CGHLSLMSFinishCodeGen.cpp 114 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193
  1. ///////////////////////////////////////////////////////////////////////////////
  2. // //
  3. // CGHLSLMSFinishCodeGen.cpp //
  4. // Copyright (C) Microsoft Corporation. All rights reserved. //
  5. // This file is distributed under the University of Illinois Open Source //
  6. // License. See LICENSE.TXT for details. //
  7. // //
  8. // Impliment FinishCodeGen. //
  9. // //
  10. ///////////////////////////////////////////////////////////////////////////////
  11. #include "llvm/ADT/SmallVector.h"
  12. #include "llvm/ADT/StringRef.h"
  13. #include "llvm/Analysis/DxilValueCache.h"
  14. #include "llvm/IR/CFG.h"
  15. #include "llvm/IR/Function.h"
  16. #include "llvm/IR/GetElementPtrTypeIterator.h"
  17. #include "llvm/IR/IRBuilder.h"
  18. #include "llvm/IR/InstIterator.h"
  19. #include "llvm/IR/Instructions.h"
  20. #include "llvm/IR/Module.h"
  21. #include "llvm/IR/Type.h"
  22. #include "llvm/Transforms/Utils/Cloning.h"
  23. #include "llvm/Transforms/Utils/ValueMapper.h"
  24. #include "CodeGenModule.h"
  25. #include "clang/Basic/LangOptions.h"
  26. #include "clang/Frontend/CodeGenOptions.h"
  27. #include "clang/Parse/ParseHLSL.h" // root sig would be in Parser if part of lang
  28. #include "dxc/DXIL/DxilConstants.h"
  29. #include "dxc/DXIL/DxilOperations.h"
  30. #include "dxc/DXIL/DxilResourceProperties.h"
  31. #include "dxc/DXIL/DxilTypeSystem.h"
  32. #include "dxc/DXIL/DxilUtil.h"
  33. #include "dxc/DxilRootSignature/DxilRootSignature.h"
  34. #include "dxc/HLSL/DxilExportMap.h"
  35. #include "dxc/HLSL/DxilGenerationPass.h"
  36. #include "dxc/HLSL/HLMatrixType.h"
  37. #include "dxc/HLSL/HLModule.h"
  38. #include "dxc/HLSL/HLSLExtensionsCodegenHelper.h"
  39. #include "dxc/HlslIntrinsicOp.h"
  40. #include <fenv.h>
  41. #include <memory>
  42. #include <vector>
  43. #include "CGHLSLMSHelper.h"
  44. using namespace llvm;
  45. using namespace hlsl;
  46. using namespace CGHLSLMSHelper;
  47. namespace {
  48. Value *CreateHandleFromResPtr(Value *ResPtr, HLModule &HLM,
  49. llvm::Type *HandleTy, IRBuilder<> &Builder) {
  50. Module &M = *HLM.GetModule();
  51. // Load to make sure resource only have Ld/St use so mem2reg could remove
  52. // temp resource.
  53. Value *ldObj = Builder.CreateLoad(ResPtr);
  54. Value *args[] = {ldObj};
  55. CallInst *Handle = HLM.EmitHLOperationCall(
  56. Builder, HLOpcodeGroup::HLCreateHandle, 0, HandleTy, args, M);
  57. return Handle;
  58. }
  59. CallInst *CreateAnnotateHandle(HLModule &HLM, Value *Handle,
  60. DxilResourceProperties &RP, llvm::Type *ResTy,
  61. IRBuilder<> &Builder) {
  62. Constant *RPConstant = resource_helper::getAsConstant(
  63. RP, HLM.GetOP()->GetResourcePropertiesType(), *HLM.GetShaderModel());
  64. return HLM.EmitHLOperationCall(
  65. Builder, HLOpcodeGroup::HLAnnotateHandle,
  66. (unsigned)HLOpcodeGroup::HLAnnotateHandle, Handle->getType(),
  67. {Handle, RPConstant, UndefValue::get(ResTy)}, *HLM.GetModule());
  68. }
  69. // Lower CBV bitcast use to handle use.
  70. // Leave the load/store.
  71. void LowerDynamicCBVUseToHandle(
  72. HLModule &HLM,
  73. DxilObjectProperties &objectProperties) {
  74. Type *HandleTy = HLM.GetOP()->GetHandleType();
  75. Module &M = *HLM.GetModule();
  76. // Collect BitCast use of CBV.
  77. SmallVector<std::pair<BitCastInst *, DxilResourceProperties>, 4> BitCasts;
  78. for (auto it : objectProperties.resMap) {
  79. DxilResourceProperties RP = it.second;
  80. if (RP.getResourceKind() != DXIL::ResourceKind::CBuffer &&
  81. RP.getResourceKind() != DXIL::ResourceKind::TBuffer)
  82. continue;
  83. Value *V = it.first;
  84. // Skip external globals.
  85. if (GlobalVariable *GV = dyn_cast<GlobalVariable>(V)) {
  86. if (GV->getLinkage() != GlobalValue::LinkageTypes::InternalLinkage)
  87. continue;
  88. }
  89. for (auto UserIt = V->user_begin(); UserIt != V->user_end();) {
  90. User *U = *(UserIt++);
  91. if (U->user_empty())
  92. continue;
  93. if (BitCastInst *BCI = dyn_cast<BitCastInst>(U)) {
  94. BitCasts.emplace_back(std::make_pair(BCI, RP));
  95. continue;
  96. }
  97. DXASSERT((!isa<BitCastOperator>(U) || U->user_empty()),
  98. "all BitCast should be BitCastInst");
  99. }
  100. }
  101. for (auto it : BitCasts) {
  102. BitCastInst *BCI = it.first;
  103. DxilResourceProperties RP = it.second;
  104. IRBuilder<> B(BCI);
  105. B.AllowFolding = false;
  106. Value *ObjV = BCI->getOperand(0);
  107. Value *Handle = CreateHandleFromResPtr(ObjV, HLM, HandleTy, B);
  108. Type *ResTy = ObjV->getType()->getPointerElementType();
  109. Handle = CreateAnnotateHandle(HLM, Handle, RP, ResTy, B);
  110. // Create cb subscript.
  111. llvm::Type *opcodeTy = B.getInt32Ty();
  112. llvm::Type *idxTy = opcodeTy;
  113. Constant *zeroIdx = ConstantInt::get(opcodeTy, 0);
  114. Type *cbTy = BCI->getType();
  115. llvm::FunctionType *SubscriptFuncTy =
  116. llvm::FunctionType::get(cbTy, {opcodeTy, HandleTy, idxTy}, false);
  117. Function *subscriptFunc =
  118. GetOrCreateHLFunction(M, SubscriptFuncTy, HLOpcodeGroup::HLSubscript,
  119. (unsigned)HLSubscriptOpcode::CBufferSubscript);
  120. Constant *opArg = ConstantInt::get(
  121. opcodeTy, (unsigned)HLSubscriptOpcode::CBufferSubscript);
  122. Value *args[] = {opArg, Handle, zeroIdx};
  123. Instruction *cbSubscript =
  124. cast<Instruction>(B.CreateCall(subscriptFunc, {args}));
  125. BCI->replaceAllUsesWith(cbSubscript);
  126. BCI->eraseFromParent();
  127. }
  128. }
  129. bool IsHLSLSamplerDescType(llvm::Type *Ty) {
  130. if (llvm::StructType *ST = dyn_cast<llvm::StructType>(Ty)) {
  131. if (!ST->hasName())
  132. return false;
  133. StringRef name = ST->getName();
  134. if (name == "struct..Sampler")
  135. return true;
  136. }
  137. return false;
  138. }
  139. #ifndef NDEBUG
  140. static bool ConsumePrefix(StringRef &Str, StringRef Prefix) {
  141. if (!Str.startswith(Prefix)) return false;
  142. Str = Str.substr(Prefix.size());
  143. return true;
  144. }
  145. bool IsHLSLBufferViewType(llvm::Type *Ty) {
  146. if (llvm::StructType *ST = dyn_cast<llvm::StructType>(Ty)) {
  147. if (!ST->hasName())
  148. return false;
  149. StringRef name = ST->getName();
  150. if (!(ConsumePrefix(name, "class.") ||
  151. ConsumePrefix(name, "struct.")))
  152. return false;
  153. if (name.startswith("ConstantBuffer<") ||
  154. name.startswith("TextureBuffer<"))
  155. return true;
  156. }
  157. return false;
  158. }
  159. #endif
  160. void LowerGetResourceFromHeap(
  161. HLModule &HLM, std::vector<std::pair<Function *, unsigned>> &intrinsicMap) {
  162. llvm::Module &M = *HLM.GetModule();
  163. llvm::Type *HandleTy = HLM.GetOP()->GetHandleType();
  164. unsigned GetResFromHeapOp =
  165. static_cast<unsigned>(IntrinsicOp::IOP_CreateResourceFromHeap);
  166. DenseMap<Instruction *, Instruction *> ResourcePtrToHandlePtrMap;
  167. for (auto it : intrinsicMap) {
  168. unsigned opcode = it.second;
  169. if (opcode != GetResFromHeapOp)
  170. continue;
  171. Function *F = it.first;
  172. HLOpcodeGroup group = hlsl::GetHLOpcodeGroup(F);
  173. if (group != HLOpcodeGroup::HLIntrinsic)
  174. continue;
  175. for (auto uit = F->user_begin(); uit != F->user_end();) {
  176. CallInst *CI = cast<CallInst>(*(uit++));
  177. // Arg 0 is this pointer.
  178. unsigned ArgIdx = 1;
  179. Instruction *ResPtr = cast<Instruction>(CI->getArgOperand(ArgIdx));
  180. Value *Index = CI->getArgOperand(ArgIdx+1);
  181. IRBuilder<> Builder(CI);
  182. // Make a handle from GetResFromHeap.
  183. Value *IsSampler = Builder.getInt1(
  184. IsHLSLSamplerDescType(ResPtr->getType()->getPointerElementType()));
  185. Value *Handle = HLM.EmitHLOperationCall(
  186. Builder, HLOpcodeGroup::HLIntrinsic, GetResFromHeapOp, HandleTy,
  187. {Index, IsSampler}, M);
  188. // Find the handle ptr for res ptr.
  189. auto it = ResourcePtrToHandlePtrMap.find(ResPtr);
  190. Instruction *HandlePtr = nullptr;
  191. if (it != ResourcePtrToHandlePtrMap.end()) {
  192. HandlePtr = it->second;
  193. } else {
  194. IRBuilder<> AllocaBuilder(
  195. ResPtr->getParent()->getParent()->getEntryBlock().begin());
  196. HandlePtr = AllocaBuilder.CreateAlloca(HandleTy);
  197. ResourcePtrToHandlePtrMap[ResPtr] = HandlePtr;
  198. }
  199. // Store handle to handle ptr.
  200. Builder.CreateStore(Handle, HandlePtr);
  201. CI->eraseFromParent();
  202. }
  203. }
  204. // Replace load of Resource ptr into load of handel ptr.
  205. for (auto it : ResourcePtrToHandlePtrMap) {
  206. Instruction *resPtr = it.first;
  207. Instruction *handlePtr = it.second;
  208. for (auto uit = resPtr->user_begin(); uit != resPtr->user_end();) {
  209. User *U = *(uit++);
  210. BitCastInst *BCI = cast<BitCastInst>(U);
  211. DXASSERT(
  212. dxilutil::IsHLSLResourceType(
  213. BCI->getType()->getPointerElementType()) ||
  214. IsHLSLBufferViewType(BCI->getType()->getPointerElementType()),
  215. "illegal cast of resource ptr");
  216. for (auto cuit = BCI->user_begin(); cuit != BCI->user_end();) {
  217. LoadInst *LI = cast<LoadInst>(*(cuit++));
  218. IRBuilder<> Builder(LI);
  219. Value *Handle = Builder.CreateLoad(handlePtr);
  220. Value *Res =
  221. HLM.EmitHLOperationCall(Builder, HLOpcodeGroup::HLCast,
  222. (unsigned)HLCastOpcode::HandleToResCast,
  223. LI->getType(), {Handle}, M);
  224. LI->replaceAllUsesWith(Res);
  225. LI->eraseFromParent();
  226. }
  227. BCI->eraseFromParent();
  228. }
  229. resPtr->eraseFromParent();
  230. }
  231. }
  232. void ReplaceBoolVectorSubscript(CallInst *CI) {
  233. Value *Ptr = CI->getArgOperand(0);
  234. Value *Idx = CI->getArgOperand(1);
  235. Value *IdxList[] = {ConstantInt::get(Idx->getType(), 0), Idx};
  236. for (auto It = CI->user_begin(), E = CI->user_end(); It != E;) {
  237. Instruction *user = cast<Instruction>(*(It++));
  238. IRBuilder<> Builder(user);
  239. Value *GEP = Builder.CreateInBoundsGEP(Ptr, IdxList);
  240. if (LoadInst *LI = dyn_cast<LoadInst>(user)) {
  241. Value *NewLd = Builder.CreateLoad(GEP);
  242. Value *cast = Builder.CreateZExt(NewLd, LI->getType());
  243. LI->replaceAllUsesWith(cast);
  244. LI->eraseFromParent();
  245. } else {
  246. // Must be a store inst here.
  247. StoreInst *SI = cast<StoreInst>(user);
  248. Value *V = SI->getValueOperand();
  249. Value *cast =
  250. Builder.CreateICmpNE(V, llvm::ConstantInt::get(V->getType(), 0));
  251. Builder.CreateStore(cast, GEP);
  252. SI->eraseFromParent();
  253. }
  254. }
  255. CI->eraseFromParent();
  256. }
  257. void ReplaceBoolVectorSubscript(Function *F) {
  258. for (auto It = F->user_begin(), E = F->user_end(); It != E;) {
  259. User *user = *(It++);
  260. CallInst *CI = cast<CallInst>(user);
  261. ReplaceBoolVectorSubscript(CI);
  262. }
  263. }
  264. // Add function body for intrinsic if possible.
  265. Function *CreateOpFunction(llvm::Module &M, Function *F,
  266. llvm::FunctionType *funcTy, HLOpcodeGroup group,
  267. unsigned opcode) {
  268. Function *opFunc = nullptr;
  269. AttributeSet attribs = F->getAttributes().getFnAttributes();
  270. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  271. if (group == HLOpcodeGroup::HLIntrinsic) {
  272. IntrinsicOp intriOp = static_cast<IntrinsicOp>(opcode);
  273. switch (intriOp) {
  274. case IntrinsicOp::MOP_Append:
  275. case IntrinsicOp::MOP_Consume: {
  276. bool bAppend = intriOp == IntrinsicOp::MOP_Append;
  277. llvm::Type *handleTy = funcTy->getParamType(HLOperandIndex::kHandleOpIdx);
  278. // Don't generate body for OutputStream::Append.
  279. if (bAppend && HLModule::IsStreamOutputPtrType(handleTy)) {
  280. opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode, attribs);
  281. break;
  282. }
  283. opFunc = GetOrCreateHLFunctionWithBody(M, funcTy, group, opcode,
  284. bAppend ? "append" : "consume");
  285. llvm::Type *counterTy = llvm::Type::getInt32Ty(M.getContext());
  286. llvm::FunctionType *IncCounterFuncTy =
  287. llvm::FunctionType::get(counterTy, {opcodeTy, handleTy}, false);
  288. unsigned counterOpcode =
  289. bAppend ? (unsigned)IntrinsicOp::MOP_IncrementCounter
  290. : (unsigned)IntrinsicOp::MOP_DecrementCounter;
  291. Function *incCounterFunc = GetOrCreateHLFunction(
  292. M, IncCounterFuncTy, group, counterOpcode, attribs);
  293. llvm::Type *idxTy = counterTy;
  294. llvm::Type *valTy =
  295. bAppend ? funcTy->getParamType(HLOperandIndex::kAppendValOpIndex)
  296. : funcTy->getReturnType();
  297. // Return type for subscript should be pointer type, hence in memory
  298. // representation
  299. llvm::Type *subscriptTy = valTy;
  300. bool isBoolScalarOrVector = false;
  301. if (!subscriptTy->isPointerTy()) {
  302. if (subscriptTy->getScalarType()->isIntegerTy(1)) {
  303. isBoolScalarOrVector = true;
  304. llvm::Type *memReprType =
  305. llvm::IntegerType::get(subscriptTy->getContext(), 32);
  306. subscriptTy =
  307. subscriptTy->isVectorTy()
  308. ? llvm::VectorType::get(memReprType,
  309. subscriptTy->getVectorNumElements())
  310. : memReprType;
  311. }
  312. subscriptTy = llvm::PointerType::get(subscriptTy, 0);
  313. }
  314. llvm::FunctionType *SubscriptFuncTy = llvm::FunctionType::get(
  315. subscriptTy, {opcodeTy, handleTy, idxTy}, false);
  316. Function *subscriptFunc = GetOrCreateHLFunction(
  317. M, SubscriptFuncTy, HLOpcodeGroup::HLSubscript,
  318. (unsigned)HLSubscriptOpcode::DefaultSubscript, attribs);
  319. BasicBlock *BB =
  320. BasicBlock::Create(opFunc->getContext(), "Entry", opFunc);
  321. IRBuilder<> Builder(BB);
  322. auto argIter = opFunc->args().begin();
  323. // Skip the opcode arg.
  324. argIter++;
  325. Argument *thisArg = argIter++;
  326. // int counter = IncrementCounter/DecrementCounter(Buf);
  327. Value *incCounterOpArg = ConstantInt::get(idxTy, counterOpcode);
  328. Value *counter =
  329. Builder.CreateCall(incCounterFunc, {incCounterOpArg, thisArg});
  330. // Buf[counter];
  331. Value *subscriptOpArg = ConstantInt::get(
  332. idxTy, (unsigned)HLSubscriptOpcode::DefaultSubscript);
  333. Value *subscript =
  334. Builder.CreateCall(subscriptFunc, {subscriptOpArg, thisArg, counter});
  335. if (bAppend) {
  336. Argument *valArg = argIter;
  337. // Buf[counter] = val;
  338. if (valTy->isPointerTy()) {
  339. unsigned size = M.getDataLayout().getTypeAllocSize(
  340. subscript->getType()->getPointerElementType());
  341. Builder.CreateMemCpy(subscript, valArg, size, 1);
  342. } else {
  343. Value *storedVal = valArg;
  344. // Convert to memory representation
  345. if (isBoolScalarOrVector)
  346. storedVal = Builder.CreateZExt(
  347. storedVal, subscriptTy->getPointerElementType(), "frombool");
  348. Builder.CreateStore(storedVal, subscript);
  349. }
  350. Builder.CreateRetVoid();
  351. } else {
  352. // return Buf[counter];
  353. if (valTy->isPointerTy())
  354. Builder.CreateRet(subscript);
  355. else {
  356. Value *retVal = Builder.CreateLoad(subscript);
  357. // Convert to register representation
  358. if (isBoolScalarOrVector)
  359. retVal = Builder.CreateICmpNE(
  360. retVal, Constant::getNullValue(retVal->getType()), "tobool");
  361. Builder.CreateRet(retVal);
  362. }
  363. }
  364. } break;
  365. case IntrinsicOp::IOP_sincos: {
  366. opFunc =
  367. GetOrCreateHLFunctionWithBody(M, funcTy, group, opcode, "sincos");
  368. llvm::Type *valTy =
  369. funcTy->getParamType(HLOperandIndex::kTrinaryOpSrc0Idx);
  370. llvm::FunctionType *sinFuncTy =
  371. llvm::FunctionType::get(valTy, {opcodeTy, valTy}, false);
  372. unsigned sinOp = static_cast<unsigned>(IntrinsicOp::IOP_sin);
  373. unsigned cosOp = static_cast<unsigned>(IntrinsicOp::IOP_cos);
  374. Function *sinFunc =
  375. GetOrCreateHLFunction(M, sinFuncTy, group, sinOp, attribs);
  376. Function *cosFunc =
  377. GetOrCreateHLFunction(M, sinFuncTy, group, cosOp, attribs);
  378. BasicBlock *BB =
  379. BasicBlock::Create(opFunc->getContext(), "Entry", opFunc);
  380. IRBuilder<> Builder(BB);
  381. auto argIter = opFunc->args().begin();
  382. // Skip the opcode arg.
  383. argIter++;
  384. Argument *valArg = argIter++;
  385. Argument *sinPtrArg = argIter++;
  386. Argument *cosPtrArg = argIter++;
  387. Value *sinOpArg = ConstantInt::get(opcodeTy, sinOp);
  388. Value *sinVal = Builder.CreateCall(sinFunc, {sinOpArg, valArg});
  389. Builder.CreateStore(sinVal, sinPtrArg);
  390. Value *cosOpArg = ConstantInt::get(opcodeTy, cosOp);
  391. Value *cosVal = Builder.CreateCall(cosFunc, {cosOpArg, valArg});
  392. Builder.CreateStore(cosVal, cosPtrArg);
  393. // Ret.
  394. Builder.CreateRetVoid();
  395. } break;
  396. default:
  397. opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode, attribs);
  398. break;
  399. }
  400. } else if (group == HLOpcodeGroup::HLExtIntrinsic) {
  401. llvm::StringRef fnName = F->getName();
  402. llvm::StringRef groupName = GetHLOpcodeGroupNameByAttr(F);
  403. opFunc = GetOrCreateHLFunction(M, funcTy, group, &groupName, &fnName,
  404. opcode, attribs);
  405. } else {
  406. opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode, attribs);
  407. }
  408. return opFunc;
  409. }
  410. DxilResourceProperties GetResourcePropsFromIntrinsicObjectArg(
  411. Value *arg, HLModule &HLM, DxilTypeSystem &typeSys,
  412. DxilObjectProperties &objectProperties) {
  413. DxilResourceProperties RP = objectProperties.GetResource(arg);
  414. if (RP.isValid())
  415. return RP;
  416. // Must be GEP.
  417. GEPOperator *GEP = cast<GEPOperator>(arg);
  418. // Find RP from GEP.
  419. Value *Ptr = GEP->getPointerOperand();
  420. // When Ptr is array of resource, check if it is another GEP.
  421. while (
  422. dxilutil::IsHLSLResourceType(dxilutil::GetArrayEltTy(Ptr->getType()))) {
  423. if (GEPOperator *ParentGEP = dyn_cast<GEPOperator>(Ptr)) {
  424. GEP = ParentGEP;
  425. Ptr = GEP->getPointerOperand();
  426. } else {
  427. break;
  428. }
  429. }
  430. // When ptr is array of resource, ptr could be in
  431. // objectProperties.
  432. RP = objectProperties.GetResource(Ptr);
  433. if (RP.isValid())
  434. return RP;
  435. DxilStructAnnotation *Anno = nullptr;
  436. for (auto gepIt = gep_type_begin(GEP), E = gep_type_end(GEP); gepIt != E;
  437. ++gepIt) {
  438. if (StructType *ST = dyn_cast<StructType>(*gepIt)) {
  439. Anno = typeSys.GetStructAnnotation(ST);
  440. DXASSERT(Anno, "missing type annotation");
  441. unsigned Index =
  442. cast<ConstantInt>(gepIt.getOperand())->getLimitedValue();
  443. DxilFieldAnnotation &fieldAnno = Anno->GetFieldAnnotation(Index);
  444. if (fieldAnno.HasResourceAttribute()) {
  445. MDNode *resAttrib = fieldAnno.GetResourceAttribute();
  446. DxilResourceBase R(DXIL::ResourceClass::Invalid);
  447. HLM.LoadDxilResourceBaseFromMDNode(resAttrib, R);
  448. switch (R.GetClass()) {
  449. case DXIL::ResourceClass::SRV:
  450. case DXIL::ResourceClass::UAV: {
  451. DxilResource Res;
  452. HLM.LoadDxilResourceFromMDNode(resAttrib, Res);
  453. RP = resource_helper::loadPropsFromResourceBase(&Res);
  454. } break;
  455. case DXIL::ResourceClass::Sampler: {
  456. DxilSampler Sampler;
  457. HLM.LoadDxilSamplerFromMDNode(resAttrib, Sampler);
  458. RP = resource_helper::loadPropsFromResourceBase(&Sampler);
  459. } break;
  460. default:
  461. DXASSERT(0, "invalid resource attribute in filed annotation");
  462. break;
  463. }
  464. break;
  465. }
  466. }
  467. }
  468. DXASSERT(RP.isValid(), "invalid resource properties");
  469. return RP;
  470. }
  471. void AddOpcodeParamForIntrinsic(
  472. HLModule &HLM, Function *F, unsigned opcode, llvm::Type *HandleTy,
  473. DxilObjectProperties &objectProperties) {
  474. llvm::Module &M = *HLM.GetModule();
  475. llvm::FunctionType *oldFuncTy = F->getFunctionType();
  476. SmallVector<llvm::Type *, 4> paramTyList;
  477. // Add the opcode param
  478. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  479. paramTyList.emplace_back(opcodeTy);
  480. paramTyList.append(oldFuncTy->param_begin(), oldFuncTy->param_end());
  481. for (unsigned i = 1; i < paramTyList.size(); i++) {
  482. llvm::Type *Ty = paramTyList[i];
  483. if (Ty->isPointerTy()) {
  484. Ty = Ty->getPointerElementType();
  485. if (dxilutil::IsHLSLResourceType(Ty)) {
  486. // Use handle type for resource type.
  487. // This will make sure temp object variable only used by createHandle.
  488. paramTyList[i] = HandleTy;
  489. }
  490. }
  491. }
  492. HLOpcodeGroup group = hlsl::GetHLOpcodeGroup(F);
  493. if (group == HLOpcodeGroup::HLSubscript &&
  494. opcode == static_cast<unsigned>(HLSubscriptOpcode::VectorSubscript)) {
  495. llvm::FunctionType *FT = F->getFunctionType();
  496. llvm::Type *VecArgTy = FT->getParamType(0);
  497. llvm::VectorType *VType =
  498. cast<llvm::VectorType>(VecArgTy->getPointerElementType());
  499. llvm::Type *Ty = VType->getElementType();
  500. DXASSERT(Ty->isIntegerTy(), "Only bool could use VectorSubscript");
  501. llvm::IntegerType *ITy = cast<IntegerType>(Ty);
  502. DXASSERT_LOCALVAR(ITy, ITy->getBitWidth() == 1,
  503. "Only bool could use VectorSubscript");
  504. // The return type is i8*.
  505. // Replace all uses with i1*.
  506. ReplaceBoolVectorSubscript(F);
  507. return;
  508. }
  509. bool isDoubleSubscriptFunc =
  510. group == HLOpcodeGroup::HLSubscript &&
  511. opcode == static_cast<unsigned>(HLSubscriptOpcode::DoubleSubscript);
  512. llvm::Type *RetTy = oldFuncTy->getReturnType();
  513. if (isDoubleSubscriptFunc) {
  514. CallInst *doubleSub = cast<CallInst>(*F->user_begin());
  515. // Change currentIdx type into coord type.
  516. auto U = doubleSub->user_begin();
  517. Value *user = *U;
  518. CallInst *secSub = cast<CallInst>(user);
  519. unsigned coordIdx = HLOperandIndex::kSubscriptIndexOpIdx;
  520. // opcode operand not add yet, so the index need -1.
  521. if (GetHLOpcodeGroupByName(secSub->getCalledFunction()) ==
  522. HLOpcodeGroup::NotHL)
  523. coordIdx -= 1;
  524. Value *coord = secSub->getArgOperand(coordIdx);
  525. llvm::Type *coordTy = coord->getType();
  526. paramTyList[HLOperandIndex::kSubscriptIndexOpIdx] = coordTy;
  527. // Add the sampleIdx or mipLevel parameter to the end.
  528. paramTyList.emplace_back(opcodeTy);
  529. // Change return type to be resource ret type.
  530. // opcode operand not add yet, so the index need -1.
  531. Value *objPtr =
  532. doubleSub->getArgOperand(HLOperandIndex::kSubscriptObjectOpIdx - 1);
  533. // Must be a GEP
  534. GEPOperator *objGEP = cast<GEPOperator>(objPtr);
  535. gep_type_iterator GEPIt = gep_type_begin(objGEP), E = gep_type_end(objGEP);
  536. llvm::Type *resTy = nullptr;
  537. while (GEPIt != E) {
  538. if (dxilutil::IsHLSLResourceType(*GEPIt)) {
  539. resTy = *GEPIt;
  540. break;
  541. }
  542. GEPIt++;
  543. }
  544. DXASSERT(resTy, "must find the resource type");
  545. // Change object type to handle type.
  546. paramTyList[HLOperandIndex::kSubscriptObjectOpIdx] = HandleTy;
  547. // Change RetTy into pointer of resource reture type.
  548. RetTy = cast<StructType>(resTy)->getElementType(0)->getPointerTo();
  549. }
  550. llvm::FunctionType *funcTy =
  551. llvm::FunctionType::get(RetTy, paramTyList, oldFuncTy->isVarArg());
  552. Function *opFunc = CreateOpFunction(M, F, funcTy, group, opcode);
  553. StringRef lower = hlsl::GetHLLowerStrategy(F);
  554. if (!lower.empty())
  555. hlsl::SetHLLowerStrategy(opFunc, lower);
  556. DxilTypeSystem &typeSys = HLM.GetTypeSystem();
  557. for (auto user = F->user_begin(); user != F->user_end();) {
  558. // User must be a call.
  559. CallInst *oldCI = cast<CallInst>(*(user++));
  560. SmallVector<Value *, 4> opcodeParamList;
  561. Value *opcodeConst = Constant::getIntegerValue(opcodeTy, APInt(32, opcode));
  562. opcodeParamList.emplace_back(opcodeConst);
  563. opcodeParamList.append(oldCI->arg_operands().begin(),
  564. oldCI->arg_operands().end());
  565. IRBuilder<> Builder(oldCI);
  566. if (isDoubleSubscriptFunc) {
  567. // Change obj to the resource pointer.
  568. Value *objVal = opcodeParamList[HLOperandIndex::kSubscriptObjectOpIdx];
  569. GEPOperator *objGEP = cast<GEPOperator>(objVal);
  570. SmallVector<Value *, 8> IndexList;
  571. IndexList.append(objGEP->idx_begin(), objGEP->idx_end());
  572. Value *lastIndex = IndexList.back();
  573. ConstantInt *constIndex = cast<ConstantInt>(lastIndex);
  574. DXASSERT_LOCALVAR(constIndex, constIndex->getLimitedValue() == 1,
  575. "last index must 1");
  576. // Remove the last index.
  577. IndexList.pop_back();
  578. objVal = objGEP->getPointerOperand();
  579. DxilResourceProperties RP = GetResourcePropsFromIntrinsicObjectArg(
  580. objVal, HLM, typeSys, objectProperties);
  581. if (IndexList.size() > 1)
  582. objVal = Builder.CreateInBoundsGEP(objVal, IndexList);
  583. Value *Handle = CreateHandleFromResPtr(objVal, HLM, HandleTy, Builder);
  584. Type *ResTy = objVal->getType()->getPointerElementType();
  585. Handle = CreateAnnotateHandle(HLM, Handle, RP, ResTy, Builder);
  586. // Change obj to the resource pointer.
  587. opcodeParamList[HLOperandIndex::kSubscriptObjectOpIdx] = Handle;
  588. // Set idx and mipIdx.
  589. Value *mipIdx = opcodeParamList[HLOperandIndex::kSubscriptIndexOpIdx];
  590. auto U = oldCI->user_begin();
  591. Value *user = *U;
  592. CallInst *secSub = cast<CallInst>(user);
  593. unsigned idxOpIndex = HLOperandIndex::kSubscriptIndexOpIdx;
  594. if (GetHLOpcodeGroupByName(secSub->getCalledFunction()) ==
  595. HLOpcodeGroup::NotHL)
  596. idxOpIndex--;
  597. Value *idx = secSub->getArgOperand(idxOpIndex);
  598. DXASSERT(secSub->hasOneUse(), "subscript should only has one use");
  599. // Add the sampleIdx or mipLevel parameter to the end.
  600. opcodeParamList[HLOperandIndex::kSubscriptIndexOpIdx] = idx;
  601. opcodeParamList.emplace_back(mipIdx);
  602. // Insert new call before secSub to make sure idx is ready to use.
  603. Builder.SetInsertPoint(secSub);
  604. }
  605. for (unsigned i = 1; i < opcodeParamList.size(); i++) {
  606. Value *arg = opcodeParamList[i];
  607. llvm::Type *Ty = arg->getType();
  608. if (Ty->isPointerTy()) {
  609. Ty = Ty->getPointerElementType();
  610. if (dxilutil::IsHLSLResourceType(Ty)) {
  611. DxilResourceProperties RP = GetResourcePropsFromIntrinsicObjectArg(
  612. arg, HLM, typeSys, objectProperties);
  613. // Use object type directly, not by pointer.
  614. // This will make sure temp object variable only used by ld/st.
  615. if (GEPOperator *argGEP = dyn_cast<GEPOperator>(arg)) {
  616. std::vector<Value *> idxList(argGEP->idx_begin(),
  617. argGEP->idx_end());
  618. // Create instruction to avoid GEPOperator.
  619. GetElementPtrInst *GEP = GetElementPtrInst::CreateInBounds(
  620. argGEP->getPointerOperand(), idxList);
  621. Builder.Insert(GEP);
  622. arg = GEP;
  623. }
  624. llvm::Type *ResTy = arg->getType()->getPointerElementType();
  625. Value *Handle = CreateHandleFromResPtr(arg, HLM, HandleTy, Builder);
  626. Handle = CreateAnnotateHandle(HLM, Handle, RP, ResTy, Builder);
  627. opcodeParamList[i] = Handle;
  628. }
  629. }
  630. }
  631. Value *CI = Builder.CreateCall(opFunc, opcodeParamList);
  632. if (!isDoubleSubscriptFunc) {
  633. // replace new call and delete the old call
  634. oldCI->replaceAllUsesWith(CI);
  635. oldCI->eraseFromParent();
  636. } else {
  637. // For double script.
  638. // Replace single users use with new CI.
  639. auto U = oldCI->user_begin();
  640. Value *user = *U;
  641. CallInst *secSub = cast<CallInst>(user);
  642. secSub->replaceAllUsesWith(CI);
  643. secSub->eraseFromParent();
  644. oldCI->eraseFromParent();
  645. }
  646. }
  647. // delete the function
  648. F->eraseFromParent();
  649. }
  650. void AddOpcodeParamForIntrinsics(
  651. HLModule &HLM, std::vector<std::pair<Function *, unsigned>> &intrinsicMap,
  652. DxilObjectProperties &objectProperties) {
  653. llvm::Type *HandleTy = HLM.GetOP()->GetHandleType();
  654. for (auto mapIter : intrinsicMap) {
  655. Function *F = mapIter.first;
  656. if (F->user_empty()) {
  657. // delete the function
  658. F->eraseFromParent();
  659. continue;
  660. }
  661. unsigned opcode = mapIter.second;
  662. AddOpcodeParamForIntrinsic(HLM, F, opcode, HandleTy, objectProperties);
  663. }
  664. }
  665. } // namespace
  666. namespace {
  667. // Returns true a global value is being updated
  668. bool GlobalHasStoreUserRec(Value *V, std::set<Value *> &visited) {
  669. bool isWriteEnabled = false;
  670. if (V && visited.find(V) == visited.end()) {
  671. visited.insert(V);
  672. for (User *U : V->users()) {
  673. if (isa<StoreInst>(U)) {
  674. return true;
  675. } else if (CallInst *CI = dyn_cast<CallInst>(U)) {
  676. Function *F = CI->getCalledFunction();
  677. if (!F->isIntrinsic()) {
  678. HLOpcodeGroup hlGroup = GetHLOpcodeGroup(F);
  679. switch (hlGroup) {
  680. case HLOpcodeGroup::NotHL:
  681. return true;
  682. case HLOpcodeGroup::HLMatLoadStore: {
  683. HLMatLoadStoreOpcode opCode =
  684. static_cast<HLMatLoadStoreOpcode>(hlsl::GetHLOpcode(CI));
  685. if (opCode == HLMatLoadStoreOpcode::ColMatStore ||
  686. opCode == HLMatLoadStoreOpcode::RowMatStore)
  687. return true;
  688. break;
  689. }
  690. case HLOpcodeGroup::HLCast:
  691. case HLOpcodeGroup::HLSubscript:
  692. if (GlobalHasStoreUserRec(U, visited))
  693. return true;
  694. break;
  695. default:
  696. break;
  697. }
  698. }
  699. } else if (isa<GEPOperator>(U) || isa<PHINode>(U) || isa<SelectInst>(U)) {
  700. if (GlobalHasStoreUserRec(U, visited))
  701. return true;
  702. }
  703. }
  704. }
  705. return isWriteEnabled;
  706. }
  707. // Returns true if any of the direct user of a global is a store inst
  708. // otherwise recurse through the remaining users and check if any GEP
  709. // exists and which in turn has a store inst as user.
  710. bool GlobalHasStoreUser(GlobalVariable *GV) {
  711. std::set<Value *> visited;
  712. Value *V = cast<Value>(GV);
  713. return GlobalHasStoreUserRec(V, visited);
  714. }
  715. GlobalVariable *CreateStaticGlobal(llvm::Module *M, GlobalVariable *GV) {
  716. Constant *GC = M->getOrInsertGlobal(GV->getName().str() + ".static.copy",
  717. GV->getType()->getPointerElementType());
  718. GlobalVariable *NGV = cast<GlobalVariable>(GC);
  719. if (GV->hasInitializer()) {
  720. NGV->setInitializer(GV->getInitializer());
  721. } else {
  722. // The copy being static, it should be initialized per llvm rules
  723. NGV->setInitializer(
  724. Constant::getNullValue(GV->getType()->getPointerElementType()));
  725. }
  726. // static global should have internal linkage
  727. NGV->setLinkage(GlobalValue::InternalLinkage);
  728. return NGV;
  729. }
  730. void CreateWriteEnabledStaticGlobals(llvm::Module *M, llvm::Function *EF) {
  731. std::vector<GlobalVariable *> worklist;
  732. for (GlobalVariable &GV : M->globals()) {
  733. if (!GV.isConstant() && GV.getLinkage() != GlobalValue::InternalLinkage &&
  734. // skip globals which are HLSL objects or group shared
  735. !dxilutil::IsHLSLObjectType(GV.getType()->getPointerElementType()) &&
  736. !dxilutil::IsSharedMemoryGlobal(&GV)) {
  737. if (GlobalHasStoreUser(&GV))
  738. worklist.emplace_back(&GV);
  739. // TODO: Ensure that constant globals aren't using initializer
  740. GV.setConstant(true);
  741. }
  742. }
  743. IRBuilder<> Builder(
  744. dxilutil::FirstNonAllocaInsertionPt(&EF->getEntryBlock()));
  745. for (GlobalVariable *GV : worklist) {
  746. GlobalVariable *NGV = CreateStaticGlobal(M, GV);
  747. GV->replaceAllUsesWith(NGV);
  748. // insert memcpy in all entryblocks
  749. uint64_t size = M->getDataLayout().getTypeAllocSize(
  750. GV->getType()->getPointerElementType());
  751. Builder.CreateMemCpy(NGV, GV, size, 1);
  752. }
  753. }
  754. } // namespace
  755. namespace {
  756. void SetEntryFunction(HLModule &HLM, Function *Entry,
  757. clang::CodeGen::CodeGenModule &CGM) {
  758. if (Entry == nullptr) {
  759. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  760. unsigned DiagID = Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  761. "cannot find entry function %0");
  762. Diags.Report(DiagID) << CGM.getCodeGenOpts().HLSLEntryFunction;
  763. return;
  764. }
  765. HLM.SetEntryFunction(Entry);
  766. }
  767. Function *CloneFunction(Function *Orig, const llvm::Twine &Name,
  768. llvm::Module *llvmModule, hlsl::DxilTypeSystem &TypeSys,
  769. hlsl::DxilTypeSystem &SrcTypeSys) {
  770. Function *F = Function::Create(Orig->getFunctionType(),
  771. GlobalValue::LinkageTypes::ExternalLinkage,
  772. Name, llvmModule);
  773. SmallVector<ReturnInst *, 2> Returns;
  774. ValueToValueMapTy vmap;
  775. // Map params.
  776. auto entryParamIt = F->arg_begin();
  777. for (Argument &param : Orig->args()) {
  778. vmap[&param] = (entryParamIt++);
  779. }
  780. llvm::CloneFunctionInto(F, Orig, vmap, /*ModuleLevelChagnes*/ false, Returns);
  781. TypeSys.CopyFunctionAnnotation(F, Orig, SrcTypeSys);
  782. return F;
  783. }
  784. // Clone shader entry function to be called by other functions.
  785. // The original function will be used as shader entry.
  786. void CloneShaderEntry(Function *ShaderF, StringRef EntryName, HLModule &HLM) {
  787. Function *F = CloneFunction(ShaderF, "", HLM.GetModule(), HLM.GetTypeSystem(),
  788. HLM.GetTypeSystem());
  789. F->takeName(ShaderF);
  790. F->setLinkage(GlobalValue::LinkageTypes::InternalLinkage);
  791. // Set to name before mangled.
  792. ShaderF->setName(EntryName);
  793. DxilFunctionAnnotation *annot = HLM.GetFunctionAnnotation(F);
  794. DxilParameterAnnotation &cloneRetAnnot = annot->GetRetTypeAnnotation();
  795. // Clear semantic for cloned one.
  796. cloneRetAnnot.SetSemanticString("");
  797. cloneRetAnnot.SetSemanticIndexVec({});
  798. for (unsigned i = 0; i < annot->GetNumParameters(); i++) {
  799. DxilParameterAnnotation &cloneParamAnnot = annot->GetParameterAnnotation(i);
  800. // Clear semantic for cloned one.
  801. cloneParamAnnot.SetSemanticString("");
  802. cloneParamAnnot.SetSemanticIndexVec({});
  803. }
  804. }
  805. } // namespace
  806. namespace {
  807. bool IsPatchConstantFunction(
  808. const Function *F, StringMap<PatchConstantInfo> &patchConstantFunctionMap) {
  809. DXASSERT_NOMSG(F != nullptr);
  810. for (auto &&p : patchConstantFunctionMap) {
  811. if (p.second.Func == F)
  812. return true;
  813. }
  814. return false;
  815. }
  816. void SetPatchConstantFunctionWithAttr(
  817. const EntryFunctionInfo &EntryFunc,
  818. const clang::HLSLPatchConstantFuncAttr *PatchConstantFuncAttr,
  819. StringMap<PatchConstantInfo> &patchConstantFunctionMap,
  820. std::unordered_map<Function *, std::unique_ptr<DxilFunctionProps>>
  821. &patchConstantFunctionPropsMap,
  822. HLModule &HLM, clang::CodeGen::CodeGenModule &CGM) {
  823. StringRef funcName = PatchConstantFuncAttr->getFunctionName();
  824. auto Entry = patchConstantFunctionMap.find(funcName);
  825. if (Entry == patchConstantFunctionMap.end()) {
  826. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  827. unsigned DiagID = Diags.getCustomDiagID(
  828. clang::DiagnosticsEngine::Error, "Cannot find patchconstantfunc %0.");
  829. Diags.Report(PatchConstantFuncAttr->getLocation(), DiagID) << funcName;
  830. return;
  831. }
  832. if (Entry->second.NumOverloads != 1) {
  833. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  834. unsigned DiagID =
  835. Diags.getCustomDiagID(clang::DiagnosticsEngine::Warning,
  836. "Multiple overloads of patchconstantfunc %0.");
  837. unsigned NoteID = Diags.getCustomDiagID(clang::DiagnosticsEngine::Note,
  838. "This overload was selected.");
  839. Diags.Report(PatchConstantFuncAttr->getLocation(), DiagID) << funcName;
  840. Diags.Report(Entry->second.SL, NoteID);
  841. }
  842. Function *patchConstFunc = Entry->second.Func;
  843. DXASSERT(
  844. HLM.HasDxilFunctionProps(EntryFunc.Func),
  845. " else AddHLSLFunctionInfo did not save the dxil function props for the "
  846. "HS entry.");
  847. DxilFunctionProps *HSProps = &HLM.GetDxilFunctionProps(EntryFunc.Func);
  848. HLM.SetPatchConstantFunctionForHS(EntryFunc.Func, patchConstFunc);
  849. DXASSERT_NOMSG(patchConstantFunctionPropsMap.count(patchConstFunc));
  850. // Check no inout parameter for patch constant function.
  851. DxilFunctionAnnotation *patchConstFuncAnnotation =
  852. HLM.GetFunctionAnnotation(patchConstFunc);
  853. for (unsigned i = 0; i < patchConstFuncAnnotation->GetNumParameters(); i++) {
  854. if (patchConstFuncAnnotation->GetParameterAnnotation(i)
  855. .GetParamInputQual() == DxilParamInputQual::Inout) {
  856. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  857. unsigned DiagID = Diags.getCustomDiagID(
  858. clang::DiagnosticsEngine::Error,
  859. "Patch Constant function %0 should not have inout param.");
  860. Diags.Report(Entry->second.SL, DiagID) << funcName;
  861. }
  862. }
  863. // Input/Output control point validation.
  864. if (patchConstantFunctionPropsMap.count(patchConstFunc)) {
  865. const DxilFunctionProps &patchProps =
  866. *patchConstantFunctionPropsMap[patchConstFunc];
  867. if (patchProps.ShaderProps.HS.inputControlPoints != 0 &&
  868. patchProps.ShaderProps.HS.inputControlPoints !=
  869. HSProps->ShaderProps.HS.inputControlPoints) {
  870. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  871. unsigned DiagID =
  872. Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  873. "Patch constant function's input patch input "
  874. "should have %0 elements, but has %1.");
  875. Diags.Report(Entry->second.SL, DiagID)
  876. << HSProps->ShaderProps.HS.inputControlPoints
  877. << patchProps.ShaderProps.HS.inputControlPoints;
  878. }
  879. if (patchProps.ShaderProps.HS.outputControlPoints != 0 &&
  880. patchProps.ShaderProps.HS.outputControlPoints !=
  881. HSProps->ShaderProps.HS.outputControlPoints) {
  882. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  883. unsigned DiagID =
  884. Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  885. "Patch constant function's output patch input "
  886. "should have %0 elements, but has %1.");
  887. Diags.Report(Entry->second.SL, DiagID)
  888. << HSProps->ShaderProps.HS.outputControlPoints
  889. << patchProps.ShaderProps.HS.outputControlPoints;
  890. }
  891. }
  892. }
  893. void SetPatchConstantFunction(
  894. const EntryFunctionInfo &EntryFunc,
  895. std::unordered_map<Function *, const clang::HLSLPatchConstantFuncAttr *>
  896. &HSEntryPatchConstantFuncAttr,
  897. StringMap<PatchConstantInfo> &patchConstantFunctionMap,
  898. std::unordered_map<Function *, std::unique_ptr<DxilFunctionProps>>
  899. &patchConstantFunctionPropsMap,
  900. HLModule &HLM, clang::CodeGen::CodeGenModule &CGM) {
  901. auto AttrsIter = HSEntryPatchConstantFuncAttr.find(EntryFunc.Func);
  902. DXASSERT(AttrsIter != HSEntryPatchConstantFuncAttr.end(),
  903. "we have checked this in AddHLSLFunctionInfo()");
  904. SetPatchConstantFunctionWithAttr(EntryFunc, AttrsIter->second,
  905. patchConstantFunctionMap,
  906. patchConstantFunctionPropsMap, HLM, CGM);
  907. }
  908. } // namespace
  909. namespace {
  910. // For case like:
  911. // cbuffer A {
  912. // float a;
  913. // int b;
  914. //}
  915. //
  916. // const static struct {
  917. // float a;
  918. // int b;
  919. //} ST = { a, b };
  920. // Replace user of ST with a and b.
  921. bool ReplaceConstStaticGlobalUser(GEPOperator *GEP,
  922. std::vector<Constant *> &InitList,
  923. IRBuilder<> &Builder) {
  924. if (GEP->getNumIndices() < 2) {
  925. // Don't use sub element.
  926. return false;
  927. }
  928. SmallVector<Value *, 4> idxList;
  929. auto iter = GEP->idx_begin();
  930. idxList.emplace_back(*(iter++));
  931. ConstantInt *subIdx = dyn_cast<ConstantInt>(*(iter++));
  932. DXASSERT(subIdx, "else dynamic indexing on struct field");
  933. unsigned subIdxImm = subIdx->getLimitedValue();
  934. DXASSERT(subIdxImm < InitList.size(), "else struct index out of bound");
  935. Constant *subPtr = InitList[subIdxImm];
  936. // Move every idx to idxList except idx for InitList.
  937. while (iter != GEP->idx_end()) {
  938. idxList.emplace_back(*(iter++));
  939. }
  940. Value *NewGEP = Builder.CreateGEP(subPtr, idxList);
  941. GEP->replaceAllUsesWith(NewGEP);
  942. return true;
  943. }
  944. } // namespace
  945. namespace CGHLSLMSHelper {
  946. void ReplaceConstStaticGlobals(
  947. std::unordered_map<GlobalVariable *, std::vector<Constant *>>
  948. &staticConstGlobalInitListMap,
  949. std::unordered_map<GlobalVariable *, Function *>
  950. &staticConstGlobalCtorMap) {
  951. for (auto &iter : staticConstGlobalInitListMap) {
  952. GlobalVariable *GV = iter.first;
  953. std::vector<Constant *> &InitList = iter.second;
  954. LLVMContext &Ctx = GV->getContext();
  955. // Do the replace.
  956. bool bPass = true;
  957. for (User *U : GV->users()) {
  958. IRBuilder<> Builder(Ctx);
  959. if (GetElementPtrInst *GEPInst = dyn_cast<GetElementPtrInst>(U)) {
  960. Builder.SetInsertPoint(GEPInst);
  961. bPass &= ReplaceConstStaticGlobalUser(cast<GEPOperator>(GEPInst),
  962. InitList, Builder);
  963. } else if (GEPOperator *GEP = dyn_cast<GEPOperator>(U)) {
  964. bPass &= ReplaceConstStaticGlobalUser(GEP, InitList, Builder);
  965. } else {
  966. DXASSERT(false, "invalid user of const static global");
  967. }
  968. }
  969. // Clear the Ctor which is useless now.
  970. if (bPass) {
  971. Function *Ctor = staticConstGlobalCtorMap[GV];
  972. Ctor->getBasicBlockList().clear();
  973. BasicBlock *Entry = BasicBlock::Create(Ctx, "", Ctor);
  974. IRBuilder<> Builder(Entry);
  975. Builder.CreateRetVoid();
  976. }
  977. }
  978. }
  979. } // namespace CGHLSLMSHelper
  980. namespace {
  981. Value *CastLdValue(Value *Ptr, llvm::Type *FromTy, llvm::Type *ToTy,
  982. IRBuilder<> &Builder) {
  983. if (ToTy->isVectorTy()) {
  984. unsigned vecSize = ToTy->getVectorNumElements();
  985. if (vecSize == 1 && ToTy->getVectorElementType() == FromTy) {
  986. Value *V = Builder.CreateLoad(Ptr);
  987. // ScalarToVec1Splat
  988. // Change scalar into vec1.
  989. Value *Vec1 = UndefValue::get(ToTy);
  990. return Builder.CreateInsertElement(Vec1, V, (uint64_t)0);
  991. } else if (vecSize == 1 && FromTy->isIntegerTy() &&
  992. ToTy->getVectorElementType()->isIntegerTy(1)) {
  993. // load(bitcast i32* to <1 x i1>*)
  994. // Rewrite to
  995. // insertelement(icmp ne (load i32*), 0)
  996. Value *IntV = Builder.CreateLoad(Ptr);
  997. Value *BoolV = Builder.CreateICmpNE(
  998. IntV, ConstantInt::get(IntV->getType(), 0), "tobool");
  999. Value *Vec1 = UndefValue::get(ToTy);
  1000. return Builder.CreateInsertElement(Vec1, BoolV, (uint64_t)0);
  1001. } else if (FromTy->isVectorTy() && vecSize == 1) {
  1002. Value *V = Builder.CreateLoad(Ptr);
  1003. // VectorTrunc
  1004. // Change vector into vec1.
  1005. int mask[] = {0};
  1006. return Builder.CreateShuffleVector(V, V, mask);
  1007. } else if (FromTy->isArrayTy()) {
  1008. llvm::Type *FromEltTy = FromTy->getArrayElementType();
  1009. llvm::Type *ToEltTy = ToTy->getVectorElementType();
  1010. if (FromTy->getArrayNumElements() == vecSize && FromEltTy == ToEltTy) {
  1011. // ArrayToVector.
  1012. Value *NewLd = UndefValue::get(ToTy);
  1013. Value *zeroIdx = Builder.getInt32(0);
  1014. for (unsigned i = 0; i < vecSize; i++) {
  1015. Value *GEP =
  1016. Builder.CreateInBoundsGEP(Ptr, {zeroIdx, Builder.getInt32(i)});
  1017. Value *Elt = Builder.CreateLoad(GEP);
  1018. NewLd = Builder.CreateInsertElement(NewLd, Elt, i);
  1019. }
  1020. return NewLd;
  1021. }
  1022. }
  1023. } else if (FromTy == Builder.getInt1Ty()) {
  1024. Value *V = Builder.CreateLoad(Ptr);
  1025. // BoolCast
  1026. DXASSERT_NOMSG(ToTy->isIntegerTy());
  1027. return Builder.CreateZExt(V, ToTy);
  1028. }
  1029. return nullptr;
  1030. }
  1031. Value *CastStValue(Value *Ptr, Value *V, llvm::Type *FromTy, llvm::Type *ToTy,
  1032. IRBuilder<> &Builder) {
  1033. if (ToTy->isVectorTy()) {
  1034. unsigned vecSize = ToTy->getVectorNumElements();
  1035. if (vecSize == 1 && ToTy->getVectorElementType() == FromTy) {
  1036. // ScalarToVec1Splat
  1037. // Change vec1 back to scalar.
  1038. Value *Elt = Builder.CreateExtractElement(V, (uint64_t)0);
  1039. return Elt;
  1040. } else if (FromTy->isVectorTy() && vecSize == 1) {
  1041. // VectorTrunc
  1042. // Change vec1 into vector.
  1043. // Should not happen.
  1044. // Reported error at Sema::ImpCastExprToType.
  1045. DXASSERT_NOMSG(0);
  1046. } else if (FromTy->isArrayTy()) {
  1047. llvm::Type *FromEltTy = FromTy->getArrayElementType();
  1048. llvm::Type *ToEltTy = ToTy->getVectorElementType();
  1049. if (FromTy->getArrayNumElements() == vecSize && FromEltTy == ToEltTy) {
  1050. // ArrayToVector.
  1051. Value *zeroIdx = Builder.getInt32(0);
  1052. for (unsigned i = 0; i < vecSize; i++) {
  1053. Value *Elt = Builder.CreateExtractElement(V, i);
  1054. Value *GEP =
  1055. Builder.CreateInBoundsGEP(Ptr, {zeroIdx, Builder.getInt32(i)});
  1056. Builder.CreateStore(Elt, GEP);
  1057. }
  1058. // The store already done.
  1059. // Return null to ignore use of the return value.
  1060. return nullptr;
  1061. }
  1062. }
  1063. } else if (FromTy == Builder.getInt1Ty()) {
  1064. // BoolCast
  1065. // Change i1 to ToTy.
  1066. DXASSERT_NOMSG(ToTy->isIntegerTy());
  1067. Value *CastV = Builder.CreateICmpNE(V, ConstantInt::get(V->getType(), 0));
  1068. return CastV;
  1069. }
  1070. return nullptr;
  1071. }
  1072. bool SimplifyBitCastLoad(LoadInst *LI, llvm::Type *FromTy, llvm::Type *ToTy,
  1073. Value *Ptr) {
  1074. IRBuilder<> Builder(LI);
  1075. // Cast FromLd to ToTy.
  1076. Value *CastV = CastLdValue(Ptr, FromTy, ToTy, Builder);
  1077. if (CastV) {
  1078. LI->replaceAllUsesWith(CastV);
  1079. return true;
  1080. } else {
  1081. return false;
  1082. }
  1083. }
  1084. bool SimplifyBitCastStore(StoreInst *SI, llvm::Type *FromTy, llvm::Type *ToTy,
  1085. Value *Ptr) {
  1086. IRBuilder<> Builder(SI);
  1087. Value *V = SI->getValueOperand();
  1088. // Cast Val to FromTy.
  1089. Value *CastV = CastStValue(Ptr, V, FromTy, ToTy, Builder);
  1090. if (CastV) {
  1091. Builder.CreateStore(CastV, Ptr);
  1092. return true;
  1093. } else {
  1094. return false;
  1095. }
  1096. }
  1097. bool SimplifyBitCastGEP(GEPOperator *GEP, llvm::Type *FromTy, llvm::Type *ToTy,
  1098. Value *Ptr) {
  1099. if (ToTy->isVectorTy()) {
  1100. unsigned vecSize = ToTy->getVectorNumElements();
  1101. if (vecSize == 1 && ToTy->getVectorElementType() == FromTy) {
  1102. // ScalarToVec1Splat
  1103. GEP->replaceAllUsesWith(Ptr);
  1104. return true;
  1105. } else if (FromTy->isVectorTy() && vecSize == 1) {
  1106. // VectorTrunc
  1107. DXASSERT_NOMSG(
  1108. !isa<llvm::VectorType>(GEP->getType()->getPointerElementType()));
  1109. IRBuilder<> Builder(FromTy->getContext());
  1110. if (Instruction *I = dyn_cast<Instruction>(GEP))
  1111. Builder.SetInsertPoint(I);
  1112. std::vector<Value *> idxList(GEP->idx_begin(), GEP->idx_end());
  1113. Value *NewGEP = Builder.CreateInBoundsGEP(Ptr, idxList);
  1114. GEP->replaceAllUsesWith(NewGEP);
  1115. return true;
  1116. } else if (FromTy->isArrayTy()) {
  1117. llvm::Type *FromEltTy = FromTy->getArrayElementType();
  1118. llvm::Type *ToEltTy = ToTy->getVectorElementType();
  1119. if (FromTy->getArrayNumElements() == vecSize && FromEltTy == ToEltTy) {
  1120. // ArrayToVector.
  1121. }
  1122. }
  1123. } else if (FromTy == llvm::Type::getInt1Ty(FromTy->getContext())) {
  1124. // BoolCast
  1125. }
  1126. return false;
  1127. }
  1128. typedef SmallPtrSet<Instruction *, 4> SmallInstSet;
  1129. void SimplifyBitCast(BitCastOperator *BC, SmallInstSet &deadInsts) {
  1130. Value *Ptr = BC->getOperand(0);
  1131. llvm::Type *FromTy = Ptr->getType();
  1132. llvm::Type *ToTy = BC->getType();
  1133. if (!FromTy->isPointerTy() || !ToTy->isPointerTy())
  1134. return;
  1135. FromTy = FromTy->getPointerElementType();
  1136. ToTy = ToTy->getPointerElementType();
  1137. // Take care case like %2 = bitcast %struct.T* %1 to <1 x float>*.
  1138. bool GEPCreated = false;
  1139. if (FromTy->isStructTy()) {
  1140. IRBuilder<> Builder(FromTy->getContext());
  1141. if (Instruction *I = dyn_cast<Instruction>(BC))
  1142. Builder.SetInsertPoint(I);
  1143. Value *zeroIdx = Builder.getInt32(0);
  1144. unsigned nestLevel = 1;
  1145. while (llvm::StructType *ST = dyn_cast<llvm::StructType>(FromTy)) {
  1146. if (ST->getNumElements() == 0)
  1147. break;
  1148. FromTy = ST->getElementType(0);
  1149. nestLevel++;
  1150. }
  1151. std::vector<Value *> idxList(nestLevel, zeroIdx);
  1152. Ptr = Builder.CreateGEP(Ptr, idxList);
  1153. GEPCreated = true;
  1154. }
  1155. for (User *U : BC->users()) {
  1156. if (LoadInst *LI = dyn_cast<LoadInst>(U)) {
  1157. if (SimplifyBitCastLoad(LI, FromTy, ToTy, Ptr)) {
  1158. LI->dropAllReferences();
  1159. deadInsts.insert(LI);
  1160. }
  1161. } else if (StoreInst *SI = dyn_cast<StoreInst>(U)) {
  1162. if (SimplifyBitCastStore(SI, FromTy, ToTy, Ptr)) {
  1163. SI->dropAllReferences();
  1164. deadInsts.insert(SI);
  1165. }
  1166. } else if (GEPOperator *GEP = dyn_cast<GEPOperator>(U)) {
  1167. if (SimplifyBitCastGEP(GEP, FromTy, ToTy, Ptr))
  1168. if (Instruction *I = dyn_cast<Instruction>(GEP)) {
  1169. I->dropAllReferences();
  1170. deadInsts.insert(I);
  1171. }
  1172. } else if (dyn_cast<CallInst>(U)) {
  1173. // Skip function call.
  1174. } else if (dyn_cast<BitCastInst>(U)) {
  1175. // Skip bitcast.
  1176. } else if (dyn_cast<AddrSpaceCastInst>(U)) {
  1177. // Skip addrspacecast.
  1178. } else {
  1179. DXASSERT(0, "not support yet");
  1180. }
  1181. }
  1182. // We created a GEP instruction but didn't end up consuming it, so delete it.
  1183. if (GEPCreated && Ptr->use_empty()) {
  1184. if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Ptr))
  1185. GEP->eraseFromParent();
  1186. else
  1187. cast<Constant>(Ptr)->destroyConstant();
  1188. }
  1189. }
  1190. typedef float(__cdecl *FloatUnaryEvalFuncType)(float);
  1191. typedef double(__cdecl *DoubleUnaryEvalFuncType)(double);
  1192. typedef APInt(__cdecl *IntBinaryEvalFuncType)(const APInt &, const APInt &);
  1193. typedef float(__cdecl *FloatBinaryEvalFuncType)(float, float);
  1194. typedef double(__cdecl *DoubleBinaryEvalFuncType)(double, double);
  1195. Value *EvalUnaryIntrinsic(ConstantFP *fpV, FloatUnaryEvalFuncType floatEvalFunc,
  1196. DoubleUnaryEvalFuncType doubleEvalFunc) {
  1197. llvm::Type *Ty = fpV->getType();
  1198. Value *Result = nullptr;
  1199. if (Ty->isDoubleTy()) {
  1200. double dV = fpV->getValueAPF().convertToDouble();
  1201. Value *dResult = ConstantFP::get(Ty, doubleEvalFunc(dV));
  1202. Result = dResult;
  1203. } else {
  1204. DXASSERT_NOMSG(Ty->isFloatTy());
  1205. float fV = fpV->getValueAPF().convertToFloat();
  1206. Value *dResult = ConstantFP::get(Ty, floatEvalFunc(fV));
  1207. Result = dResult;
  1208. }
  1209. return Result;
  1210. }
  1211. Value *EvalBinaryIntrinsic(Constant *cV0, Constant *cV1,
  1212. FloatBinaryEvalFuncType floatEvalFunc,
  1213. DoubleBinaryEvalFuncType doubleEvalFunc,
  1214. IntBinaryEvalFuncType intEvalFunc) {
  1215. llvm::Type *Ty = cV0->getType();
  1216. Value *Result = nullptr;
  1217. if (Ty->isDoubleTy()) {
  1218. ConstantFP *fpV0 = cast<ConstantFP>(cV0);
  1219. ConstantFP *fpV1 = cast<ConstantFP>(cV1);
  1220. double dV0 = fpV0->getValueAPF().convertToDouble();
  1221. double dV1 = fpV1->getValueAPF().convertToDouble();
  1222. Value *dResult = ConstantFP::get(Ty, doubleEvalFunc(dV0, dV1));
  1223. Result = dResult;
  1224. } else if (Ty->isFloatTy()) {
  1225. ConstantFP *fpV0 = cast<ConstantFP>(cV0);
  1226. ConstantFP *fpV1 = cast<ConstantFP>(cV1);
  1227. float fV0 = fpV0->getValueAPF().convertToFloat();
  1228. float fV1 = fpV1->getValueAPF().convertToFloat();
  1229. Value *dResult = ConstantFP::get(Ty, floatEvalFunc(fV0, fV1));
  1230. Result = dResult;
  1231. } else {
  1232. DXASSERT_NOMSG(Ty->isIntegerTy());
  1233. DXASSERT_NOMSG(intEvalFunc);
  1234. ConstantInt *ciV0 = cast<ConstantInt>(cV0);
  1235. ConstantInt *ciV1 = cast<ConstantInt>(cV1);
  1236. const APInt &iV0 = ciV0->getValue();
  1237. const APInt &iV1 = ciV1->getValue();
  1238. Value *dResult = ConstantInt::get(Ty, intEvalFunc(iV0, iV1));
  1239. Result = dResult;
  1240. }
  1241. return Result;
  1242. }
  1243. Value *EvalUnaryIntrinsic(CallInst *CI, FloatUnaryEvalFuncType floatEvalFunc,
  1244. DoubleUnaryEvalFuncType doubleEvalFunc) {
  1245. Value *V = CI->getArgOperand(0);
  1246. llvm::Type *Ty = CI->getType();
  1247. Value *Result = nullptr;
  1248. if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(Ty)) {
  1249. Result = UndefValue::get(Ty);
  1250. Constant *CV = cast<Constant>(V);
  1251. IRBuilder<> Builder(CI);
  1252. for (unsigned i = 0; i < VT->getNumElements(); i++) {
  1253. ConstantFP *fpV = cast<ConstantFP>(CV->getAggregateElement(i));
  1254. Value *EltResult = EvalUnaryIntrinsic(fpV, floatEvalFunc, doubleEvalFunc);
  1255. Result = Builder.CreateInsertElement(Result, EltResult, i);
  1256. }
  1257. } else {
  1258. ConstantFP *fpV = cast<ConstantFP>(V);
  1259. Result = EvalUnaryIntrinsic(fpV, floatEvalFunc, doubleEvalFunc);
  1260. }
  1261. CI->replaceAllUsesWith(Result);
  1262. CI->eraseFromParent();
  1263. return Result;
  1264. }
  1265. Value *EvalBinaryIntrinsic(CallInst *CI, FloatBinaryEvalFuncType floatEvalFunc,
  1266. DoubleBinaryEvalFuncType doubleEvalFunc,
  1267. IntBinaryEvalFuncType intEvalFunc = nullptr) {
  1268. Value *V0 = CI->getArgOperand(0);
  1269. Value *V1 = CI->getArgOperand(1);
  1270. llvm::Type *Ty = CI->getType();
  1271. Value *Result = nullptr;
  1272. if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(Ty)) {
  1273. Result = UndefValue::get(Ty);
  1274. Constant *CV0 = cast<Constant>(V0);
  1275. Constant *CV1 = cast<Constant>(V1);
  1276. IRBuilder<> Builder(CI);
  1277. for (unsigned i = 0; i < VT->getNumElements(); i++) {
  1278. Constant *cV0 = cast<Constant>(CV0->getAggregateElement(i));
  1279. Constant *cV1 = cast<Constant>(CV1->getAggregateElement(i));
  1280. Value *EltResult = EvalBinaryIntrinsic(cV0, cV1, floatEvalFunc,
  1281. doubleEvalFunc, intEvalFunc);
  1282. Result = Builder.CreateInsertElement(Result, EltResult, i);
  1283. }
  1284. } else {
  1285. Constant *cV0 = cast<Constant>(V0);
  1286. Constant *cV1 = cast<Constant>(V1);
  1287. Result = EvalBinaryIntrinsic(cV0, cV1, floatEvalFunc, doubleEvalFunc,
  1288. intEvalFunc);
  1289. }
  1290. CI->replaceAllUsesWith(Result);
  1291. CI->eraseFromParent();
  1292. return Result;
  1293. CI->eraseFromParent();
  1294. return Result;
  1295. }
  1296. void SimpleTransformForHLDXIRInst(Instruction *I, SmallInstSet &deadInsts) {
  1297. unsigned opcode = I->getOpcode();
  1298. switch (opcode) {
  1299. case Instruction::BitCast: {
  1300. BitCastOperator *BCI = cast<BitCastOperator>(I);
  1301. SimplifyBitCast(BCI, deadInsts);
  1302. } break;
  1303. case Instruction::Load: {
  1304. LoadInst *ldInst = cast<LoadInst>(I);
  1305. DXASSERT(!HLMatrixType::isa(ldInst->getType()),
  1306. "matrix load should use HL LdStMatrix");
  1307. Value *Ptr = ldInst->getPointerOperand();
  1308. if (ConstantExpr *CE = dyn_cast_or_null<ConstantExpr>(Ptr)) {
  1309. if (BitCastOperator *BCO = dyn_cast<BitCastOperator>(CE)) {
  1310. SimplifyBitCast(BCO, deadInsts);
  1311. }
  1312. }
  1313. } break;
  1314. case Instruction::Store: {
  1315. StoreInst *stInst = cast<StoreInst>(I);
  1316. Value *V = stInst->getValueOperand();
  1317. DXASSERT_LOCALVAR(V, !HLMatrixType::isa(V->getType()),
  1318. "matrix store should use HL LdStMatrix");
  1319. Value *Ptr = stInst->getPointerOperand();
  1320. if (ConstantExpr *CE = dyn_cast<ConstantExpr>(Ptr)) {
  1321. if (BitCastOperator *BCO = dyn_cast<BitCastOperator>(CE)) {
  1322. SimplifyBitCast(BCO, deadInsts);
  1323. }
  1324. }
  1325. } break;
  1326. case Instruction::LShr:
  1327. case Instruction::AShr:
  1328. case Instruction::Shl: {
  1329. llvm::BinaryOperator *BO = cast<llvm::BinaryOperator>(I);
  1330. Value *op2 = BO->getOperand(1);
  1331. IntegerType *Ty = cast<IntegerType>(BO->getType()->getScalarType());
  1332. unsigned bitWidth = Ty->getBitWidth();
  1333. // Clamp op2 to 0 ~ bitWidth-1
  1334. if (ConstantInt *cOp2 = dyn_cast<ConstantInt>(op2)) {
  1335. unsigned iOp2 = cOp2->getLimitedValue();
  1336. unsigned clampedOp2 = iOp2 & (bitWidth - 1);
  1337. if (iOp2 != clampedOp2) {
  1338. BO->setOperand(1, ConstantInt::get(op2->getType(), clampedOp2));
  1339. }
  1340. } else {
  1341. Value *mask = ConstantInt::get(op2->getType(), bitWidth - 1);
  1342. IRBuilder<> Builder(I);
  1343. op2 = Builder.CreateAnd(op2, mask);
  1344. BO->setOperand(1, op2);
  1345. }
  1346. } break;
  1347. }
  1348. }
  1349. } // namespace
  1350. namespace CGHLSLMSHelper {
  1351. Value *TryEvalIntrinsic(CallInst *CI, IntrinsicOp intriOp,
  1352. unsigned hlslVersion) {
  1353. switch (intriOp) {
  1354. case IntrinsicOp::IOP_tan: {
  1355. return EvalUnaryIntrinsic(CI, tanf, tan);
  1356. } break;
  1357. case IntrinsicOp::IOP_tanh: {
  1358. return EvalUnaryIntrinsic(CI, tanhf, tanh);
  1359. } break;
  1360. case IntrinsicOp::IOP_sin: {
  1361. return EvalUnaryIntrinsic(CI, sinf, sin);
  1362. } break;
  1363. case IntrinsicOp::IOP_sinh: {
  1364. return EvalUnaryIntrinsic(CI, sinhf, sinh);
  1365. } break;
  1366. case IntrinsicOp::IOP_cos: {
  1367. return EvalUnaryIntrinsic(CI, cosf, cos);
  1368. } break;
  1369. case IntrinsicOp::IOP_cosh: {
  1370. return EvalUnaryIntrinsic(CI, coshf, cosh);
  1371. } break;
  1372. case IntrinsicOp::IOP_asin: {
  1373. return EvalUnaryIntrinsic(CI, asinf, asin);
  1374. } break;
  1375. case IntrinsicOp::IOP_acos: {
  1376. return EvalUnaryIntrinsic(CI, acosf, acos);
  1377. } break;
  1378. case IntrinsicOp::IOP_atan: {
  1379. return EvalUnaryIntrinsic(CI, atanf, atan);
  1380. } break;
  1381. case IntrinsicOp::IOP_atan2: {
  1382. Value *V0 = CI->getArgOperand(0);
  1383. ConstantFP *fpV0 = cast<ConstantFP>(V0);
  1384. Value *V1 = CI->getArgOperand(1);
  1385. ConstantFP *fpV1 = cast<ConstantFP>(V1);
  1386. llvm::Type *Ty = CI->getType();
  1387. Value *Result = nullptr;
  1388. if (Ty->isDoubleTy()) {
  1389. double dV0 = fpV0->getValueAPF().convertToDouble();
  1390. double dV1 = fpV1->getValueAPF().convertToDouble();
  1391. Value *atanV = ConstantFP::get(CI->getType(), atan2(dV0, dV1));
  1392. CI->replaceAllUsesWith(atanV);
  1393. Result = atanV;
  1394. } else {
  1395. DXASSERT_NOMSG(Ty->isFloatTy());
  1396. float fV0 = fpV0->getValueAPF().convertToFloat();
  1397. float fV1 = fpV1->getValueAPF().convertToFloat();
  1398. Value *atanV = ConstantFP::get(CI->getType(), atan2f(fV0, fV1));
  1399. CI->replaceAllUsesWith(atanV);
  1400. Result = atanV;
  1401. }
  1402. CI->eraseFromParent();
  1403. return Result;
  1404. } break;
  1405. case IntrinsicOp::IOP_sqrt: {
  1406. return EvalUnaryIntrinsic(CI, sqrtf, sqrt);
  1407. } break;
  1408. case IntrinsicOp::IOP_rsqrt: {
  1409. auto rsqrtF = [](float v) -> float { return 1.0 / sqrtf(v); };
  1410. auto rsqrtD = [](double v) -> double { return 1.0 / sqrt(v); };
  1411. return EvalUnaryIntrinsic(CI, rsqrtF, rsqrtD);
  1412. } break;
  1413. case IntrinsicOp::IOP_exp: {
  1414. return EvalUnaryIntrinsic(CI, expf, exp);
  1415. } break;
  1416. case IntrinsicOp::IOP_exp2: {
  1417. return EvalUnaryIntrinsic(CI, exp2f, exp2);
  1418. } break;
  1419. case IntrinsicOp::IOP_log: {
  1420. return EvalUnaryIntrinsic(CI, logf, log);
  1421. } break;
  1422. case IntrinsicOp::IOP_log10: {
  1423. return EvalUnaryIntrinsic(CI, log10f, log10);
  1424. } break;
  1425. case IntrinsicOp::IOP_log2: {
  1426. return EvalUnaryIntrinsic(CI, log2f, log2);
  1427. } break;
  1428. case IntrinsicOp::IOP_pow: {
  1429. return EvalBinaryIntrinsic(CI, powf, pow);
  1430. } break;
  1431. case IntrinsicOp::IOP_max: {
  1432. auto maxF = [](float a, float b) -> float { return a > b ? a : b; };
  1433. auto maxD = [](double a, double b) -> double { return a > b ? a : b; };
  1434. auto imaxI = [](const APInt &a, const APInt &b) -> APInt {
  1435. return a.sgt(b) ? a : b;
  1436. };
  1437. return EvalBinaryIntrinsic(CI, maxF, maxD, imaxI);
  1438. } break;
  1439. case IntrinsicOp::IOP_min: {
  1440. auto minF = [](float a, float b) -> float { return a < b ? a : b; };
  1441. auto minD = [](double a, double b) -> double { return a < b ? a : b; };
  1442. auto iminI = [](const APInt &a, const APInt &b) -> APInt {
  1443. return a.slt(b) ? a : b;
  1444. };
  1445. return EvalBinaryIntrinsic(CI, minF, minD, iminI);
  1446. } break;
  1447. case IntrinsicOp::IOP_umax: {
  1448. DXASSERT_NOMSG(
  1449. CI->getArgOperand(0)->getType()->getScalarType()->isIntegerTy());
  1450. auto umaxI = [](const APInt &a, const APInt &b) -> APInt {
  1451. return a.ugt(b) ? a : b;
  1452. };
  1453. return EvalBinaryIntrinsic(CI, nullptr, nullptr, umaxI);
  1454. } break;
  1455. case IntrinsicOp::IOP_umin: {
  1456. DXASSERT_NOMSG(
  1457. CI->getArgOperand(0)->getType()->getScalarType()->isIntegerTy());
  1458. auto uminI = [](const APInt &a, const APInt &b) -> APInt {
  1459. return a.ult(b) ? a : b;
  1460. };
  1461. return EvalBinaryIntrinsic(CI, nullptr, nullptr, uminI);
  1462. } break;
  1463. case IntrinsicOp::IOP_rcp: {
  1464. auto rcpF = [](float v) -> float { return 1.0 / v; };
  1465. auto rcpD = [](double v) -> double { return 1.0 / v; };
  1466. return EvalUnaryIntrinsic(CI, rcpF, rcpD);
  1467. } break;
  1468. case IntrinsicOp::IOP_ceil: {
  1469. return EvalUnaryIntrinsic(CI, ceilf, ceil);
  1470. } break;
  1471. case IntrinsicOp::IOP_floor: {
  1472. return EvalUnaryIntrinsic(CI, floorf, floor);
  1473. } break;
  1474. case IntrinsicOp::IOP_round: {
  1475. // round intrinsic could exhibit different behaviour for constant and
  1476. // runtime evaluations. E.g., for round(0.5): constant evaluation results in
  1477. // 1 (away from zero rounding), while runtime evaluation results in 0
  1478. // (nearest even rounding).
  1479. //
  1480. // For back compat, DXC still preserves the above behavior for language
  1481. // versions 2016 or below. However, for newer language versions, DXC now
  1482. // always use nearest even for round() intrinsic in all cases.
  1483. if (hlslVersion <= 2016) {
  1484. return EvalUnaryIntrinsic(CI, roundf, round);
  1485. } else {
  1486. auto roundingMode = fegetround();
  1487. fesetround(FE_TONEAREST);
  1488. Value *result = EvalUnaryIntrinsic(CI, nearbyintf, nearbyint);
  1489. fesetround(roundingMode);
  1490. return result;
  1491. }
  1492. } break;
  1493. case IntrinsicOp::IOP_trunc: {
  1494. return EvalUnaryIntrinsic(CI, truncf, trunc);
  1495. } break;
  1496. case IntrinsicOp::IOP_frac: {
  1497. auto fracF = [](float v) -> float { return v - floor(v); };
  1498. auto fracD = [](double v) -> double { return v - floor(v); };
  1499. return EvalUnaryIntrinsic(CI, fracF, fracD);
  1500. } break;
  1501. case IntrinsicOp::IOP_isnan: {
  1502. Value *V = CI->getArgOperand(0);
  1503. ConstantFP *fV = cast<ConstantFP>(V);
  1504. bool isNan = fV->getValueAPF().isNaN();
  1505. Constant *cNan = ConstantInt::get(CI->getType(), isNan ? 1 : 0);
  1506. CI->replaceAllUsesWith(cNan);
  1507. CI->eraseFromParent();
  1508. return cNan;
  1509. } break;
  1510. default:
  1511. return nullptr;
  1512. }
  1513. }
  1514. // Do simple transform to make later lower pass easier.
  1515. void SimpleTransformForHLDXIR(llvm::Module *pM) {
  1516. SmallInstSet deadInsts;
  1517. for (Function &F : pM->functions()) {
  1518. for (BasicBlock &BB : F.getBasicBlockList()) {
  1519. for (BasicBlock::iterator Iter = BB.begin(); Iter != BB.end();) {
  1520. Instruction *I = (Iter++);
  1521. if (deadInsts.count(I))
  1522. continue; // Skip dead instructions
  1523. SimpleTransformForHLDXIRInst(I, deadInsts);
  1524. }
  1525. }
  1526. }
  1527. for (Instruction *I : deadInsts)
  1528. I->dropAllReferences();
  1529. for (Instruction *I : deadInsts)
  1530. I->eraseFromParent();
  1531. deadInsts.clear();
  1532. for (GlobalVariable &GV : pM->globals()) {
  1533. if (dxilutil::IsStaticGlobal(&GV)) {
  1534. for (User *U : GV.users()) {
  1535. if (BitCastOperator *BCO = dyn_cast<BitCastOperator>(U)) {
  1536. SimplifyBitCast(BCO, deadInsts);
  1537. }
  1538. }
  1539. }
  1540. }
  1541. for (Instruction *I : deadInsts)
  1542. I->dropAllReferences();
  1543. for (Instruction *I : deadInsts)
  1544. I->eraseFromParent();
  1545. }
  1546. } // namespace CGHLSLMSHelper
  1547. namespace {
  1548. unsigned RoundToAlign(unsigned num, unsigned mod) {
  1549. // round num to next highest mod
  1550. if (mod != 0)
  1551. return mod * ((num + mod - 1) / mod);
  1552. return num;
  1553. }
  1554. // Retrieve the last scalar or vector element type.
  1555. // This has to be recursive for the nasty empty struct case.
  1556. // returns true if found, false if we must backtrack.
  1557. bool RetrieveLastElementType(Type *Ty, Type *&EltTy) {
  1558. if (Ty->isStructTy()) {
  1559. if (Ty->getStructNumElements() == 0)
  1560. return false;
  1561. for (unsigned i = Ty->getStructNumElements(); i > 0; --i) {
  1562. if (RetrieveLastElementType(Ty->getStructElementType(i - 1), EltTy))
  1563. return true;
  1564. }
  1565. } else if (Ty->isArrayTy()) {
  1566. if (RetrieveLastElementType(Ty->getArrayElementType(), EltTy))
  1567. return true;
  1568. } else if ((Ty->isVectorTy() || Ty->isSingleValueType())) {
  1569. EltTy = Ty->getScalarType();
  1570. return true;
  1571. }
  1572. return false;
  1573. }
  1574. // Here the size is CB size.
  1575. // Offset still needs to be aligned based on type since this
  1576. // is the legacy cbuffer global path.
  1577. unsigned AlignCBufferOffset(unsigned offset, unsigned size, llvm::Type *Ty,
  1578. bool bRowMajor, bool bMinPrecMode,
  1579. bool &bCurRowIsMinPrec) {
  1580. DXASSERT(!(offset & 1), "otherwise we have an invalid offset.");
  1581. bool bNeedNewRow = Ty->isArrayTy();
  1582. // In min-precision mode, a new row is needed when
  1583. // going into or out of min-precision component type.
  1584. if (!bNeedNewRow) {
  1585. bool bMinPrec = false;
  1586. if (Ty->isStructTy()) {
  1587. if (HLMatrixType mat = HLMatrixType::dyn_cast(Ty)) {
  1588. bNeedNewRow |= !bRowMajor && mat.getNumColumns() > 1;
  1589. bNeedNewRow |= bRowMajor && mat.getNumRows() > 1;
  1590. bMinPrec = bMinPrecMode &&
  1591. mat.getElementType(false)->getScalarSizeInBits() < 32;
  1592. } else {
  1593. bNeedNewRow = true;
  1594. if (bMinPrecMode) {
  1595. // Need to get min-prec of last element of structure,
  1596. // in case we pack something else into the end.
  1597. Type *EltTy = nullptr;
  1598. if (RetrieveLastElementType(Ty, EltTy))
  1599. bCurRowIsMinPrec = EltTy->getScalarSizeInBits() < 32;
  1600. }
  1601. }
  1602. } else {
  1603. DXASSERT_NOMSG(Ty->isVectorTy() || Ty->isSingleValueType());
  1604. // vector or scalar
  1605. bMinPrec = bMinPrecMode && Ty->getScalarSizeInBits() < 32;
  1606. }
  1607. if (bMinPrecMode) {
  1608. bNeedNewRow |= bCurRowIsMinPrec != bMinPrec;
  1609. bCurRowIsMinPrec = bMinPrec;
  1610. }
  1611. }
  1612. unsigned scalarSizeInBytes = Ty->getScalarSizeInBits() / 8;
  1613. return AlignBufferOffsetInLegacy(offset, size, scalarSizeInBytes,
  1614. bNeedNewRow);
  1615. }
  1616. unsigned AllocateDxilConstantBuffer(
  1617. HLCBuffer &CB,
  1618. std::unordered_map<Constant *, DxilFieldAnnotation> &constVarAnnotationMap,
  1619. bool bMinPrecMode) {
  1620. unsigned offset = 0;
  1621. // Scan user allocated constants first.
  1622. // Update offset.
  1623. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1624. if (C->GetLowerBound() == UINT_MAX)
  1625. continue;
  1626. unsigned size = C->GetRangeSize();
  1627. unsigned nextOffset = size + C->GetLowerBound();
  1628. if (offset < nextOffset)
  1629. offset = nextOffset;
  1630. }
  1631. // Alloc after user allocated constants.
  1632. bool bCurRowIsMinPrec = false;
  1633. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1634. if (C->GetLowerBound() != UINT_MAX)
  1635. continue;
  1636. unsigned size = C->GetRangeSize();
  1637. llvm::Type *Ty = C->GetGlobalSymbol()->getType()->getPointerElementType();
  1638. auto fieldAnnotation = constVarAnnotationMap.at(C->GetGlobalSymbol());
  1639. bool bRowMajor = HLMatrixType::isa(Ty)
  1640. ? fieldAnnotation.GetMatrixAnnotation().Orientation ==
  1641. MatrixOrientation::RowMajor
  1642. : false;
  1643. // Align offset.
  1644. offset = AlignCBufferOffset(offset, size, Ty, bRowMajor, bMinPrecMode,
  1645. bCurRowIsMinPrec);
  1646. if (C->GetLowerBound() == UINT_MAX) {
  1647. C->SetLowerBound(offset);
  1648. }
  1649. offset += size;
  1650. }
  1651. return offset;
  1652. }
  1653. void AllocateDxilConstantBuffers(
  1654. HLModule &HLM, std::unordered_map<Constant *, DxilFieldAnnotation>
  1655. &constVarAnnotationMap) {
  1656. for (unsigned i = 0; i < HLM.GetCBuffers().size(); i++) {
  1657. HLCBuffer &CB = *static_cast<HLCBuffer *>(&(HLM.GetCBuffer(i)));
  1658. unsigned size = AllocateDxilConstantBuffer(
  1659. CB, constVarAnnotationMap, HLM.GetHLOptions().bUseMinPrecision);
  1660. CB.SetSize(size);
  1661. }
  1662. }
  1663. } // namespace
  1664. namespace {
  1665. void ReplaceUseInFunction(Value *V, Value *NewV, Function *F,
  1666. IRBuilder<> &Builder) {
  1667. for (auto U = V->user_begin(); U != V->user_end();) {
  1668. User *user = *(U++);
  1669. if (Instruction *I = dyn_cast<Instruction>(user)) {
  1670. if (I->getParent()->getParent() == F) {
  1671. // replace use with GEP if in F
  1672. if (BitCastInst *BCI = dyn_cast<BitCastInst>(I)) {
  1673. if (BCI->getType() == NewV->getType()) {
  1674. I->replaceAllUsesWith(NewV);
  1675. I->eraseFromParent();
  1676. continue;
  1677. }
  1678. }
  1679. I->replaceUsesOfWith(V, NewV);
  1680. }
  1681. } else {
  1682. // For constant operator, create local clone which use GEP.
  1683. // Only support GEP and bitcast.
  1684. if (GEPOperator *GEPOp = dyn_cast<GEPOperator>(user)) {
  1685. std::vector<Value *> idxList(GEPOp->idx_begin(), GEPOp->idx_end());
  1686. Value *NewGEP = Builder.CreateInBoundsGEP(NewV, idxList);
  1687. ReplaceUseInFunction(GEPOp, NewGEP, F, Builder);
  1688. } else if (GlobalVariable *GV = dyn_cast<GlobalVariable>(user)) {
  1689. // Change the init val into NewV with Store.
  1690. GV->setInitializer(nullptr);
  1691. Builder.CreateStore(NewV, GV);
  1692. } else {
  1693. // Must be bitcast here.
  1694. BitCastOperator *BC = cast<BitCastOperator>(user);
  1695. Value *NewBC = Builder.CreateBitCast(NewV, BC->getType());
  1696. ReplaceUseInFunction(BC, NewBC, F, Builder);
  1697. }
  1698. }
  1699. }
  1700. }
  1701. void MarkUsedFunctionForConst(Value *V,
  1702. std::unordered_set<Function *> &usedFunc) {
  1703. for (auto U = V->user_begin(); U != V->user_end();) {
  1704. User *user = *(U++);
  1705. if (Instruction *I = dyn_cast<Instruction>(user)) {
  1706. Function *F = I->getParent()->getParent();
  1707. usedFunc.insert(F);
  1708. } else {
  1709. // For constant operator, create local clone which use GEP.
  1710. // Only support GEP and bitcast.
  1711. if (GEPOperator *GEPOp = dyn_cast<GEPOperator>(user)) {
  1712. MarkUsedFunctionForConst(GEPOp, usedFunc);
  1713. } else if (GlobalVariable *GV = dyn_cast<GlobalVariable>(user)) {
  1714. MarkUsedFunctionForConst(GV, usedFunc);
  1715. } else {
  1716. // Must be bitcast here.
  1717. BitCastOperator *BC = cast<BitCastOperator>(user);
  1718. MarkUsedFunctionForConst(BC, usedFunc);
  1719. }
  1720. }
  1721. }
  1722. }
  1723. bool CreateCBufferVariable(HLCBuffer &CB, HLModule &HLM, llvm::Type *HandleTy) {
  1724. bool bUsed = false;
  1725. // Build Struct for CBuffer.
  1726. SmallVector<llvm::Type *, 4> Elements;
  1727. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1728. Value *GV = C->GetGlobalSymbol();
  1729. if (!GV->use_empty())
  1730. bUsed = true;
  1731. // Global variable must be pointer type.
  1732. llvm::Type *Ty = GV->getType()->getPointerElementType();
  1733. Elements.emplace_back(Ty);
  1734. }
  1735. // Don't create CBuffer variable for unused cbuffer.
  1736. if (!bUsed)
  1737. return false;
  1738. llvm::Module &M = *HLM.GetModule();
  1739. bool isCBArray = CB.IsArray();
  1740. llvm::GlobalVariable *cbGV = nullptr;
  1741. llvm::Type *cbTy = nullptr;
  1742. unsigned cbIndexDepth = 0;
  1743. if (!isCBArray) {
  1744. if (CB.IsView()) {
  1745. llvm::StructType *CBStructTy =
  1746. llvm::StructType::create(CB.GetResultType(), CB.GetGlobalName());
  1747. cbGV = new llvm::GlobalVariable(M, CBStructTy,
  1748. /*IsConstant*/ true,
  1749. llvm::GlobalValue::ExternalLinkage,
  1750. /*InitVal*/ nullptr, CB.GetGlobalName());
  1751. cbTy = cbGV->getType();
  1752. } else {
  1753. llvm::StructType *CBStructTy =
  1754. llvm::StructType::create(Elements, CB.GetGlobalName());
  1755. cbGV = new llvm::GlobalVariable(M, CBStructTy, /*IsConstant*/ true,
  1756. llvm::GlobalValue::ExternalLinkage,
  1757. /*InitVal*/ nullptr, CB.GetGlobalName());
  1758. cbTy = cbGV->getType();
  1759. }
  1760. } else {
  1761. // For array of ConstantBuffer, create array of struct instead of struct of
  1762. // array.
  1763. DXASSERT(CB.GetConstants().size() == 1,
  1764. "ConstantBuffer should have 1 constant");
  1765. Value *GV = CB.GetConstants()[0]->GetGlobalSymbol();
  1766. llvm::Type *CBEltTy =
  1767. GV->getType()->getPointerElementType()->getArrayElementType();
  1768. cbIndexDepth = 1;
  1769. while (CBEltTy->isArrayTy()) {
  1770. CBEltTy = CBEltTy->getArrayElementType();
  1771. cbIndexDepth++;
  1772. }
  1773. // Add one level struct type to match normal case.
  1774. llvm::StructType *CBStructTy =
  1775. llvm::StructType::create({CB.GetResultType()}, CB.GetGlobalName());
  1776. llvm::ArrayType *CBArrayTy =
  1777. llvm::ArrayType::get(CBStructTy, CB.GetRangeSize());
  1778. cbGV = new llvm::GlobalVariable(M, CBArrayTy, /*IsConstant*/ true,
  1779. llvm::GlobalValue::ExternalLinkage,
  1780. /*InitVal*/ nullptr, CB.GetGlobalName());
  1781. cbTy = llvm::PointerType::get(CBStructTy,
  1782. cbGV->getType()->getPointerAddressSpace());
  1783. }
  1784. CB.SetGlobalSymbol(cbGV);
  1785. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  1786. llvm::Type *idxTy = opcodeTy;
  1787. Constant *zeroIdx = ConstantInt::get(opcodeTy, 0);
  1788. Value *HandleArgs[] = {cbGV, zeroIdx};
  1789. llvm::FunctionType *SubscriptFuncTy =
  1790. llvm::FunctionType::get(cbTy, {opcodeTy, HandleTy, idxTy}, false);
  1791. Function *subscriptFunc =
  1792. GetOrCreateHLFunction(M, SubscriptFuncTy, HLOpcodeGroup::HLSubscript,
  1793. (unsigned)HLSubscriptOpcode::CBufferSubscript);
  1794. Constant *opArg =
  1795. ConstantInt::get(opcodeTy, (unsigned)HLSubscriptOpcode::CBufferSubscript);
  1796. Value *args[] = {opArg, nullptr, zeroIdx};
  1797. llvm::LLVMContext &Context = M.getContext();
  1798. llvm::Type *i32Ty = llvm::Type::getInt32Ty(Context);
  1799. Value *zero = ConstantInt::get(i32Ty, (uint64_t)0);
  1800. std::vector<Value *> indexArray(CB.GetConstants().size());
  1801. std::vector<std::unordered_set<Function *>> constUsedFuncList(
  1802. CB.GetConstants().size());
  1803. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1804. Value *idx = ConstantInt::get(i32Ty, C->GetID());
  1805. indexArray[C->GetID()] = idx;
  1806. Value *GV = C->GetGlobalSymbol();
  1807. MarkUsedFunctionForConst(GV, constUsedFuncList[C->GetID()]);
  1808. }
  1809. for (Function &F : M.functions()) {
  1810. if (F.isDeclaration())
  1811. continue;
  1812. if (GetHLOpcodeGroupByName(&F) != HLOpcodeGroup::NotHL)
  1813. continue;
  1814. IRBuilder<> Builder(F.getEntryBlock().getFirstInsertionPt());
  1815. // create HL subscript to make all the use of cbuffer start from it.
  1816. HandleArgs[HLOperandIndex::kCreateHandleResourceOpIdx - 1] = cbGV;
  1817. CallInst *Handle = HLM.EmitHLOperationCall(
  1818. Builder, HLOpcodeGroup::HLCreateHandle, 0, HandleTy, HandleArgs, M);
  1819. CallInst *OrigHandle = Handle;
  1820. DxilResourceProperties RP = resource_helper::loadPropsFromResourceBase(&CB);
  1821. Handle = CreateAnnotateHandle(HLM, Handle, RP, cbGV->getType()->getElementType(), Builder);
  1822. args[HLOperandIndex::kSubscriptObjectOpIdx] = Handle;
  1823. Instruction *cbSubscript =
  1824. cast<Instruction>(Builder.CreateCall(subscriptFunc, {args}));
  1825. // Replace constant var with GEP pGV
  1826. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1827. Value *GV = C->GetGlobalSymbol();
  1828. if (constUsedFuncList[C->GetID()].count(&F) == 0)
  1829. continue;
  1830. Value *idx = indexArray[C->GetID()];
  1831. if (!isCBArray) {
  1832. Instruction *GEP = cast<Instruction>(
  1833. Builder.CreateInBoundsGEP(cbSubscript, {zero, idx}));
  1834. // TODO: make sure the debug info is synced to GEP.
  1835. // GEP->setDebugLoc(GV);
  1836. ReplaceUseInFunction(GV, GEP, &F, Builder);
  1837. // Delete if no use in F.
  1838. if (GEP->user_empty())
  1839. GEP->eraseFromParent();
  1840. } else {
  1841. for (auto U = GV->user_begin(); U != GV->user_end();) {
  1842. User *user = *(U++);
  1843. if (user->user_empty())
  1844. continue;
  1845. Instruction *I = dyn_cast<Instruction>(user);
  1846. if (I && I->getParent()->getParent() != &F)
  1847. continue;
  1848. IRBuilder<> *instBuilder = &Builder;
  1849. std::unique_ptr<IRBuilder<>> B;
  1850. if (I) {
  1851. B = llvm::make_unique<IRBuilder<>>(I);
  1852. instBuilder = B.get();
  1853. }
  1854. GEPOperator *GEPOp = cast<GEPOperator>(user);
  1855. std::vector<Value *> idxList;
  1856. DXASSERT(GEPOp->getNumIndices() >= 1 + cbIndexDepth,
  1857. "must indexing ConstantBuffer array");
  1858. idxList.reserve(GEPOp->getNumIndices() - (cbIndexDepth - 1));
  1859. gep_type_iterator GI = gep_type_begin(*GEPOp),
  1860. E = gep_type_end(*GEPOp);
  1861. idxList.push_back(GI.getOperand());
  1862. // change array index with 0 for struct index.
  1863. idxList.push_back(zero);
  1864. GI++;
  1865. Value *arrayIdx = GI.getOperand();
  1866. GI++;
  1867. for (unsigned curIndex = 1; GI != E && curIndex < cbIndexDepth;
  1868. ++GI, ++curIndex) {
  1869. arrayIdx = instBuilder->CreateMul(
  1870. arrayIdx, Builder.getInt32(GI->getArrayNumElements()));
  1871. arrayIdx = instBuilder->CreateAdd(arrayIdx, GI.getOperand());
  1872. }
  1873. for (; GI != E; ++GI) {
  1874. idxList.push_back(GI.getOperand());
  1875. }
  1876. HandleArgs[HLOperandIndex::kCreateHandleIndexOpIdx - 1] = arrayIdx;
  1877. CallInst *Handle =
  1878. HLM.EmitHLOperationCall(*instBuilder,
  1879. HLOpcodeGroup::HLCreateHandle, 0,
  1880. HandleTy, HandleArgs, M);
  1881. DxilResourceProperties RP = resource_helper::loadPropsFromResourceBase(&CB);
  1882. Handle = CreateAnnotateHandle(HLM, Handle, RP, cbGV->getType()->getElementType(), *instBuilder);
  1883. args[HLOperandIndex::kSubscriptObjectOpIdx] = Handle;
  1884. args[HLOperandIndex::kSubscriptIndexOpIdx] = arrayIdx;
  1885. Instruction *cbSubscript =
  1886. cast<Instruction>(instBuilder->CreateCall(subscriptFunc, {args}));
  1887. Instruction *NewGEP = cast<Instruction>(
  1888. instBuilder->CreateInBoundsGEP(cbSubscript, idxList));
  1889. ReplaceUseInFunction(GEPOp, NewGEP, &F, *instBuilder);
  1890. }
  1891. }
  1892. }
  1893. // Delete if no use in F.
  1894. if (cbSubscript->user_empty()) {
  1895. cbSubscript->eraseFromParent();
  1896. Handle->eraseFromParent();
  1897. OrigHandle->eraseFromParent();
  1898. } else {
  1899. // merge GEP use for cbSubscript.
  1900. HLModule::MergeGepUse(cbSubscript);
  1901. }
  1902. }
  1903. return true;
  1904. }
  1905. void ConstructCBufferAnnotation(
  1906. HLCBuffer &CB, DxilTypeSystem &dxilTypeSys,
  1907. std::unordered_map<Constant *, DxilFieldAnnotation> &AnnotationMap) {
  1908. Value *GV = CB.GetGlobalSymbol();
  1909. llvm::StructType *CBStructTy =
  1910. dyn_cast<llvm::StructType>(GV->getType()->getPointerElementType());
  1911. if (!CBStructTy) {
  1912. // For Array of ConstantBuffer.
  1913. llvm::ArrayType *CBArrayTy =
  1914. cast<llvm::ArrayType>(GV->getType()->getPointerElementType());
  1915. CBStructTy = cast<llvm::StructType>(CBArrayTy->getArrayElementType());
  1916. }
  1917. DxilStructAnnotation *CBAnnotation =
  1918. dxilTypeSys.AddStructAnnotation(CBStructTy);
  1919. CBAnnotation->SetCBufferSize(CB.GetSize());
  1920. // Set fieldAnnotation for each constant var.
  1921. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1922. Constant *GV = C->GetGlobalSymbol();
  1923. DxilFieldAnnotation &fieldAnnotation =
  1924. CBAnnotation->GetFieldAnnotation(C->GetID());
  1925. fieldAnnotation = AnnotationMap[GV];
  1926. // This is after CBuffer allocation.
  1927. fieldAnnotation.SetCBufferOffset(C->GetLowerBound());
  1928. fieldAnnotation.SetFieldName(C->GetGlobalName());
  1929. }
  1930. }
  1931. void ConstructCBuffer(
  1932. HLModule &HLM, llvm::Type *CBufferType,
  1933. std::unordered_map<Constant *, DxilFieldAnnotation> &AnnotationMap) {
  1934. DxilTypeSystem &dxilTypeSys = HLM.GetTypeSystem();
  1935. llvm::Type *HandleTy = HLM.GetOP()->GetHandleType();
  1936. for (unsigned i = 0; i < HLM.GetCBuffers().size(); i++) {
  1937. HLCBuffer &CB = *static_cast<HLCBuffer *>(&(HLM.GetCBuffer(i)));
  1938. if (CB.GetConstants().size() == 0) {
  1939. // Create Fake variable for cbuffer which is empty.
  1940. llvm::GlobalVariable *pGV = new llvm::GlobalVariable(
  1941. *HLM.GetModule(), CBufferType, true,
  1942. llvm::GlobalValue::ExternalLinkage, nullptr, CB.GetGlobalName());
  1943. CB.SetGlobalSymbol(pGV);
  1944. } else {
  1945. bool bCreated = CreateCBufferVariable(CB, HLM, HandleTy);
  1946. if (bCreated)
  1947. ConstructCBufferAnnotation(CB, dxilTypeSys, AnnotationMap);
  1948. else {
  1949. // Create Fake variable for cbuffer which is unused.
  1950. llvm::GlobalVariable *pGV = new llvm::GlobalVariable(
  1951. *HLM.GetModule(), CBufferType, true,
  1952. llvm::GlobalValue::ExternalLinkage, nullptr, CB.GetGlobalName());
  1953. CB.SetGlobalSymbol(pGV);
  1954. }
  1955. }
  1956. // Clear the constants which useless now.
  1957. CB.GetConstants().clear();
  1958. }
  1959. }
  1960. } // namespace
  1961. namespace CGHLSLMSHelper {
  1962. // Align cbuffer offset in legacy mode (16 bytes per row).
  1963. unsigned AlignBufferOffsetInLegacy(unsigned offset, unsigned size,
  1964. unsigned scalarSizeInBytes,
  1965. bool bNeedNewRow) {
  1966. if (unsigned remainder = (offset & 0xf)) {
  1967. // Start from new row
  1968. if (remainder + size > 16 || bNeedNewRow) {
  1969. return offset + 16 - remainder;
  1970. }
  1971. // If not, naturally align data
  1972. return RoundToAlign(offset, scalarSizeInBytes);
  1973. }
  1974. return offset;
  1975. }
  1976. // Translate RayQuery constructor. From:
  1977. // %call = call %"RayQuery<flags>" @<constructor>(%"RayQuery<flags>" %ptr)
  1978. // To:
  1979. // i32 %handle = AllocateRayQuery(i32 <IntrinsicOp::IOP_AllocateRayQuery>, i32
  1980. // %flags) %gep = GEP %"RayQuery<flags>" %ptr, 0, 0 store i32* %gep, i32
  1981. // %handle ; and replace uses of %call with %ptr
  1982. void TranslateRayQueryConstructor(HLModule &HLM) {
  1983. llvm::Module &M = *HLM.GetModule();
  1984. SmallVector<Function *, 4> Constructors;
  1985. for (auto &F : M.functions()) {
  1986. // Match templated RayQuery constructor instantiation by prefix and
  1987. // signature. It should be impossible to achieve the same signature from
  1988. // HLSL.
  1989. if (!F.getName().startswith("\01??0?$RayQuery@$"))
  1990. continue;
  1991. llvm::Type *Ty = F.getReturnType();
  1992. if (!Ty->isPointerTy() ||
  1993. !dxilutil::IsHLSLRayQueryType(Ty->getPointerElementType()))
  1994. continue;
  1995. if (F.arg_size() != 1 || Ty != F.arg_begin()->getType())
  1996. continue;
  1997. Constructors.emplace_back(&F);
  1998. }
  1999. for (auto pConstructorFunc : Constructors) {
  2000. llvm::IntegerType *i32Ty = llvm::Type::getInt32Ty(M.getContext());
  2001. llvm::ConstantInt *i32Zero =
  2002. llvm::ConstantInt::get(i32Ty, (uint64_t)0, false);
  2003. llvm::FunctionType *funcTy =
  2004. llvm::FunctionType::get(i32Ty, {i32Ty, i32Ty}, false);
  2005. unsigned opcode = (unsigned)IntrinsicOp::IOP_AllocateRayQuery;
  2006. llvm::ConstantInt *opVal = llvm::ConstantInt::get(i32Ty, opcode, false);
  2007. Function *opFunc =
  2008. GetOrCreateHLFunction(M, funcTy, HLOpcodeGroup::HLIntrinsic, opcode);
  2009. while (!pConstructorFunc->user_empty()) {
  2010. Value *V = *pConstructorFunc->user_begin();
  2011. llvm::CallInst *CI = cast<CallInst>(V); // Must be call
  2012. llvm::Value *pThis = CI->getArgOperand(0);
  2013. llvm::StructType *pRQType =
  2014. cast<llvm::StructType>(pThis->getType()->getPointerElementType());
  2015. DxilStructAnnotation *SA =
  2016. HLM.GetTypeSystem().GetStructAnnotation(pRQType);
  2017. DXASSERT(SA, "otherwise, could not find type annoation for RayQuery "
  2018. "specialization");
  2019. DXASSERT(SA->GetNumTemplateArgs() == 1 &&
  2020. SA->GetTemplateArgAnnotation(0).IsIntegral(),
  2021. "otherwise, RayQuery has changed, or lacks template args");
  2022. llvm::IRBuilder<> Builder(CI);
  2023. llvm::Value *rayFlags =
  2024. Builder.getInt32(SA->GetTemplateArgAnnotation(0).GetIntegral());
  2025. llvm::Value *Call =
  2026. Builder.CreateCall(opFunc, {opVal, rayFlags}, pThis->getName());
  2027. llvm::Value *GEP = Builder.CreateInBoundsGEP(pThis, {i32Zero, i32Zero});
  2028. Builder.CreateStore(Call, GEP);
  2029. CI->replaceAllUsesWith(pThis);
  2030. CI->eraseFromParent();
  2031. }
  2032. pConstructorFunc->eraseFromParent();
  2033. }
  2034. }
  2035. } // namespace CGHLSLMSHelper
  2036. namespace {
  2037. bool BuildImmInit(Function *Ctor) {
  2038. GlobalVariable *GV = nullptr;
  2039. SmallVector<Constant *, 4> ImmList;
  2040. bool allConst = true;
  2041. for (inst_iterator I = inst_begin(Ctor), E = inst_end(Ctor); I != E; ++I) {
  2042. if (StoreInst *SI = dyn_cast<StoreInst>(&(*I))) {
  2043. Value *V = SI->getValueOperand();
  2044. if (!isa<Constant>(V) || V->getType()->isPointerTy()) {
  2045. allConst = false;
  2046. break;
  2047. }
  2048. ImmList.emplace_back(cast<Constant>(V));
  2049. Value *Ptr = SI->getPointerOperand();
  2050. if (GEPOperator *GepOp = dyn_cast<GEPOperator>(Ptr)) {
  2051. Ptr = GepOp->getPointerOperand();
  2052. if (GlobalVariable *pGV = dyn_cast<GlobalVariable>(Ptr)) {
  2053. if (GV == nullptr)
  2054. GV = pGV;
  2055. else {
  2056. DXASSERT(GV == pGV, "else pointer mismatch");
  2057. }
  2058. }
  2059. }
  2060. } else {
  2061. if (!isa<ReturnInst>(*I)) {
  2062. allConst = false;
  2063. break;
  2064. }
  2065. }
  2066. }
  2067. if (!allConst)
  2068. return false;
  2069. if (!GV)
  2070. return false;
  2071. llvm::Type *Ty = GV->getType()->getElementType();
  2072. llvm::ArrayType *AT = dyn_cast<llvm::ArrayType>(Ty);
  2073. // TODO: support other types.
  2074. if (!AT)
  2075. return false;
  2076. if (ImmList.size() != AT->getNumElements())
  2077. return false;
  2078. Constant *Init = llvm::ConstantArray::get(AT, ImmList);
  2079. GV->setInitializer(Init);
  2080. return true;
  2081. }
  2082. } // namespace
  2083. namespace CGHLSLMSHelper {
  2084. void ProcessCtorFunctions(llvm::Module &M, StringRef globalName,
  2085. Instruction *InsertPt, bool bRemoveGlobal) {
  2086. // add global call to entry func
  2087. GlobalVariable *GV = M.getGlobalVariable(globalName);
  2088. if (!GV)
  2089. return;
  2090. ConstantArray *CA = dyn_cast<ConstantArray>(GV->getInitializer());
  2091. if (!CA)
  2092. return;
  2093. IRBuilder<> Builder(InsertPt);
  2094. for (User::op_iterator i = CA->op_begin(), e = CA->op_end(); i != e; ++i) {
  2095. if (isa<ConstantAggregateZero>(*i))
  2096. continue;
  2097. ConstantStruct *CS = cast<ConstantStruct>(*i);
  2098. if (isa<ConstantPointerNull>(CS->getOperand(1)))
  2099. continue;
  2100. // Must have a function or null ptr.
  2101. if (!isa<Function>(CS->getOperand(1)))
  2102. continue;
  2103. Function *Ctor = cast<Function>(CS->getOperand(1));
  2104. DXASSERT(Ctor->getReturnType()->isVoidTy() && Ctor->arg_size() == 0,
  2105. "function type must be void (void)");
  2106. for (inst_iterator I = inst_begin(Ctor), E = inst_end(Ctor); I != E; ++I) {
  2107. if (CallInst *CI = dyn_cast<CallInst>(&(*I))) {
  2108. Function *F = CI->getCalledFunction();
  2109. // Try to build imm initilizer.
  2110. // If not work, add global call to entry func.
  2111. if (BuildImmInit(F) == false) {
  2112. Builder.CreateCall(F);
  2113. }
  2114. } else {
  2115. DXASSERT(isa<ReturnInst>(&(*I)),
  2116. "else invalid Global constructor function");
  2117. }
  2118. }
  2119. }
  2120. // remove the GV
  2121. if (bRemoveGlobal) {
  2122. GV->eraseFromParent();
  2123. }
  2124. }
  2125. void FinishCBuffer(HLModule &HLM, llvm::Type *CBufferType,
  2126. std::unordered_map<Constant *, DxilFieldAnnotation>
  2127. &constVarAnnotationMap) {
  2128. // Allocate constant buffers.
  2129. AllocateDxilConstantBuffers(HLM, constVarAnnotationMap);
  2130. // TODO: create temp variable for constant which has store use.
  2131. // Create Global variable and type annotation for each CBuffer.
  2132. ConstructCBuffer(HLM, CBufferType, constVarAnnotationMap);
  2133. }
  2134. void AddRegBindingsForResourceInConstantBuffer(
  2135. HLModule &HLM,
  2136. llvm::DenseMap<llvm::Constant *,
  2137. llvm::SmallVector<std::pair<DXIL::ResourceClass, unsigned>,
  2138. 1>> &constantRegBindingMap) {
  2139. for (unsigned i = 0; i < HLM.GetCBuffers().size(); i++) {
  2140. HLCBuffer &CB = *static_cast<HLCBuffer *>(&(HLM.GetCBuffer(i)));
  2141. auto &Constants = CB.GetConstants();
  2142. for (unsigned j = 0; j < Constants.size(); j++) {
  2143. const std::unique_ptr<DxilResourceBase> &C = Constants[j];
  2144. Constant *CGV = C->GetGlobalSymbol();
  2145. auto &regBindings = constantRegBindingMap[CGV];
  2146. if (regBindings.empty())
  2147. continue;
  2148. unsigned Srv = UINT_MAX;
  2149. unsigned Uav = UINT_MAX;
  2150. unsigned Sampler = UINT_MAX;
  2151. for (auto it : regBindings) {
  2152. unsigned RegNum = it.second;
  2153. switch (it.first) {
  2154. case DXIL::ResourceClass::SRV:
  2155. Srv = RegNum;
  2156. break;
  2157. case DXIL::ResourceClass::UAV:
  2158. Uav = RegNum;
  2159. break;
  2160. case DXIL::ResourceClass::Sampler:
  2161. Sampler = RegNum;
  2162. break;
  2163. default:
  2164. DXASSERT(0, "invalid resource class");
  2165. break;
  2166. }
  2167. }
  2168. HLM.AddRegBinding(CB.GetID(), j, Srv, Uav, Sampler);
  2169. }
  2170. }
  2171. }
  2172. // extension codegen.
  2173. void ExtensionCodeGen(HLModule &HLM, clang::CodeGen::CodeGenModule &CGM) {
  2174. // Add semantic defines for extensions if any are available.
  2175. HLSLExtensionsCodegenHelper::SemanticDefineErrorList errors =
  2176. CGM.getCodeGenOpts().HLSLExtensionsCodegen->WriteSemanticDefines(
  2177. HLM.GetModule());
  2178. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2179. for (const HLSLExtensionsCodegenHelper::SemanticDefineError &error : errors) {
  2180. clang::DiagnosticsEngine::Level level = clang::DiagnosticsEngine::Error;
  2181. if (error.IsWarning())
  2182. level = clang::DiagnosticsEngine::Warning;
  2183. unsigned DiagID = Diags.getCustomDiagID(level, "%0");
  2184. Diags.Report(clang::SourceLocation::getFromRawEncoding(error.Location()),
  2185. DiagID)
  2186. << error.Message();
  2187. }
  2188. // Add root signature from a #define. Overrides root signature in function
  2189. // attribute.
  2190. {
  2191. using Status = HLSLExtensionsCodegenHelper::CustomRootSignature::Status;
  2192. HLSLExtensionsCodegenHelper::CustomRootSignature customRootSig;
  2193. HLSLExtensionsCodegenHelper::CustomRootSignature::Status status =
  2194. CGM.getCodeGenOpts().HLSLExtensionsCodegen->GetCustomRootSignature(
  2195. &customRootSig);
  2196. if (status == Status::FOUND) {
  2197. DxilRootSignatureVersion rootSigVer;
  2198. // set root signature version.
  2199. if (CGM.getLangOpts().RootSigMinor == 0) {
  2200. rootSigVer = hlsl::DxilRootSignatureVersion::Version_1_0;
  2201. } else {
  2202. DXASSERT(CGM.getLangOpts().RootSigMinor == 1,
  2203. "else CGMSHLSLRuntime Constructor needs to be updated");
  2204. rootSigVer = hlsl::DxilRootSignatureVersion::Version_1_1;
  2205. }
  2206. RootSignatureHandle RootSigHandle;
  2207. CompileRootSignature(
  2208. customRootSig.RootSignature, Diags,
  2209. clang::SourceLocation::getFromRawEncoding(
  2210. customRootSig.EncodedSourceLocation),
  2211. rootSigVer, DxilRootSignatureCompilationFlags::GlobalRootSignature,
  2212. &RootSigHandle);
  2213. if (!RootSigHandle.IsEmpty()) {
  2214. RootSigHandle.EnsureSerializedAvailable();
  2215. HLM.SetSerializedRootSignature(RootSigHandle.GetSerializedBytes(),
  2216. RootSigHandle.GetSerializedSize());
  2217. }
  2218. }
  2219. }
  2220. }
  2221. } // namespace CGHLSLMSHelper
  2222. namespace {
  2223. void ReportDisallowedTypeInExportParam(clang::CodeGen ::CodeGenModule &CGM,
  2224. StringRef name) {
  2225. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2226. unsigned DiagID =
  2227. Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  2228. "Exported function %0 must not contain a "
  2229. "resource in parameter or return type.");
  2230. std::string escaped;
  2231. llvm::raw_string_ostream os(escaped);
  2232. dxilutil::PrintEscapedString(name, os);
  2233. Diags.Report(DiagID) << os.str();
  2234. }
  2235. } // namespace
  2236. namespace CGHLSLMSHelper {
  2237. void FinishClipPlane(HLModule &HLM, std::vector<Function *> &clipPlaneFuncList,
  2238. std::unordered_map<Value *, DebugLoc> &debugInfoMap,
  2239. clang::CodeGen::CodeGenModule &CGM) {
  2240. bool bDebugInfo = CGM.getCodeGenOpts().getDebugInfo() ==
  2241. clang::CodeGenOptions::FullDebugInfo;
  2242. Module &M = *HLM.GetModule();
  2243. for (Function *F : clipPlaneFuncList) {
  2244. DxilFunctionProps &props = HLM.GetDxilFunctionProps(F);
  2245. IRBuilder<> Builder(F->getEntryBlock().getFirstInsertionPt());
  2246. for (unsigned i = 0; i < DXIL::kNumClipPlanes; i++) {
  2247. Value *clipPlane = props.ShaderProps.VS.clipPlanes[i];
  2248. if (!clipPlane)
  2249. continue;
  2250. if (bDebugInfo) {
  2251. Builder.SetCurrentDebugLocation(debugInfoMap[clipPlane]);
  2252. }
  2253. llvm::Type *Ty = clipPlane->getType()->getPointerElementType();
  2254. // Constant *zeroInit = ConstantFP::get(Ty, 0);
  2255. GlobalVariable *GV = new llvm::GlobalVariable(
  2256. M, Ty, /*IsConstant*/ false, // constant false to store.
  2257. llvm::GlobalValue::ExternalLinkage,
  2258. /*InitVal*/ nullptr, Twine("SV_ClipPlane") + Twine(i));
  2259. Value *initVal = Builder.CreateLoad(clipPlane);
  2260. Builder.CreateStore(initVal, GV);
  2261. props.ShaderProps.VS.clipPlanes[i] = GV;
  2262. }
  2263. }
  2264. }
  2265. } // namespace CGHLSLMSHelper
  2266. namespace {
  2267. void LowerExportFunctions(HLModule &HLM, clang::CodeGen::CodeGenModule &CGM,
  2268. dxilutil::ExportMap &exportMap,
  2269. StringMap<EntryFunctionInfo> &entryFunctionMap) {
  2270. bool bIsLib = HLM.GetShaderModel()->IsLib();
  2271. Module &M = *HLM.GetModule();
  2272. if (bIsLib && !exportMap.empty()) {
  2273. for (auto &it : entryFunctionMap) {
  2274. if (HLM.HasDxilFunctionProps(it.second.Func)) {
  2275. const DxilFunctionProps &props =
  2276. HLM.GetDxilFunctionProps(it.second.Func);
  2277. if (props.IsHS())
  2278. exportMap.RegisterExportedFunction(
  2279. props.ShaderProps.HS.patchConstantFunc);
  2280. }
  2281. }
  2282. }
  2283. if (bIsLib && !exportMap.empty()) {
  2284. exportMap.BeginProcessing();
  2285. for (Function &f : M.functions()) {
  2286. if (f.isDeclaration() || f.isIntrinsic() ||
  2287. GetHLOpcodeGroup(&f) != HLOpcodeGroup::NotHL)
  2288. continue;
  2289. exportMap.ProcessFunction(&f, true);
  2290. }
  2291. // TODO: add subobject export names here.
  2292. if (!exportMap.EndProcessing()) {
  2293. for (auto &name : exportMap.GetNameCollisions()) {
  2294. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2295. unsigned DiagID = Diags.getCustomDiagID(
  2296. clang::DiagnosticsEngine::Error,
  2297. "Export name collides with another export: %0");
  2298. std::string escaped;
  2299. llvm::raw_string_ostream os(escaped);
  2300. dxilutil::PrintEscapedString(name, os);
  2301. Diags.Report(DiagID) << os.str();
  2302. }
  2303. for (auto &name : exportMap.GetUnusedExports()) {
  2304. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2305. unsigned DiagID =
  2306. Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  2307. "Could not find target for export: %0");
  2308. std::string escaped;
  2309. llvm::raw_string_ostream os(escaped);
  2310. dxilutil::PrintEscapedString(name, os);
  2311. Diags.Report(DiagID) << os.str();
  2312. }
  2313. }
  2314. }
  2315. for (auto &it : exportMap.GetFunctionRenames()) {
  2316. Function *F = it.first;
  2317. auto &renames = it.second;
  2318. if (renames.empty())
  2319. continue;
  2320. // Rename the original, if necessary, then clone the rest
  2321. if (renames.find(F->getName()) == renames.end())
  2322. F->setName(*renames.begin());
  2323. for (auto &itName : renames) {
  2324. if (F->getName() != itName) {
  2325. Function *pClone = CloneFunction(F, itName, &M, HLM.GetTypeSystem(),
  2326. HLM.GetTypeSystem());
  2327. // add DxilFunctionProps if entry
  2328. if (HLM.HasDxilFunctionProps(F)) {
  2329. DxilFunctionProps &props = HLM.GetDxilFunctionProps(F);
  2330. auto newProps = llvm::make_unique<DxilFunctionProps>(props);
  2331. HLM.AddDxilFunctionProps(pClone, newProps);
  2332. }
  2333. }
  2334. }
  2335. }
  2336. }
  2337. void CheckResourceParameters(HLModule &HLM,
  2338. clang::CodeGen::CodeGenModule &CGM) {
  2339. Module &M = *HLM.GetModule();
  2340. for (Function &f : M.functions()) {
  2341. // Skip llvm intrinsics, non-external linkage, entry/patch constant func,
  2342. // and HL intrinsics
  2343. if (!f.isIntrinsic() &&
  2344. f.getLinkage() == GlobalValue::LinkageTypes::ExternalLinkage &&
  2345. !HLM.HasDxilFunctionProps(&f) && !HLM.IsPatchConstantShader(&f) &&
  2346. GetHLOpcodeGroup(&f) == HLOpcodeGroup::NotHL) {
  2347. // Verify no resources in param/return types
  2348. if (dxilutil::ContainsHLSLObjectType(f.getReturnType())) {
  2349. ReportDisallowedTypeInExportParam(CGM, f.getName());
  2350. continue;
  2351. }
  2352. for (auto &Arg : f.args()) {
  2353. if (dxilutil::ContainsHLSLObjectType(Arg.getType())) {
  2354. ReportDisallowedTypeInExportParam(CGM, f.getName());
  2355. break;
  2356. }
  2357. }
  2358. }
  2359. }
  2360. }
  2361. } // namespace
  2362. namespace CGHLSLMSHelper {
  2363. void UpdateLinkage(HLModule &HLM, clang::CodeGen::CodeGenModule &CGM,
  2364. dxilutil::ExportMap &exportMap,
  2365. StringMap<EntryFunctionInfo> &entryFunctionMap,
  2366. StringMap<PatchConstantInfo> &patchConstantFunctionMap) {
  2367. bool bIsLib = HLM.GetShaderModel()->IsLib();
  2368. Module &M = *HLM.GetModule();
  2369. // Pin entry point and constant buffers, mark everything else internal.
  2370. for (Function &f : M.functions()) {
  2371. if (!bIsLib) {
  2372. if (&f == HLM.GetEntryFunction() ||
  2373. IsPatchConstantFunction(&f, patchConstantFunctionMap) ||
  2374. f.isDeclaration()) {
  2375. if (f.isDeclaration() && !f.isIntrinsic() &&
  2376. GetHLOpcodeGroup(&f) == HLOpcodeGroup::NotHL) {
  2377. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2378. unsigned DiagID = Diags.getCustomDiagID(
  2379. clang::DiagnosticsEngine::Error,
  2380. "External function used in non-library profile: %0");
  2381. std::string escaped;
  2382. llvm::raw_string_ostream os(escaped);
  2383. dxilutil::PrintEscapedString(f.getName(), os);
  2384. Diags.Report(DiagID) << os.str();
  2385. return;
  2386. }
  2387. f.setLinkage(GlobalValue::LinkageTypes::ExternalLinkage);
  2388. } else {
  2389. f.setLinkage(GlobalValue::LinkageTypes::InternalLinkage);
  2390. }
  2391. }
  2392. // Skip no inline functions.
  2393. if (f.hasFnAttribute(llvm::Attribute::NoInline))
  2394. continue;
  2395. // Always inline for used functions.
  2396. if (!f.user_empty() && !f.isDeclaration())
  2397. f.addFnAttr(llvm::Attribute::AlwaysInline);
  2398. }
  2399. LowerExportFunctions(HLM, CGM, exportMap, entryFunctionMap);
  2400. if (CGM.getCodeGenOpts().ExportShadersOnly) {
  2401. for (Function &f : M.functions()) {
  2402. // Skip declarations, intrinsics, shaders, and non-external linkage
  2403. if (f.isDeclaration() || f.isIntrinsic() ||
  2404. GetHLOpcodeGroup(&f) != HLOpcodeGroup::NotHL ||
  2405. HLM.HasDxilFunctionProps(&f) || HLM.IsPatchConstantShader(&f) ||
  2406. f.getLinkage() != GlobalValue::LinkageTypes::ExternalLinkage)
  2407. continue;
  2408. // Mark non-shader user functions as InternalLinkage
  2409. f.setLinkage(GlobalValue::LinkageTypes::InternalLinkage);
  2410. }
  2411. }
  2412. // Now iterate hull shaders and make sure their corresponding patch constant
  2413. // functions are marked ExternalLinkage:
  2414. for (Function &f : M.functions()) {
  2415. if (f.isDeclaration() || f.isIntrinsic() ||
  2416. GetHLOpcodeGroup(&f) != HLOpcodeGroup::NotHL ||
  2417. f.getLinkage() != GlobalValue::LinkageTypes::ExternalLinkage ||
  2418. !HLM.HasDxilFunctionProps(&f))
  2419. continue;
  2420. DxilFunctionProps &props = HLM.GetDxilFunctionProps(&f);
  2421. if (!props.IsHS())
  2422. continue;
  2423. Function *PCFunc = props.ShaderProps.HS.patchConstantFunc;
  2424. if (PCFunc->getLinkage() != GlobalValue::LinkageTypes::ExternalLinkage)
  2425. PCFunc->setLinkage(GlobalValue::LinkageTypes::ExternalLinkage);
  2426. }
  2427. // Disallow resource arguments in (non-entry) function exports
  2428. // unless offline linking target.
  2429. if (bIsLib &&
  2430. HLM.GetShaderModel()->GetMinor() != ShaderModel::kOfflineMinor) {
  2431. CheckResourceParameters(HLM, CGM);
  2432. }
  2433. }
  2434. void FinishEntries(
  2435. HLModule &HLM, const EntryFunctionInfo &Entry,
  2436. clang::CodeGen::CodeGenModule &CGM,
  2437. StringMap<EntryFunctionInfo> &entryFunctionMap,
  2438. std::unordered_map<Function *, const clang::HLSLPatchConstantFuncAttr *>
  2439. &HSEntryPatchConstantFuncAttr,
  2440. StringMap<PatchConstantInfo> &patchConstantFunctionMap,
  2441. std::unordered_map<Function *, std::unique_ptr<DxilFunctionProps>>
  2442. &patchConstantFunctionPropsMap) {
  2443. bool bIsLib = HLM.GetShaderModel()->IsLib();
  2444. // Library don't have entry.
  2445. if (!bIsLib) {
  2446. SetEntryFunction(HLM, Entry.Func, CGM);
  2447. // If at this point we haven't determined the entry function it's an error.
  2448. if (HLM.GetEntryFunction() == nullptr) {
  2449. assert(CGM.getDiags().hasErrorOccurred() &&
  2450. "else SetEntryFunction should have reported this condition");
  2451. return;
  2452. }
  2453. // In back-compat mode (with /Gec flag) create a static global for each
  2454. // const global to allow writing to it.
  2455. // TODO: Verfiy the behavior of static globals in hull shader
  2456. if (CGM.getLangOpts().EnableDX9CompatMode &&
  2457. CGM.getLangOpts().HLSLVersion <= 2016)
  2458. CreateWriteEnabledStaticGlobals(HLM.GetModule(), HLM.GetEntryFunction());
  2459. if (HLM.GetShaderModel()->IsHS()) {
  2460. SetPatchConstantFunction(Entry, HSEntryPatchConstantFuncAttr,
  2461. patchConstantFunctionMap,
  2462. patchConstantFunctionPropsMap, HLM, CGM);
  2463. }
  2464. } else {
  2465. for (auto &it : entryFunctionMap) {
  2466. // skip clone if RT entry
  2467. if (HLM.GetDxilFunctionProps(it.second.Func).IsRay())
  2468. continue;
  2469. // TODO: change flattened function names to dx.entry.<name>:
  2470. // std::string entryName = (Twine(dxilutil::EntryPrefix) +
  2471. // it.getKey()).str();
  2472. CloneShaderEntry(it.second.Func, it.getKey(), HLM);
  2473. auto AttrIter = HSEntryPatchConstantFuncAttr.find(it.second.Func);
  2474. if (AttrIter != HSEntryPatchConstantFuncAttr.end()) {
  2475. SetPatchConstantFunctionWithAttr(
  2476. it.second, AttrIter->second, patchConstantFunctionMap,
  2477. patchConstantFunctionPropsMap, HLM, CGM);
  2478. }
  2479. }
  2480. }
  2481. }
  2482. } // namespace CGHLSLMSHelper
  2483. namespace CGHLSLMSHelper {
  2484. void FinishIntrinsics(
  2485. HLModule &HLM, std::vector<std::pair<Function *, unsigned>> &intrinsicMap,
  2486. DxilObjectProperties &objectProperties) {
  2487. // Lower getResourceHeap before AddOpcodeParamForIntrinsics to skip automatic
  2488. // lower for getResourceFromHeap.
  2489. LowerGetResourceFromHeap(HLM, intrinsicMap);
  2490. // Lower bitcast use of CBV into cbSubscript.
  2491. LowerDynamicCBVUseToHandle(HLM, objectProperties);
  2492. // translate opcode into parameter for intrinsic functions
  2493. // Do this before CloneShaderEntry and TranslateRayQueryConstructor to avoid
  2494. // update valToResPropertiesMap for cloned inst.
  2495. AddOpcodeParamForIntrinsics(HLM, intrinsicMap, objectProperties);
  2496. }
  2497. // Add the dx.break temporary intrinsic and create Call Instructions
  2498. // to it for each branch that requires the artificial conditional.
  2499. void AddDxBreak(Module &M,
  2500. const SmallVector<llvm::BranchInst *, 16> &DxBreaks) {
  2501. if (DxBreaks.empty())
  2502. return;
  2503. // Collect functions that make use of any wave operations
  2504. // Only they will need the dx.break condition added
  2505. SmallPtrSet<Function *, 16> WaveUsers;
  2506. for (Function &F : M.functions()) {
  2507. HLOpcodeGroup opgroup = hlsl::GetHLOpcodeGroup(&F);
  2508. if (F.isDeclaration() && IsHLWaveSensitive(&F) &&
  2509. (opgroup == HLOpcodeGroup::HLIntrinsic ||
  2510. opgroup == HLOpcodeGroup::HLExtIntrinsic)) {
  2511. for (User *U : F.users()) {
  2512. CallInst *CI = cast<CallInst>(U);
  2513. WaveUsers.insert(CI->getParent()->getParent());
  2514. }
  2515. }
  2516. }
  2517. // If there are no wave users, not even the function declaration is needed
  2518. if (WaveUsers.empty())
  2519. return;
  2520. // Create the dx.break function
  2521. FunctionType *FT =
  2522. llvm::FunctionType::get(llvm::Type::getInt1Ty(M.getContext()), false);
  2523. Function *func =
  2524. cast<llvm::Function>(M.getOrInsertFunction(DXIL::kDxBreakFuncName, FT));
  2525. func->addFnAttr(Attribute::AttrKind::NoUnwind);
  2526. // For all break branches recorded previously, if the function they are in
  2527. // makes any use of a wave op, it may need to be artificially conditional.
  2528. // Make it so now. The CleanupDxBreak pass will remove those that aren't
  2529. // needed when more is known.
  2530. for (llvm::BranchInst *BI : DxBreaks) {
  2531. if (WaveUsers.count(BI->getParent()->getParent())) {
  2532. CallInst *Call = CallInst::Create(FT, func, ArrayRef<Value *>(), "", BI);
  2533. BI->setCondition(Call);
  2534. if (!BI->getMetadata(DXIL::kDxBreakMDName)) {
  2535. BI->setMetadata(DXIL::kDxBreakMDName,
  2536. llvm::MDNode::get(BI->getContext(), {}));
  2537. }
  2538. }
  2539. }
  2540. }
  2541. } // namespace CGHLSLMSHelper
  2542. namespace CGHLSLMSHelper {
  2543. ScopeInfo::ScopeInfo(Function *F) : maxRetLevel(0), bAllReturnsInIf(true) {
  2544. Scope FuncScope;
  2545. FuncScope.kind = Scope::ScopeKind::FunctionScope;
  2546. FuncScope.EndScopeBB = nullptr;
  2547. FuncScope.bWholeScopeReturned = false;
  2548. // Make it 0 to avoid check when get parent.
  2549. // All loop on scopes should check kind != FunctionScope.
  2550. FuncScope.parentScopeIndex = 0;
  2551. scopes.emplace_back(FuncScope);
  2552. scopeStack.emplace_back(0);
  2553. }
  2554. // When all returns is inside if which is not nested, the flow is still
  2555. // structurized even there're more than one return.
  2556. bool ScopeInfo::CanSkipStructurize() {
  2557. return bAllReturnsInIf && maxRetLevel < 2;
  2558. }
  2559. void ScopeInfo::AddScope(Scope::ScopeKind k, BasicBlock *endScopeBB) {
  2560. Scope Scope;
  2561. Scope.kind = k;
  2562. Scope.bWholeScopeReturned = false;
  2563. Scope.EndScopeBB = endScopeBB;
  2564. Scope.parentScopeIndex = scopeStack.back();
  2565. scopeStack.emplace_back(scopes.size());
  2566. scopes.emplace_back(Scope);
  2567. }
  2568. void ScopeInfo::AddIf(BasicBlock *endIfBB) {
  2569. AddScope(Scope::ScopeKind::IfScope, endIfBB);
  2570. }
  2571. void ScopeInfo::AddSwitch(BasicBlock *endSwitch) {
  2572. AddScope(Scope::ScopeKind::SwitchScope, endSwitch);
  2573. }
  2574. void ScopeInfo::AddLoop(BasicBlock *loopContinue, BasicBlock *endLoop) {
  2575. AddScope(Scope::ScopeKind::LoopScope, endLoop);
  2576. scopes.back().loopContinueBB = loopContinue;
  2577. }
  2578. void ScopeInfo::AddRet(BasicBlock *bbWithRet) {
  2579. Scope RetScope;
  2580. RetScope.kind = Scope::ScopeKind::ReturnScope;
  2581. RetScope.EndScopeBB = bbWithRet;
  2582. RetScope.parentScopeIndex = scopeStack.back();
  2583. // - 1 for function scope which is at scopeStack[0].
  2584. unsigned retLevel = scopeStack.size() - 1;
  2585. // save max nested level for ret.
  2586. maxRetLevel = std::max<unsigned>(maxRetLevel, retLevel);
  2587. bool bGotLoopOrSwitch = false;
  2588. for (auto it = scopeStack.rbegin(); it != scopeStack.rend(); it++) {
  2589. unsigned idx = *it;
  2590. Scope &S = scopes[idx];
  2591. switch (S.kind) {
  2592. default:
  2593. break;
  2594. case Scope::ScopeKind::LoopScope:
  2595. case Scope::ScopeKind::SwitchScope:
  2596. bGotLoopOrSwitch = true;
  2597. // For return inside loop and switch, can just break.
  2598. RetScope.parentScopeIndex = idx;
  2599. break;
  2600. }
  2601. if (bGotLoopOrSwitch)
  2602. break;
  2603. }
  2604. bAllReturnsInIf &= !bGotLoopOrSwitch;
  2605. // return finish current scope.
  2606. RetScope.bWholeScopeReturned = true;
  2607. // save retScope to rets.
  2608. rets.emplace_back(scopes.size());
  2609. scopes.emplace_back(RetScope);
  2610. // Don't need to put retScope to stack since it cannot nested other scopes.
  2611. }
  2612. void ScopeInfo::EndScope(bool bScopeFinishedWithRet) {
  2613. unsigned idx = scopeStack.pop_back_val();
  2614. Scope &Scope = GetScope(idx);
  2615. // If whole stmt is finished and end scope bb has not used(nothing branch to
  2616. // it). Then the whole scope is returned.
  2617. Scope.bWholeScopeReturned =
  2618. bScopeFinishedWithRet && Scope.EndScopeBB->user_empty();
  2619. }
  2620. Scope &ScopeInfo::GetScope(unsigned i) { return scopes[i]; }
  2621. void ScopeInfo::LegalizeWholeReturnedScope() {
  2622. // legalize scopes which whole scope returned.
  2623. // When whole scope is returned, the endScopeBB will be deleted in codeGen.
  2624. // Here update it to parent scope's endScope.
  2625. // Since the scopes are in order, so it will automatic update to the final
  2626. // target. A->B->C will just get A->C.
  2627. for (auto &S : scopes) {
  2628. if (S.bWholeScopeReturned && S.kind != Scope::ScopeKind::ReturnScope) {
  2629. S.EndScopeBB = scopes[S.parentScopeIndex].EndScopeBB;
  2630. }
  2631. }
  2632. }
  2633. } // namespace CGHLSLMSHelper
  2634. namespace {
  2635. void updateEndScope(
  2636. ScopeInfo &ScopeInfo,
  2637. DenseMap<BasicBlock *, SmallVector<unsigned, 2>> &EndBBToScopeIndexMap,
  2638. BasicBlock *oldEndScope, BasicBlock *newEndScope) {
  2639. auto it = EndBBToScopeIndexMap.find(oldEndScope);
  2640. DXASSERT(it != EndBBToScopeIndexMap.end(),
  2641. "fail to find endScopeBB in EndBBToScopeIndexMap");
  2642. SmallVector<unsigned, 2> &scopeList = it->second;
  2643. // Don't need to update when not share endBB with other scope.
  2644. if (scopeList.size() < 2)
  2645. return;
  2646. for (unsigned i : scopeList) {
  2647. Scope &S = ScopeInfo.GetScope(i);
  2648. // Don't update return endBB, because that is the Block has return branch.
  2649. if (S.kind != Scope::ScopeKind::ReturnScope)
  2650. S.EndScopeBB = newEndScope;
  2651. }
  2652. EndBBToScopeIndexMap[newEndScope] = scopeList;
  2653. }
  2654. // Init ret value with undef to make sure it will not live thru loop inside
  2655. // callers.
  2656. // Because structurize return, the flow is controled by bIsReturned. The
  2657. // semantic is the same as multiple return, but without konwledge of
  2658. // bIsReturend, some path for structrized flow will have ret value not
  2659. // initialized.
  2660. // When function is called inside loop, ret value will live across the loop
  2661. // after inline.
  2662. void InitRetValue(BasicBlock *exitBB) {
  2663. Value *RetValPtr = nullptr;
  2664. if (ReturnInst *RI = dyn_cast<ReturnInst>(exitBB->getTerminator())) {
  2665. if (Value *RetV = RI->getReturnValue()) {
  2666. if (LoadInst *LI = dyn_cast<LoadInst>(RetV)) {
  2667. RetValPtr = LI->getPointerOperand();
  2668. }
  2669. }
  2670. }
  2671. if (!RetValPtr)
  2672. return;
  2673. if (AllocaInst *RetVAlloc = dyn_cast<AllocaInst>(RetValPtr)) {
  2674. IRBuilder<> B(RetVAlloc->getNextNode());
  2675. Type *Ty = RetVAlloc->getAllocatedType();
  2676. Value *Init = UndefValue::get(Ty);
  2677. if (Ty->isAggregateType()) {
  2678. // TODO: support aggreagate type and out parameters.
  2679. // Skip it here will cause undef on phi which the incoming path should
  2680. // never hit.
  2681. } else {
  2682. B.CreateStore(Init, RetVAlloc);
  2683. }
  2684. }
  2685. }
  2686. // For functions has multiple returns like
  2687. // float foo(float a, float b, float c) {
  2688. // float r = c;
  2689. // if (a > 0) {
  2690. // if (b > 0) {
  2691. // return -1;
  2692. // }
  2693. // ***
  2694. // }
  2695. // ...
  2696. // return r;
  2697. // }
  2698. // transform into
  2699. // float foo(float a, float b, float c) {
  2700. // bool bRet = false;
  2701. // float retV;
  2702. // float r = c;
  2703. // if (a > 0) {
  2704. // if (b > 0) {
  2705. // bRet = true;
  2706. // retV = -1;
  2707. // }
  2708. // if (!bRet) {
  2709. // ***
  2710. // }
  2711. // }
  2712. // if (!bRet) {
  2713. // ...
  2714. // retV = r;
  2715. // }
  2716. // return vRet;
  2717. // }
  2718. void StructurizeMultiRetFunction(Function *F, ScopeInfo &ScopeInfo,
  2719. bool bWaveEnabledStage,
  2720. SmallVector<BranchInst *, 16> &DxBreaks) {
  2721. if (ScopeInfo.CanSkipStructurize())
  2722. return;
  2723. // Get bbWithRets.
  2724. auto &rets = ScopeInfo.GetRetScopes();
  2725. IRBuilder<> B(F->getEntryBlock().begin());
  2726. Scope &FunctionScope = ScopeInfo.GetScope(0);
  2727. Type *boolTy = Type::getInt1Ty(F->getContext());
  2728. Constant *cTrue = ConstantInt::get(boolTy, 1);
  2729. Constant *cFalse = ConstantInt::get(boolTy, 0);
  2730. // bool bIsReturned = false;
  2731. AllocaInst *bIsReturned = B.CreateAlloca(boolTy, nullptr, "bReturned");
  2732. B.CreateStore(cFalse, bIsReturned);
  2733. Scope &RetScope = ScopeInfo.GetScope(rets[0]);
  2734. BasicBlock *exitBB = RetScope.EndScopeBB->getTerminator()->getSuccessor(0);
  2735. FunctionScope.EndScopeBB = exitBB;
  2736. // Find alloca for retunr val and init it to avoid undef after guard code with
  2737. // bIsReturned.
  2738. InitRetValue(exitBB);
  2739. ScopeInfo.LegalizeWholeReturnedScope();
  2740. // Map from endScopeBB to scope index.
  2741. // When 2 scopes share same endScopeBB, need to update endScopeBB after
  2742. // structurize.
  2743. DenseMap<BasicBlock *, SmallVector<unsigned, 2>> EndBBToScopeIndexMap;
  2744. auto &scopes = ScopeInfo.GetScopes();
  2745. for (unsigned i = 0; i < scopes.size(); i++) {
  2746. Scope &S = scopes[i];
  2747. EndBBToScopeIndexMap[S.EndScopeBB].emplace_back(i);
  2748. }
  2749. DenseSet<unsigned> guardedSet;
  2750. for (auto it = rets.begin(); it != rets.end(); it++) {
  2751. unsigned scopeIndex = *it;
  2752. Scope *pCurScope = &ScopeInfo.GetScope(scopeIndex);
  2753. Scope *pRetParentScope = &ScopeInfo.GetScope(pCurScope->parentScopeIndex);
  2754. // skip ret not in nested control flow.
  2755. if (pRetParentScope->kind == Scope::ScopeKind::FunctionScope)
  2756. continue;
  2757. do {
  2758. BasicBlock *BB = pCurScope->EndScopeBB;
  2759. // exit when scope is processed.
  2760. if (guardedSet.count(scopeIndex))
  2761. break;
  2762. guardedSet.insert(scopeIndex);
  2763. Scope *pParentScope = &ScopeInfo.GetScope(pCurScope->parentScopeIndex);
  2764. BasicBlock *EndBB = pParentScope->EndScopeBB;
  2765. // When whole scope returned, just branch to endScope of parent.
  2766. if (pCurScope->bWholeScopeReturned) {
  2767. // For ret, just branch to endScope of parent.
  2768. if (pCurScope->kind == Scope::ScopeKind::ReturnScope) {
  2769. BasicBlock *retBB = pCurScope->EndScopeBB;
  2770. TerminatorInst *retBr = retBB->getTerminator();
  2771. IRBuilder<> B(retBr);
  2772. // Set bReturned to true.
  2773. B.CreateStore(cTrue, bIsReturned);
  2774. if (bWaveEnabledStage &&
  2775. pParentScope->kind == Scope::ScopeKind::LoopScope) {
  2776. BranchInst *BI =
  2777. B.CreateCondBr(cTrue, EndBB, pParentScope->loopContinueBB);
  2778. DxBreaks.emplace_back(BI);
  2779. retBr->eraseFromParent();
  2780. } else {
  2781. // Update branch target.
  2782. retBr->setSuccessor(0, EndBB);
  2783. }
  2784. }
  2785. // For other scope, do nothing. Since whole scope is returned.
  2786. // Just flow naturally to parent scope.
  2787. } else {
  2788. // When only part scope returned.
  2789. // Use bIsReturned to guard to part which not returned.
  2790. switch (pParentScope->kind) {
  2791. case Scope::ScopeKind::ReturnScope:
  2792. DXASSERT(0, "return scope must get whole scope returned.");
  2793. break;
  2794. case Scope::ScopeKind::FunctionScope:
  2795. case Scope::ScopeKind::IfScope: {
  2796. // inside if.
  2797. // if (!bReturned) {
  2798. // rest of if or else.
  2799. // }
  2800. BasicBlock *CmpBB = BasicBlock::Create(BB->getContext(),
  2801. "bReturned.cmp.false", F, BB);
  2802. // Make BB preds go to cmpBB.
  2803. BB->replaceAllUsesWith(CmpBB);
  2804. // Update endscopeBB to CmpBB for scopes which has BB as endscope.
  2805. updateEndScope(ScopeInfo, EndBBToScopeIndexMap, BB, CmpBB);
  2806. IRBuilder<> B(CmpBB);
  2807. Value *isRetured = B.CreateLoad(bIsReturned, "bReturned.load");
  2808. Value *notReturned =
  2809. B.CreateICmpNE(isRetured, cFalse, "bReturned.not");
  2810. B.CreateCondBr(notReturned, EndBB, BB);
  2811. } break;
  2812. default: {
  2813. // inside switch/loop
  2814. // if (bReturned) {
  2815. // br endOfScope.
  2816. // }
  2817. BasicBlock *CmpBB =
  2818. BasicBlock::Create(BB->getContext(), "bReturned.cmp.true", F, BB);
  2819. BasicBlock *BreakBB =
  2820. BasicBlock::Create(BB->getContext(), "bReturned.break", F, BB);
  2821. BB->replaceAllUsesWith(CmpBB);
  2822. // Update endscopeBB to CmpBB for scopes which has BB as endscope.
  2823. updateEndScope(ScopeInfo, EndBBToScopeIndexMap, BB, CmpBB);
  2824. IRBuilder<> B(CmpBB);
  2825. Value *isReturned = B.CreateLoad(bIsReturned, "bReturned.load");
  2826. isReturned = B.CreateICmpEQ(isReturned, cTrue, "bReturned.true");
  2827. B.CreateCondBr(isReturned, BreakBB, BB);
  2828. B.SetInsertPoint(BreakBB);
  2829. if (bWaveEnabledStage &&
  2830. pParentScope->kind == Scope::ScopeKind::LoopScope) {
  2831. BranchInst *BI =
  2832. B.CreateCondBr(cTrue, EndBB, pParentScope->loopContinueBB);
  2833. DxBreaks.emplace_back(BI);
  2834. } else {
  2835. B.CreateBr(EndBB);
  2836. }
  2837. } break;
  2838. }
  2839. }
  2840. scopeIndex = pCurScope->parentScopeIndex;
  2841. pCurScope = &ScopeInfo.GetScope(scopeIndex);
  2842. // done when reach function scope.
  2843. } while (pCurScope->kind != Scope::ScopeKind::FunctionScope);
  2844. }
  2845. }
  2846. } // namespace
  2847. namespace CGHLSLMSHelper {
  2848. void StructurizeMultiRet(Module &M, clang::CodeGen::CodeGenModule &CGM,
  2849. DenseMap<Function *, ScopeInfo> &ScopeMap,
  2850. bool bWaveEnabledStage,
  2851. SmallVector<BranchInst *, 16> &DxBreaks) {
  2852. if (CGM.getCodeGenOpts().HLSLExtensionsCodegen) {
  2853. if (!CGM.getCodeGenOpts().HLSLExtensionsCodegen->IsOptionEnabled(
  2854. "structurize-returns"))
  2855. return;
  2856. } else {
  2857. if (!CGM.getCodeGenOpts().HLSLOptimizationToggles.count(
  2858. "structurize-returns") ||
  2859. !CGM.getCodeGenOpts()
  2860. .HLSLOptimizationToggles.find("structurize-returns")
  2861. ->second)
  2862. return;
  2863. }
  2864. for (Function &F : M) {
  2865. if (F.isDeclaration())
  2866. continue;
  2867. auto it = ScopeMap.find(&F);
  2868. if (it == ScopeMap.end())
  2869. continue;
  2870. StructurizeMultiRetFunction(&F, it->second, bWaveEnabledStage, DxBreaks);
  2871. }
  2872. }
  2873. bool DxilObjectProperties::AddResource(llvm::Value *V, const hlsl::DxilResourceProperties &RP) {
  2874. if (RP.isValid()) {
  2875. DXASSERT(!GetResource(V).isValid() || GetResource(V) == RP, "otherwise, property conflict");
  2876. resMap[V] = RP;
  2877. return true;
  2878. }
  2879. return false;
  2880. }
  2881. bool DxilObjectProperties::IsResource(llvm::Value *V) {
  2882. return resMap.count(V) != 0;
  2883. }
  2884. hlsl::DxilResourceProperties DxilObjectProperties::GetResource(llvm::Value *V) {
  2885. auto it = resMap.find(V);
  2886. if (it != resMap.end())
  2887. return it->second;
  2888. return DxilResourceProperties();
  2889. }
  2890. } // namespace CGHLSLMSHelper