CGHLSLMSFinishCodeGen.cpp 109 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040
  1. ///////////////////////////////////////////////////////////////////////////////
  2. // //
  3. // CGHLSLMSFinishCodeGen.cpp //
  4. // Copyright (C) Microsoft Corporation. All rights reserved. //
  5. // This file is distributed under the University of Illinois Open Source //
  6. // License. See LICENSE.TXT for details. //
  7. // //
  8. // Impliment FinishCodeGen. //
  9. // //
  10. ///////////////////////////////////////////////////////////////////////////////
  11. #include "llvm/IR/Function.h"
  12. #include "llvm/IR/IRBuilder.h"
  13. #include "llvm/IR/Module.h"
  14. #include "llvm/IR/Type.h"
  15. #include "llvm/IR/Instructions.h"
  16. #include "llvm/IR/InstIterator.h"
  17. #include "llvm/IR/GetElementPtrTypeIterator.h"
  18. #include "llvm/ADT/SmallVector.h"
  19. #include "llvm/ADT/StringRef.h"
  20. #include "llvm/Analysis/DxilValueCache.h"
  21. #include "llvm/Transforms/Utils/ValueMapper.h"
  22. #include "llvm/Transforms/Utils/Cloning.h"
  23. #include "llvm/IR/CFG.h"
  24. #include "CodeGenModule.h"
  25. #include "clang/Frontend/CodeGenOptions.h"
  26. #include "clang/Basic/LangOptions.h"
  27. #include "clang/Parse/ParseHLSL.h" // root sig would be in Parser if part of lang
  28. #include "dxc/HLSL/HLModule.h"
  29. #include "dxc/HLSL/HLSLExtensionsCodegenHelper.h"
  30. #include "dxc/DXIL/DxilOperations.h"
  31. #include "dxc/HlslIntrinsicOp.h"
  32. #include "dxc/DXIL/DxilUtil.h"
  33. #include "dxc/HLSL/DxilExportMap.h"
  34. #include "dxc/DXIL/DxilResourceProperties.h"
  35. #include "dxc/DXIL/DxilTypeSystem.h"
  36. #include "dxc/DXIL/DxilConstants.h"
  37. #include "dxc/DxilRootSignature/DxilRootSignature.h"
  38. #include "dxc/HLSL/DxilGenerationPass.h"
  39. #include "dxc/HLSL/HLMatrixType.h"
  40. #include <vector>
  41. #include <memory>
  42. #include <fenv.h>
  43. #include "CGHLSLMSHelper.h"
  44. using namespace llvm;
  45. using namespace hlsl;
  46. using namespace CGHLSLMSHelper;
  47. namespace {
  48. Value *CreateHandleFromResPtr(Value *ResPtr, HLModule &HLM,
  49. llvm::Type *HandleTy, IRBuilder<> &Builder) {
  50. Module &M = *HLM.GetModule();
  51. // Load to make sure resource only have Ld/St use so mem2reg could remove
  52. // temp resource.
  53. Value *ldObj = Builder.CreateLoad(ResPtr);
  54. Value *args[] = {ldObj};
  55. CallInst *Handle = HLM.EmitHLOperationCall(
  56. Builder, HLOpcodeGroup::HLCreateHandle, 0, HandleTy, args, M);
  57. return Handle;
  58. }
  59. Value *CreateAnnotateHandle(HLModule &HLM, Value *Handle,
  60. DxilResourceProperties &RP, llvm::Type *ResTy,
  61. IRBuilder<> &Builder) {
  62. Constant *RPConstant = resource_helper::getAsConstant(
  63. RP, HLM.GetOP()->GetResourcePropertiesType(), *HLM.GetShaderModel());
  64. return HLM.EmitHLOperationCall(
  65. Builder, HLOpcodeGroup::HLAnnotateHandle,
  66. (unsigned)HLOpcodeGroup::HLAnnotateHandle, Handle->getType(),
  67. {Handle, Builder.getInt8((uint8_t)RP.Class),
  68. Builder.getInt8((uint8_t)RP.Kind), RPConstant, UndefValue::get(ResTy)},
  69. *HLM.GetModule());
  70. }
  71. void LowerGetResourceFromHeap(
  72. HLModule &HLM, std::vector<std::pair<Function *, unsigned>> &intrinsicMap) {
  73. llvm::Module &M = *HLM.GetModule();
  74. llvm::Type *HandleTy = HLM.GetOP()->GetHandleType();
  75. unsigned GetResFromHeapOp =
  76. static_cast<unsigned>(IntrinsicOp::IOP_CreateResourceFromHeap);
  77. DenseMap<Instruction *, Instruction *> ResourcePtrToHandlePtrMap;
  78. for (auto it : intrinsicMap) {
  79. unsigned opcode = it.second;
  80. if (opcode != GetResFromHeapOp)
  81. continue;
  82. Function *F = it.first;
  83. HLOpcodeGroup group = hlsl::GetHLOpcodeGroup(F);
  84. if (group != HLOpcodeGroup::HLIntrinsic)
  85. continue;
  86. for (auto uit = F->user_begin(); uit != F->user_end();) {
  87. CallInst *CI = cast<CallInst>(*(uit++));
  88. Instruction *ResPtr = cast<Instruction>(CI->getArgOperand(0));
  89. Value *Index = CI->getArgOperand(1);
  90. IRBuilder<> Builder(CI);
  91. // Make a handle from GetResFromHeap.
  92. Value *Handle =
  93. HLM.EmitHLOperationCall(Builder, HLOpcodeGroup::HLIntrinsic,
  94. GetResFromHeapOp, HandleTy, {Index}, M);
  95. // Find the handle ptr for res ptr.
  96. auto it = ResourcePtrToHandlePtrMap.find(ResPtr);
  97. Instruction *HandlePtr = nullptr;
  98. if (it != ResourcePtrToHandlePtrMap.end()) {
  99. HandlePtr = it->second;
  100. } else {
  101. IRBuilder<> AllocaBuilder(
  102. ResPtr->getParent()->getParent()->getEntryBlock().begin());
  103. HandlePtr = AllocaBuilder.CreateAlloca(HandleTy);
  104. ResourcePtrToHandlePtrMap[ResPtr] = HandlePtr;
  105. }
  106. // Store handle to handle ptr.
  107. Builder.CreateStore(Handle, HandlePtr);
  108. CI->eraseFromParent();
  109. }
  110. }
  111. // Replace load of Resource ptr into load of handel ptr.
  112. for (auto it : ResourcePtrToHandlePtrMap) {
  113. Instruction *resPtr = it.first;
  114. Instruction *handlePtr = it.second;
  115. for (auto uit = resPtr->user_begin(); uit != resPtr->user_end();) {
  116. User *U = *(uit++);
  117. BitCastInst *BCI = cast<BitCastInst>(U);
  118. DXASSERT(
  119. dxilutil::IsHLSLResourceType(BCI->getType()->getPointerElementType()),
  120. "illegal cast of resource ptr");
  121. for (auto cuit = BCI->user_begin(); cuit != BCI->user_end();) {
  122. LoadInst *LI = cast<LoadInst>(*(cuit++));
  123. IRBuilder<> Builder(LI);
  124. Value *Handle = Builder.CreateLoad(handlePtr);
  125. Value *Res =
  126. HLM.EmitHLOperationCall(Builder, HLOpcodeGroup::HLCast,
  127. (unsigned)HLCastOpcode::HandleToResCast,
  128. LI->getType(), {Handle}, M);
  129. LI->replaceAllUsesWith(Res);
  130. LI->eraseFromParent();
  131. }
  132. BCI->eraseFromParent();
  133. }
  134. resPtr->eraseFromParent();
  135. }
  136. }
  137. void ReplaceBoolVectorSubscript(CallInst *CI) {
  138. Value *Ptr = CI->getArgOperand(0);
  139. Value *Idx = CI->getArgOperand(1);
  140. Value *IdxList[] = {ConstantInt::get(Idx->getType(), 0), Idx};
  141. for (auto It = CI->user_begin(), E = CI->user_end(); It != E;) {
  142. Instruction *user = cast<Instruction>(*(It++));
  143. IRBuilder<> Builder(user);
  144. Value *GEP = Builder.CreateInBoundsGEP(Ptr, IdxList);
  145. if (LoadInst *LI = dyn_cast<LoadInst>(user)) {
  146. Value *NewLd = Builder.CreateLoad(GEP);
  147. Value *cast = Builder.CreateZExt(NewLd, LI->getType());
  148. LI->replaceAllUsesWith(cast);
  149. LI->eraseFromParent();
  150. } else {
  151. // Must be a store inst here.
  152. StoreInst *SI = cast<StoreInst>(user);
  153. Value *V = SI->getValueOperand();
  154. Value *cast =
  155. Builder.CreateICmpNE(V, llvm::ConstantInt::get(V->getType(), 0));
  156. Builder.CreateStore(cast, GEP);
  157. SI->eraseFromParent();
  158. }
  159. }
  160. CI->eraseFromParent();
  161. }
  162. void ReplaceBoolVectorSubscript(Function *F) {
  163. for (auto It = F->user_begin(), E = F->user_end(); It != E;) {
  164. User *user = *(It++);
  165. CallInst *CI = cast<CallInst>(user);
  166. ReplaceBoolVectorSubscript(CI);
  167. }
  168. }
  169. // Add function body for intrinsic if possible.
  170. Function *CreateOpFunction(llvm::Module &M, Function *F,
  171. llvm::FunctionType *funcTy, HLOpcodeGroup group,
  172. unsigned opcode) {
  173. Function *opFunc = nullptr;
  174. AttributeSet attribs = F->getAttributes().getFnAttributes();
  175. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  176. if (group == HLOpcodeGroup::HLIntrinsic) {
  177. IntrinsicOp intriOp = static_cast<IntrinsicOp>(opcode);
  178. switch (intriOp) {
  179. case IntrinsicOp::MOP_Append:
  180. case IntrinsicOp::MOP_Consume: {
  181. bool bAppend = intriOp == IntrinsicOp::MOP_Append;
  182. llvm::Type *handleTy = funcTy->getParamType(HLOperandIndex::kHandleOpIdx);
  183. // Don't generate body for OutputStream::Append.
  184. if (bAppend && HLModule::IsStreamOutputPtrType(handleTy)) {
  185. opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode, attribs);
  186. break;
  187. }
  188. opFunc = GetOrCreateHLFunctionWithBody(M, funcTy, group, opcode,
  189. bAppend ? "append" : "consume");
  190. llvm::Type *counterTy = llvm::Type::getInt32Ty(M.getContext());
  191. llvm::FunctionType *IncCounterFuncTy =
  192. llvm::FunctionType::get(counterTy, {opcodeTy, handleTy}, false);
  193. unsigned counterOpcode =
  194. bAppend ? (unsigned)IntrinsicOp::MOP_IncrementCounter
  195. : (unsigned)IntrinsicOp::MOP_DecrementCounter;
  196. Function *incCounterFunc =
  197. GetOrCreateHLFunction(M, IncCounterFuncTy, group, counterOpcode, attribs);
  198. llvm::Type *idxTy = counterTy;
  199. llvm::Type *valTy =
  200. bAppend ? funcTy->getParamType(HLOperandIndex::kAppendValOpIndex)
  201. : funcTy->getReturnType();
  202. // Return type for subscript should be pointer type, hence in memory
  203. // representation
  204. llvm::Type *subscriptTy = valTy;
  205. bool isBoolScalarOrVector = false;
  206. if (!subscriptTy->isPointerTy()) {
  207. if (subscriptTy->getScalarType()->isIntegerTy(1)) {
  208. isBoolScalarOrVector = true;
  209. llvm::Type *memReprType =
  210. llvm::IntegerType::get(subscriptTy->getContext(), 32);
  211. subscriptTy =
  212. subscriptTy->isVectorTy()
  213. ? llvm::VectorType::get(memReprType,
  214. subscriptTy->getVectorNumElements())
  215. : memReprType;
  216. }
  217. subscriptTy = llvm::PointerType::get(subscriptTy, 0);
  218. }
  219. llvm::FunctionType *SubscriptFuncTy = llvm::FunctionType::get(
  220. subscriptTy, {opcodeTy, handleTy, idxTy}, false);
  221. Function *subscriptFunc =
  222. GetOrCreateHLFunction(M, SubscriptFuncTy, HLOpcodeGroup::HLSubscript,
  223. (unsigned)HLSubscriptOpcode::DefaultSubscript, attribs);
  224. BasicBlock *BB =
  225. BasicBlock::Create(opFunc->getContext(), "Entry", opFunc);
  226. IRBuilder<> Builder(BB);
  227. auto argIter = opFunc->args().begin();
  228. // Skip the opcode arg.
  229. argIter++;
  230. Argument *thisArg = argIter++;
  231. // int counter = IncrementCounter/DecrementCounter(Buf);
  232. Value *incCounterOpArg = ConstantInt::get(idxTy, counterOpcode);
  233. Value *counter =
  234. Builder.CreateCall(incCounterFunc, {incCounterOpArg, thisArg});
  235. // Buf[counter];
  236. Value *subscriptOpArg = ConstantInt::get(
  237. idxTy, (unsigned)HLSubscriptOpcode::DefaultSubscript);
  238. Value *subscript =
  239. Builder.CreateCall(subscriptFunc, {subscriptOpArg, thisArg, counter});
  240. if (bAppend) {
  241. Argument *valArg = argIter;
  242. // Buf[counter] = val;
  243. if (valTy->isPointerTy()) {
  244. unsigned size = M.getDataLayout().getTypeAllocSize(
  245. subscript->getType()->getPointerElementType());
  246. Builder.CreateMemCpy(subscript, valArg, size, 1);
  247. } else {
  248. Value *storedVal = valArg;
  249. // Convert to memory representation
  250. if (isBoolScalarOrVector)
  251. storedVal = Builder.CreateZExt(
  252. storedVal, subscriptTy->getPointerElementType(), "frombool");
  253. Builder.CreateStore(storedVal, subscript);
  254. }
  255. Builder.CreateRetVoid();
  256. } else {
  257. // return Buf[counter];
  258. if (valTy->isPointerTy())
  259. Builder.CreateRet(subscript);
  260. else {
  261. Value *retVal = Builder.CreateLoad(subscript);
  262. // Convert to register representation
  263. if (isBoolScalarOrVector)
  264. retVal = Builder.CreateICmpNE(
  265. retVal, Constant::getNullValue(retVal->getType()), "tobool");
  266. Builder.CreateRet(retVal);
  267. }
  268. }
  269. } break;
  270. case IntrinsicOp::IOP_sincos: {
  271. opFunc =
  272. GetOrCreateHLFunctionWithBody(M, funcTy, group, opcode, "sincos");
  273. llvm::Type *valTy =
  274. funcTy->getParamType(HLOperandIndex::kTrinaryOpSrc0Idx);
  275. llvm::FunctionType *sinFuncTy =
  276. llvm::FunctionType::get(valTy, {opcodeTy, valTy}, false);
  277. unsigned sinOp = static_cast<unsigned>(IntrinsicOp::IOP_sin);
  278. unsigned cosOp = static_cast<unsigned>(IntrinsicOp::IOP_cos);
  279. Function *sinFunc = GetOrCreateHLFunction(M, sinFuncTy, group, sinOp, attribs);
  280. Function *cosFunc = GetOrCreateHLFunction(M, sinFuncTy, group, cosOp, attribs);
  281. BasicBlock *BB =
  282. BasicBlock::Create(opFunc->getContext(), "Entry", opFunc);
  283. IRBuilder<> Builder(BB);
  284. auto argIter = opFunc->args().begin();
  285. // Skip the opcode arg.
  286. argIter++;
  287. Argument *valArg = argIter++;
  288. Argument *sinPtrArg = argIter++;
  289. Argument *cosPtrArg = argIter++;
  290. Value *sinOpArg = ConstantInt::get(opcodeTy, sinOp);
  291. Value *sinVal = Builder.CreateCall(sinFunc, {sinOpArg, valArg});
  292. Builder.CreateStore(sinVal, sinPtrArg);
  293. Value *cosOpArg = ConstantInt::get(opcodeTy, cosOp);
  294. Value *cosVal = Builder.CreateCall(cosFunc, {cosOpArg, valArg});
  295. Builder.CreateStore(cosVal, cosPtrArg);
  296. // Ret.
  297. Builder.CreateRetVoid();
  298. } break;
  299. default:
  300. opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode, attribs);
  301. break;
  302. }
  303. } else if (group == HLOpcodeGroup::HLExtIntrinsic) {
  304. llvm::StringRef fnName = F->getName();
  305. llvm::StringRef groupName = GetHLOpcodeGroupNameByAttr(F);
  306. opFunc =
  307. GetOrCreateHLFunction(M, funcTy, group, &groupName, &fnName, opcode, attribs);
  308. } else {
  309. opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode, attribs);
  310. }
  311. return opFunc;
  312. }
  313. DxilResourceProperties GetResourcePropsFromIntrinsicObjectArg(
  314. Value *arg, HLModule &HLM, DxilTypeSystem &typeSys,
  315. DenseMap<Value *, DxilResourceProperties> &valToResPropertiesMap) {
  316. DxilResourceProperties RP;
  317. RP.Class = DXIL::ResourceClass::Invalid;
  318. auto RPIt = valToResPropertiesMap.find(arg);
  319. if (RPIt != valToResPropertiesMap.end()) {
  320. RP = RPIt->second;
  321. } else {
  322. // Must be GEP.
  323. GEPOperator *GEP = cast<GEPOperator>(arg);
  324. // Find RP from GEP.
  325. Value *Ptr = GEP->getPointerOperand();
  326. // When Ptr is array of resource, check if it is another GEP.
  327. while (
  328. dxilutil::IsHLSLResourceType(dxilutil::GetArrayEltTy(Ptr->getType()))) {
  329. if (GEPOperator *ParentGEP = dyn_cast<GEPOperator>(Ptr)) {
  330. GEP = ParentGEP;
  331. Ptr = GEP->getPointerOperand();
  332. } else {
  333. break;
  334. }
  335. }
  336. RPIt = valToResPropertiesMap.find(Ptr);
  337. // When ptr is array of resource, ptr could be in
  338. // valToResPropertiesMap.
  339. if (RPIt != valToResPropertiesMap.end()) {
  340. RP = RPIt->second;
  341. } else {
  342. DxilStructAnnotation *Anno = nullptr;
  343. for (auto gepIt = gep_type_begin(GEP), E = gep_type_end(GEP); gepIt != E;
  344. ++gepIt) {
  345. if (StructType *ST = dyn_cast<StructType>(*gepIt)) {
  346. Anno = typeSys.GetStructAnnotation(ST);
  347. DXASSERT(Anno, "missing type annotation");
  348. unsigned Index =
  349. cast<ConstantInt>(gepIt.getOperand())->getLimitedValue();
  350. DxilFieldAnnotation &fieldAnno = Anno->GetFieldAnnotation(Index);
  351. if (fieldAnno.HasResourceAttribute()) {
  352. MDNode *resAttrib = fieldAnno.GetResourceAttribute();
  353. DxilResourceBase R(DXIL::ResourceClass::Invalid);
  354. HLM.LoadDxilResourceBaseFromMDNode(resAttrib, R);
  355. switch (R.GetClass()) {
  356. case DXIL::ResourceClass::SRV:
  357. case DXIL::ResourceClass::UAV: {
  358. DxilResource Res;
  359. HLM.LoadDxilResourceFromMDNode(resAttrib, Res);
  360. RP = resource_helper::loadFromResourceBase(&Res);
  361. } break;
  362. case DXIL::ResourceClass::Sampler: {
  363. DxilSampler Sampler;
  364. HLM.LoadDxilSamplerFromMDNode(resAttrib, Sampler);
  365. RP = resource_helper::loadFromResourceBase(&Sampler);
  366. } break;
  367. default:
  368. DXASSERT(0, "invalid resource attribute in filed annotation");
  369. break;
  370. }
  371. break;
  372. }
  373. }
  374. }
  375. }
  376. }
  377. DXASSERT(RP.Class != DXIL::ResourceClass::Invalid,
  378. "invalid resource properties");
  379. return RP;
  380. }
  381. void AddOpcodeParamForIntrinsic(
  382. HLModule &HLM, Function *F, unsigned opcode, llvm::Type *HandleTy,
  383. DenseMap<Value *, DxilResourceProperties> &valToResPropertiesMap) {
  384. llvm::Module &M = *HLM.GetModule();
  385. llvm::FunctionType *oldFuncTy = F->getFunctionType();
  386. SmallVector<llvm::Type *, 4> paramTyList;
  387. // Add the opcode param
  388. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  389. paramTyList.emplace_back(opcodeTy);
  390. paramTyList.append(oldFuncTy->param_begin(), oldFuncTy->param_end());
  391. for (unsigned i = 1; i < paramTyList.size(); i++) {
  392. llvm::Type *Ty = paramTyList[i];
  393. if (Ty->isPointerTy()) {
  394. Ty = Ty->getPointerElementType();
  395. if (dxilutil::IsHLSLResourceType(Ty)) {
  396. // Use handle type for resource type.
  397. // This will make sure temp object variable only used by createHandle.
  398. paramTyList[i] = HandleTy;
  399. }
  400. }
  401. }
  402. HLOpcodeGroup group = hlsl::GetHLOpcodeGroup(F);
  403. if (group == HLOpcodeGroup::HLSubscript &&
  404. opcode == static_cast<unsigned>(HLSubscriptOpcode::VectorSubscript)) {
  405. llvm::FunctionType *FT = F->getFunctionType();
  406. llvm::Type *VecArgTy = FT->getParamType(0);
  407. llvm::VectorType *VType =
  408. cast<llvm::VectorType>(VecArgTy->getPointerElementType());
  409. llvm::Type *Ty = VType->getElementType();
  410. DXASSERT(Ty->isIntegerTy(), "Only bool could use VectorSubscript");
  411. llvm::IntegerType *ITy = cast<IntegerType>(Ty);
  412. DXASSERT_LOCALVAR(ITy, ITy->getBitWidth() == 1,
  413. "Only bool could use VectorSubscript");
  414. // The return type is i8*.
  415. // Replace all uses with i1*.
  416. ReplaceBoolVectorSubscript(F);
  417. return;
  418. }
  419. bool isDoubleSubscriptFunc =
  420. group == HLOpcodeGroup::HLSubscript &&
  421. opcode == static_cast<unsigned>(HLSubscriptOpcode::DoubleSubscript);
  422. llvm::Type *RetTy = oldFuncTy->getReturnType();
  423. if (isDoubleSubscriptFunc) {
  424. CallInst *doubleSub = cast<CallInst>(*F->user_begin());
  425. // Change currentIdx type into coord type.
  426. auto U = doubleSub->user_begin();
  427. Value *user = *U;
  428. CallInst *secSub = cast<CallInst>(user);
  429. unsigned coordIdx = HLOperandIndex::kSubscriptIndexOpIdx;
  430. // opcode operand not add yet, so the index need -1.
  431. if (GetHLOpcodeGroupByName(secSub->getCalledFunction()) ==
  432. HLOpcodeGroup::NotHL)
  433. coordIdx -= 1;
  434. Value *coord = secSub->getArgOperand(coordIdx);
  435. llvm::Type *coordTy = coord->getType();
  436. paramTyList[HLOperandIndex::kSubscriptIndexOpIdx] = coordTy;
  437. // Add the sampleIdx or mipLevel parameter to the end.
  438. paramTyList.emplace_back(opcodeTy);
  439. // Change return type to be resource ret type.
  440. // opcode operand not add yet, so the index need -1.
  441. Value *objPtr =
  442. doubleSub->getArgOperand(HLOperandIndex::kSubscriptObjectOpIdx - 1);
  443. // Must be a GEP
  444. GEPOperator *objGEP = cast<GEPOperator>(objPtr);
  445. gep_type_iterator GEPIt = gep_type_begin(objGEP), E = gep_type_end(objGEP);
  446. llvm::Type *resTy = nullptr;
  447. while (GEPIt != E) {
  448. if (dxilutil::IsHLSLResourceType(*GEPIt)) {
  449. resTy = *GEPIt;
  450. break;
  451. }
  452. GEPIt++;
  453. }
  454. DXASSERT(resTy, "must find the resource type");
  455. // Change object type to handle type.
  456. paramTyList[HLOperandIndex::kSubscriptObjectOpIdx] = HandleTy;
  457. // Change RetTy into pointer of resource reture type.
  458. RetTy = cast<StructType>(resTy)->getElementType(0)->getPointerTo();
  459. }
  460. llvm::FunctionType *funcTy =
  461. llvm::FunctionType::get(RetTy, paramTyList, oldFuncTy->isVarArg());
  462. Function *opFunc = CreateOpFunction(M, F, funcTy, group, opcode);
  463. StringRef lower = hlsl::GetHLLowerStrategy(F);
  464. if (!lower.empty())
  465. hlsl::SetHLLowerStrategy(opFunc, lower);
  466. DxilTypeSystem &typeSys = HLM.GetTypeSystem();
  467. for (auto user = F->user_begin(); user != F->user_end();) {
  468. // User must be a call.
  469. CallInst *oldCI = cast<CallInst>(*(user++));
  470. SmallVector<Value *, 4> opcodeParamList;
  471. Value *opcodeConst = Constant::getIntegerValue(opcodeTy, APInt(32, opcode));
  472. opcodeParamList.emplace_back(opcodeConst);
  473. opcodeParamList.append(oldCI->arg_operands().begin(),
  474. oldCI->arg_operands().end());
  475. IRBuilder<> Builder(oldCI);
  476. if (isDoubleSubscriptFunc) {
  477. // Change obj to the resource pointer.
  478. Value *objVal = opcodeParamList[HLOperandIndex::kSubscriptObjectOpIdx];
  479. GEPOperator *objGEP = cast<GEPOperator>(objVal);
  480. SmallVector<Value *, 8> IndexList;
  481. IndexList.append(objGEP->idx_begin(), objGEP->idx_end());
  482. Value *lastIndex = IndexList.back();
  483. ConstantInt *constIndex = cast<ConstantInt>(lastIndex);
  484. DXASSERT_LOCALVAR(constIndex, constIndex->getLimitedValue() == 1,
  485. "last index must 1");
  486. // Remove the last index.
  487. IndexList.pop_back();
  488. objVal = objGEP->getPointerOperand();
  489. DxilResourceProperties RP = GetResourcePropsFromIntrinsicObjectArg(
  490. objVal, HLM, typeSys, valToResPropertiesMap);
  491. if (IndexList.size() > 1)
  492. objVal = Builder.CreateInBoundsGEP(objVal, IndexList);
  493. Value *Handle = CreateHandleFromResPtr(objVal, HLM, HandleTy, Builder);
  494. Type *ResTy = objVal->getType()->getPointerElementType();
  495. Handle = CreateAnnotateHandle(HLM, Handle, RP, ResTy, Builder);
  496. // Change obj to the resource pointer.
  497. opcodeParamList[HLOperandIndex::kSubscriptObjectOpIdx] = Handle;
  498. // Set idx and mipIdx.
  499. Value *mipIdx = opcodeParamList[HLOperandIndex::kSubscriptIndexOpIdx];
  500. auto U = oldCI->user_begin();
  501. Value *user = *U;
  502. CallInst *secSub = cast<CallInst>(user);
  503. unsigned idxOpIndex = HLOperandIndex::kSubscriptIndexOpIdx;
  504. if (GetHLOpcodeGroupByName(secSub->getCalledFunction()) ==
  505. HLOpcodeGroup::NotHL)
  506. idxOpIndex--;
  507. Value *idx = secSub->getArgOperand(idxOpIndex);
  508. DXASSERT(secSub->hasOneUse(), "subscript should only has one use");
  509. // Add the sampleIdx or mipLevel parameter to the end.
  510. opcodeParamList[HLOperandIndex::kSubscriptIndexOpIdx] = idx;
  511. opcodeParamList.emplace_back(mipIdx);
  512. // Insert new call before secSub to make sure idx is ready to use.
  513. Builder.SetInsertPoint(secSub);
  514. }
  515. for (unsigned i = 1; i < opcodeParamList.size(); i++) {
  516. Value *arg = opcodeParamList[i];
  517. llvm::Type *Ty = arg->getType();
  518. if (Ty->isPointerTy()) {
  519. Ty = Ty->getPointerElementType();
  520. if (dxilutil::IsHLSLResourceType(Ty)) {
  521. DxilResourceProperties RP = GetResourcePropsFromIntrinsicObjectArg(
  522. arg, HLM, typeSys, valToResPropertiesMap);
  523. // Use object type directly, not by pointer.
  524. // This will make sure temp object variable only used by ld/st.
  525. if (GEPOperator *argGEP = dyn_cast<GEPOperator>(arg)) {
  526. std::vector<Value *> idxList(argGEP->idx_begin(),
  527. argGEP->idx_end());
  528. // Create instruction to avoid GEPOperator.
  529. GetElementPtrInst *GEP = GetElementPtrInst::CreateInBounds(
  530. argGEP->getPointerOperand(), idxList);
  531. Builder.Insert(GEP);
  532. arg = GEP;
  533. }
  534. llvm::Type *ResTy = arg->getType()->getPointerElementType();
  535. Value *Handle = CreateHandleFromResPtr(arg, HLM, HandleTy, Builder);
  536. Handle = CreateAnnotateHandle(HLM, Handle, RP, ResTy, Builder);
  537. opcodeParamList[i] = Handle;
  538. }
  539. }
  540. }
  541. Value *CI = Builder.CreateCall(opFunc, opcodeParamList);
  542. if (!isDoubleSubscriptFunc) {
  543. // replace new call and delete the old call
  544. oldCI->replaceAllUsesWith(CI);
  545. oldCI->eraseFromParent();
  546. } else {
  547. // For double script.
  548. // Replace single users use with new CI.
  549. auto U = oldCI->user_begin();
  550. Value *user = *U;
  551. CallInst *secSub = cast<CallInst>(user);
  552. secSub->replaceAllUsesWith(CI);
  553. secSub->eraseFromParent();
  554. oldCI->eraseFromParent();
  555. }
  556. }
  557. // delete the function
  558. F->eraseFromParent();
  559. }
  560. void AddOpcodeParamForIntrinsics(
  561. HLModule &HLM, std::vector<std::pair<Function *, unsigned>> &intrinsicMap,
  562. DenseMap<Value *, DxilResourceProperties> &valToResPropertiesMap) {
  563. llvm::Type *HandleTy = HLM.GetOP()->GetHandleType();
  564. for (auto mapIter : intrinsicMap) {
  565. Function *F = mapIter.first;
  566. if (F->user_empty()) {
  567. // delete the function
  568. F->eraseFromParent();
  569. continue;
  570. }
  571. unsigned opcode = mapIter.second;
  572. AddOpcodeParamForIntrinsic(HLM, F, opcode, HandleTy, valToResPropertiesMap);
  573. }
  574. }
  575. }
  576. namespace {
  577. // Returns true a global value is being updated
  578. bool GlobalHasStoreUserRec(Value *V, std::set<Value *> &visited) {
  579. bool isWriteEnabled = false;
  580. if (V && visited.find(V) == visited.end()) {
  581. visited.insert(V);
  582. for (User *U : V->users()) {
  583. if (isa<StoreInst>(U)) {
  584. return true;
  585. } else if (CallInst *CI = dyn_cast<CallInst>(U)) {
  586. Function *F = CI->getCalledFunction();
  587. if (!F->isIntrinsic()) {
  588. HLOpcodeGroup hlGroup = GetHLOpcodeGroup(F);
  589. switch (hlGroup) {
  590. case HLOpcodeGroup::NotHL:
  591. return true;
  592. case HLOpcodeGroup::HLMatLoadStore: {
  593. HLMatLoadStoreOpcode opCode =
  594. static_cast<HLMatLoadStoreOpcode>(hlsl::GetHLOpcode(CI));
  595. if (opCode == HLMatLoadStoreOpcode::ColMatStore ||
  596. opCode == HLMatLoadStoreOpcode::RowMatStore)
  597. return true;
  598. break;
  599. }
  600. case HLOpcodeGroup::HLCast:
  601. case HLOpcodeGroup::HLSubscript:
  602. if (GlobalHasStoreUserRec(U, visited))
  603. return true;
  604. break;
  605. default:
  606. break;
  607. }
  608. }
  609. } else if (isa<GEPOperator>(U) || isa<PHINode>(U) || isa<SelectInst>(U)) {
  610. if (GlobalHasStoreUserRec(U, visited))
  611. return true;
  612. }
  613. }
  614. }
  615. return isWriteEnabled;
  616. }
  617. // Returns true if any of the direct user of a global is a store inst
  618. // otherwise recurse through the remaining users and check if any GEP
  619. // exists and which in turn has a store inst as user.
  620. bool GlobalHasStoreUser(GlobalVariable *GV) {
  621. std::set<Value *> visited;
  622. Value *V = cast<Value>(GV);
  623. return GlobalHasStoreUserRec(V, visited);
  624. }
  625. GlobalVariable *CreateStaticGlobal(llvm::Module *M, GlobalVariable *GV) {
  626. Constant *GC = M->getOrInsertGlobal(GV->getName().str() + ".static.copy",
  627. GV->getType()->getPointerElementType());
  628. GlobalVariable *NGV = cast<GlobalVariable>(GC);
  629. if (GV->hasInitializer()) {
  630. NGV->setInitializer(GV->getInitializer());
  631. } else {
  632. // The copy being static, it should be initialized per llvm rules
  633. NGV->setInitializer(
  634. Constant::getNullValue(GV->getType()->getPointerElementType()));
  635. }
  636. // static global should have internal linkage
  637. NGV->setLinkage(GlobalValue::InternalLinkage);
  638. return NGV;
  639. }
  640. void CreateWriteEnabledStaticGlobals(llvm::Module *M, llvm::Function *EF) {
  641. std::vector<GlobalVariable *> worklist;
  642. for (GlobalVariable &GV : M->globals()) {
  643. if (!GV.isConstant() && GV.getLinkage() != GlobalValue::InternalLinkage &&
  644. // skip globals which are HLSL objects or group shared
  645. !dxilutil::IsHLSLObjectType(GV.getType()->getPointerElementType()) &&
  646. !dxilutil::IsSharedMemoryGlobal(&GV)) {
  647. if (GlobalHasStoreUser(&GV))
  648. worklist.emplace_back(&GV);
  649. // TODO: Ensure that constant globals aren't using initializer
  650. GV.setConstant(true);
  651. }
  652. }
  653. IRBuilder<> Builder(
  654. dxilutil::FirstNonAllocaInsertionPt(&EF->getEntryBlock()));
  655. for (GlobalVariable *GV : worklist) {
  656. GlobalVariable *NGV = CreateStaticGlobal(M, GV);
  657. GV->replaceAllUsesWith(NGV);
  658. // insert memcpy in all entryblocks
  659. uint64_t size = M->getDataLayout().getTypeAllocSize(
  660. GV->getType()->getPointerElementType());
  661. Builder.CreateMemCpy(NGV, GV, size, 1);
  662. }
  663. }
  664. } // namespace
  665. namespace {
  666. void SetEntryFunction(HLModule &HLM, Function *Entry,
  667. clang::CodeGen::CodeGenModule &CGM) {
  668. if (Entry == nullptr) {
  669. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  670. unsigned DiagID = Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  671. "cannot find entry function %0");
  672. Diags.Report(DiagID) << CGM.getCodeGenOpts().HLSLEntryFunction;
  673. return;
  674. }
  675. HLM.SetEntryFunction(Entry);
  676. }
  677. Function *CloneFunction(Function *Orig, const llvm::Twine &Name,
  678. llvm::Module *llvmModule, hlsl::DxilTypeSystem &TypeSys,
  679. hlsl::DxilTypeSystem &SrcTypeSys) {
  680. Function *F = Function::Create(Orig->getFunctionType(),
  681. GlobalValue::LinkageTypes::ExternalLinkage,
  682. Name, llvmModule);
  683. SmallVector<ReturnInst *, 2> Returns;
  684. ValueToValueMapTy vmap;
  685. // Map params.
  686. auto entryParamIt = F->arg_begin();
  687. for (Argument &param : Orig->args()) {
  688. vmap[&param] = (entryParamIt++);
  689. }
  690. llvm::CloneFunctionInto(F, Orig, vmap, /*ModuleLevelChagnes*/ false, Returns);
  691. TypeSys.CopyFunctionAnnotation(F, Orig, SrcTypeSys);
  692. return F;
  693. }
  694. // Clone shader entry function to be called by other functions.
  695. // The original function will be used as shader entry.
  696. void CloneShaderEntry(Function *ShaderF, StringRef EntryName, HLModule &HLM) {
  697. Function *F = CloneFunction(ShaderF, "", HLM.GetModule(), HLM.GetTypeSystem(),
  698. HLM.GetTypeSystem());
  699. F->takeName(ShaderF);
  700. F->setLinkage(GlobalValue::LinkageTypes::InternalLinkage);
  701. // Set to name before mangled.
  702. ShaderF->setName(EntryName);
  703. DxilFunctionAnnotation *annot = HLM.GetFunctionAnnotation(F);
  704. DxilParameterAnnotation &cloneRetAnnot = annot->GetRetTypeAnnotation();
  705. // Clear semantic for cloned one.
  706. cloneRetAnnot.SetSemanticString("");
  707. cloneRetAnnot.SetSemanticIndexVec({});
  708. for (unsigned i = 0; i < annot->GetNumParameters(); i++) {
  709. DxilParameterAnnotation &cloneParamAnnot = annot->GetParameterAnnotation(i);
  710. // Clear semantic for cloned one.
  711. cloneParamAnnot.SetSemanticString("");
  712. cloneParamAnnot.SetSemanticIndexVec({});
  713. }
  714. }
  715. } // namespace
  716. namespace {
  717. bool IsPatchConstantFunction(
  718. const Function *F, StringMap<PatchConstantInfo> &patchConstantFunctionMap) {
  719. DXASSERT_NOMSG(F != nullptr);
  720. for (auto &&p : patchConstantFunctionMap) {
  721. if (p.second.Func == F)
  722. return true;
  723. }
  724. return false;
  725. }
  726. void SetPatchConstantFunctionWithAttr(
  727. const EntryFunctionInfo &EntryFunc,
  728. const clang::HLSLPatchConstantFuncAttr *PatchConstantFuncAttr,
  729. StringMap<PatchConstantInfo> &patchConstantFunctionMap,
  730. std::unordered_map<Function *, std::unique_ptr<DxilFunctionProps>>
  731. &patchConstantFunctionPropsMap,
  732. HLModule &HLM, clang::CodeGen::CodeGenModule &CGM) {
  733. StringRef funcName = PatchConstantFuncAttr->getFunctionName();
  734. auto Entry = patchConstantFunctionMap.find(funcName);
  735. if (Entry == patchConstantFunctionMap.end()) {
  736. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  737. unsigned DiagID = Diags.getCustomDiagID(
  738. clang::DiagnosticsEngine::Error, "Cannot find patchconstantfunc %0.");
  739. Diags.Report(PatchConstantFuncAttr->getLocation(), DiagID) << funcName;
  740. return;
  741. }
  742. if (Entry->second.NumOverloads != 1) {
  743. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  744. unsigned DiagID =
  745. Diags.getCustomDiagID(clang::DiagnosticsEngine::Warning,
  746. "Multiple overloads of patchconstantfunc %0.");
  747. unsigned NoteID = Diags.getCustomDiagID(clang::DiagnosticsEngine::Note,
  748. "This overload was selected.");
  749. Diags.Report(PatchConstantFuncAttr->getLocation(), DiagID) << funcName;
  750. Diags.Report(Entry->second.SL, NoteID);
  751. }
  752. Function *patchConstFunc = Entry->second.Func;
  753. DXASSERT(
  754. HLM.HasDxilFunctionProps(EntryFunc.Func),
  755. " else AddHLSLFunctionInfo did not save the dxil function props for the "
  756. "HS entry.");
  757. DxilFunctionProps *HSProps = &HLM.GetDxilFunctionProps(EntryFunc.Func);
  758. HLM.SetPatchConstantFunctionForHS(EntryFunc.Func, patchConstFunc);
  759. DXASSERT_NOMSG(patchConstantFunctionPropsMap.count(patchConstFunc));
  760. // Check no inout parameter for patch constant function.
  761. DxilFunctionAnnotation *patchConstFuncAnnotation =
  762. HLM.GetFunctionAnnotation(patchConstFunc);
  763. for (unsigned i = 0; i < patchConstFuncAnnotation->GetNumParameters(); i++) {
  764. if (patchConstFuncAnnotation->GetParameterAnnotation(i)
  765. .GetParamInputQual() == DxilParamInputQual::Inout) {
  766. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  767. unsigned DiagID = Diags.getCustomDiagID(
  768. clang::DiagnosticsEngine::Error,
  769. "Patch Constant function %0 should not have inout param.");
  770. Diags.Report(Entry->second.SL, DiagID) << funcName;
  771. }
  772. }
  773. // Input/Output control point validation.
  774. if (patchConstantFunctionPropsMap.count(patchConstFunc)) {
  775. const DxilFunctionProps &patchProps =
  776. *patchConstantFunctionPropsMap[patchConstFunc];
  777. if (patchProps.ShaderProps.HS.inputControlPoints != 0 &&
  778. patchProps.ShaderProps.HS.inputControlPoints !=
  779. HSProps->ShaderProps.HS.inputControlPoints) {
  780. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  781. unsigned DiagID =
  782. Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  783. "Patch constant function's input patch input "
  784. "should have %0 elements, but has %1.");
  785. Diags.Report(Entry->second.SL, DiagID)
  786. << HSProps->ShaderProps.HS.inputControlPoints
  787. << patchProps.ShaderProps.HS.inputControlPoints;
  788. }
  789. if (patchProps.ShaderProps.HS.outputControlPoints != 0 &&
  790. patchProps.ShaderProps.HS.outputControlPoints !=
  791. HSProps->ShaderProps.HS.outputControlPoints) {
  792. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  793. unsigned DiagID =
  794. Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  795. "Patch constant function's output patch input "
  796. "should have %0 elements, but has %1.");
  797. Diags.Report(Entry->second.SL, DiagID)
  798. << HSProps->ShaderProps.HS.outputControlPoints
  799. << patchProps.ShaderProps.HS.outputControlPoints;
  800. }
  801. }
  802. }
  803. void SetPatchConstantFunction(
  804. const EntryFunctionInfo &EntryFunc,
  805. std::unordered_map<Function *, const clang::HLSLPatchConstantFuncAttr *>
  806. &HSEntryPatchConstantFuncAttr,
  807. StringMap<PatchConstantInfo> &patchConstantFunctionMap,
  808. std::unordered_map<Function *, std::unique_ptr<DxilFunctionProps>>
  809. &patchConstantFunctionPropsMap,
  810. HLModule &HLM, clang::CodeGen::CodeGenModule &CGM) {
  811. auto AttrsIter = HSEntryPatchConstantFuncAttr.find(EntryFunc.Func);
  812. DXASSERT(AttrsIter != HSEntryPatchConstantFuncAttr.end(),
  813. "we have checked this in AddHLSLFunctionInfo()");
  814. SetPatchConstantFunctionWithAttr(EntryFunc, AttrsIter->second,
  815. patchConstantFunctionMap,
  816. patchConstantFunctionPropsMap, HLM, CGM);
  817. }
  818. } // namespace
  819. namespace {
  820. // For case like:
  821. // cbuffer A {
  822. // float a;
  823. // int b;
  824. //}
  825. //
  826. // const static struct {
  827. // float a;
  828. // int b;
  829. //} ST = { a, b };
  830. // Replace user of ST with a and b.
  831. bool ReplaceConstStaticGlobalUser(GEPOperator *GEP,
  832. std::vector<Constant *> &InitList,
  833. IRBuilder<> &Builder) {
  834. if (GEP->getNumIndices() < 2) {
  835. // Don't use sub element.
  836. return false;
  837. }
  838. SmallVector<Value *, 4> idxList;
  839. auto iter = GEP->idx_begin();
  840. idxList.emplace_back(*(iter++));
  841. ConstantInt *subIdx = dyn_cast<ConstantInt>(*(iter++));
  842. DXASSERT(subIdx, "else dynamic indexing on struct field");
  843. unsigned subIdxImm = subIdx->getLimitedValue();
  844. DXASSERT(subIdxImm < InitList.size(), "else struct index out of bound");
  845. Constant *subPtr = InitList[subIdxImm];
  846. // Move every idx to idxList except idx for InitList.
  847. while (iter != GEP->idx_end()) {
  848. idxList.emplace_back(*(iter++));
  849. }
  850. Value *NewGEP = Builder.CreateGEP(subPtr, idxList);
  851. GEP->replaceAllUsesWith(NewGEP);
  852. return true;
  853. }
  854. } // namespace
  855. namespace CGHLSLMSHelper {
  856. void ReplaceConstStaticGlobals(
  857. std::unordered_map<GlobalVariable *, std::vector<Constant *>>
  858. &staticConstGlobalInitListMap,
  859. std::unordered_map<GlobalVariable *, Function *>
  860. &staticConstGlobalCtorMap) {
  861. for (auto &iter : staticConstGlobalInitListMap) {
  862. GlobalVariable *GV = iter.first;
  863. std::vector<Constant *> &InitList = iter.second;
  864. LLVMContext &Ctx = GV->getContext();
  865. // Do the replace.
  866. bool bPass = true;
  867. for (User *U : GV->users()) {
  868. IRBuilder<> Builder(Ctx);
  869. if (GetElementPtrInst *GEPInst = dyn_cast<GetElementPtrInst>(U)) {
  870. Builder.SetInsertPoint(GEPInst);
  871. bPass &= ReplaceConstStaticGlobalUser(cast<GEPOperator>(GEPInst),
  872. InitList, Builder);
  873. } else if (GEPOperator *GEP = dyn_cast<GEPOperator>(U)) {
  874. bPass &= ReplaceConstStaticGlobalUser(GEP, InitList, Builder);
  875. } else {
  876. DXASSERT(false, "invalid user of const static global");
  877. }
  878. }
  879. // Clear the Ctor which is useless now.
  880. if (bPass) {
  881. Function *Ctor = staticConstGlobalCtorMap[GV];
  882. Ctor->getBasicBlockList().clear();
  883. BasicBlock *Entry = BasicBlock::Create(Ctx, "", Ctor);
  884. IRBuilder<> Builder(Entry);
  885. Builder.CreateRetVoid();
  886. }
  887. }
  888. }
  889. }
  890. namespace {
  891. Value *CastLdValue(Value *Ptr, llvm::Type *FromTy, llvm::Type *ToTy,
  892. IRBuilder<> &Builder) {
  893. if (ToTy->isVectorTy()) {
  894. unsigned vecSize = ToTy->getVectorNumElements();
  895. if (vecSize == 1 && ToTy->getVectorElementType() == FromTy) {
  896. Value *V = Builder.CreateLoad(Ptr);
  897. // ScalarToVec1Splat
  898. // Change scalar into vec1.
  899. Value *Vec1 = UndefValue::get(ToTy);
  900. return Builder.CreateInsertElement(Vec1, V, (uint64_t)0);
  901. } else if (vecSize == 1 && FromTy->isIntegerTy() &&
  902. ToTy->getVectorElementType()->isIntegerTy(1)) {
  903. // load(bitcast i32* to <1 x i1>*)
  904. // Rewrite to
  905. // insertelement(icmp ne (load i32*), 0)
  906. Value *IntV = Builder.CreateLoad(Ptr);
  907. Value *BoolV = Builder.CreateICmpNE(
  908. IntV, ConstantInt::get(IntV->getType(), 0), "tobool");
  909. Value *Vec1 = UndefValue::get(ToTy);
  910. return Builder.CreateInsertElement(Vec1, BoolV, (uint64_t)0);
  911. } else if (FromTy->isVectorTy() && vecSize == 1) {
  912. Value *V = Builder.CreateLoad(Ptr);
  913. // VectorTrunc
  914. // Change vector into vec1.
  915. int mask[] = {0};
  916. return Builder.CreateShuffleVector(V, V, mask);
  917. } else if (FromTy->isArrayTy()) {
  918. llvm::Type *FromEltTy = FromTy->getArrayElementType();
  919. llvm::Type *ToEltTy = ToTy->getVectorElementType();
  920. if (FromTy->getArrayNumElements() == vecSize && FromEltTy == ToEltTy) {
  921. // ArrayToVector.
  922. Value *NewLd = UndefValue::get(ToTy);
  923. Value *zeroIdx = Builder.getInt32(0);
  924. for (unsigned i = 0; i < vecSize; i++) {
  925. Value *GEP =
  926. Builder.CreateInBoundsGEP(Ptr, {zeroIdx, Builder.getInt32(i)});
  927. Value *Elt = Builder.CreateLoad(GEP);
  928. NewLd = Builder.CreateInsertElement(NewLd, Elt, i);
  929. }
  930. return NewLd;
  931. }
  932. }
  933. } else if (FromTy == Builder.getInt1Ty()) {
  934. Value *V = Builder.CreateLoad(Ptr);
  935. // BoolCast
  936. DXASSERT_NOMSG(ToTy->isIntegerTy());
  937. return Builder.CreateZExt(V, ToTy);
  938. }
  939. return nullptr;
  940. }
  941. Value *CastStValue(Value *Ptr, Value *V, llvm::Type *FromTy, llvm::Type *ToTy,
  942. IRBuilder<> &Builder) {
  943. if (ToTy->isVectorTy()) {
  944. unsigned vecSize = ToTy->getVectorNumElements();
  945. if (vecSize == 1 && ToTy->getVectorElementType() == FromTy) {
  946. // ScalarToVec1Splat
  947. // Change vec1 back to scalar.
  948. Value *Elt = Builder.CreateExtractElement(V, (uint64_t)0);
  949. return Elt;
  950. } else if (FromTy->isVectorTy() && vecSize == 1) {
  951. // VectorTrunc
  952. // Change vec1 into vector.
  953. // Should not happen.
  954. // Reported error at Sema::ImpCastExprToType.
  955. DXASSERT_NOMSG(0);
  956. } else if (FromTy->isArrayTy()) {
  957. llvm::Type *FromEltTy = FromTy->getArrayElementType();
  958. llvm::Type *ToEltTy = ToTy->getVectorElementType();
  959. if (FromTy->getArrayNumElements() == vecSize && FromEltTy == ToEltTy) {
  960. // ArrayToVector.
  961. Value *zeroIdx = Builder.getInt32(0);
  962. for (unsigned i = 0; i < vecSize; i++) {
  963. Value *Elt = Builder.CreateExtractElement(V, i);
  964. Value *GEP =
  965. Builder.CreateInBoundsGEP(Ptr, {zeroIdx, Builder.getInt32(i)});
  966. Builder.CreateStore(Elt, GEP);
  967. }
  968. // The store already done.
  969. // Return null to ignore use of the return value.
  970. return nullptr;
  971. }
  972. }
  973. } else if (FromTy == Builder.getInt1Ty()) {
  974. // BoolCast
  975. // Change i1 to ToTy.
  976. DXASSERT_NOMSG(ToTy->isIntegerTy());
  977. Value *CastV = Builder.CreateICmpNE(V, ConstantInt::get(V->getType(), 0));
  978. return CastV;
  979. }
  980. return nullptr;
  981. }
  982. bool SimplifyBitCastLoad(LoadInst *LI, llvm::Type *FromTy, llvm::Type *ToTy,
  983. Value *Ptr) {
  984. IRBuilder<> Builder(LI);
  985. // Cast FromLd to ToTy.
  986. Value *CastV = CastLdValue(Ptr, FromTy, ToTy, Builder);
  987. if (CastV) {
  988. LI->replaceAllUsesWith(CastV);
  989. return true;
  990. } else {
  991. return false;
  992. }
  993. }
  994. bool SimplifyBitCastStore(StoreInst *SI, llvm::Type *FromTy, llvm::Type *ToTy,
  995. Value *Ptr) {
  996. IRBuilder<> Builder(SI);
  997. Value *V = SI->getValueOperand();
  998. // Cast Val to FromTy.
  999. Value *CastV = CastStValue(Ptr, V, FromTy, ToTy, Builder);
  1000. if (CastV) {
  1001. Builder.CreateStore(CastV, Ptr);
  1002. return true;
  1003. } else {
  1004. return false;
  1005. }
  1006. }
  1007. bool SimplifyBitCastGEP(GEPOperator *GEP, llvm::Type *FromTy, llvm::Type *ToTy,
  1008. Value *Ptr) {
  1009. if (ToTy->isVectorTy()) {
  1010. unsigned vecSize = ToTy->getVectorNumElements();
  1011. if (vecSize == 1 && ToTy->getVectorElementType() == FromTy) {
  1012. // ScalarToVec1Splat
  1013. GEP->replaceAllUsesWith(Ptr);
  1014. return true;
  1015. } else if (FromTy->isVectorTy() && vecSize == 1) {
  1016. // VectorTrunc
  1017. DXASSERT_NOMSG(
  1018. !isa<llvm::VectorType>(GEP->getType()->getPointerElementType()));
  1019. IRBuilder<> Builder(FromTy->getContext());
  1020. if (Instruction *I = dyn_cast<Instruction>(GEP))
  1021. Builder.SetInsertPoint(I);
  1022. std::vector<Value *> idxList(GEP->idx_begin(), GEP->idx_end());
  1023. Value *NewGEP = Builder.CreateInBoundsGEP(Ptr, idxList);
  1024. GEP->replaceAllUsesWith(NewGEP);
  1025. return true;
  1026. } else if (FromTy->isArrayTy()) {
  1027. llvm::Type *FromEltTy = FromTy->getArrayElementType();
  1028. llvm::Type *ToEltTy = ToTy->getVectorElementType();
  1029. if (FromTy->getArrayNumElements() == vecSize && FromEltTy == ToEltTy) {
  1030. // ArrayToVector.
  1031. }
  1032. }
  1033. } else if (FromTy == llvm::Type::getInt1Ty(FromTy->getContext())) {
  1034. // BoolCast
  1035. }
  1036. return false;
  1037. }
  1038. typedef SmallPtrSet<Instruction *, 4> SmallInstSet;
  1039. void SimplifyBitCast(BitCastOperator *BC, SmallInstSet &deadInsts) {
  1040. Value *Ptr = BC->getOperand(0);
  1041. llvm::Type *FromTy = Ptr->getType();
  1042. llvm::Type *ToTy = BC->getType();
  1043. if (!FromTy->isPointerTy() || !ToTy->isPointerTy())
  1044. return;
  1045. FromTy = FromTy->getPointerElementType();
  1046. ToTy = ToTy->getPointerElementType();
  1047. // Take care case like %2 = bitcast %struct.T* %1 to <1 x float>*.
  1048. bool GEPCreated = false;
  1049. if (FromTy->isStructTy()) {
  1050. IRBuilder<> Builder(FromTy->getContext());
  1051. if (Instruction *I = dyn_cast<Instruction>(BC))
  1052. Builder.SetInsertPoint(I);
  1053. Value *zeroIdx = Builder.getInt32(0);
  1054. unsigned nestLevel = 1;
  1055. while (llvm::StructType *ST = dyn_cast<llvm::StructType>(FromTy)) {
  1056. if (ST->getNumElements() == 0)
  1057. break;
  1058. FromTy = ST->getElementType(0);
  1059. nestLevel++;
  1060. }
  1061. std::vector<Value *> idxList(nestLevel, zeroIdx);
  1062. Ptr = Builder.CreateGEP(Ptr, idxList);
  1063. GEPCreated = true;
  1064. }
  1065. for (User *U : BC->users()) {
  1066. if (LoadInst *LI = dyn_cast<LoadInst>(U)) {
  1067. if (SimplifyBitCastLoad(LI, FromTy, ToTy, Ptr)) {
  1068. LI->dropAllReferences();
  1069. deadInsts.insert(LI);
  1070. }
  1071. } else if (StoreInst *SI = dyn_cast<StoreInst>(U)) {
  1072. if (SimplifyBitCastStore(SI, FromTy, ToTy, Ptr)) {
  1073. SI->dropAllReferences();
  1074. deadInsts.insert(SI);
  1075. }
  1076. } else if (GEPOperator *GEP = dyn_cast<GEPOperator>(U)) {
  1077. if (SimplifyBitCastGEP(GEP, FromTy, ToTy, Ptr))
  1078. if (Instruction *I = dyn_cast<Instruction>(GEP)) {
  1079. I->dropAllReferences();
  1080. deadInsts.insert(I);
  1081. }
  1082. } else if (dyn_cast<CallInst>(U)) {
  1083. // Skip function call.
  1084. } else if (dyn_cast<BitCastInst>(U)) {
  1085. // Skip bitcast.
  1086. } else if (dyn_cast<AddrSpaceCastInst>(U)) {
  1087. // Skip addrspacecast.
  1088. } else {
  1089. DXASSERT(0, "not support yet");
  1090. }
  1091. }
  1092. // We created a GEP instruction but didn't end up consuming it, so delete it.
  1093. if (GEPCreated && Ptr->use_empty()) {
  1094. if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Ptr))
  1095. GEP->eraseFromParent();
  1096. else
  1097. cast<Constant>(Ptr)->destroyConstant();
  1098. }
  1099. }
  1100. typedef float(__cdecl *FloatUnaryEvalFuncType)(float);
  1101. typedef double(__cdecl *DoubleUnaryEvalFuncType)(double);
  1102. typedef APInt(__cdecl *IntBinaryEvalFuncType)(const APInt &, const APInt &);
  1103. typedef float(__cdecl *FloatBinaryEvalFuncType)(float, float);
  1104. typedef double(__cdecl *DoubleBinaryEvalFuncType)(double, double);
  1105. Value *EvalUnaryIntrinsic(ConstantFP *fpV, FloatUnaryEvalFuncType floatEvalFunc,
  1106. DoubleUnaryEvalFuncType doubleEvalFunc) {
  1107. llvm::Type *Ty = fpV->getType();
  1108. Value *Result = nullptr;
  1109. if (Ty->isDoubleTy()) {
  1110. double dV = fpV->getValueAPF().convertToDouble();
  1111. Value *dResult = ConstantFP::get(Ty, doubleEvalFunc(dV));
  1112. Result = dResult;
  1113. } else {
  1114. DXASSERT_NOMSG(Ty->isFloatTy());
  1115. float fV = fpV->getValueAPF().convertToFloat();
  1116. Value *dResult = ConstantFP::get(Ty, floatEvalFunc(fV));
  1117. Result = dResult;
  1118. }
  1119. return Result;
  1120. }
  1121. Value *EvalBinaryIntrinsic(Constant *cV0, Constant *cV1,
  1122. FloatBinaryEvalFuncType floatEvalFunc,
  1123. DoubleBinaryEvalFuncType doubleEvalFunc,
  1124. IntBinaryEvalFuncType intEvalFunc) {
  1125. llvm::Type *Ty = cV0->getType();
  1126. Value *Result = nullptr;
  1127. if (Ty->isDoubleTy()) {
  1128. ConstantFP *fpV0 = cast<ConstantFP>(cV0);
  1129. ConstantFP *fpV1 = cast<ConstantFP>(cV1);
  1130. double dV0 = fpV0->getValueAPF().convertToDouble();
  1131. double dV1 = fpV1->getValueAPF().convertToDouble();
  1132. Value *dResult = ConstantFP::get(Ty, doubleEvalFunc(dV0, dV1));
  1133. Result = dResult;
  1134. } else if (Ty->isFloatTy()) {
  1135. ConstantFP *fpV0 = cast<ConstantFP>(cV0);
  1136. ConstantFP *fpV1 = cast<ConstantFP>(cV1);
  1137. float fV0 = fpV0->getValueAPF().convertToFloat();
  1138. float fV1 = fpV1->getValueAPF().convertToFloat();
  1139. Value *dResult = ConstantFP::get(Ty, floatEvalFunc(fV0, fV1));
  1140. Result = dResult;
  1141. } else {
  1142. DXASSERT_NOMSG(Ty->isIntegerTy());
  1143. DXASSERT_NOMSG(intEvalFunc);
  1144. ConstantInt *ciV0 = cast<ConstantInt>(cV0);
  1145. ConstantInt *ciV1 = cast<ConstantInt>(cV1);
  1146. const APInt &iV0 = ciV0->getValue();
  1147. const APInt &iV1 = ciV1->getValue();
  1148. Value *dResult = ConstantInt::get(Ty, intEvalFunc(iV0, iV1));
  1149. Result = dResult;
  1150. }
  1151. return Result;
  1152. }
  1153. Value *EvalUnaryIntrinsic(CallInst *CI, FloatUnaryEvalFuncType floatEvalFunc,
  1154. DoubleUnaryEvalFuncType doubleEvalFunc) {
  1155. Value *V = CI->getArgOperand(0);
  1156. llvm::Type *Ty = CI->getType();
  1157. Value *Result = nullptr;
  1158. if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(Ty)) {
  1159. Result = UndefValue::get(Ty);
  1160. Constant *CV = cast<Constant>(V);
  1161. IRBuilder<> Builder(CI);
  1162. for (unsigned i = 0; i < VT->getNumElements(); i++) {
  1163. ConstantFP *fpV = cast<ConstantFP>(CV->getAggregateElement(i));
  1164. Value *EltResult = EvalUnaryIntrinsic(fpV, floatEvalFunc, doubleEvalFunc);
  1165. Result = Builder.CreateInsertElement(Result, EltResult, i);
  1166. }
  1167. } else {
  1168. ConstantFP *fpV = cast<ConstantFP>(V);
  1169. Result = EvalUnaryIntrinsic(fpV, floatEvalFunc, doubleEvalFunc);
  1170. }
  1171. CI->replaceAllUsesWith(Result);
  1172. CI->eraseFromParent();
  1173. return Result;
  1174. }
  1175. Value *EvalBinaryIntrinsic(CallInst *CI, FloatBinaryEvalFuncType floatEvalFunc,
  1176. DoubleBinaryEvalFuncType doubleEvalFunc,
  1177. IntBinaryEvalFuncType intEvalFunc = nullptr) {
  1178. Value *V0 = CI->getArgOperand(0);
  1179. Value *V1 = CI->getArgOperand(1);
  1180. llvm::Type *Ty = CI->getType();
  1181. Value *Result = nullptr;
  1182. if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(Ty)) {
  1183. Result = UndefValue::get(Ty);
  1184. Constant *CV0 = cast<Constant>(V0);
  1185. Constant *CV1 = cast<Constant>(V1);
  1186. IRBuilder<> Builder(CI);
  1187. for (unsigned i = 0; i < VT->getNumElements(); i++) {
  1188. Constant *cV0 = cast<Constant>(CV0->getAggregateElement(i));
  1189. Constant *cV1 = cast<Constant>(CV1->getAggregateElement(i));
  1190. Value *EltResult = EvalBinaryIntrinsic(cV0, cV1, floatEvalFunc,
  1191. doubleEvalFunc, intEvalFunc);
  1192. Result = Builder.CreateInsertElement(Result, EltResult, i);
  1193. }
  1194. } else {
  1195. Constant *cV0 = cast<Constant>(V0);
  1196. Constant *cV1 = cast<Constant>(V1);
  1197. Result = EvalBinaryIntrinsic(cV0, cV1, floatEvalFunc, doubleEvalFunc,
  1198. intEvalFunc);
  1199. }
  1200. CI->replaceAllUsesWith(Result);
  1201. CI->eraseFromParent();
  1202. return Result;
  1203. CI->eraseFromParent();
  1204. return Result;
  1205. }
  1206. void SimpleTransformForHLDXIRInst(Instruction *I, SmallInstSet &deadInsts) {
  1207. unsigned opcode = I->getOpcode();
  1208. switch (opcode) {
  1209. case Instruction::BitCast: {
  1210. BitCastOperator *BCI = cast<BitCastOperator>(I);
  1211. SimplifyBitCast(BCI, deadInsts);
  1212. } break;
  1213. case Instruction::Load: {
  1214. LoadInst *ldInst = cast<LoadInst>(I);
  1215. DXASSERT(!HLMatrixType::isa(ldInst->getType()),
  1216. "matrix load should use HL LdStMatrix");
  1217. Value *Ptr = ldInst->getPointerOperand();
  1218. if (ConstantExpr *CE = dyn_cast_or_null<ConstantExpr>(Ptr)) {
  1219. if (BitCastOperator *BCO = dyn_cast<BitCastOperator>(CE)) {
  1220. SimplifyBitCast(BCO, deadInsts);
  1221. }
  1222. }
  1223. } break;
  1224. case Instruction::Store: {
  1225. StoreInst *stInst = cast<StoreInst>(I);
  1226. Value *V = stInst->getValueOperand();
  1227. DXASSERT_LOCALVAR(V, !HLMatrixType::isa(V->getType()),
  1228. "matrix store should use HL LdStMatrix");
  1229. Value *Ptr = stInst->getPointerOperand();
  1230. if (ConstantExpr *CE = dyn_cast<ConstantExpr>(Ptr)) {
  1231. if (BitCastOperator *BCO = dyn_cast<BitCastOperator>(CE)) {
  1232. SimplifyBitCast(BCO, deadInsts);
  1233. }
  1234. }
  1235. } break;
  1236. case Instruction::LShr:
  1237. case Instruction::AShr:
  1238. case Instruction::Shl: {
  1239. llvm::BinaryOperator *BO = cast<llvm::BinaryOperator>(I);
  1240. Value *op2 = BO->getOperand(1);
  1241. IntegerType *Ty = cast<IntegerType>(BO->getType()->getScalarType());
  1242. unsigned bitWidth = Ty->getBitWidth();
  1243. // Clamp op2 to 0 ~ bitWidth-1
  1244. if (ConstantInt *cOp2 = dyn_cast<ConstantInt>(op2)) {
  1245. unsigned iOp2 = cOp2->getLimitedValue();
  1246. unsigned clampedOp2 = iOp2 & (bitWidth - 1);
  1247. if (iOp2 != clampedOp2) {
  1248. BO->setOperand(1, ConstantInt::get(op2->getType(), clampedOp2));
  1249. }
  1250. } else {
  1251. Value *mask = ConstantInt::get(op2->getType(), bitWidth - 1);
  1252. IRBuilder<> Builder(I);
  1253. op2 = Builder.CreateAnd(op2, mask);
  1254. BO->setOperand(1, op2);
  1255. }
  1256. } break;
  1257. }
  1258. }
  1259. } // namespace
  1260. namespace CGHLSLMSHelper {
  1261. Value *TryEvalIntrinsic(CallInst *CI, IntrinsicOp intriOp, unsigned hlslVersion) {
  1262. switch (intriOp) {
  1263. case IntrinsicOp::IOP_tan: {
  1264. return EvalUnaryIntrinsic(CI, tanf, tan);
  1265. } break;
  1266. case IntrinsicOp::IOP_tanh: {
  1267. return EvalUnaryIntrinsic(CI, tanhf, tanh);
  1268. } break;
  1269. case IntrinsicOp::IOP_sin: {
  1270. return EvalUnaryIntrinsic(CI, sinf, sin);
  1271. } break;
  1272. case IntrinsicOp::IOP_sinh: {
  1273. return EvalUnaryIntrinsic(CI, sinhf, sinh);
  1274. } break;
  1275. case IntrinsicOp::IOP_cos: {
  1276. return EvalUnaryIntrinsic(CI, cosf, cos);
  1277. } break;
  1278. case IntrinsicOp::IOP_cosh: {
  1279. return EvalUnaryIntrinsic(CI, coshf, cosh);
  1280. } break;
  1281. case IntrinsicOp::IOP_asin: {
  1282. return EvalUnaryIntrinsic(CI, asinf, asin);
  1283. } break;
  1284. case IntrinsicOp::IOP_acos: {
  1285. return EvalUnaryIntrinsic(CI, acosf, acos);
  1286. } break;
  1287. case IntrinsicOp::IOP_atan: {
  1288. return EvalUnaryIntrinsic(CI, atanf, atan);
  1289. } break;
  1290. case IntrinsicOp::IOP_atan2: {
  1291. Value *V0 = CI->getArgOperand(0);
  1292. ConstantFP *fpV0 = cast<ConstantFP>(V0);
  1293. Value *V1 = CI->getArgOperand(1);
  1294. ConstantFP *fpV1 = cast<ConstantFP>(V1);
  1295. llvm::Type *Ty = CI->getType();
  1296. Value *Result = nullptr;
  1297. if (Ty->isDoubleTy()) {
  1298. double dV0 = fpV0->getValueAPF().convertToDouble();
  1299. double dV1 = fpV1->getValueAPF().convertToDouble();
  1300. Value *atanV = ConstantFP::get(CI->getType(), atan2(dV0, dV1));
  1301. CI->replaceAllUsesWith(atanV);
  1302. Result = atanV;
  1303. } else {
  1304. DXASSERT_NOMSG(Ty->isFloatTy());
  1305. float fV0 = fpV0->getValueAPF().convertToFloat();
  1306. float fV1 = fpV1->getValueAPF().convertToFloat();
  1307. Value *atanV = ConstantFP::get(CI->getType(), atan2f(fV0, fV1));
  1308. CI->replaceAllUsesWith(atanV);
  1309. Result = atanV;
  1310. }
  1311. CI->eraseFromParent();
  1312. return Result;
  1313. } break;
  1314. case IntrinsicOp::IOP_sqrt: {
  1315. return EvalUnaryIntrinsic(CI, sqrtf, sqrt);
  1316. } break;
  1317. case IntrinsicOp::IOP_rsqrt: {
  1318. auto rsqrtF = [](float v) -> float { return 1.0 / sqrtf(v); };
  1319. auto rsqrtD = [](double v) -> double { return 1.0 / sqrt(v); };
  1320. return EvalUnaryIntrinsic(CI, rsqrtF, rsqrtD);
  1321. } break;
  1322. case IntrinsicOp::IOP_exp: {
  1323. return EvalUnaryIntrinsic(CI, expf, exp);
  1324. } break;
  1325. case IntrinsicOp::IOP_exp2: {
  1326. return EvalUnaryIntrinsic(CI, exp2f, exp2);
  1327. } break;
  1328. case IntrinsicOp::IOP_log: {
  1329. return EvalUnaryIntrinsic(CI, logf, log);
  1330. } break;
  1331. case IntrinsicOp::IOP_log10: {
  1332. return EvalUnaryIntrinsic(CI, log10f, log10);
  1333. } break;
  1334. case IntrinsicOp::IOP_log2: {
  1335. return EvalUnaryIntrinsic(CI, log2f, log2);
  1336. } break;
  1337. case IntrinsicOp::IOP_pow: {
  1338. return EvalBinaryIntrinsic(CI, powf, pow);
  1339. } break;
  1340. case IntrinsicOp::IOP_max: {
  1341. auto maxF = [](float a, float b) -> float { return a > b ? a : b; };
  1342. auto maxD = [](double a, double b) -> double { return a > b ? a : b; };
  1343. auto imaxI = [](const APInt &a, const APInt &b) -> APInt {
  1344. return a.sgt(b) ? a : b;
  1345. };
  1346. return EvalBinaryIntrinsic(CI, maxF, maxD, imaxI);
  1347. } break;
  1348. case IntrinsicOp::IOP_min: {
  1349. auto minF = [](float a, float b) -> float { return a < b ? a : b; };
  1350. auto minD = [](double a, double b) -> double { return a < b ? a : b; };
  1351. auto iminI = [](const APInt &a, const APInt &b) -> APInt {
  1352. return a.slt(b) ? a : b;
  1353. };
  1354. return EvalBinaryIntrinsic(CI, minF, minD, iminI);
  1355. } break;
  1356. case IntrinsicOp::IOP_umax: {
  1357. DXASSERT_NOMSG(
  1358. CI->getArgOperand(0)->getType()->getScalarType()->isIntegerTy());
  1359. auto umaxI = [](const APInt &a, const APInt &b) -> APInt {
  1360. return a.ugt(b) ? a : b;
  1361. };
  1362. return EvalBinaryIntrinsic(CI, nullptr, nullptr, umaxI);
  1363. } break;
  1364. case IntrinsicOp::IOP_umin: {
  1365. DXASSERT_NOMSG(
  1366. CI->getArgOperand(0)->getType()->getScalarType()->isIntegerTy());
  1367. auto uminI = [](const APInt &a, const APInt &b) -> APInt {
  1368. return a.ult(b) ? a : b;
  1369. };
  1370. return EvalBinaryIntrinsic(CI, nullptr, nullptr, uminI);
  1371. } break;
  1372. case IntrinsicOp::IOP_rcp: {
  1373. auto rcpF = [](float v) -> float { return 1.0 / v; };
  1374. auto rcpD = [](double v) -> double { return 1.0 / v; };
  1375. return EvalUnaryIntrinsic(CI, rcpF, rcpD);
  1376. } break;
  1377. case IntrinsicOp::IOP_ceil: {
  1378. return EvalUnaryIntrinsic(CI, ceilf, ceil);
  1379. } break;
  1380. case IntrinsicOp::IOP_floor: {
  1381. return EvalUnaryIntrinsic(CI, floorf, floor);
  1382. } break;
  1383. case IntrinsicOp::IOP_round: {
  1384. // round intrinsic could exhibit different behaviour for constant and runtime evaluations.
  1385. // E.g., for round(0.5): constant evaluation results in 1 (away from zero rounding),
  1386. // while runtime evaluation results in 0 (nearest even rounding).
  1387. //
  1388. // For back compat, DXC still preserves the above behavior for language versions 2016 or below.
  1389. // However, for newer language versions, DXC now always use nearest even for round() intrinsic in all
  1390. // cases.
  1391. if (hlslVersion <= 2016) {
  1392. return EvalUnaryIntrinsic(CI, roundf, round);
  1393. } else {
  1394. auto roundingMode = fegetround();
  1395. fesetround(FE_TONEAREST);
  1396. Value *result = EvalUnaryIntrinsic(CI, nearbyintf, nearbyint);
  1397. fesetround(roundingMode);
  1398. return result;
  1399. }
  1400. } break;
  1401. case IntrinsicOp::IOP_trunc: {
  1402. return EvalUnaryIntrinsic(CI, truncf, trunc);
  1403. } break;
  1404. case IntrinsicOp::IOP_frac: {
  1405. auto fracF = [](float v) -> float { return v - floor(v); };
  1406. auto fracD = [](double v) -> double { return v - floor(v); };
  1407. return EvalUnaryIntrinsic(CI, fracF, fracD);
  1408. } break;
  1409. case IntrinsicOp::IOP_isnan: {
  1410. Value *V = CI->getArgOperand(0);
  1411. ConstantFP *fV = cast<ConstantFP>(V);
  1412. bool isNan = fV->getValueAPF().isNaN();
  1413. Constant *cNan = ConstantInt::get(CI->getType(), isNan ? 1 : 0);
  1414. CI->replaceAllUsesWith(cNan);
  1415. CI->eraseFromParent();
  1416. return cNan;
  1417. } break;
  1418. default:
  1419. return nullptr;
  1420. }
  1421. }
  1422. // Do simple transform to make later lower pass easier.
  1423. void SimpleTransformForHLDXIR(llvm::Module *pM) {
  1424. SmallInstSet deadInsts;
  1425. for (Function &F : pM->functions()) {
  1426. for (BasicBlock &BB : F.getBasicBlockList()) {
  1427. for (BasicBlock::iterator Iter = BB.begin(); Iter != BB.end();) {
  1428. Instruction *I = (Iter++);
  1429. if (deadInsts.count(I))
  1430. continue; // Skip dead instructions
  1431. SimpleTransformForHLDXIRInst(I, deadInsts);
  1432. }
  1433. }
  1434. }
  1435. for (Instruction *I : deadInsts)
  1436. I->dropAllReferences();
  1437. for (Instruction *I : deadInsts)
  1438. I->eraseFromParent();
  1439. deadInsts.clear();
  1440. for (GlobalVariable &GV : pM->globals()) {
  1441. if (dxilutil::IsStaticGlobal(&GV)) {
  1442. for (User *U : GV.users()) {
  1443. if (BitCastOperator *BCO = dyn_cast<BitCastOperator>(U)) {
  1444. SimplifyBitCast(BCO, deadInsts);
  1445. }
  1446. }
  1447. }
  1448. }
  1449. for (Instruction *I : deadInsts)
  1450. I->dropAllReferences();
  1451. for (Instruction *I : deadInsts)
  1452. I->eraseFromParent();
  1453. }
  1454. } // namespace CGHLSLMSHelper
  1455. namespace {
  1456. unsigned RoundToAlign(unsigned num, unsigned mod) {
  1457. // round num to next highest mod
  1458. if (mod != 0)
  1459. return mod * ((num + mod - 1) / mod);
  1460. return num;
  1461. }
  1462. // Retrieve the last scalar or vector element type.
  1463. // This has to be recursive for the nasty empty struct case.
  1464. // returns true if found, false if we must backtrack.
  1465. bool RetrieveLastElementType(Type *Ty, Type *&EltTy) {
  1466. if (Ty->isStructTy()) {
  1467. if (Ty->getStructNumElements() == 0)
  1468. return false;
  1469. for (unsigned i = Ty->getStructNumElements(); i > 0; --i) {
  1470. if (RetrieveLastElementType(Ty->getStructElementType(i - 1), EltTy))
  1471. return true;
  1472. }
  1473. } else if (Ty->isArrayTy()) {
  1474. if (RetrieveLastElementType(Ty->getArrayElementType(), EltTy))
  1475. return true;
  1476. } else if ((Ty->isVectorTy() || Ty->isSingleValueType())) {
  1477. EltTy = Ty->getScalarType();
  1478. return true;
  1479. }
  1480. return false;
  1481. }
  1482. // Here the size is CB size.
  1483. // Offset still needs to be aligned based on type since this
  1484. // is the legacy cbuffer global path.
  1485. unsigned AlignCBufferOffset(unsigned offset, unsigned size, llvm::Type *Ty,
  1486. bool bRowMajor,
  1487. bool bMinPrecMode, bool &bCurRowIsMinPrec) {
  1488. DXASSERT(!(offset & 1), "otherwise we have an invalid offset.");
  1489. bool bNeedNewRow = Ty->isArrayTy();
  1490. // In min-precision mode, a new row is needed when
  1491. // going into or out of min-precision component type.
  1492. if (!bNeedNewRow) {
  1493. bool bMinPrec = false;
  1494. if (Ty->isStructTy()) {
  1495. if (HLMatrixType mat = HLMatrixType::dyn_cast(Ty)) {
  1496. bNeedNewRow |= !bRowMajor && mat.getNumColumns() > 1;
  1497. bNeedNewRow |= bRowMajor && mat.getNumRows() > 1;
  1498. bMinPrec = bMinPrecMode && mat.getElementType(false)->getScalarSizeInBits() < 32;
  1499. } else {
  1500. bNeedNewRow = true;
  1501. if (bMinPrecMode) {
  1502. // Need to get min-prec of last element of structure,
  1503. // in case we pack something else into the end.
  1504. Type *EltTy = nullptr;
  1505. if (RetrieveLastElementType(Ty, EltTy))
  1506. bCurRowIsMinPrec = EltTy->getScalarSizeInBits() < 32;
  1507. }
  1508. }
  1509. } else {
  1510. DXASSERT_NOMSG(Ty->isVectorTy() || Ty->isSingleValueType());
  1511. // vector or scalar
  1512. bMinPrec = bMinPrecMode && Ty->getScalarSizeInBits() < 32;
  1513. }
  1514. if (bMinPrecMode) {
  1515. bNeedNewRow |= bCurRowIsMinPrec != bMinPrec;
  1516. bCurRowIsMinPrec = bMinPrec;
  1517. }
  1518. }
  1519. unsigned scalarSizeInBytes = Ty->getScalarSizeInBits() / 8;
  1520. return AlignBufferOffsetInLegacy(offset, size, scalarSizeInBytes,
  1521. bNeedNewRow);
  1522. }
  1523. unsigned
  1524. AllocateDxilConstantBuffer(HLCBuffer &CB,
  1525. std::unordered_map<Constant *, DxilFieldAnnotation>
  1526. &constVarAnnotationMap,
  1527. bool bMinPrecMode) {
  1528. unsigned offset = 0;
  1529. // Scan user allocated constants first.
  1530. // Update offset.
  1531. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1532. if (C->GetLowerBound() == UINT_MAX)
  1533. continue;
  1534. unsigned size = C->GetRangeSize();
  1535. unsigned nextOffset = size + C->GetLowerBound();
  1536. if (offset < nextOffset)
  1537. offset = nextOffset;
  1538. }
  1539. // Alloc after user allocated constants.
  1540. bool bCurRowIsMinPrec = false;
  1541. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1542. if (C->GetLowerBound() != UINT_MAX)
  1543. continue;
  1544. unsigned size = C->GetRangeSize();
  1545. llvm::Type *Ty = C->GetGlobalSymbol()->getType()->getPointerElementType();
  1546. auto fieldAnnotation = constVarAnnotationMap.at(C->GetGlobalSymbol());
  1547. bool bRowMajor = HLMatrixType::isa(Ty)
  1548. ? fieldAnnotation.GetMatrixAnnotation().Orientation ==
  1549. MatrixOrientation::RowMajor
  1550. : false;
  1551. // Align offset.
  1552. offset = AlignCBufferOffset(offset, size, Ty, bRowMajor, bMinPrecMode, bCurRowIsMinPrec);
  1553. if (C->GetLowerBound() == UINT_MAX) {
  1554. C->SetLowerBound(offset);
  1555. }
  1556. offset += size;
  1557. }
  1558. return offset;
  1559. }
  1560. void AllocateDxilConstantBuffers(
  1561. HLModule &HLM, std::unordered_map<Constant *, DxilFieldAnnotation>
  1562. &constVarAnnotationMap) {
  1563. for (unsigned i = 0; i < HLM.GetCBuffers().size(); i++) {
  1564. HLCBuffer &CB = *static_cast<HLCBuffer *>(&(HLM.GetCBuffer(i)));
  1565. unsigned size = AllocateDxilConstantBuffer(CB, constVarAnnotationMap,
  1566. HLM.GetHLOptions().bUseMinPrecision);
  1567. CB.SetSize(size);
  1568. }
  1569. }
  1570. } // namespace
  1571. namespace {
  1572. void ReplaceUseInFunction(Value *V, Value *NewV, Function *F,
  1573. IRBuilder<> &Builder) {
  1574. for (auto U = V->user_begin(); U != V->user_end();) {
  1575. User *user = *(U++);
  1576. if (Instruction *I = dyn_cast<Instruction>(user)) {
  1577. if (I->getParent()->getParent() == F) {
  1578. // replace use with GEP if in F
  1579. for (unsigned i = 0; i < I->getNumOperands(); i++) {
  1580. if (I->getOperand(i) == V)
  1581. I->setOperand(i, NewV);
  1582. }
  1583. }
  1584. } else {
  1585. // For constant operator, create local clone which use GEP.
  1586. // Only support GEP and bitcast.
  1587. if (GEPOperator *GEPOp = dyn_cast<GEPOperator>(user)) {
  1588. std::vector<Value *> idxList(GEPOp->idx_begin(), GEPOp->idx_end());
  1589. Value *NewGEP = Builder.CreateInBoundsGEP(NewV, idxList);
  1590. ReplaceUseInFunction(GEPOp, NewGEP, F, Builder);
  1591. } else if (GlobalVariable *GV = dyn_cast<GlobalVariable>(user)) {
  1592. // Change the init val into NewV with Store.
  1593. GV->setInitializer(nullptr);
  1594. Builder.CreateStore(NewV, GV);
  1595. } else {
  1596. // Must be bitcast here.
  1597. BitCastOperator *BC = cast<BitCastOperator>(user);
  1598. Value *NewBC = Builder.CreateBitCast(NewV, BC->getType());
  1599. ReplaceUseInFunction(BC, NewBC, F, Builder);
  1600. }
  1601. }
  1602. }
  1603. }
  1604. void MarkUsedFunctionForConst(Value *V,
  1605. std::unordered_set<Function *> &usedFunc) {
  1606. for (auto U = V->user_begin(); U != V->user_end();) {
  1607. User *user = *(U++);
  1608. if (Instruction *I = dyn_cast<Instruction>(user)) {
  1609. Function *F = I->getParent()->getParent();
  1610. usedFunc.insert(F);
  1611. } else {
  1612. // For constant operator, create local clone which use GEP.
  1613. // Only support GEP and bitcast.
  1614. if (GEPOperator *GEPOp = dyn_cast<GEPOperator>(user)) {
  1615. MarkUsedFunctionForConst(GEPOp, usedFunc);
  1616. } else if (GlobalVariable *GV = dyn_cast<GlobalVariable>(user)) {
  1617. MarkUsedFunctionForConst(GV, usedFunc);
  1618. } else {
  1619. // Must be bitcast here.
  1620. BitCastOperator *BC = cast<BitCastOperator>(user);
  1621. MarkUsedFunctionForConst(BC, usedFunc);
  1622. }
  1623. }
  1624. }
  1625. }
  1626. bool CreateCBufferVariable(HLCBuffer &CB, HLModule &HLM, llvm::Type *HandleTy) {
  1627. bool bUsed = false;
  1628. // Build Struct for CBuffer.
  1629. SmallVector<llvm::Type *, 4> Elements;
  1630. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1631. Value *GV = C->GetGlobalSymbol();
  1632. if (GV->hasNUsesOrMore(1))
  1633. bUsed = true;
  1634. // Global variable must be pointer type.
  1635. llvm::Type *Ty = GV->getType()->getPointerElementType();
  1636. Elements.emplace_back(Ty);
  1637. }
  1638. // Don't create CBuffer variable for unused cbuffer.
  1639. if (!bUsed)
  1640. return false;
  1641. llvm::Module &M = *HLM.GetModule();
  1642. bool isCBArray = CB.GetRangeSize() != 1;
  1643. llvm::GlobalVariable *cbGV = nullptr;
  1644. llvm::Type *cbTy = nullptr;
  1645. unsigned cbIndexDepth = 0;
  1646. if (!isCBArray) {
  1647. llvm::StructType *CBStructTy =
  1648. llvm::StructType::create(Elements, CB.GetGlobalName());
  1649. cbGV = new llvm::GlobalVariable(M, CBStructTy, /*IsConstant*/ true,
  1650. llvm::GlobalValue::ExternalLinkage,
  1651. /*InitVal*/ nullptr, CB.GetGlobalName());
  1652. cbTy = cbGV->getType();
  1653. } else {
  1654. // For array of ConstantBuffer, create array of struct instead of struct of
  1655. // array.
  1656. DXASSERT(CB.GetConstants().size() == 1,
  1657. "ConstantBuffer should have 1 constant");
  1658. Value *GV = CB.GetConstants()[0]->GetGlobalSymbol();
  1659. llvm::Type *CBEltTy =
  1660. GV->getType()->getPointerElementType()->getArrayElementType();
  1661. cbIndexDepth = 1;
  1662. while (CBEltTy->isArrayTy()) {
  1663. CBEltTy = CBEltTy->getArrayElementType();
  1664. cbIndexDepth++;
  1665. }
  1666. // Add one level struct type to match normal case.
  1667. llvm::StructType *CBStructTy =
  1668. llvm::StructType::create({CBEltTy}, CB.GetGlobalName());
  1669. llvm::ArrayType *CBArrayTy =
  1670. llvm::ArrayType::get(CBStructTy, CB.GetRangeSize());
  1671. cbGV = new llvm::GlobalVariable(M, CBArrayTy, /*IsConstant*/ true,
  1672. llvm::GlobalValue::ExternalLinkage,
  1673. /*InitVal*/ nullptr, CB.GetGlobalName());
  1674. cbTy = llvm::PointerType::get(CBStructTy,
  1675. cbGV->getType()->getPointerAddressSpace());
  1676. }
  1677. CB.SetGlobalSymbol(cbGV);
  1678. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  1679. llvm::Type *idxTy = opcodeTy;
  1680. Constant *zeroIdx = ConstantInt::get(opcodeTy, 0);
  1681. Value *HandleArgs[] = {cbGV, zeroIdx};
  1682. llvm::FunctionType *SubscriptFuncTy =
  1683. llvm::FunctionType::get(cbTy, {opcodeTy, HandleTy, idxTy}, false);
  1684. Function *subscriptFunc =
  1685. GetOrCreateHLFunction(M, SubscriptFuncTy, HLOpcodeGroup::HLSubscript,
  1686. (unsigned)HLSubscriptOpcode::CBufferSubscript);
  1687. Constant *opArg =
  1688. ConstantInt::get(opcodeTy, (unsigned)HLSubscriptOpcode::CBufferSubscript);
  1689. Value *args[] = {opArg, nullptr, zeroIdx};
  1690. llvm::LLVMContext &Context = M.getContext();
  1691. llvm::Type *i32Ty = llvm::Type::getInt32Ty(Context);
  1692. Value *zero = ConstantInt::get(i32Ty, (uint64_t)0);
  1693. std::vector<Value *> indexArray(CB.GetConstants().size());
  1694. std::vector<std::unordered_set<Function *>> constUsedFuncList(
  1695. CB.GetConstants().size());
  1696. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1697. Value *idx = ConstantInt::get(i32Ty, C->GetID());
  1698. indexArray[C->GetID()] = idx;
  1699. Value *GV = C->GetGlobalSymbol();
  1700. MarkUsedFunctionForConst(GV, constUsedFuncList[C->GetID()]);
  1701. }
  1702. for (Function &F : M.functions()) {
  1703. if (F.isDeclaration())
  1704. continue;
  1705. if (GetHLOpcodeGroupByName(&F) != HLOpcodeGroup::NotHL)
  1706. continue;
  1707. IRBuilder<> Builder(F.getEntryBlock().getFirstInsertionPt());
  1708. // create HL subscript to make all the use of cbuffer start from it.
  1709. HandleArgs[HLOperandIndex::kCreateHandleResourceOpIdx-1] = cbGV;
  1710. CallInst *Handle = HLM.EmitHLOperationCall(
  1711. Builder, HLOpcodeGroup::HLCreateHandle, 0, HandleTy, HandleArgs, M);
  1712. args[HLOperandIndex::kSubscriptObjectOpIdx] = Handle;
  1713. Instruction *cbSubscript =
  1714. cast<Instruction>(Builder.CreateCall(subscriptFunc, {args}));
  1715. // Replace constant var with GEP pGV
  1716. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1717. Value *GV = C->GetGlobalSymbol();
  1718. if (constUsedFuncList[C->GetID()].count(&F) == 0)
  1719. continue;
  1720. Value *idx = indexArray[C->GetID()];
  1721. if (!isCBArray) {
  1722. Instruction *GEP = cast<Instruction>(
  1723. Builder.CreateInBoundsGEP(cbSubscript, {zero, idx}));
  1724. // TODO: make sure the debug info is synced to GEP.
  1725. // GEP->setDebugLoc(GV);
  1726. ReplaceUseInFunction(GV, GEP, &F, Builder);
  1727. // Delete if no use in F.
  1728. if (GEP->user_empty())
  1729. GEP->eraseFromParent();
  1730. } else {
  1731. for (auto U = GV->user_begin(); U != GV->user_end();) {
  1732. User *user = *(U++);
  1733. if (user->user_empty())
  1734. continue;
  1735. Instruction *I = dyn_cast<Instruction>(user);
  1736. if (I && I->getParent()->getParent() != &F)
  1737. continue;
  1738. IRBuilder<> *instBuilder = &Builder;
  1739. std::unique_ptr<IRBuilder<>> B;
  1740. if (I) {
  1741. B = llvm::make_unique<IRBuilder<>>(I);
  1742. instBuilder = B.get();
  1743. }
  1744. GEPOperator *GEPOp = cast<GEPOperator>(user);
  1745. std::vector<Value *> idxList;
  1746. DXASSERT(GEPOp->getNumIndices() >= 1 + cbIndexDepth,
  1747. "must indexing ConstantBuffer array");
  1748. idxList.reserve(GEPOp->getNumIndices() - (cbIndexDepth - 1));
  1749. gep_type_iterator GI = gep_type_begin(*GEPOp),
  1750. E = gep_type_end(*GEPOp);
  1751. idxList.push_back(GI.getOperand());
  1752. // change array index with 0 for struct index.
  1753. idxList.push_back(zero);
  1754. GI++;
  1755. Value *arrayIdx = GI.getOperand();
  1756. GI++;
  1757. for (unsigned curIndex = 1; GI != E && curIndex < cbIndexDepth;
  1758. ++GI, ++curIndex) {
  1759. arrayIdx = instBuilder->CreateMul(
  1760. arrayIdx, Builder.getInt32(GI->getArrayNumElements()));
  1761. arrayIdx = instBuilder->CreateAdd(arrayIdx, GI.getOperand());
  1762. }
  1763. for (; GI != E; ++GI) {
  1764. idxList.push_back(GI.getOperand());
  1765. }
  1766. HandleArgs[HLOperandIndex::kCreateHandleIndexOpIdx-1] = arrayIdx;
  1767. CallInst *Handle =
  1768. HLM.EmitHLOperationCall(*instBuilder,
  1769. HLOpcodeGroup::HLCreateHandle, 0,
  1770. HandleTy, HandleArgs, M);
  1771. args[HLOperandIndex::kSubscriptObjectOpIdx] = Handle;
  1772. args[HLOperandIndex::kSubscriptIndexOpIdx] = arrayIdx;
  1773. Instruction *cbSubscript =
  1774. cast<Instruction>(instBuilder->CreateCall(subscriptFunc, {args}));
  1775. Instruction *NewGEP = cast<Instruction>(
  1776. instBuilder->CreateInBoundsGEP(cbSubscript, idxList));
  1777. ReplaceUseInFunction(GEPOp, NewGEP, &F, *instBuilder);
  1778. }
  1779. }
  1780. }
  1781. // Delete if no use in F.
  1782. if (cbSubscript->user_empty()) {
  1783. cbSubscript->eraseFromParent();
  1784. Handle->eraseFromParent();
  1785. } else {
  1786. // merge GEP use for cbSubscript.
  1787. HLModule::MergeGepUse(cbSubscript);
  1788. }
  1789. }
  1790. return true;
  1791. }
  1792. void ConstructCBufferAnnotation(
  1793. HLCBuffer &CB, DxilTypeSystem &dxilTypeSys,
  1794. std::unordered_map<Constant *, DxilFieldAnnotation> &AnnotationMap) {
  1795. Value *GV = CB.GetGlobalSymbol();
  1796. llvm::StructType *CBStructTy =
  1797. dyn_cast<llvm::StructType>(GV->getType()->getPointerElementType());
  1798. if (!CBStructTy) {
  1799. // For Array of ConstantBuffer.
  1800. llvm::ArrayType *CBArrayTy =
  1801. cast<llvm::ArrayType>(GV->getType()->getPointerElementType());
  1802. CBStructTy = cast<llvm::StructType>(CBArrayTy->getArrayElementType());
  1803. }
  1804. DxilStructAnnotation *CBAnnotation =
  1805. dxilTypeSys.AddStructAnnotation(CBStructTy);
  1806. CBAnnotation->SetCBufferSize(CB.GetSize());
  1807. // Set fieldAnnotation for each constant var.
  1808. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1809. Constant *GV = C->GetGlobalSymbol();
  1810. DxilFieldAnnotation &fieldAnnotation =
  1811. CBAnnotation->GetFieldAnnotation(C->GetID());
  1812. fieldAnnotation = AnnotationMap[GV];
  1813. // This is after CBuffer allocation.
  1814. fieldAnnotation.SetCBufferOffset(C->GetLowerBound());
  1815. fieldAnnotation.SetFieldName(C->GetGlobalName());
  1816. }
  1817. }
  1818. void ConstructCBuffer(
  1819. HLModule &HLM, llvm::Type *CBufferType,
  1820. std::unordered_map<Constant *, DxilFieldAnnotation> &AnnotationMap) {
  1821. DxilTypeSystem &dxilTypeSys = HLM.GetTypeSystem();
  1822. llvm::Type *HandleTy = HLM.GetOP()->GetHandleType();
  1823. for (unsigned i = 0; i < HLM.GetCBuffers().size(); i++) {
  1824. HLCBuffer &CB = *static_cast<HLCBuffer *>(&(HLM.GetCBuffer(i)));
  1825. if (CB.GetConstants().size() == 0) {
  1826. // Create Fake variable for cbuffer which is empty.
  1827. llvm::GlobalVariable *pGV = new llvm::GlobalVariable(
  1828. *HLM.GetModule(), CBufferType, true,
  1829. llvm::GlobalValue::ExternalLinkage, nullptr, CB.GetGlobalName());
  1830. CB.SetGlobalSymbol(pGV);
  1831. } else {
  1832. bool bCreated = CreateCBufferVariable(CB, HLM, HandleTy);
  1833. if (bCreated)
  1834. ConstructCBufferAnnotation(CB, dxilTypeSys, AnnotationMap);
  1835. else {
  1836. // Create Fake variable for cbuffer which is unused.
  1837. llvm::GlobalVariable *pGV = new llvm::GlobalVariable(
  1838. *HLM.GetModule(), CBufferType, true,
  1839. llvm::GlobalValue::ExternalLinkage, nullptr, CB.GetGlobalName());
  1840. CB.SetGlobalSymbol(pGV);
  1841. }
  1842. }
  1843. // Clear the constants which useless now.
  1844. CB.GetConstants().clear();
  1845. }
  1846. }
  1847. }
  1848. namespace CGHLSLMSHelper {
  1849. // Align cbuffer offset in legacy mode (16 bytes per row).
  1850. unsigned AlignBufferOffsetInLegacy(unsigned offset, unsigned size,
  1851. unsigned scalarSizeInBytes,
  1852. bool bNeedNewRow) {
  1853. if (unsigned remainder = (offset & 0xf)) {
  1854. // Start from new row
  1855. if (remainder + size > 16 || bNeedNewRow) {
  1856. return offset + 16 - remainder;
  1857. }
  1858. // If not, naturally align data
  1859. return RoundToAlign(offset, scalarSizeInBytes);
  1860. }
  1861. return offset;
  1862. }
  1863. // Translate RayQuery constructor. From:
  1864. // %call = call %"RayQuery<flags>" @<constructor>(%"RayQuery<flags>" %ptr)
  1865. // To:
  1866. // i32 %handle = AllocateRayQuery(i32 <IntrinsicOp::IOP_AllocateRayQuery>, i32
  1867. // %flags) %gep = GEP %"RayQuery<flags>" %ptr, 0, 0 store i32* %gep, i32
  1868. // %handle ; and replace uses of %call with %ptr
  1869. void TranslateRayQueryConstructor(HLModule &HLM) {
  1870. llvm::Module &M = *HLM.GetModule();
  1871. SmallVector<Function *, 4> Constructors;
  1872. for (auto &F : M.functions()) {
  1873. // Match templated RayQuery constructor instantiation by prefix and
  1874. // signature. It should be impossible to achieve the same signature from
  1875. // HLSL.
  1876. if (!F.getName().startswith("\01??0?$RayQuery@$"))
  1877. continue;
  1878. llvm::Type *Ty = F.getReturnType();
  1879. if (!Ty->isPointerTy() ||
  1880. !dxilutil::IsHLSLRayQueryType(Ty->getPointerElementType()))
  1881. continue;
  1882. if (F.arg_size() != 1 || Ty != F.arg_begin()->getType())
  1883. continue;
  1884. Constructors.emplace_back(&F);
  1885. }
  1886. for (auto pConstructorFunc : Constructors) {
  1887. llvm::IntegerType *i32Ty = llvm::Type::getInt32Ty(M.getContext());
  1888. llvm::ConstantInt *i32Zero =
  1889. llvm::ConstantInt::get(i32Ty, (uint64_t)0, false);
  1890. llvm::FunctionType *funcTy =
  1891. llvm::FunctionType::get(i32Ty, {i32Ty, i32Ty}, false);
  1892. unsigned opcode = (unsigned)IntrinsicOp::IOP_AllocateRayQuery;
  1893. llvm::ConstantInt *opVal = llvm::ConstantInt::get(i32Ty, opcode, false);
  1894. Function *opFunc =
  1895. GetOrCreateHLFunction(M, funcTy, HLOpcodeGroup::HLIntrinsic, opcode);
  1896. while (!pConstructorFunc->user_empty()) {
  1897. Value *V = *pConstructorFunc->user_begin();
  1898. llvm::CallInst *CI = cast<CallInst>(V); // Must be call
  1899. llvm::Value *pThis = CI->getArgOperand(0);
  1900. llvm::StructType *pRQType =
  1901. cast<llvm::StructType>(pThis->getType()->getPointerElementType());
  1902. DxilStructAnnotation *SA =
  1903. HLM.GetTypeSystem().GetStructAnnotation(pRQType);
  1904. DXASSERT(SA, "otherwise, could not find type annoation for RayQuery "
  1905. "specialization");
  1906. DXASSERT(SA->GetNumTemplateArgs() == 1 &&
  1907. SA->GetTemplateArgAnnotation(0).IsIntegral(),
  1908. "otherwise, RayQuery has changed, or lacks template args");
  1909. llvm::IRBuilder<> Builder(CI);
  1910. llvm::Value *rayFlags =
  1911. Builder.getInt32(SA->GetTemplateArgAnnotation(0).GetIntegral());
  1912. llvm::Value *Call =
  1913. Builder.CreateCall(opFunc, {opVal, rayFlags}, pThis->getName());
  1914. llvm::Value *GEP = Builder.CreateInBoundsGEP(pThis, {i32Zero, i32Zero});
  1915. Builder.CreateStore(Call, GEP);
  1916. CI->replaceAllUsesWith(pThis);
  1917. CI->eraseFromParent();
  1918. }
  1919. pConstructorFunc->eraseFromParent();
  1920. }
  1921. }
  1922. }
  1923. namespace {
  1924. bool BuildImmInit(Function *Ctor) {
  1925. GlobalVariable *GV = nullptr;
  1926. SmallVector<Constant *, 4> ImmList;
  1927. bool allConst = true;
  1928. for (inst_iterator I = inst_begin(Ctor), E = inst_end(Ctor); I != E; ++I) {
  1929. if (StoreInst *SI = dyn_cast<StoreInst>(&(*I))) {
  1930. Value *V = SI->getValueOperand();
  1931. if (!isa<Constant>(V) || V->getType()->isPointerTy()) {
  1932. allConst = false;
  1933. break;
  1934. }
  1935. ImmList.emplace_back(cast<Constant>(V));
  1936. Value *Ptr = SI->getPointerOperand();
  1937. if (GEPOperator *GepOp = dyn_cast<GEPOperator>(Ptr)) {
  1938. Ptr = GepOp->getPointerOperand();
  1939. if (GlobalVariable *pGV = dyn_cast<GlobalVariable>(Ptr)) {
  1940. if (GV == nullptr)
  1941. GV = pGV;
  1942. else {
  1943. DXASSERT(GV == pGV, "else pointer mismatch");
  1944. }
  1945. }
  1946. }
  1947. } else {
  1948. if (!isa<ReturnInst>(*I)) {
  1949. allConst = false;
  1950. break;
  1951. }
  1952. }
  1953. }
  1954. if (!allConst)
  1955. return false;
  1956. if (!GV)
  1957. return false;
  1958. llvm::Type *Ty = GV->getType()->getElementType();
  1959. llvm::ArrayType *AT = dyn_cast<llvm::ArrayType>(Ty);
  1960. // TODO: support other types.
  1961. if (!AT)
  1962. return false;
  1963. if (ImmList.size() != AT->getNumElements())
  1964. return false;
  1965. Constant *Init = llvm::ConstantArray::get(AT, ImmList);
  1966. GV->setInitializer(Init);
  1967. return true;
  1968. }
  1969. } // namespace
  1970. namespace CGHLSLMSHelper {
  1971. void ProcessCtorFunctions(llvm::Module &M, StringRef globalName,
  1972. Instruction *InsertPt) {
  1973. // add global call to entry func
  1974. GlobalVariable *GV = M.getGlobalVariable(globalName);
  1975. if (!GV)
  1976. return;
  1977. ConstantArray *CA = dyn_cast<ConstantArray>(GV->getInitializer());
  1978. if (!CA)
  1979. return;
  1980. IRBuilder<> Builder(InsertPt);
  1981. for (User::op_iterator i = CA->op_begin(), e = CA->op_end(); i != e; ++i) {
  1982. if (isa<ConstantAggregateZero>(*i))
  1983. continue;
  1984. ConstantStruct *CS = cast<ConstantStruct>(*i);
  1985. if (isa<ConstantPointerNull>(CS->getOperand(1)))
  1986. continue;
  1987. // Must have a function or null ptr.
  1988. if (!isa<Function>(CS->getOperand(1)))
  1989. continue;
  1990. Function *Ctor = cast<Function>(CS->getOperand(1));
  1991. DXASSERT(Ctor->getReturnType()->isVoidTy() && Ctor->arg_size() == 0,
  1992. "function type must be void (void)");
  1993. for (inst_iterator I = inst_begin(Ctor), E = inst_end(Ctor); I != E; ++I) {
  1994. if (CallInst *CI = dyn_cast<CallInst>(&(*I))) {
  1995. Function *F = CI->getCalledFunction();
  1996. // Try to build imm initilizer.
  1997. // If not work, add global call to entry func.
  1998. if (BuildImmInit(F) == false) {
  1999. Builder.CreateCall(F);
  2000. }
  2001. } else {
  2002. DXASSERT(isa<ReturnInst>(&(*I)),
  2003. "else invalid Global constructor function");
  2004. }
  2005. }
  2006. }
  2007. // remove the GV
  2008. GV->eraseFromParent();
  2009. }
  2010. void FinishCBuffer(
  2011. HLModule &HLM, llvm::Type *CBufferType,
  2012. std::unordered_map<Constant *, DxilFieldAnnotation> &constVarAnnotationMap) {
  2013. // Allocate constant buffers.
  2014. AllocateDxilConstantBuffers(HLM, constVarAnnotationMap);
  2015. // TODO: create temp variable for constant which has store use.
  2016. // Create Global variable and type annotation for each CBuffer.
  2017. ConstructCBuffer(HLM, CBufferType, constVarAnnotationMap);
  2018. }
  2019. void AddRegBindingsForResourceInConstantBuffer(
  2020. HLModule &HLM,
  2021. llvm::DenseMap<llvm::Constant *,
  2022. llvm::SmallVector<std::pair<DXIL::ResourceClass, unsigned>,
  2023. 1>> &constantRegBindingMap) {
  2024. for (unsigned i = 0; i < HLM.GetCBuffers().size(); i++) {
  2025. HLCBuffer &CB = *static_cast<HLCBuffer *>(&(HLM.GetCBuffer(i)));
  2026. auto &Constants = CB.GetConstants();
  2027. for (unsigned j = 0; j < Constants.size(); j++) {
  2028. const std::unique_ptr<DxilResourceBase> &C = Constants[j];
  2029. Constant *CGV = C->GetGlobalSymbol();
  2030. auto &regBindings = constantRegBindingMap[CGV];
  2031. if (regBindings.empty())
  2032. continue;
  2033. unsigned Srv = UINT_MAX;
  2034. unsigned Uav = UINT_MAX;
  2035. unsigned Sampler = UINT_MAX;
  2036. for (auto it : regBindings) {
  2037. unsigned RegNum = it.second;
  2038. switch (it.first) {
  2039. case DXIL::ResourceClass::SRV:
  2040. Srv = RegNum;
  2041. break;
  2042. case DXIL::ResourceClass::UAV:
  2043. Uav = RegNum;
  2044. break;
  2045. case DXIL::ResourceClass::Sampler:
  2046. Sampler = RegNum;
  2047. break;
  2048. default:
  2049. DXASSERT(0, "invalid resource class");
  2050. break;
  2051. }
  2052. }
  2053. HLM.AddRegBinding(CB.GetID(), j, Srv, Uav, Sampler);
  2054. }
  2055. }
  2056. }
  2057. // extension codegen.
  2058. void ExtensionCodeGen(HLModule &HLM, clang::CodeGen::CodeGenModule &CGM) {
  2059. // Add semantic defines for extensions if any are available.
  2060. HLSLExtensionsCodegenHelper::SemanticDefineErrorList errors =
  2061. CGM.getCodeGenOpts().HLSLExtensionsCodegen->WriteSemanticDefines(
  2062. HLM.GetModule());
  2063. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2064. for (const HLSLExtensionsCodegenHelper::SemanticDefineError &error : errors) {
  2065. clang::DiagnosticsEngine::Level level = clang::DiagnosticsEngine::Error;
  2066. if (error.IsWarning())
  2067. level = clang::DiagnosticsEngine::Warning;
  2068. unsigned DiagID = Diags.getCustomDiagID(level, "%0");
  2069. Diags.Report(clang::SourceLocation::getFromRawEncoding(error.Location()),
  2070. DiagID)
  2071. << error.Message();
  2072. }
  2073. // Add root signature from a #define. Overrides root signature in function
  2074. // attribute.
  2075. {
  2076. using Status = HLSLExtensionsCodegenHelper::CustomRootSignature::Status;
  2077. HLSLExtensionsCodegenHelper::CustomRootSignature customRootSig;
  2078. HLSLExtensionsCodegenHelper::CustomRootSignature::Status status =
  2079. CGM.getCodeGenOpts().HLSLExtensionsCodegen->GetCustomRootSignature(
  2080. &customRootSig);
  2081. if (status == Status::FOUND) {
  2082. DxilRootSignatureVersion rootSigVer;
  2083. // set root signature version.
  2084. if (CGM.getLangOpts().RootSigMinor == 0) {
  2085. rootSigVer = hlsl::DxilRootSignatureVersion::Version_1_0;
  2086. } else {
  2087. DXASSERT(CGM.getLangOpts().RootSigMinor == 1,
  2088. "else CGMSHLSLRuntime Constructor needs to be updated");
  2089. rootSigVer = hlsl::DxilRootSignatureVersion::Version_1_1;
  2090. }
  2091. RootSignatureHandle RootSigHandle;
  2092. CompileRootSignature(
  2093. customRootSig.RootSignature, Diags,
  2094. clang::SourceLocation::getFromRawEncoding(
  2095. customRootSig.EncodedSourceLocation),
  2096. rootSigVer, DxilRootSignatureCompilationFlags::GlobalRootSignature,
  2097. &RootSigHandle);
  2098. if (!RootSigHandle.IsEmpty()) {
  2099. RootSigHandle.EnsureSerializedAvailable();
  2100. HLM.SetSerializedRootSignature(RootSigHandle.GetSerializedBytes(),
  2101. RootSigHandle.GetSerializedSize());
  2102. }
  2103. }
  2104. }
  2105. }
  2106. } // namespace CGHLSLMSHelper
  2107. namespace {
  2108. void ReportDisallowedTypeInExportParam(clang::CodeGen ::CodeGenModule &CGM,
  2109. StringRef name) {
  2110. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2111. unsigned DiagID =
  2112. Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  2113. "Exported function %0 must not contain a "
  2114. "resource in parameter or return type.");
  2115. std::string escaped;
  2116. llvm::raw_string_ostream os(escaped);
  2117. dxilutil::PrintEscapedString(name, os);
  2118. Diags.Report(DiagID) << os.str();
  2119. }
  2120. } // namespace
  2121. namespace CGHLSLMSHelper {
  2122. void FinishClipPlane(HLModule &HLM, std::vector<Function *> &clipPlaneFuncList,
  2123. std::unordered_map<Value *, DebugLoc> &debugInfoMap,
  2124. clang::CodeGen::CodeGenModule &CGM) {
  2125. bool bDebugInfo = CGM.getCodeGenOpts().getDebugInfo() ==
  2126. clang::CodeGenOptions::FullDebugInfo;
  2127. Module &M = *HLM.GetModule();
  2128. for (Function *F : clipPlaneFuncList) {
  2129. DxilFunctionProps &props = HLM.GetDxilFunctionProps(F);
  2130. IRBuilder<> Builder(F->getEntryBlock().getFirstInsertionPt());
  2131. for (unsigned i = 0; i < DXIL::kNumClipPlanes; i++) {
  2132. Value *clipPlane = props.ShaderProps.VS.clipPlanes[i];
  2133. if (!clipPlane)
  2134. continue;
  2135. if (bDebugInfo) {
  2136. Builder.SetCurrentDebugLocation(debugInfoMap[clipPlane]);
  2137. }
  2138. llvm::Type *Ty = clipPlane->getType()->getPointerElementType();
  2139. // Constant *zeroInit = ConstantFP::get(Ty, 0);
  2140. GlobalVariable *GV = new llvm::GlobalVariable(
  2141. M, Ty, /*IsConstant*/ false, // constant false to store.
  2142. llvm::GlobalValue::ExternalLinkage,
  2143. /*InitVal*/ nullptr, Twine("SV_ClipPlane") + Twine(i));
  2144. Value *initVal = Builder.CreateLoad(clipPlane);
  2145. Builder.CreateStore(initVal, GV);
  2146. props.ShaderProps.VS.clipPlanes[i] = GV;
  2147. }
  2148. }
  2149. }
  2150. } // namespace
  2151. namespace {
  2152. void LowerExportFunctions(HLModule &HLM, clang::CodeGen::CodeGenModule &CGM,
  2153. dxilutil::ExportMap &exportMap,
  2154. StringMap<EntryFunctionInfo> &entryFunctionMap) {
  2155. bool bIsLib = HLM.GetShaderModel()->IsLib();
  2156. Module &M = *HLM.GetModule();
  2157. if (bIsLib && !exportMap.empty()) {
  2158. for (auto &it : entryFunctionMap) {
  2159. if (HLM.HasDxilFunctionProps(it.second.Func)) {
  2160. const DxilFunctionProps &props =
  2161. HLM.GetDxilFunctionProps(it.second.Func);
  2162. if (props.IsHS())
  2163. exportMap.RegisterExportedFunction(
  2164. props.ShaderProps.HS.patchConstantFunc);
  2165. }
  2166. }
  2167. }
  2168. if (bIsLib && !exportMap.empty()) {
  2169. exportMap.BeginProcessing();
  2170. for (Function &f : M.functions()) {
  2171. if (f.isDeclaration() || f.isIntrinsic() ||
  2172. GetHLOpcodeGroup(&f) != HLOpcodeGroup::NotHL)
  2173. continue;
  2174. exportMap.ProcessFunction(&f, true);
  2175. }
  2176. // TODO: add subobject export names here.
  2177. if (!exportMap.EndProcessing()) {
  2178. for (auto &name : exportMap.GetNameCollisions()) {
  2179. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2180. unsigned DiagID = Diags.getCustomDiagID(
  2181. clang::DiagnosticsEngine::Error,
  2182. "Export name collides with another export: %0");
  2183. std::string escaped;
  2184. llvm::raw_string_ostream os(escaped);
  2185. dxilutil::PrintEscapedString(name, os);
  2186. Diags.Report(DiagID) << os.str();
  2187. }
  2188. for (auto &name : exportMap.GetUnusedExports()) {
  2189. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2190. unsigned DiagID =
  2191. Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  2192. "Could not find target for export: %0");
  2193. std::string escaped;
  2194. llvm::raw_string_ostream os(escaped);
  2195. dxilutil::PrintEscapedString(name, os);
  2196. Diags.Report(DiagID) << os.str();
  2197. }
  2198. }
  2199. }
  2200. for (auto &it : exportMap.GetFunctionRenames()) {
  2201. Function *F = it.first;
  2202. auto &renames = it.second;
  2203. if (renames.empty())
  2204. continue;
  2205. // Rename the original, if necessary, then clone the rest
  2206. if (renames.find(F->getName()) == renames.end())
  2207. F->setName(*renames.begin());
  2208. for (auto &itName : renames) {
  2209. if (F->getName() != itName) {
  2210. Function *pClone = CloneFunction(F, itName, &M, HLM.GetTypeSystem(),
  2211. HLM.GetTypeSystem());
  2212. // add DxilFunctionProps if entry
  2213. if (HLM.HasDxilFunctionProps(F)) {
  2214. DxilFunctionProps &props = HLM.GetDxilFunctionProps(F);
  2215. auto newProps = llvm::make_unique<DxilFunctionProps>(props);
  2216. HLM.AddDxilFunctionProps(pClone, newProps);
  2217. }
  2218. }
  2219. }
  2220. }
  2221. }
  2222. void CheckResourceParameters(HLModule &HLM,
  2223. clang::CodeGen::CodeGenModule &CGM) {
  2224. Module &M = *HLM.GetModule();
  2225. for (Function &f : M.functions()) {
  2226. // Skip llvm intrinsics, non-external linkage, entry/patch constant func,
  2227. // and HL intrinsics
  2228. if (!f.isIntrinsic() &&
  2229. f.getLinkage() == GlobalValue::LinkageTypes::ExternalLinkage &&
  2230. !HLM.HasDxilFunctionProps(&f) && !HLM.IsPatchConstantShader(&f) &&
  2231. GetHLOpcodeGroup(&f) == HLOpcodeGroup::NotHL) {
  2232. // Verify no resources in param/return types
  2233. if (dxilutil::ContainsHLSLObjectType(f.getReturnType())) {
  2234. ReportDisallowedTypeInExportParam(CGM, f.getName());
  2235. continue;
  2236. }
  2237. for (auto &Arg : f.args()) {
  2238. if (dxilutil::ContainsHLSLObjectType(Arg.getType())) {
  2239. ReportDisallowedTypeInExportParam(CGM, f.getName());
  2240. break;
  2241. }
  2242. }
  2243. }
  2244. }
  2245. }
  2246. } // namespace
  2247. namespace CGHLSLMSHelper {
  2248. void UpdateLinkage(HLModule &HLM, clang::CodeGen::CodeGenModule &CGM,
  2249. dxilutil::ExportMap &exportMap,
  2250. StringMap<EntryFunctionInfo> &entryFunctionMap,
  2251. StringMap<PatchConstantInfo> &patchConstantFunctionMap) {
  2252. bool bIsLib = HLM.GetShaderModel()->IsLib();
  2253. Module &M = *HLM.GetModule();
  2254. // Pin entry point and constant buffers, mark everything else internal.
  2255. for (Function &f : M.functions()) {
  2256. if (!bIsLib) {
  2257. if (&f == HLM.GetEntryFunction() ||
  2258. IsPatchConstantFunction(&f, patchConstantFunctionMap) ||
  2259. f.isDeclaration()) {
  2260. if (f.isDeclaration() && !f.isIntrinsic() &&
  2261. GetHLOpcodeGroup(&f) == HLOpcodeGroup::NotHL) {
  2262. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2263. unsigned DiagID = Diags.getCustomDiagID(
  2264. clang::DiagnosticsEngine::Error,
  2265. "External function used in non-library profile: %0");
  2266. std::string escaped;
  2267. llvm::raw_string_ostream os(escaped);
  2268. dxilutil::PrintEscapedString(f.getName(), os);
  2269. Diags.Report(DiagID) << os.str();
  2270. return;
  2271. }
  2272. f.setLinkage(GlobalValue::LinkageTypes::ExternalLinkage);
  2273. } else {
  2274. f.setLinkage(GlobalValue::LinkageTypes::InternalLinkage);
  2275. }
  2276. }
  2277. // Skip no inline functions.
  2278. if (f.hasFnAttribute(llvm::Attribute::NoInline))
  2279. continue;
  2280. // Always inline for used functions.
  2281. if (!f.user_empty() && !f.isDeclaration())
  2282. f.addFnAttr(llvm::Attribute::AlwaysInline);
  2283. }
  2284. LowerExportFunctions(HLM, CGM, exportMap, entryFunctionMap);
  2285. if (CGM.getCodeGenOpts().ExportShadersOnly) {
  2286. for (Function &f : M.functions()) {
  2287. // Skip declarations, intrinsics, shaders, and non-external linkage
  2288. if (f.isDeclaration() || f.isIntrinsic() ||
  2289. GetHLOpcodeGroup(&f) != HLOpcodeGroup::NotHL ||
  2290. HLM.HasDxilFunctionProps(&f) || HLM.IsPatchConstantShader(&f) ||
  2291. f.getLinkage() != GlobalValue::LinkageTypes::ExternalLinkage)
  2292. continue;
  2293. // Mark non-shader user functions as InternalLinkage
  2294. f.setLinkage(GlobalValue::LinkageTypes::InternalLinkage);
  2295. }
  2296. }
  2297. // Now iterate hull shaders and make sure their corresponding patch constant
  2298. // functions are marked ExternalLinkage:
  2299. for (Function &f : M.functions()) {
  2300. if (f.isDeclaration() || f.isIntrinsic() ||
  2301. GetHLOpcodeGroup(&f) != HLOpcodeGroup::NotHL ||
  2302. f.getLinkage() != GlobalValue::LinkageTypes::ExternalLinkage ||
  2303. !HLM.HasDxilFunctionProps(&f))
  2304. continue;
  2305. DxilFunctionProps &props = HLM.GetDxilFunctionProps(&f);
  2306. if (!props.IsHS())
  2307. continue;
  2308. Function *PCFunc = props.ShaderProps.HS.patchConstantFunc;
  2309. if (PCFunc->getLinkage() != GlobalValue::LinkageTypes::ExternalLinkage)
  2310. PCFunc->setLinkage(GlobalValue::LinkageTypes::ExternalLinkage);
  2311. }
  2312. // Disallow resource arguments in (non-entry) function exports
  2313. // unless offline linking target.
  2314. if (bIsLib &&
  2315. HLM.GetShaderModel()->GetMinor() != ShaderModel::kOfflineMinor) {
  2316. CheckResourceParameters(HLM, CGM);
  2317. }
  2318. }
  2319. void FinishEntries(
  2320. HLModule &HLM, const EntryFunctionInfo &Entry,
  2321. clang::CodeGen::CodeGenModule &CGM,
  2322. StringMap<EntryFunctionInfo> &entryFunctionMap,
  2323. std::unordered_map<Function *, const clang::HLSLPatchConstantFuncAttr *>
  2324. &HSEntryPatchConstantFuncAttr,
  2325. StringMap<PatchConstantInfo> &patchConstantFunctionMap,
  2326. std::unordered_map<Function *, std::unique_ptr<DxilFunctionProps>>
  2327. &patchConstantFunctionPropsMap) {
  2328. bool bIsLib = HLM.GetShaderModel()->IsLib();
  2329. // Library don't have entry.
  2330. if (!bIsLib) {
  2331. SetEntryFunction(HLM, Entry.Func, CGM);
  2332. // If at this point we haven't determined the entry function it's an error.
  2333. if (HLM.GetEntryFunction() == nullptr) {
  2334. assert(CGM.getDiags().hasErrorOccurred() &&
  2335. "else SetEntryFunction should have reported this condition");
  2336. return;
  2337. }
  2338. // In back-compat mode (with /Gec flag) create a static global for each
  2339. // const global to allow writing to it.
  2340. // TODO: Verfiy the behavior of static globals in hull shader
  2341. if (CGM.getLangOpts().EnableDX9CompatMode &&
  2342. CGM.getLangOpts().HLSLVersion <= 2016)
  2343. CreateWriteEnabledStaticGlobals(HLM.GetModule(), HLM.GetEntryFunction());
  2344. if (HLM.GetShaderModel()->IsHS()) {
  2345. SetPatchConstantFunction(Entry, HSEntryPatchConstantFuncAttr,
  2346. patchConstantFunctionMap,
  2347. patchConstantFunctionPropsMap, HLM, CGM);
  2348. }
  2349. } else {
  2350. for (auto &it : entryFunctionMap) {
  2351. // skip clone if RT entry
  2352. if (HLM.GetDxilFunctionProps(it.second.Func).IsRay())
  2353. continue;
  2354. // TODO: change flattened function names to dx.entry.<name>:
  2355. // std::string entryName = (Twine(dxilutil::EntryPrefix) +
  2356. // it.getKey()).str();
  2357. CloneShaderEntry(it.second.Func, it.getKey(), HLM);
  2358. auto AttrIter = HSEntryPatchConstantFuncAttr.find(it.second.Func);
  2359. if (AttrIter != HSEntryPatchConstantFuncAttr.end()) {
  2360. SetPatchConstantFunctionWithAttr(
  2361. it.second, AttrIter->second, patchConstantFunctionMap,
  2362. patchConstantFunctionPropsMap, HLM, CGM);
  2363. }
  2364. }
  2365. }
  2366. }
  2367. } // namespace
  2368. namespace CGHLSLMSHelper {
  2369. void FinishIntrinsics(
  2370. HLModule &HLM, std::vector<std::pair<Function *, unsigned>> &intrinsicMap,
  2371. DenseMap<Value *, DxilResourceProperties> &valToResPropertiesMap) {
  2372. // Lower getResourceHeap before AddOpcodeParamForIntrinsics to skip automatic
  2373. // lower for getResourceFromHeap.
  2374. LowerGetResourceFromHeap(HLM, intrinsicMap);
  2375. // translate opcode into parameter for intrinsic functions
  2376. // Do this before CloneShaderEntry and TranslateRayQueryConstructor to avoid
  2377. // update valToResPropertiesMap for cloned inst.
  2378. AddOpcodeParamForIntrinsics(HLM, intrinsicMap, valToResPropertiesMap);
  2379. }
  2380. // Add the dx.break temporary intrinsic and create Call Instructions
  2381. // to it for each branch that requires the artificial conditional.
  2382. void AddDxBreak(Module &M, const SmallVector<llvm::BranchInst*, 16> &DxBreaks) {
  2383. if (DxBreaks.empty())
  2384. return;
  2385. // Collect functions that make use of any wave operations
  2386. // Only they will need the dx.break condition added
  2387. SmallPtrSet<Function *, 16> WaveUsers;
  2388. for (Function &F : M.functions()) {
  2389. HLOpcodeGroup opgroup = hlsl::GetHLOpcodeGroup(&F);
  2390. if (F.isDeclaration() && IsHLWaveSensitive(&F) &&
  2391. (opgroup == HLOpcodeGroup::HLIntrinsic || opgroup == HLOpcodeGroup::HLExtIntrinsic)) {
  2392. for (User *U : F.users()) {
  2393. CallInst *CI = cast<CallInst>(U);
  2394. WaveUsers.insert(CI->getParent()->getParent());
  2395. }
  2396. }
  2397. }
  2398. // If there are no wave users, not even the function declaration is needed
  2399. if (WaveUsers.empty())
  2400. return;
  2401. // Create the dx.break function
  2402. FunctionType *FT = llvm::FunctionType::get(llvm::Type::getInt1Ty(M.getContext()), false);
  2403. Function *func = cast<llvm::Function>(M.getOrInsertFunction(DXIL::kDxBreakFuncName, FT));
  2404. func->addFnAttr(Attribute::AttrKind::NoUnwind);
  2405. // For all break branches recorded previously, if the function they are in makes
  2406. // any use of a wave op, it may need to be artificially conditional. Make it so now.
  2407. // The CleanupDxBreak pass will remove those that aren't needed when more is known.
  2408. for(llvm::BranchInst *BI : DxBreaks) {
  2409. if (WaveUsers.count(BI->getParent()->getParent())) {
  2410. CallInst *Call = CallInst::Create(FT, func, ArrayRef<Value *>(), "", BI);
  2411. BI->setCondition(Call);
  2412. if (!BI->getMetadata(DXIL::kDxBreakMDName)) {
  2413. BI->setMetadata(DXIL::kDxBreakMDName, llvm::MDNode::get(BI->getContext(), {}));
  2414. }
  2415. }
  2416. }
  2417. }
  2418. }
  2419. namespace CGHLSLMSHelper {
  2420. ScopeInfo::ScopeInfo(Function *F) : maxRetLevel(0), bAllReturnsInIf(true) {
  2421. Scope FuncScope;
  2422. FuncScope.kind = Scope::ScopeKind::FunctionScope;
  2423. FuncScope.EndScopeBB = nullptr;
  2424. FuncScope.bWholeScopeReturned = false;
  2425. // Make it 0 to avoid check when get parent.
  2426. // All loop on scopes should check kind != FunctionScope.
  2427. FuncScope.parentScopeIndex = 0;
  2428. scopes.emplace_back(FuncScope);
  2429. scopeStack.emplace_back(0);
  2430. }
  2431. // When all returns is inside if which is not nested, the flow is still
  2432. // structurized even there're more than one return.
  2433. bool ScopeInfo::CanSkipStructurize() {
  2434. return bAllReturnsInIf && maxRetLevel < 2;
  2435. }
  2436. void ScopeInfo::AddScope(Scope::ScopeKind k, BasicBlock *endScopeBB) {
  2437. Scope Scope;
  2438. Scope.kind = k;
  2439. Scope.bWholeScopeReturned = false;
  2440. Scope.EndScopeBB = endScopeBB;
  2441. Scope.parentScopeIndex = scopeStack.back();
  2442. scopeStack.emplace_back(scopes.size());
  2443. scopes.emplace_back(Scope);
  2444. }
  2445. void ScopeInfo::AddIf(BasicBlock *endIfBB) {
  2446. AddScope(Scope::ScopeKind::IfScope, endIfBB);
  2447. }
  2448. void ScopeInfo::AddSwitch(BasicBlock *endSwitch) {
  2449. AddScope(Scope::ScopeKind::SwitchScope, endSwitch);
  2450. }
  2451. void ScopeInfo::AddLoop(BasicBlock *loopContinue, BasicBlock *endLoop) {
  2452. AddScope(Scope::ScopeKind::LoopScope, endLoop);
  2453. scopes.back().loopContinueBB = loopContinue;
  2454. }
  2455. void ScopeInfo::AddRet(BasicBlock *bbWithRet) {
  2456. Scope RetScope;
  2457. RetScope.kind = Scope::ScopeKind::ReturnScope;
  2458. RetScope.EndScopeBB = bbWithRet;
  2459. RetScope.parentScopeIndex = scopeStack.back();
  2460. // - 1 for function scope which is at scopeStack[0].
  2461. unsigned retLevel = scopeStack.size() - 1;
  2462. // save max nested level for ret.
  2463. maxRetLevel = std::max<unsigned>(maxRetLevel, retLevel);
  2464. bool bGotLoopOrSwitch = false;
  2465. for (auto it = scopeStack.rbegin(); it != scopeStack.rend(); it++) {
  2466. unsigned idx = *it;
  2467. Scope &S = scopes[idx];
  2468. switch (S.kind) {
  2469. default:
  2470. break;
  2471. case Scope::ScopeKind::LoopScope:
  2472. case Scope::ScopeKind::SwitchScope:
  2473. bGotLoopOrSwitch = true;
  2474. // For return inside loop and switch, can just break.
  2475. RetScope.parentScopeIndex = idx;
  2476. break;
  2477. }
  2478. if (bGotLoopOrSwitch)
  2479. break;
  2480. }
  2481. bAllReturnsInIf &= !bGotLoopOrSwitch;
  2482. // return finish current scope.
  2483. RetScope.bWholeScopeReturned = true;
  2484. // save retScope to rets.
  2485. rets.emplace_back(scopes.size());
  2486. scopes.emplace_back(RetScope);
  2487. // Don't need to put retScope to stack since it cannot nested other scopes.
  2488. }
  2489. void ScopeInfo::EndScope(bool bScopeFinishedWithRet) {
  2490. unsigned idx = scopeStack.pop_back_val();
  2491. Scope &Scope = GetScope(idx);
  2492. // If whole stmt is finished and end scope bb has not used(nothing branch to
  2493. // it). Then the whole scope is returned.
  2494. Scope.bWholeScopeReturned =
  2495. bScopeFinishedWithRet && Scope.EndScopeBB->user_empty();
  2496. }
  2497. Scope &ScopeInfo::GetScope(unsigned i) { return scopes[i]; }
  2498. void ScopeInfo::LegalizeWholeReturnedScope() {
  2499. // legalize scopes which whole scope returned.
  2500. // When whole scope is returned, the endScopeBB will be deleted in codeGen.
  2501. // Here update it to parent scope's endScope.
  2502. // Since the scopes are in order, so it will automatic update to the final
  2503. // target. A->B->C will just get A->C.
  2504. for (auto &S : scopes) {
  2505. if (S.bWholeScopeReturned && S.kind != Scope::ScopeKind::ReturnScope) {
  2506. S.EndScopeBB = scopes[S.parentScopeIndex].EndScopeBB;
  2507. }
  2508. }
  2509. }
  2510. } // namespace CGHLSLMSHelper
  2511. namespace {
  2512. void updateEndScope(
  2513. ScopeInfo &ScopeInfo,
  2514. DenseMap<BasicBlock *, SmallVector<unsigned, 2>> &EndBBToScopeIndexMap,
  2515. BasicBlock *oldEndScope, BasicBlock *newEndScope) {
  2516. auto it = EndBBToScopeIndexMap.find(oldEndScope);
  2517. DXASSERT(it != EndBBToScopeIndexMap.end(),
  2518. "fail to find endScopeBB in EndBBToScopeIndexMap");
  2519. SmallVector<unsigned, 2> &scopeList = it->second;
  2520. // Don't need to update when not share endBB with other scope.
  2521. if (scopeList.size() < 2)
  2522. return;
  2523. for (unsigned i : scopeList) {
  2524. Scope &S = ScopeInfo.GetScope(i);
  2525. // Don't update return endBB, because that is the Block has return branch.
  2526. if (S.kind != Scope::ScopeKind::ReturnScope)
  2527. S.EndScopeBB = newEndScope;
  2528. }
  2529. EndBBToScopeIndexMap[newEndScope] = scopeList;
  2530. }
  2531. // Init ret value with undef to make sure it will not live thru loop inside
  2532. // callers.
  2533. // Because structurize return, the flow is controled by bIsReturned. The
  2534. // semantic is the same as multiple return, but without konwledge of
  2535. // bIsReturend, some path for structrized flow will have ret value not
  2536. // initialized.
  2537. // When function is called inside loop, ret value will live across the loop
  2538. // after inline.
  2539. void InitRetValue(BasicBlock *exitBB) {
  2540. Value *RetValPtr = nullptr;
  2541. if (ReturnInst *RI = dyn_cast<ReturnInst>(exitBB->getTerminator())) {
  2542. if (Value *RetV = RI->getReturnValue()) {
  2543. if (LoadInst *LI = dyn_cast<LoadInst>(RetV)) {
  2544. RetValPtr = LI->getPointerOperand();
  2545. }
  2546. }
  2547. }
  2548. if (!RetValPtr)
  2549. return;
  2550. if (AllocaInst *RetVAlloc = dyn_cast<AllocaInst>(RetValPtr)) {
  2551. IRBuilder<> B(RetVAlloc->getNextNode());
  2552. Type *Ty = RetVAlloc->getAllocatedType();
  2553. Value *Init = UndefValue::get(Ty);
  2554. if (Ty->isAggregateType()) {
  2555. // TODO: support aggreagate type and out parameters.
  2556. // Skip it here will cause undef on phi which the incoming path should never hit.
  2557. } else {
  2558. B.CreateStore(Init, RetVAlloc);
  2559. }
  2560. }
  2561. }
  2562. // For functions has multiple returns like
  2563. // float foo(float a, float b, float c) {
  2564. // float r = c;
  2565. // if (a > 0) {
  2566. // if (b > 0) {
  2567. // return -1;
  2568. // }
  2569. // ***
  2570. // }
  2571. // ...
  2572. // return r;
  2573. // }
  2574. // transform into
  2575. // float foo(float a, float b, float c) {
  2576. // bool bRet = false;
  2577. // float retV;
  2578. // float r = c;
  2579. // if (a > 0) {
  2580. // if (b > 0) {
  2581. // bRet = true;
  2582. // retV = -1;
  2583. // }
  2584. // if (!bRet) {
  2585. // ***
  2586. // }
  2587. // }
  2588. // if (!bRet) {
  2589. // ...
  2590. // retV = r;
  2591. // }
  2592. // return vRet;
  2593. // }
  2594. void StructurizeMultiRetFunction(Function *F, ScopeInfo &ScopeInfo,
  2595. bool bWaveEnabledStage,
  2596. SmallVector<BranchInst *, 16> &DxBreaks) {
  2597. if (ScopeInfo.CanSkipStructurize())
  2598. return;
  2599. // Get bbWithRets.
  2600. auto &rets = ScopeInfo.GetRetScopes();
  2601. IRBuilder<> B(F->getEntryBlock().begin());
  2602. Scope &FunctionScope = ScopeInfo.GetScope(0);
  2603. Type *boolTy = Type::getInt1Ty(F->getContext());
  2604. Constant *cTrue = ConstantInt::get(boolTy, 1);
  2605. Constant *cFalse = ConstantInt::get(boolTy, 0);
  2606. // bool bIsReturned = false;
  2607. AllocaInst *bIsReturned = B.CreateAlloca(boolTy, nullptr, "bReturned");
  2608. B.CreateStore(cFalse, bIsReturned);
  2609. Scope &RetScope = ScopeInfo.GetScope(rets[0]);
  2610. BasicBlock *exitBB = RetScope.EndScopeBB->getTerminator()->getSuccessor(0);
  2611. FunctionScope.EndScopeBB = exitBB;
  2612. // Find alloca for retunr val and init it to avoid undef after guard code with
  2613. // bIsReturned.
  2614. InitRetValue(exitBB);
  2615. ScopeInfo.LegalizeWholeReturnedScope();
  2616. // Map from endScopeBB to scope index.
  2617. // When 2 scopes share same endScopeBB, need to update endScopeBB after
  2618. // structurize.
  2619. DenseMap<BasicBlock *, SmallVector<unsigned, 2>> EndBBToScopeIndexMap;
  2620. auto &scopes = ScopeInfo.GetScopes();
  2621. for (unsigned i = 0; i < scopes.size(); i++) {
  2622. Scope &S = scopes[i];
  2623. EndBBToScopeIndexMap[S.EndScopeBB].emplace_back(i);
  2624. }
  2625. DenseSet<unsigned> guardedSet;
  2626. for (auto it = rets.begin(); it != rets.end(); it++) {
  2627. unsigned scopeIndex = *it;
  2628. Scope *pCurScope = &ScopeInfo.GetScope(scopeIndex);
  2629. Scope *pRetParentScope = &ScopeInfo.GetScope(pCurScope->parentScopeIndex);
  2630. // skip ret not in nested control flow.
  2631. if (pRetParentScope->kind == Scope::ScopeKind::FunctionScope)
  2632. continue;
  2633. do {
  2634. BasicBlock *BB = pCurScope->EndScopeBB;
  2635. // exit when scope is processed.
  2636. if (guardedSet.count(scopeIndex))
  2637. break;
  2638. guardedSet.insert(scopeIndex);
  2639. Scope *pParentScope = &ScopeInfo.GetScope(pCurScope->parentScopeIndex);
  2640. BasicBlock *EndBB = pParentScope->EndScopeBB;
  2641. // When whole scope returned, just branch to endScope of parent.
  2642. if (pCurScope->bWholeScopeReturned) {
  2643. // For ret, just branch to endScope of parent.
  2644. if (pCurScope->kind == Scope::ScopeKind::ReturnScope) {
  2645. BasicBlock *retBB = pCurScope->EndScopeBB;
  2646. TerminatorInst *retBr = retBB->getTerminator();
  2647. IRBuilder<> B(retBr);
  2648. // Set bReturned to true.
  2649. B.CreateStore(cTrue, bIsReturned);
  2650. if (bWaveEnabledStage &&
  2651. pParentScope->kind == Scope::ScopeKind::LoopScope) {
  2652. BranchInst *BI =
  2653. B.CreateCondBr(cTrue, EndBB, pParentScope->loopContinueBB);
  2654. DxBreaks.emplace_back(BI);
  2655. retBr->eraseFromParent();
  2656. } else {
  2657. // Update branch target.
  2658. retBr->setSuccessor(0, EndBB);
  2659. }
  2660. }
  2661. // For other scope, do nothing. Since whole scope is returned.
  2662. // Just flow naturally to parent scope.
  2663. } else {
  2664. // When only part scope returned.
  2665. // Use bIsReturned to guard to part which not returned.
  2666. switch (pParentScope->kind) {
  2667. case Scope::ScopeKind::ReturnScope:
  2668. DXASSERT(0, "return scope must get whole scope returned.");
  2669. break;
  2670. case Scope::ScopeKind::FunctionScope:
  2671. case Scope::ScopeKind::IfScope: {
  2672. // inside if.
  2673. // if (!bReturned) {
  2674. // rest of if or else.
  2675. // }
  2676. BasicBlock *CmpBB = BasicBlock::Create(BB->getContext(),
  2677. "bReturned.cmp.false", F, BB);
  2678. // Make BB preds go to cmpBB.
  2679. BB->replaceAllUsesWith(CmpBB);
  2680. // Update endscopeBB to CmpBB for scopes which has BB as endscope.
  2681. updateEndScope(ScopeInfo, EndBBToScopeIndexMap, BB, CmpBB);
  2682. IRBuilder<> B(CmpBB);
  2683. Value *isRetured = B.CreateLoad(bIsReturned, "bReturned.load");
  2684. Value *notReturned =
  2685. B.CreateICmpNE(isRetured, cFalse, "bReturned.not");
  2686. B.CreateCondBr(notReturned, EndBB, BB);
  2687. } break;
  2688. default: {
  2689. // inside switch/loop
  2690. // if (bReturned) {
  2691. // br endOfScope.
  2692. // }
  2693. BasicBlock *CmpBB =
  2694. BasicBlock::Create(BB->getContext(), "bReturned.cmp.true", F, BB);
  2695. BasicBlock *BreakBB =
  2696. BasicBlock::Create(BB->getContext(), "bReturned.break", F, BB);
  2697. BB->replaceAllUsesWith(CmpBB);
  2698. // Update endscopeBB to CmpBB for scopes which has BB as endscope.
  2699. updateEndScope(ScopeInfo, EndBBToScopeIndexMap, BB, CmpBB);
  2700. IRBuilder<> B(CmpBB);
  2701. Value *isReturned = B.CreateLoad(bIsReturned, "bReturned.load");
  2702. isReturned = B.CreateICmpEQ(isReturned, cTrue, "bReturned.true");
  2703. B.CreateCondBr(isReturned, BreakBB, BB);
  2704. B.SetInsertPoint(BreakBB);
  2705. if (bWaveEnabledStage &&
  2706. pParentScope->kind == Scope::ScopeKind::LoopScope) {
  2707. BranchInst *BI =
  2708. B.CreateCondBr(cTrue, EndBB, pParentScope->loopContinueBB);
  2709. DxBreaks.emplace_back(BI);
  2710. } else {
  2711. B.CreateBr(EndBB);
  2712. }
  2713. } break;
  2714. }
  2715. }
  2716. scopeIndex = pCurScope->parentScopeIndex;
  2717. pCurScope = &ScopeInfo.GetScope(scopeIndex);
  2718. // done when reach function scope.
  2719. } while (pCurScope->kind != Scope::ScopeKind::FunctionScope);
  2720. }
  2721. }
  2722. } // namespace
  2723. namespace CGHLSLMSHelper {
  2724. void StructurizeMultiRet(Module &M, clang::CodeGen::CodeGenModule &CGM,
  2725. DenseMap<Function *, ScopeInfo> &ScopeMap,
  2726. bool bWaveEnabledStage,
  2727. SmallVector<BranchInst *, 16> &DxBreaks) {
  2728. if (CGM.getCodeGenOpts().HLSLExtensionsCodegen) {
  2729. if (!CGM.getCodeGenOpts().HLSLExtensionsCodegen->IsOptionEnabled("structurize-returns"))
  2730. return;
  2731. } else {
  2732. if (!CGM.getCodeGenOpts().HLSLOptimizationToggles.count("structurize-returns") ||
  2733. !CGM.getCodeGenOpts().HLSLOptimizationToggles.find("structurize-returns")->second)
  2734. return;
  2735. }
  2736. for (Function &F : M) {
  2737. if (F.isDeclaration())
  2738. continue;
  2739. auto it = ScopeMap.find(&F);
  2740. if (it == ScopeMap.end())
  2741. continue;
  2742. StructurizeMultiRetFunction(&F, it->second, bWaveEnabledStage, DxBreaks);
  2743. }
  2744. }
  2745. } // namespace CGHLSLMSHelper