CGHLSLMSFinishCodeGen.cpp 118 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294
  1. ///////////////////////////////////////////////////////////////////////////////
  2. // //
  3. // CGHLSLMSFinishCodeGen.cpp //
  4. // Copyright (C) Microsoft Corporation. All rights reserved. //
  5. // This file is distributed under the University of Illinois Open Source //
  6. // License. See LICENSE.TXT for details. //
  7. // //
  8. // Impliment FinishCodeGen. //
  9. // //
  10. ///////////////////////////////////////////////////////////////////////////////
  11. #include "llvm/ADT/SmallVector.h"
  12. #include "llvm/ADT/StringRef.h"
  13. #include "llvm/Analysis/DxilValueCache.h"
  14. #include "llvm/IR/CFG.h"
  15. #include "llvm/IR/Function.h"
  16. #include "llvm/IR/GetElementPtrTypeIterator.h"
  17. #include "llvm/IR/IRBuilder.h"
  18. #include "llvm/IR/InstIterator.h"
  19. #include "llvm/IR/Instructions.h"
  20. #include "llvm/IR/Module.h"
  21. #include "llvm/IR/Type.h"
  22. #include "llvm/Transforms/Utils/Cloning.h"
  23. #include "llvm/Transforms/Utils/ValueMapper.h"
  24. #include "CodeGenModule.h"
  25. #include "clang/Basic/LangOptions.h"
  26. #include "clang/Frontend/CodeGenOptions.h"
  27. #include "clang/Parse/ParseHLSL.h" // root sig would be in Parser if part of lang
  28. #include "dxc/DXIL/DxilConstants.h"
  29. #include "dxc/DXIL/DxilOperations.h"
  30. #include "dxc/DXIL/DxilResourceProperties.h"
  31. #include "dxc/DXIL/DxilTypeSystem.h"
  32. #include "dxc/DXIL/DxilUtil.h"
  33. #include "dxc/DxilRootSignature/DxilRootSignature.h"
  34. #include "dxc/HLSL/DxilExportMap.h"
  35. #include "dxc/HLSL/DxilGenerationPass.h"
  36. #include "dxc/HLSL/HLMatrixType.h"
  37. #include "dxc/HLSL/HLModule.h"
  38. #include "dxc/HLSL/HLSLExtensionsCodegenHelper.h"
  39. #include "dxc/HlslIntrinsicOp.h"
  40. #include <fenv.h>
  41. #include <memory>
  42. #include <vector>
  43. #include "CGHLSLMSHelper.h"
  44. using namespace llvm;
  45. using namespace hlsl;
  46. using namespace CGHLSLMSHelper;
  47. namespace {
  48. Value *CreateHandleFromResPtr(Value *ResPtr, HLModule &HLM,
  49. llvm::Type *HandleTy, IRBuilder<> &Builder) {
  50. Module &M = *HLM.GetModule();
  51. // Load to make sure resource only have Ld/St use so mem2reg could remove
  52. // temp resource.
  53. Value *ldObj = Builder.CreateLoad(ResPtr);
  54. Value *args[] = {ldObj};
  55. CallInst *Handle = HLM.EmitHLOperationCall(
  56. Builder, HLOpcodeGroup::HLCreateHandle, 0, HandleTy, args, M);
  57. return Handle;
  58. }
  59. CallInst *CreateAnnotateHandle(HLModule &HLM, Value *Handle,
  60. DxilResourceProperties &RP, llvm::Type *ResTy,
  61. IRBuilder<> &Builder) {
  62. Constant *RPConstant = resource_helper::getAsConstant(
  63. RP, HLM.GetOP()->GetResourcePropertiesType(), *HLM.GetShaderModel());
  64. return HLM.EmitHLOperationCall(
  65. Builder, HLOpcodeGroup::HLAnnotateHandle,
  66. (unsigned)HLOpcodeGroup::HLAnnotateHandle, Handle->getType(),
  67. {Handle, RPConstant, UndefValue::get(ResTy)}, *HLM.GetModule());
  68. }
  69. // Lower CBV bitcast use to handle use.
  70. // Leave the load/store.
  71. void LowerDynamicCBVUseToHandle(
  72. HLModule &HLM,
  73. DxilObjectProperties &objectProperties) {
  74. Type *HandleTy = HLM.GetOP()->GetHandleType();
  75. Module &M = *HLM.GetModule();
  76. // Collect BitCast use of CBV.
  77. SmallVector<std::pair<BitCastInst *, DxilResourceProperties>, 4> BitCasts;
  78. for (auto it : objectProperties.resMap) {
  79. DxilResourceProperties RP = it.second;
  80. if (RP.getResourceKind() != DXIL::ResourceKind::CBuffer &&
  81. RP.getResourceKind() != DXIL::ResourceKind::TBuffer)
  82. continue;
  83. Value *V = it.first;
  84. // Skip external globals.
  85. if (GlobalVariable *GV = dyn_cast<GlobalVariable>(V)) {
  86. if (GV->getLinkage() != GlobalValue::LinkageTypes::InternalLinkage)
  87. continue;
  88. }
  89. for (auto UserIt = V->user_begin(); UserIt != V->user_end();) {
  90. User *U = *(UserIt++);
  91. if (U->user_empty())
  92. continue;
  93. if (BitCastInst *BCI = dyn_cast<BitCastInst>(U)) {
  94. BitCasts.emplace_back(std::make_pair(BCI, RP));
  95. continue;
  96. }
  97. DXASSERT((!isa<BitCastOperator>(U) || U->user_empty()),
  98. "all BitCast should be BitCastInst");
  99. }
  100. }
  101. for (auto it : BitCasts) {
  102. BitCastInst *BCI = it.first;
  103. DxilResourceProperties RP = it.second;
  104. IRBuilder<> B(BCI);
  105. B.AllowFolding = false;
  106. Value *ObjV = BCI->getOperand(0);
  107. Value *Handle = CreateHandleFromResPtr(ObjV, HLM, HandleTy, B);
  108. Type *ResTy = ObjV->getType()->getPointerElementType();
  109. Handle = CreateAnnotateHandle(HLM, Handle, RP, ResTy, B);
  110. // Create cb subscript.
  111. llvm::Type *opcodeTy = B.getInt32Ty();
  112. llvm::Type *idxTy = opcodeTy;
  113. Constant *zeroIdx = ConstantInt::get(opcodeTy, 0);
  114. Type *cbTy = BCI->getType();
  115. llvm::FunctionType *SubscriptFuncTy =
  116. llvm::FunctionType::get(cbTy, {opcodeTy, HandleTy, idxTy}, false);
  117. Function *subscriptFunc =
  118. GetOrCreateHLFunction(M, SubscriptFuncTy, HLOpcodeGroup::HLSubscript,
  119. (unsigned)HLSubscriptOpcode::CBufferSubscript);
  120. Constant *opArg = ConstantInt::get(
  121. opcodeTy, (unsigned)HLSubscriptOpcode::CBufferSubscript);
  122. Value *args[] = {opArg, Handle, zeroIdx};
  123. Instruction *cbSubscript =
  124. cast<Instruction>(B.CreateCall(subscriptFunc, {args}));
  125. BCI->replaceAllUsesWith(cbSubscript);
  126. BCI->eraseFromParent();
  127. }
  128. }
  129. bool IsHLSLSamplerDescType(llvm::Type *Ty) {
  130. if (llvm::StructType *ST = dyn_cast<llvm::StructType>(Ty)) {
  131. if (!ST->hasName())
  132. return false;
  133. StringRef name = ST->getName();
  134. if (name == "struct..Sampler")
  135. return true;
  136. }
  137. return false;
  138. }
  139. #ifndef NDEBUG
  140. static bool ConsumePrefix(StringRef &Str, StringRef Prefix) {
  141. if (!Str.startswith(Prefix)) return false;
  142. Str = Str.substr(Prefix.size());
  143. return true;
  144. }
  145. bool IsHLSLBufferViewType(llvm::Type *Ty) {
  146. if (llvm::StructType *ST = dyn_cast<llvm::StructType>(Ty)) {
  147. if (!ST->hasName())
  148. return false;
  149. StringRef name = ST->getName();
  150. if (!(ConsumePrefix(name, "class.") ||
  151. ConsumePrefix(name, "struct.")))
  152. return false;
  153. if (name.startswith("ConstantBuffer<") ||
  154. name.startswith("TextureBuffer<"))
  155. return true;
  156. }
  157. return false;
  158. }
  159. #endif
  160. void LowerGetResourceFromHeap(
  161. HLModule &HLM, std::vector<std::pair<Function *, unsigned>> &intrinsicMap) {
  162. llvm::Module &M = *HLM.GetModule();
  163. llvm::Type *HandleTy = HLM.GetOP()->GetHandleType();
  164. unsigned GetResFromHeapOp =
  165. static_cast<unsigned>(IntrinsicOp::IOP_CreateResourceFromHeap);
  166. DenseMap<Instruction *, Instruction *> ResourcePtrToHandlePtrMap;
  167. for (auto it : intrinsicMap) {
  168. unsigned opcode = it.second;
  169. if (opcode != GetResFromHeapOp)
  170. continue;
  171. Function *F = it.first;
  172. HLOpcodeGroup group = hlsl::GetHLOpcodeGroup(F);
  173. if (group != HLOpcodeGroup::HLIntrinsic)
  174. continue;
  175. for (auto uit = F->user_begin(); uit != F->user_end();) {
  176. CallInst *CI = cast<CallInst>(*(uit++));
  177. // Arg 0 is this pointer.
  178. unsigned ArgIdx = 1;
  179. Instruction *ResPtr = cast<Instruction>(CI->getArgOperand(ArgIdx));
  180. Value *Index = CI->getArgOperand(ArgIdx+1);
  181. IRBuilder<> Builder(CI);
  182. // Make a handle from GetResFromHeap.
  183. Value *IsSampler = Builder.getInt1(
  184. IsHLSLSamplerDescType(ResPtr->getType()->getPointerElementType()));
  185. Value *Handle = HLM.EmitHLOperationCall(
  186. Builder, HLOpcodeGroup::HLIntrinsic, GetResFromHeapOp, HandleTy,
  187. {Index, IsSampler}, M);
  188. // Find the handle ptr for res ptr.
  189. auto it = ResourcePtrToHandlePtrMap.find(ResPtr);
  190. Instruction *HandlePtr = nullptr;
  191. if (it != ResourcePtrToHandlePtrMap.end()) {
  192. HandlePtr = it->second;
  193. } else {
  194. IRBuilder<> AllocaBuilder(
  195. ResPtr->getParent()->getParent()->getEntryBlock().begin());
  196. HandlePtr = AllocaBuilder.CreateAlloca(HandleTy);
  197. ResourcePtrToHandlePtrMap[ResPtr] = HandlePtr;
  198. }
  199. // Store handle to handle ptr.
  200. Builder.CreateStore(Handle, HandlePtr);
  201. CI->eraseFromParent();
  202. }
  203. }
  204. // Replace load of Resource ptr into load of handel ptr.
  205. for (auto it : ResourcePtrToHandlePtrMap) {
  206. Instruction *resPtr = it.first;
  207. Instruction *handlePtr = it.second;
  208. for (auto uit = resPtr->user_begin(); uit != resPtr->user_end();) {
  209. User *U = *(uit++);
  210. BitCastInst *BCI = cast<BitCastInst>(U);
  211. DXASSERT(
  212. dxilutil::IsHLSLResourceType(
  213. BCI->getType()->getPointerElementType()) ||
  214. IsHLSLBufferViewType(BCI->getType()->getPointerElementType()),
  215. "illegal cast of resource ptr");
  216. for (auto cuit = BCI->user_begin(); cuit != BCI->user_end();) {
  217. LoadInst *LI = cast<LoadInst>(*(cuit++));
  218. IRBuilder<> Builder(LI);
  219. Value *Handle = Builder.CreateLoad(handlePtr);
  220. Value *Res =
  221. HLM.EmitHLOperationCall(Builder, HLOpcodeGroup::HLCast,
  222. (unsigned)HLCastOpcode::HandleToResCast,
  223. LI->getType(), {Handle}, M);
  224. LI->replaceAllUsesWith(Res);
  225. LI->eraseFromParent();
  226. }
  227. BCI->eraseFromParent();
  228. }
  229. resPtr->eraseFromParent();
  230. }
  231. }
  232. void ReplaceBoolVectorSubscript(CallInst *CI) {
  233. Value *Ptr = CI->getArgOperand(0);
  234. Value *Idx = CI->getArgOperand(1);
  235. Value *IdxList[] = {ConstantInt::get(Idx->getType(), 0), Idx};
  236. for (auto It = CI->user_begin(), E = CI->user_end(); It != E;) {
  237. Instruction *user = cast<Instruction>(*(It++));
  238. IRBuilder<> Builder(user);
  239. Value *GEP = Builder.CreateInBoundsGEP(Ptr, IdxList);
  240. if (LoadInst *LI = dyn_cast<LoadInst>(user)) {
  241. Value *NewLd = Builder.CreateLoad(GEP);
  242. Value *cast = Builder.CreateZExt(NewLd, LI->getType());
  243. LI->replaceAllUsesWith(cast);
  244. LI->eraseFromParent();
  245. } else {
  246. // Must be a store inst here.
  247. StoreInst *SI = cast<StoreInst>(user);
  248. Value *V = SI->getValueOperand();
  249. Value *cast =
  250. Builder.CreateICmpNE(V, llvm::ConstantInt::get(V->getType(), 0));
  251. Builder.CreateStore(cast, GEP);
  252. SI->eraseFromParent();
  253. }
  254. }
  255. CI->eraseFromParent();
  256. }
  257. void ReplaceBoolVectorSubscript(Function *F) {
  258. for (auto It = F->user_begin(), E = F->user_end(); It != E;) {
  259. User *user = *(It++);
  260. CallInst *CI = cast<CallInst>(user);
  261. ReplaceBoolVectorSubscript(CI);
  262. }
  263. }
  264. // Add function body for intrinsic if possible.
  265. Function *CreateOpFunction(llvm::Module &M, Function *F,
  266. llvm::FunctionType *funcTy, HLOpcodeGroup group,
  267. unsigned opcode) {
  268. Function *opFunc = nullptr;
  269. AttributeSet attribs = F->getAttributes().getFnAttributes();
  270. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  271. if (group == HLOpcodeGroup::HLIntrinsic) {
  272. IntrinsicOp intriOp = static_cast<IntrinsicOp>(opcode);
  273. switch (intriOp) {
  274. case IntrinsicOp::MOP_Append:
  275. case IntrinsicOp::MOP_Consume: {
  276. bool bAppend = intriOp == IntrinsicOp::MOP_Append;
  277. llvm::Type *handleTy = funcTy->getParamType(HLOperandIndex::kHandleOpIdx);
  278. // Don't generate body for OutputStream::Append.
  279. if (bAppend && HLModule::IsStreamOutputPtrType(handleTy)) {
  280. opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode, attribs);
  281. break;
  282. }
  283. opFunc = GetOrCreateHLFunctionWithBody(M, funcTy, group, opcode,
  284. bAppend ? "append" : "consume");
  285. llvm::Type *counterTy = llvm::Type::getInt32Ty(M.getContext());
  286. llvm::FunctionType *IncCounterFuncTy =
  287. llvm::FunctionType::get(counterTy, {opcodeTy, handleTy}, false);
  288. unsigned counterOpcode =
  289. bAppend ? (unsigned)IntrinsicOp::MOP_IncrementCounter
  290. : (unsigned)IntrinsicOp::MOP_DecrementCounter;
  291. Function *incCounterFunc = GetOrCreateHLFunction(
  292. M, IncCounterFuncTy, group, counterOpcode, attribs);
  293. llvm::Type *idxTy = counterTy;
  294. llvm::Type *valTy =
  295. bAppend ? funcTy->getParamType(HLOperandIndex::kAppendValOpIndex)
  296. : funcTy->getReturnType();
  297. // Return type for subscript should be pointer type, hence in memory
  298. // representation
  299. llvm::Type *subscriptTy = valTy;
  300. bool isBoolScalarOrVector = false;
  301. if (!subscriptTy->isPointerTy()) {
  302. if (subscriptTy->getScalarType()->isIntegerTy(1)) {
  303. isBoolScalarOrVector = true;
  304. llvm::Type *memReprType =
  305. llvm::IntegerType::get(subscriptTy->getContext(), 32);
  306. subscriptTy =
  307. subscriptTy->isVectorTy()
  308. ? llvm::VectorType::get(memReprType,
  309. subscriptTy->getVectorNumElements())
  310. : memReprType;
  311. }
  312. subscriptTy = llvm::PointerType::get(subscriptTy, 0);
  313. }
  314. llvm::FunctionType *SubscriptFuncTy = llvm::FunctionType::get(
  315. subscriptTy, {opcodeTy, handleTy, idxTy}, false);
  316. Function *subscriptFunc = GetOrCreateHLFunction(
  317. M, SubscriptFuncTy, HLOpcodeGroup::HLSubscript,
  318. (unsigned)HLSubscriptOpcode::DefaultSubscript, attribs);
  319. BasicBlock *BB =
  320. BasicBlock::Create(opFunc->getContext(), "Entry", opFunc);
  321. IRBuilder<> Builder(BB);
  322. auto argIter = opFunc->args().begin();
  323. // Skip the opcode arg.
  324. argIter++;
  325. Argument *thisArg = argIter++;
  326. // int counter = IncrementCounter/DecrementCounter(Buf);
  327. Value *incCounterOpArg = ConstantInt::get(idxTy, counterOpcode);
  328. Value *counter =
  329. Builder.CreateCall(incCounterFunc, {incCounterOpArg, thisArg});
  330. // Buf[counter];
  331. Value *subscriptOpArg = ConstantInt::get(
  332. idxTy, (unsigned)HLSubscriptOpcode::DefaultSubscript);
  333. Value *subscript =
  334. Builder.CreateCall(subscriptFunc, {subscriptOpArg, thisArg, counter});
  335. if (bAppend) {
  336. Argument *valArg = argIter;
  337. // Buf[counter] = val;
  338. if (valTy->isPointerTy()) {
  339. unsigned size = M.getDataLayout().getTypeAllocSize(
  340. subscript->getType()->getPointerElementType());
  341. Builder.CreateMemCpy(subscript, valArg, size, 1);
  342. } else {
  343. Value *storedVal = valArg;
  344. // Convert to memory representation
  345. if (isBoolScalarOrVector)
  346. storedVal = Builder.CreateZExt(
  347. storedVal, subscriptTy->getPointerElementType(), "frombool");
  348. Builder.CreateStore(storedVal, subscript);
  349. }
  350. Builder.CreateRetVoid();
  351. } else {
  352. // return Buf[counter];
  353. if (valTy->isPointerTy())
  354. Builder.CreateRet(subscript);
  355. else {
  356. Value *retVal = Builder.CreateLoad(subscript);
  357. // Convert to register representation
  358. if (isBoolScalarOrVector)
  359. retVal = Builder.CreateICmpNE(
  360. retVal, Constant::getNullValue(retVal->getType()), "tobool");
  361. Builder.CreateRet(retVal);
  362. }
  363. }
  364. } break;
  365. case IntrinsicOp::IOP_sincos: {
  366. opFunc =
  367. GetOrCreateHLFunctionWithBody(M, funcTy, group, opcode, "sincos");
  368. llvm::Type *valTy =
  369. funcTy->getParamType(HLOperandIndex::kTrinaryOpSrc0Idx);
  370. llvm::FunctionType *sinFuncTy =
  371. llvm::FunctionType::get(valTy, {opcodeTy, valTy}, false);
  372. unsigned sinOp = static_cast<unsigned>(IntrinsicOp::IOP_sin);
  373. unsigned cosOp = static_cast<unsigned>(IntrinsicOp::IOP_cos);
  374. Function *sinFunc =
  375. GetOrCreateHLFunction(M, sinFuncTy, group, sinOp, attribs);
  376. Function *cosFunc =
  377. GetOrCreateHLFunction(M, sinFuncTy, group, cosOp, attribs);
  378. BasicBlock *BB =
  379. BasicBlock::Create(opFunc->getContext(), "Entry", opFunc);
  380. IRBuilder<> Builder(BB);
  381. auto argIter = opFunc->args().begin();
  382. // Skip the opcode arg.
  383. argIter++;
  384. Argument *valArg = argIter++;
  385. Argument *sinPtrArg = argIter++;
  386. Argument *cosPtrArg = argIter++;
  387. Value *sinOpArg = ConstantInt::get(opcodeTy, sinOp);
  388. Value *sinVal = Builder.CreateCall(sinFunc, {sinOpArg, valArg});
  389. Builder.CreateStore(sinVal, sinPtrArg);
  390. Value *cosOpArg = ConstantInt::get(opcodeTy, cosOp);
  391. Value *cosVal = Builder.CreateCall(cosFunc, {cosOpArg, valArg});
  392. Builder.CreateStore(cosVal, cosPtrArg);
  393. // Ret.
  394. Builder.CreateRetVoid();
  395. } break;
  396. default:
  397. opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode, attribs);
  398. break;
  399. }
  400. } else if (group == HLOpcodeGroup::HLExtIntrinsic) {
  401. llvm::StringRef fnName = F->getName();
  402. llvm::StringRef groupName = GetHLOpcodeGroupNameByAttr(F);
  403. opFunc = GetOrCreateHLFunction(M, funcTy, group, &groupName, &fnName,
  404. opcode, attribs);
  405. } else {
  406. opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode, attribs);
  407. }
  408. return opFunc;
  409. }
  410. DxilResourceProperties GetResourcePropsFromIntrinsicObjectArg(
  411. Value *arg, HLModule &HLM, DxilTypeSystem &typeSys,
  412. DxilObjectProperties &objectProperties) {
  413. DxilResourceProperties RP = objectProperties.GetResource(arg);
  414. if (RP.isValid())
  415. return RP;
  416. // Must be GEP.
  417. GEPOperator *GEP = cast<GEPOperator>(arg);
  418. // Find RP from GEP.
  419. Value *Ptr = GEP->getPointerOperand();
  420. // When Ptr is array of resource, check if it is another GEP.
  421. while (
  422. dxilutil::IsHLSLResourceType(dxilutil::GetArrayEltTy(Ptr->getType()))) {
  423. if (GEPOperator *ParentGEP = dyn_cast<GEPOperator>(Ptr)) {
  424. GEP = ParentGEP;
  425. Ptr = GEP->getPointerOperand();
  426. } else {
  427. break;
  428. }
  429. }
  430. // When ptr is array of resource, ptr could be in
  431. // objectProperties.
  432. RP = objectProperties.GetResource(Ptr);
  433. if (RP.isValid())
  434. return RP;
  435. DxilStructAnnotation *Anno = nullptr;
  436. for (auto gepIt = gep_type_begin(GEP), E = gep_type_end(GEP); gepIt != E;
  437. ++gepIt) {
  438. if (StructType *ST = dyn_cast<StructType>(*gepIt)) {
  439. Anno = typeSys.GetStructAnnotation(ST);
  440. DXASSERT(Anno, "missing type annotation");
  441. unsigned Index =
  442. cast<ConstantInt>(gepIt.getOperand())->getLimitedValue();
  443. DxilFieldAnnotation &fieldAnno = Anno->GetFieldAnnotation(Index);
  444. if (fieldAnno.HasResourceAttribute()) {
  445. MDNode *resAttrib = fieldAnno.GetResourceAttribute();
  446. DxilResourceBase R(DXIL::ResourceClass::Invalid);
  447. HLM.LoadDxilResourceBaseFromMDNode(resAttrib, R);
  448. switch (R.GetClass()) {
  449. case DXIL::ResourceClass::SRV:
  450. case DXIL::ResourceClass::UAV: {
  451. DxilResource Res;
  452. HLM.LoadDxilResourceFromMDNode(resAttrib, Res);
  453. RP = resource_helper::loadPropsFromResourceBase(&Res);
  454. } break;
  455. case DXIL::ResourceClass::Sampler: {
  456. DxilSampler Sampler;
  457. HLM.LoadDxilSamplerFromMDNode(resAttrib, Sampler);
  458. RP = resource_helper::loadPropsFromResourceBase(&Sampler);
  459. } break;
  460. default:
  461. DXASSERT(0, "invalid resource attribute in filed annotation");
  462. break;
  463. }
  464. break;
  465. }
  466. }
  467. }
  468. DXASSERT(RP.isValid(), "invalid resource properties");
  469. return RP;
  470. }
  471. void AddOpcodeParamForIntrinsic(
  472. HLModule &HLM, Function *F, unsigned opcode, llvm::Type *HandleTy,
  473. DxilObjectProperties &objectProperties) {
  474. llvm::Module &M = *HLM.GetModule();
  475. llvm::FunctionType *oldFuncTy = F->getFunctionType();
  476. SmallVector<llvm::Type *, 4> paramTyList;
  477. // Add the opcode param
  478. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  479. paramTyList.emplace_back(opcodeTy);
  480. paramTyList.append(oldFuncTy->param_begin(), oldFuncTy->param_end());
  481. for (unsigned i = 1; i < paramTyList.size(); i++) {
  482. llvm::Type *Ty = paramTyList[i];
  483. if (Ty->isPointerTy()) {
  484. Ty = Ty->getPointerElementType();
  485. if (dxilutil::IsHLSLResourceType(Ty)) {
  486. // Use handle type for resource type.
  487. // This will make sure temp object variable only used by createHandle.
  488. paramTyList[i] = HandleTy;
  489. }
  490. }
  491. }
  492. HLOpcodeGroup group = hlsl::GetHLOpcodeGroup(F);
  493. if (group == HLOpcodeGroup::HLSubscript &&
  494. opcode == static_cast<unsigned>(HLSubscriptOpcode::VectorSubscript)) {
  495. llvm::FunctionType *FT = F->getFunctionType();
  496. llvm::Type *VecArgTy = FT->getParamType(0);
  497. llvm::VectorType *VType =
  498. cast<llvm::VectorType>(VecArgTy->getPointerElementType());
  499. llvm::Type *Ty = VType->getElementType();
  500. DXASSERT(Ty->isIntegerTy(), "Only bool could use VectorSubscript");
  501. llvm::IntegerType *ITy = cast<IntegerType>(Ty);
  502. DXASSERT_LOCALVAR(ITy, ITy->getBitWidth() == 1,
  503. "Only bool could use VectorSubscript");
  504. // The return type is i8*.
  505. // Replace all uses with i1*.
  506. ReplaceBoolVectorSubscript(F);
  507. return;
  508. }
  509. bool isDoubleSubscriptFunc =
  510. group == HLOpcodeGroup::HLSubscript &&
  511. opcode == static_cast<unsigned>(HLSubscriptOpcode::DoubleSubscript);
  512. llvm::Type *RetTy = oldFuncTy->getReturnType();
  513. if (isDoubleSubscriptFunc) {
  514. CallInst *doubleSub = cast<CallInst>(*F->user_begin());
  515. // Change currentIdx type into coord type.
  516. auto U = doubleSub->user_begin();
  517. Value *user = *U;
  518. CallInst *secSub = cast<CallInst>(user);
  519. unsigned coordIdx = HLOperandIndex::kSubscriptIndexOpIdx;
  520. // opcode operand not add yet, so the index need -1.
  521. if (GetHLOpcodeGroupByName(secSub->getCalledFunction()) ==
  522. HLOpcodeGroup::NotHL)
  523. coordIdx -= 1;
  524. Value *coord = secSub->getArgOperand(coordIdx);
  525. llvm::Type *coordTy = coord->getType();
  526. paramTyList[HLOperandIndex::kSubscriptIndexOpIdx] = coordTy;
  527. // Add the sampleIdx or mipLevel parameter to the end.
  528. paramTyList.emplace_back(opcodeTy);
  529. // Change return type to be resource ret type.
  530. // opcode operand not add yet, so the index need -1.
  531. Value *objPtr =
  532. doubleSub->getArgOperand(HLOperandIndex::kSubscriptObjectOpIdx - 1);
  533. // Must be a GEP
  534. GEPOperator *objGEP = cast<GEPOperator>(objPtr);
  535. gep_type_iterator GEPIt = gep_type_begin(objGEP), E = gep_type_end(objGEP);
  536. llvm::Type *resTy = nullptr;
  537. while (GEPIt != E) {
  538. if (dxilutil::IsHLSLResourceType(*GEPIt)) {
  539. resTy = *GEPIt;
  540. break;
  541. }
  542. GEPIt++;
  543. }
  544. DXASSERT(resTy, "must find the resource type");
  545. // Change object type to handle type.
  546. paramTyList[HLOperandIndex::kSubscriptObjectOpIdx] = HandleTy;
  547. // Change RetTy into pointer of resource reture type.
  548. RetTy = cast<StructType>(resTy)->getElementType(0)->getPointerTo();
  549. }
  550. llvm::FunctionType *funcTy =
  551. llvm::FunctionType::get(RetTy, paramTyList, oldFuncTy->isVarArg());
  552. Function *opFunc = CreateOpFunction(M, F, funcTy, group, opcode);
  553. StringRef lower = hlsl::GetHLLowerStrategy(F);
  554. if (!lower.empty())
  555. hlsl::SetHLLowerStrategy(opFunc, lower);
  556. DxilTypeSystem &typeSys = HLM.GetTypeSystem();
  557. for (auto user = F->user_begin(); user != F->user_end();) {
  558. // User must be a call.
  559. CallInst *oldCI = cast<CallInst>(*(user++));
  560. SmallVector<Value *, 4> opcodeParamList;
  561. Value *opcodeConst = Constant::getIntegerValue(opcodeTy, APInt(32, opcode));
  562. opcodeParamList.emplace_back(opcodeConst);
  563. opcodeParamList.append(oldCI->arg_operands().begin(),
  564. oldCI->arg_operands().end());
  565. IRBuilder<> Builder(oldCI);
  566. if (isDoubleSubscriptFunc) {
  567. // Change obj to the resource pointer.
  568. Value *objVal = opcodeParamList[HLOperandIndex::kSubscriptObjectOpIdx];
  569. GEPOperator *objGEP = cast<GEPOperator>(objVal);
  570. SmallVector<Value *, 8> IndexList;
  571. IndexList.append(objGEP->idx_begin(), objGEP->idx_end());
  572. Value *lastIndex = IndexList.back();
  573. ConstantInt *constIndex = cast<ConstantInt>(lastIndex);
  574. DXASSERT_LOCALVAR(constIndex, constIndex->getLimitedValue() == 1,
  575. "last index must 1");
  576. // Remove the last index.
  577. IndexList.pop_back();
  578. objVal = objGEP->getPointerOperand();
  579. DxilResourceProperties RP = GetResourcePropsFromIntrinsicObjectArg(
  580. objVal, HLM, typeSys, objectProperties);
  581. if (IndexList.size() > 1)
  582. objVal = Builder.CreateInBoundsGEP(objVal, IndexList);
  583. Value *Handle = CreateHandleFromResPtr(objVal, HLM, HandleTy, Builder);
  584. Type *ResTy = objVal->getType()->getPointerElementType();
  585. Handle = CreateAnnotateHandle(HLM, Handle, RP, ResTy, Builder);
  586. // Change obj to the resource pointer.
  587. opcodeParamList[HLOperandIndex::kSubscriptObjectOpIdx] = Handle;
  588. // Set idx and mipIdx.
  589. Value *mipIdx = opcodeParamList[HLOperandIndex::kSubscriptIndexOpIdx];
  590. auto U = oldCI->user_begin();
  591. Value *user = *U;
  592. CallInst *secSub = cast<CallInst>(user);
  593. unsigned idxOpIndex = HLOperandIndex::kSubscriptIndexOpIdx;
  594. if (GetHLOpcodeGroupByName(secSub->getCalledFunction()) ==
  595. HLOpcodeGroup::NotHL)
  596. idxOpIndex--;
  597. Value *idx = secSub->getArgOperand(idxOpIndex);
  598. DXASSERT(secSub->hasOneUse(), "subscript should only has one use");
  599. // Add the sampleIdx or mipLevel parameter to the end.
  600. opcodeParamList[HLOperandIndex::kSubscriptIndexOpIdx] = idx;
  601. opcodeParamList.emplace_back(mipIdx);
  602. // Insert new call before secSub to make sure idx is ready to use.
  603. Builder.SetInsertPoint(secSub);
  604. }
  605. for (unsigned i = 1; i < opcodeParamList.size(); i++) {
  606. Value *arg = opcodeParamList[i];
  607. llvm::Type *Ty = arg->getType();
  608. if (Ty->isPointerTy()) {
  609. Ty = Ty->getPointerElementType();
  610. if (dxilutil::IsHLSLResourceType(Ty)) {
  611. DxilResourceProperties RP = GetResourcePropsFromIntrinsicObjectArg(
  612. arg, HLM, typeSys, objectProperties);
  613. // Use object type directly, not by pointer.
  614. // This will make sure temp object variable only used by ld/st.
  615. if (GEPOperator *argGEP = dyn_cast<GEPOperator>(arg)) {
  616. std::vector<Value *> idxList(argGEP->idx_begin(),
  617. argGEP->idx_end());
  618. // Create instruction to avoid GEPOperator.
  619. GetElementPtrInst *GEP = GetElementPtrInst::CreateInBounds(
  620. argGEP->getPointerOperand(), idxList);
  621. Builder.Insert(GEP);
  622. arg = GEP;
  623. }
  624. llvm::Type *ResTy = arg->getType()->getPointerElementType();
  625. Value *Handle = CreateHandleFromResPtr(arg, HLM, HandleTy, Builder);
  626. Handle = CreateAnnotateHandle(HLM, Handle, RP, ResTy, Builder);
  627. opcodeParamList[i] = Handle;
  628. }
  629. }
  630. }
  631. Value *CI = Builder.CreateCall(opFunc, opcodeParamList);
  632. if (!isDoubleSubscriptFunc) {
  633. // replace new call and delete the old call
  634. oldCI->replaceAllUsesWith(CI);
  635. oldCI->eraseFromParent();
  636. } else {
  637. // For double script.
  638. // Replace single users use with new CI.
  639. auto U = oldCI->user_begin();
  640. Value *user = *U;
  641. CallInst *secSub = cast<CallInst>(user);
  642. secSub->replaceAllUsesWith(CI);
  643. secSub->eraseFromParent();
  644. oldCI->eraseFromParent();
  645. }
  646. }
  647. // delete the function
  648. F->eraseFromParent();
  649. }
  650. void AddOpcodeParamForIntrinsics(
  651. HLModule &HLM, std::vector<std::pair<Function *, unsigned>> &intrinsicMap,
  652. DxilObjectProperties &objectProperties) {
  653. llvm::Type *HandleTy = HLM.GetOP()->GetHandleType();
  654. for (auto mapIter : intrinsicMap) {
  655. Function *F = mapIter.first;
  656. if (F->user_empty()) {
  657. // delete the function
  658. F->eraseFromParent();
  659. continue;
  660. }
  661. unsigned opcode = mapIter.second;
  662. AddOpcodeParamForIntrinsic(HLM, F, opcode, HandleTy, objectProperties);
  663. }
  664. }
  665. } // namespace
  666. namespace {
  667. // Returns true a global value is being updated
  668. bool GlobalHasStoreUserRec(Value *V, std::set<Value *> &visited) {
  669. bool isWriteEnabled = false;
  670. if (V && visited.find(V) == visited.end()) {
  671. visited.insert(V);
  672. for (User *U : V->users()) {
  673. if (isa<StoreInst>(U)) {
  674. return true;
  675. } else if (CallInst *CI = dyn_cast<CallInst>(U)) {
  676. Function *F = CI->getCalledFunction();
  677. if (!F->isIntrinsic()) {
  678. HLOpcodeGroup hlGroup = GetHLOpcodeGroup(F);
  679. switch (hlGroup) {
  680. case HLOpcodeGroup::NotHL:
  681. return true;
  682. case HLOpcodeGroup::HLMatLoadStore: {
  683. HLMatLoadStoreOpcode opCode =
  684. static_cast<HLMatLoadStoreOpcode>(hlsl::GetHLOpcode(CI));
  685. if (opCode == HLMatLoadStoreOpcode::ColMatStore ||
  686. opCode == HLMatLoadStoreOpcode::RowMatStore)
  687. return true;
  688. break;
  689. }
  690. case HLOpcodeGroup::HLCast:
  691. case HLOpcodeGroup::HLSubscript:
  692. if (GlobalHasStoreUserRec(U, visited))
  693. return true;
  694. break;
  695. default:
  696. break;
  697. }
  698. }
  699. } else if (isa<GEPOperator>(U) || isa<PHINode>(U) || isa<SelectInst>(U)) {
  700. if (GlobalHasStoreUserRec(U, visited))
  701. return true;
  702. }
  703. }
  704. }
  705. return isWriteEnabled;
  706. }
  707. // Returns true if any of the direct user of a global is a store inst
  708. // otherwise recurse through the remaining users and check if any GEP
  709. // exists and which in turn has a store inst as user.
  710. bool GlobalHasStoreUser(GlobalVariable *GV) {
  711. std::set<Value *> visited;
  712. Value *V = cast<Value>(GV);
  713. return GlobalHasStoreUserRec(V, visited);
  714. }
  715. GlobalVariable *CreateStaticGlobal(llvm::Module *M, GlobalVariable *GV) {
  716. Constant *GC = M->getOrInsertGlobal(GV->getName().str() + ".static.copy",
  717. GV->getType()->getPointerElementType());
  718. GlobalVariable *NGV = cast<GlobalVariable>(GC);
  719. if (GV->hasInitializer()) {
  720. NGV->setInitializer(GV->getInitializer());
  721. } else {
  722. // The copy being static, it should be initialized per llvm rules
  723. NGV->setInitializer(
  724. Constant::getNullValue(GV->getType()->getPointerElementType()));
  725. }
  726. // static global should have internal linkage
  727. NGV->setLinkage(GlobalValue::InternalLinkage);
  728. return NGV;
  729. }
  730. void CreateWriteEnabledStaticGlobals(llvm::Module *M, llvm::Function *EF) {
  731. std::vector<GlobalVariable *> worklist;
  732. for (GlobalVariable &GV : M->globals()) {
  733. if (!GV.isConstant() && GV.getLinkage() != GlobalValue::InternalLinkage &&
  734. // skip globals which are HLSL objects or group shared
  735. !dxilutil::IsHLSLObjectType(GV.getType()->getPointerElementType()) &&
  736. !dxilutil::IsSharedMemoryGlobal(&GV)) {
  737. if (GlobalHasStoreUser(&GV))
  738. worklist.emplace_back(&GV);
  739. // TODO: Ensure that constant globals aren't using initializer
  740. GV.setConstant(true);
  741. }
  742. }
  743. IRBuilder<> Builder(
  744. dxilutil::FirstNonAllocaInsertionPt(&EF->getEntryBlock()));
  745. for (GlobalVariable *GV : worklist) {
  746. GlobalVariable *NGV = CreateStaticGlobal(M, GV);
  747. GV->replaceAllUsesWith(NGV);
  748. // insert memcpy in all entryblocks
  749. uint64_t size = M->getDataLayout().getTypeAllocSize(
  750. GV->getType()->getPointerElementType());
  751. Builder.CreateMemCpy(NGV, GV, size, 1);
  752. }
  753. }
  754. } // namespace
  755. namespace {
  756. void SetEntryFunction(HLModule &HLM, Function *Entry,
  757. clang::CodeGen::CodeGenModule &CGM) {
  758. if (Entry == nullptr) {
  759. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  760. unsigned DiagID = Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  761. "cannot find entry function %0");
  762. Diags.Report(DiagID) << CGM.getCodeGenOpts().HLSLEntryFunction;
  763. return;
  764. }
  765. HLM.SetEntryFunction(Entry);
  766. }
  767. Function *CloneFunction(Function *Orig, const llvm::Twine &Name,
  768. llvm::Module *llvmModule, hlsl::DxilTypeSystem &TypeSys,
  769. hlsl::DxilTypeSystem &SrcTypeSys) {
  770. Function *F = Function::Create(Orig->getFunctionType(),
  771. GlobalValue::LinkageTypes::ExternalLinkage,
  772. Name, llvmModule);
  773. SmallVector<ReturnInst *, 2> Returns;
  774. ValueToValueMapTy vmap;
  775. // Map params.
  776. auto entryParamIt = F->arg_begin();
  777. for (Argument &param : Orig->args()) {
  778. vmap[&param] = (entryParamIt++);
  779. }
  780. llvm::CloneFunctionInto(F, Orig, vmap, /*ModuleLevelChagnes*/ false, Returns);
  781. TypeSys.CopyFunctionAnnotation(F, Orig, SrcTypeSys);
  782. return F;
  783. }
  784. // Clone shader entry function to be called by other functions.
  785. // The original function will be used as shader entry.
  786. void CloneShaderEntry(Function *ShaderF, StringRef EntryName, HLModule &HLM) {
  787. Function *F = CloneFunction(ShaderF, "", HLM.GetModule(), HLM.GetTypeSystem(),
  788. HLM.GetTypeSystem());
  789. F->takeName(ShaderF);
  790. F->setLinkage(GlobalValue::LinkageTypes::InternalLinkage);
  791. // Set to name before mangled.
  792. ShaderF->setName(EntryName);
  793. DxilFunctionAnnotation *annot = HLM.GetFunctionAnnotation(F);
  794. DxilParameterAnnotation &cloneRetAnnot = annot->GetRetTypeAnnotation();
  795. // Clear semantic for cloned one.
  796. cloneRetAnnot.SetSemanticString("");
  797. cloneRetAnnot.SetSemanticIndexVec({});
  798. for (unsigned i = 0; i < annot->GetNumParameters(); i++) {
  799. DxilParameterAnnotation &cloneParamAnnot = annot->GetParameterAnnotation(i);
  800. // Clear semantic for cloned one.
  801. cloneParamAnnot.SetSemanticString("");
  802. cloneParamAnnot.SetSemanticIndexVec({});
  803. }
  804. }
  805. } // namespace
  806. namespace {
  807. bool IsPatchConstantFunction(
  808. const Function *F, StringMap<PatchConstantInfo> &patchConstantFunctionMap) {
  809. DXASSERT_NOMSG(F != nullptr);
  810. for (auto &&p : patchConstantFunctionMap) {
  811. if (p.second.Func == F)
  812. return true;
  813. }
  814. return false;
  815. }
  816. void SetPatchConstantFunctionWithAttr(
  817. const EntryFunctionInfo &EntryFunc,
  818. const clang::HLSLPatchConstantFuncAttr *PatchConstantFuncAttr,
  819. StringMap<PatchConstantInfo> &patchConstantFunctionMap,
  820. std::unordered_map<Function *, std::unique_ptr<DxilFunctionProps>>
  821. &patchConstantFunctionPropsMap,
  822. HLModule &HLM, clang::CodeGen::CodeGenModule &CGM) {
  823. StringRef funcName = PatchConstantFuncAttr->getFunctionName();
  824. auto Entry = patchConstantFunctionMap.find(funcName);
  825. if (Entry == patchConstantFunctionMap.end()) {
  826. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  827. unsigned DiagID = Diags.getCustomDiagID(
  828. clang::DiagnosticsEngine::Error, "Cannot find patchconstantfunc %0.");
  829. Diags.Report(PatchConstantFuncAttr->getLocation(), DiagID) << funcName;
  830. return;
  831. }
  832. if (Entry->second.NumOverloads != 1) {
  833. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  834. unsigned DiagID =
  835. Diags.getCustomDiagID(clang::DiagnosticsEngine::Warning,
  836. "Multiple overloads of patchconstantfunc %0.");
  837. unsigned NoteID = Diags.getCustomDiagID(clang::DiagnosticsEngine::Note,
  838. "This overload was selected.");
  839. Diags.Report(PatchConstantFuncAttr->getLocation(), DiagID) << funcName;
  840. Diags.Report(Entry->second.SL, NoteID);
  841. }
  842. Function *patchConstFunc = Entry->second.Func;
  843. DXASSERT(
  844. HLM.HasDxilFunctionProps(EntryFunc.Func),
  845. " else AddHLSLFunctionInfo did not save the dxil function props for the "
  846. "HS entry.");
  847. DxilFunctionProps *HSProps = &HLM.GetDxilFunctionProps(EntryFunc.Func);
  848. HLM.SetPatchConstantFunctionForHS(EntryFunc.Func, patchConstFunc);
  849. DXASSERT_NOMSG(patchConstantFunctionPropsMap.count(patchConstFunc));
  850. // Check no inout parameter for patch constant function.
  851. DxilFunctionAnnotation *patchConstFuncAnnotation =
  852. HLM.GetFunctionAnnotation(patchConstFunc);
  853. for (unsigned i = 0; i < patchConstFuncAnnotation->GetNumParameters(); i++) {
  854. if (patchConstFuncAnnotation->GetParameterAnnotation(i)
  855. .GetParamInputQual() == DxilParamInputQual::Inout) {
  856. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  857. unsigned DiagID = Diags.getCustomDiagID(
  858. clang::DiagnosticsEngine::Error,
  859. "Patch Constant function %0 should not have inout param.");
  860. Diags.Report(Entry->second.SL, DiagID) << funcName;
  861. }
  862. }
  863. // Input/Output control point validation.
  864. if (patchConstantFunctionPropsMap.count(patchConstFunc)) {
  865. const DxilFunctionProps &patchProps =
  866. *patchConstantFunctionPropsMap[patchConstFunc];
  867. if (patchProps.ShaderProps.HS.inputControlPoints != 0 &&
  868. patchProps.ShaderProps.HS.inputControlPoints !=
  869. HSProps->ShaderProps.HS.inputControlPoints) {
  870. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  871. unsigned DiagID =
  872. Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  873. "Patch constant function's input patch input "
  874. "should have %0 elements, but has %1.");
  875. Diags.Report(Entry->second.SL, DiagID)
  876. << HSProps->ShaderProps.HS.inputControlPoints
  877. << patchProps.ShaderProps.HS.inputControlPoints;
  878. }
  879. if (patchProps.ShaderProps.HS.outputControlPoints != 0 &&
  880. patchProps.ShaderProps.HS.outputControlPoints !=
  881. HSProps->ShaderProps.HS.outputControlPoints) {
  882. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  883. unsigned DiagID =
  884. Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  885. "Patch constant function's output patch input "
  886. "should have %0 elements, but has %1.");
  887. Diags.Report(Entry->second.SL, DiagID)
  888. << HSProps->ShaderProps.HS.outputControlPoints
  889. << patchProps.ShaderProps.HS.outputControlPoints;
  890. }
  891. }
  892. }
  893. void SetPatchConstantFunction(
  894. const EntryFunctionInfo &EntryFunc,
  895. std::unordered_map<Function *, const clang::HLSLPatchConstantFuncAttr *>
  896. &HSEntryPatchConstantFuncAttr,
  897. StringMap<PatchConstantInfo> &patchConstantFunctionMap,
  898. std::unordered_map<Function *, std::unique_ptr<DxilFunctionProps>>
  899. &patchConstantFunctionPropsMap,
  900. HLModule &HLM, clang::CodeGen::CodeGenModule &CGM) {
  901. auto AttrsIter = HSEntryPatchConstantFuncAttr.find(EntryFunc.Func);
  902. DXASSERT(AttrsIter != HSEntryPatchConstantFuncAttr.end(),
  903. "we have checked this in AddHLSLFunctionInfo()");
  904. SetPatchConstantFunctionWithAttr(EntryFunc, AttrsIter->second,
  905. patchConstantFunctionMap,
  906. patchConstantFunctionPropsMap, HLM, CGM);
  907. }
  908. } // namespace
  909. namespace {
  910. // For case like:
  911. // cbuffer A {
  912. // float a;
  913. // int b;
  914. //}
  915. //
  916. // const static struct {
  917. // float a;
  918. // int b;
  919. //} ST = { a, b };
  920. // Replace user of ST with a and b.
  921. bool ReplaceConstStaticGlobalUser(GEPOperator *GEP,
  922. std::vector<Constant *> &InitList,
  923. IRBuilder<> &Builder) {
  924. if (GEP->getNumIndices() < 2) {
  925. // Don't use sub element.
  926. return false;
  927. }
  928. SmallVector<Value *, 4> idxList;
  929. auto iter = GEP->idx_begin();
  930. idxList.emplace_back(*(iter++));
  931. ConstantInt *subIdx = dyn_cast<ConstantInt>(*(iter++));
  932. DXASSERT(subIdx, "else dynamic indexing on struct field");
  933. unsigned subIdxImm = subIdx->getLimitedValue();
  934. DXASSERT(subIdxImm < InitList.size(), "else struct index out of bound");
  935. Constant *subPtr = InitList[subIdxImm];
  936. // Move every idx to idxList except idx for InitList.
  937. while (iter != GEP->idx_end()) {
  938. idxList.emplace_back(*(iter++));
  939. }
  940. Value *NewGEP = Builder.CreateGEP(subPtr, idxList);
  941. GEP->replaceAllUsesWith(NewGEP);
  942. return true;
  943. }
  944. } // namespace
  945. namespace CGHLSLMSHelper {
  946. void ReplaceConstStaticGlobals(
  947. std::unordered_map<GlobalVariable *, std::vector<Constant *>>
  948. &staticConstGlobalInitListMap,
  949. std::unordered_map<GlobalVariable *, Function *>
  950. &staticConstGlobalCtorMap) {
  951. for (auto &iter : staticConstGlobalInitListMap) {
  952. GlobalVariable *GV = iter.first;
  953. std::vector<Constant *> &InitList = iter.second;
  954. LLVMContext &Ctx = GV->getContext();
  955. // Do the replace.
  956. bool bPass = true;
  957. for (User *U : GV->users()) {
  958. IRBuilder<> Builder(Ctx);
  959. if (GetElementPtrInst *GEPInst = dyn_cast<GetElementPtrInst>(U)) {
  960. Builder.SetInsertPoint(GEPInst);
  961. bPass &= ReplaceConstStaticGlobalUser(cast<GEPOperator>(GEPInst),
  962. InitList, Builder);
  963. } else if (GEPOperator *GEP = dyn_cast<GEPOperator>(U)) {
  964. bPass &= ReplaceConstStaticGlobalUser(GEP, InitList, Builder);
  965. } else {
  966. DXASSERT(false, "invalid user of const static global");
  967. }
  968. }
  969. // Clear the Ctor which is useless now.
  970. if (bPass) {
  971. Function *Ctor = staticConstGlobalCtorMap[GV];
  972. Ctor->getBasicBlockList().clear();
  973. BasicBlock *Entry = BasicBlock::Create(Ctx, "", Ctor);
  974. IRBuilder<> Builder(Entry);
  975. Builder.CreateRetVoid();
  976. }
  977. }
  978. }
  979. } // namespace CGHLSLMSHelper
  980. namespace {
  981. Value *CastLdValue(Value *Ptr, llvm::Type *FromTy, llvm::Type *ToTy,
  982. IRBuilder<> &Builder) {
  983. if (ToTy->isVectorTy()) {
  984. unsigned vecSize = ToTy->getVectorNumElements();
  985. if (vecSize == 1 && ToTy->getVectorElementType() == FromTy) {
  986. Value *V = Builder.CreateLoad(Ptr);
  987. // ScalarToVec1Splat
  988. // Change scalar into vec1.
  989. Value *Vec1 = UndefValue::get(ToTy);
  990. return Builder.CreateInsertElement(Vec1, V, (uint64_t)0);
  991. } else if (vecSize == 1 && FromTy->isIntegerTy() &&
  992. ToTy->getVectorElementType()->isIntegerTy(1)) {
  993. // load(bitcast i32* to <1 x i1>*)
  994. // Rewrite to
  995. // insertelement(icmp ne (load i32*), 0)
  996. Value *IntV = Builder.CreateLoad(Ptr);
  997. Value *BoolV = Builder.CreateICmpNE(
  998. IntV, ConstantInt::get(IntV->getType(), 0), "tobool");
  999. Value *Vec1 = UndefValue::get(ToTy);
  1000. return Builder.CreateInsertElement(Vec1, BoolV, (uint64_t)0);
  1001. } else if (FromTy->isVectorTy() && vecSize == 1) {
  1002. Value *V = Builder.CreateLoad(Ptr);
  1003. // VectorTrunc
  1004. // Change vector into vec1.
  1005. int mask[] = {0};
  1006. return Builder.CreateShuffleVector(V, V, mask);
  1007. } else if (FromTy->isArrayTy()) {
  1008. llvm::Type *FromEltTy = FromTy->getArrayElementType();
  1009. llvm::Type *ToEltTy = ToTy->getVectorElementType();
  1010. if (FromTy->getArrayNumElements() == vecSize && FromEltTy == ToEltTy) {
  1011. // ArrayToVector.
  1012. Value *NewLd = UndefValue::get(ToTy);
  1013. Value *zeroIdx = Builder.getInt32(0);
  1014. for (unsigned i = 0; i < vecSize; i++) {
  1015. Value *GEP =
  1016. Builder.CreateInBoundsGEP(Ptr, {zeroIdx, Builder.getInt32(i)});
  1017. Value *Elt = Builder.CreateLoad(GEP);
  1018. NewLd = Builder.CreateInsertElement(NewLd, Elt, i);
  1019. }
  1020. return NewLd;
  1021. }
  1022. }
  1023. } else if (FromTy == Builder.getInt1Ty()) {
  1024. Value *V = Builder.CreateLoad(Ptr);
  1025. // BoolCast
  1026. DXASSERT_NOMSG(ToTy->isIntegerTy());
  1027. return Builder.CreateZExt(V, ToTy);
  1028. }
  1029. return nullptr;
  1030. }
  1031. Value *CastStValue(Value *Ptr, Value *V, llvm::Type *FromTy, llvm::Type *ToTy,
  1032. IRBuilder<> &Builder) {
  1033. if (ToTy->isVectorTy()) {
  1034. unsigned vecSize = ToTy->getVectorNumElements();
  1035. if (vecSize == 1 && ToTy->getVectorElementType() == FromTy) {
  1036. // ScalarToVec1Splat
  1037. // Change vec1 back to scalar.
  1038. Value *Elt = Builder.CreateExtractElement(V, (uint64_t)0);
  1039. return Elt;
  1040. } else if (FromTy->isVectorTy() && vecSize == 1) {
  1041. // VectorTrunc
  1042. // Change vec1 into vector.
  1043. // Should not happen.
  1044. // Reported error at Sema::ImpCastExprToType.
  1045. DXASSERT_NOMSG(0);
  1046. } else if (FromTy->isArrayTy()) {
  1047. llvm::Type *FromEltTy = FromTy->getArrayElementType();
  1048. llvm::Type *ToEltTy = ToTy->getVectorElementType();
  1049. if (FromTy->getArrayNumElements() == vecSize && FromEltTy == ToEltTy) {
  1050. // ArrayToVector.
  1051. Value *zeroIdx = Builder.getInt32(0);
  1052. for (unsigned i = 0; i < vecSize; i++) {
  1053. Value *Elt = Builder.CreateExtractElement(V, i);
  1054. Value *GEP =
  1055. Builder.CreateInBoundsGEP(Ptr, {zeroIdx, Builder.getInt32(i)});
  1056. Builder.CreateStore(Elt, GEP);
  1057. }
  1058. // The store already done.
  1059. // Return null to ignore use of the return value.
  1060. return nullptr;
  1061. }
  1062. }
  1063. } else if (FromTy == Builder.getInt1Ty()) {
  1064. // BoolCast
  1065. // Change i1 to ToTy.
  1066. DXASSERT_NOMSG(ToTy->isIntegerTy());
  1067. Value *CastV = Builder.CreateICmpNE(V, ConstantInt::get(V->getType(), 0));
  1068. return CastV;
  1069. }
  1070. return nullptr;
  1071. }
  1072. bool SimplifyBitCastLoad(LoadInst *LI, llvm::Type *FromTy, llvm::Type *ToTy,
  1073. Value *Ptr) {
  1074. IRBuilder<> Builder(LI);
  1075. // Cast FromLd to ToTy.
  1076. Value *CastV = CastLdValue(Ptr, FromTy, ToTy, Builder);
  1077. if (CastV) {
  1078. LI->replaceAllUsesWith(CastV);
  1079. return true;
  1080. } else {
  1081. return false;
  1082. }
  1083. }
  1084. bool SimplifyBitCastStore(StoreInst *SI, llvm::Type *FromTy, llvm::Type *ToTy,
  1085. Value *Ptr) {
  1086. IRBuilder<> Builder(SI);
  1087. Value *V = SI->getValueOperand();
  1088. // Cast Val to FromTy.
  1089. Value *CastV = CastStValue(Ptr, V, FromTy, ToTy, Builder);
  1090. if (CastV) {
  1091. Builder.CreateStore(CastV, Ptr);
  1092. return true;
  1093. } else {
  1094. return false;
  1095. }
  1096. }
  1097. bool SimplifyBitCastGEP(GEPOperator *GEP, llvm::Type *FromTy, llvm::Type *ToTy,
  1098. Value *Ptr) {
  1099. if (ToTy->isVectorTy()) {
  1100. unsigned vecSize = ToTy->getVectorNumElements();
  1101. if (vecSize == 1 && ToTy->getVectorElementType() == FromTy) {
  1102. // ScalarToVec1Splat
  1103. GEP->replaceAllUsesWith(Ptr);
  1104. return true;
  1105. } else if (FromTy->isVectorTy() && vecSize == 1) {
  1106. // VectorTrunc
  1107. DXASSERT_NOMSG(
  1108. !isa<llvm::VectorType>(GEP->getType()->getPointerElementType()));
  1109. IRBuilder<> Builder(FromTy->getContext());
  1110. if (Instruction *I = dyn_cast<Instruction>(GEP))
  1111. Builder.SetInsertPoint(I);
  1112. std::vector<Value *> idxList(GEP->idx_begin(), GEP->idx_end());
  1113. Value *NewGEP = Builder.CreateInBoundsGEP(Ptr, idxList);
  1114. GEP->replaceAllUsesWith(NewGEP);
  1115. return true;
  1116. } else if (FromTy->isArrayTy()) {
  1117. llvm::Type *FromEltTy = FromTy->getArrayElementType();
  1118. llvm::Type *ToEltTy = ToTy->getVectorElementType();
  1119. if (FromTy->getArrayNumElements() == vecSize && FromEltTy == ToEltTy) {
  1120. // ArrayToVector.
  1121. }
  1122. }
  1123. } else if (FromTy == llvm::Type::getInt1Ty(FromTy->getContext())) {
  1124. // BoolCast
  1125. }
  1126. return false;
  1127. }
  1128. typedef SmallPtrSet<Instruction *, 4> SmallInstSet;
  1129. void SimplifyBitCast(BitCastOperator *BC, SmallInstSet &deadInsts) {
  1130. Value *Ptr = BC->getOperand(0);
  1131. llvm::Type *FromTy = Ptr->getType();
  1132. llvm::Type *ToTy = BC->getType();
  1133. if (!FromTy->isPointerTy() || !ToTy->isPointerTy())
  1134. return;
  1135. FromTy = FromTy->getPointerElementType();
  1136. ToTy = ToTy->getPointerElementType();
  1137. // Take care case like %2 = bitcast %struct.T* %1 to <1 x float>*.
  1138. bool GEPCreated = false;
  1139. if (FromTy->isStructTy()) {
  1140. IRBuilder<> Builder(FromTy->getContext());
  1141. if (Instruction *I = dyn_cast<Instruction>(BC))
  1142. Builder.SetInsertPoint(I);
  1143. Value *zeroIdx = Builder.getInt32(0);
  1144. unsigned nestLevel = 1;
  1145. while (llvm::StructType *ST = dyn_cast<llvm::StructType>(FromTy)) {
  1146. if (ST->getNumElements() == 0)
  1147. break;
  1148. FromTy = ST->getElementType(0);
  1149. nestLevel++;
  1150. }
  1151. std::vector<Value *> idxList(nestLevel, zeroIdx);
  1152. Ptr = Builder.CreateGEP(Ptr, idxList);
  1153. GEPCreated = true;
  1154. }
  1155. for (User *U : BC->users()) {
  1156. if (LoadInst *LI = dyn_cast<LoadInst>(U)) {
  1157. if (SimplifyBitCastLoad(LI, FromTy, ToTy, Ptr)) {
  1158. LI->dropAllReferences();
  1159. deadInsts.insert(LI);
  1160. }
  1161. } else if (StoreInst *SI = dyn_cast<StoreInst>(U)) {
  1162. if (SimplifyBitCastStore(SI, FromTy, ToTy, Ptr)) {
  1163. SI->dropAllReferences();
  1164. deadInsts.insert(SI);
  1165. }
  1166. } else if (GEPOperator *GEP = dyn_cast<GEPOperator>(U)) {
  1167. if (SimplifyBitCastGEP(GEP, FromTy, ToTy, Ptr))
  1168. if (Instruction *I = dyn_cast<Instruction>(GEP)) {
  1169. I->dropAllReferences();
  1170. deadInsts.insert(I);
  1171. }
  1172. } else if (dyn_cast<CallInst>(U)) {
  1173. // Skip function call.
  1174. } else if (dyn_cast<BitCastInst>(U)) {
  1175. // Skip bitcast.
  1176. } else if (dyn_cast<AddrSpaceCastInst>(U)) {
  1177. // Skip addrspacecast.
  1178. } else {
  1179. DXASSERT(0, "not support yet");
  1180. }
  1181. }
  1182. // We created a GEP instruction but didn't end up consuming it, so delete it.
  1183. if (GEPCreated && Ptr->use_empty()) {
  1184. if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Ptr))
  1185. GEP->eraseFromParent();
  1186. else
  1187. cast<Constant>(Ptr)->destroyConstant();
  1188. }
  1189. }
  1190. typedef float(__cdecl *FloatUnaryEvalFuncType)(float);
  1191. typedef double(__cdecl *DoubleUnaryEvalFuncType)(double);
  1192. typedef APInt(__cdecl *IntBinaryEvalFuncType)(const APInt &, const APInt &);
  1193. typedef float(__cdecl *FloatBinaryEvalFuncType)(float, float);
  1194. typedef double(__cdecl *DoubleBinaryEvalFuncType)(double, double);
  1195. Value *EvalUnaryIntrinsic(ConstantFP *fpV, FloatUnaryEvalFuncType floatEvalFunc,
  1196. DoubleUnaryEvalFuncType doubleEvalFunc) {
  1197. llvm::Type *Ty = fpV->getType();
  1198. Value *Result = nullptr;
  1199. if (Ty->isDoubleTy()) {
  1200. double dV = fpV->getValueAPF().convertToDouble();
  1201. Value *dResult = ConstantFP::get(Ty, doubleEvalFunc(dV));
  1202. Result = dResult;
  1203. } else {
  1204. DXASSERT_NOMSG(Ty->isFloatTy());
  1205. float fV = fpV->getValueAPF().convertToFloat();
  1206. Value *dResult = ConstantFP::get(Ty, floatEvalFunc(fV));
  1207. Result = dResult;
  1208. }
  1209. return Result;
  1210. }
  1211. Value *EvalBinaryIntrinsic(Constant *cV0, Constant *cV1,
  1212. FloatBinaryEvalFuncType floatEvalFunc,
  1213. DoubleBinaryEvalFuncType doubleEvalFunc,
  1214. IntBinaryEvalFuncType intEvalFunc) {
  1215. llvm::Type *Ty = cV0->getType();
  1216. Value *Result = nullptr;
  1217. if (Ty->isDoubleTy()) {
  1218. ConstantFP *fpV0 = cast<ConstantFP>(cV0);
  1219. ConstantFP *fpV1 = cast<ConstantFP>(cV1);
  1220. double dV0 = fpV0->getValueAPF().convertToDouble();
  1221. double dV1 = fpV1->getValueAPF().convertToDouble();
  1222. Value *dResult = ConstantFP::get(Ty, doubleEvalFunc(dV0, dV1));
  1223. Result = dResult;
  1224. } else if (Ty->isFloatTy()) {
  1225. ConstantFP *fpV0 = cast<ConstantFP>(cV0);
  1226. ConstantFP *fpV1 = cast<ConstantFP>(cV1);
  1227. float fV0 = fpV0->getValueAPF().convertToFloat();
  1228. float fV1 = fpV1->getValueAPF().convertToFloat();
  1229. Value *dResult = ConstantFP::get(Ty, floatEvalFunc(fV0, fV1));
  1230. Result = dResult;
  1231. } else {
  1232. DXASSERT_NOMSG(Ty->isIntegerTy());
  1233. DXASSERT_NOMSG(intEvalFunc);
  1234. ConstantInt *ciV0 = cast<ConstantInt>(cV0);
  1235. ConstantInt *ciV1 = cast<ConstantInt>(cV1);
  1236. const APInt &iV0 = ciV0->getValue();
  1237. const APInt &iV1 = ciV1->getValue();
  1238. Value *dResult = ConstantInt::get(Ty, intEvalFunc(iV0, iV1));
  1239. Result = dResult;
  1240. }
  1241. return Result;
  1242. }
  1243. Value *EvalUnaryIntrinsic(CallInst *CI, FloatUnaryEvalFuncType floatEvalFunc,
  1244. DoubleUnaryEvalFuncType doubleEvalFunc) {
  1245. Value *V = CI->getArgOperand(0);
  1246. llvm::Type *Ty = CI->getType();
  1247. Value *Result = nullptr;
  1248. if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(Ty)) {
  1249. Result = UndefValue::get(Ty);
  1250. Constant *CV = cast<Constant>(V);
  1251. IRBuilder<> Builder(CI);
  1252. for (unsigned i = 0; i < VT->getNumElements(); i++) {
  1253. ConstantFP *fpV = cast<ConstantFP>(CV->getAggregateElement(i));
  1254. Value *EltResult = EvalUnaryIntrinsic(fpV, floatEvalFunc, doubleEvalFunc);
  1255. Result = Builder.CreateInsertElement(Result, EltResult, i);
  1256. }
  1257. } else {
  1258. ConstantFP *fpV = cast<ConstantFP>(V);
  1259. Result = EvalUnaryIntrinsic(fpV, floatEvalFunc, doubleEvalFunc);
  1260. }
  1261. CI->replaceAllUsesWith(Result);
  1262. CI->eraseFromParent();
  1263. return Result;
  1264. }
  1265. Value *EvalBinaryIntrinsic(CallInst *CI, FloatBinaryEvalFuncType floatEvalFunc,
  1266. DoubleBinaryEvalFuncType doubleEvalFunc,
  1267. IntBinaryEvalFuncType intEvalFunc = nullptr) {
  1268. Value *V0 = CI->getArgOperand(0);
  1269. Value *V1 = CI->getArgOperand(1);
  1270. llvm::Type *Ty = CI->getType();
  1271. Value *Result = nullptr;
  1272. if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(Ty)) {
  1273. Result = UndefValue::get(Ty);
  1274. Constant *CV0 = cast<Constant>(V0);
  1275. Constant *CV1 = cast<Constant>(V1);
  1276. IRBuilder<> Builder(CI);
  1277. for (unsigned i = 0; i < VT->getNumElements(); i++) {
  1278. Constant *cV0 = cast<Constant>(CV0->getAggregateElement(i));
  1279. Constant *cV1 = cast<Constant>(CV1->getAggregateElement(i));
  1280. Value *EltResult = EvalBinaryIntrinsic(cV0, cV1, floatEvalFunc,
  1281. doubleEvalFunc, intEvalFunc);
  1282. Result = Builder.CreateInsertElement(Result, EltResult, i);
  1283. }
  1284. } else {
  1285. Constant *cV0 = cast<Constant>(V0);
  1286. Constant *cV1 = cast<Constant>(V1);
  1287. Result = EvalBinaryIntrinsic(cV0, cV1, floatEvalFunc, doubleEvalFunc,
  1288. intEvalFunc);
  1289. }
  1290. CI->replaceAllUsesWith(Result);
  1291. CI->eraseFromParent();
  1292. return Result;
  1293. CI->eraseFromParent();
  1294. return Result;
  1295. }
  1296. void SimpleTransformForHLDXIRInst(Instruction *I, SmallInstSet &deadInsts) {
  1297. unsigned opcode = I->getOpcode();
  1298. switch (opcode) {
  1299. case Instruction::BitCast: {
  1300. BitCastOperator *BCI = cast<BitCastOperator>(I);
  1301. SimplifyBitCast(BCI, deadInsts);
  1302. } break;
  1303. case Instruction::Load: {
  1304. LoadInst *ldInst = cast<LoadInst>(I);
  1305. DXASSERT(!HLMatrixType::isa(ldInst->getType()),
  1306. "matrix load should use HL LdStMatrix");
  1307. Value *Ptr = ldInst->getPointerOperand();
  1308. if (ConstantExpr *CE = dyn_cast_or_null<ConstantExpr>(Ptr)) {
  1309. if (BitCastOperator *BCO = dyn_cast<BitCastOperator>(CE)) {
  1310. SimplifyBitCast(BCO, deadInsts);
  1311. }
  1312. }
  1313. } break;
  1314. case Instruction::Store: {
  1315. StoreInst *stInst = cast<StoreInst>(I);
  1316. Value *V = stInst->getValueOperand();
  1317. DXASSERT_LOCALVAR(V, !HLMatrixType::isa(V->getType()),
  1318. "matrix store should use HL LdStMatrix");
  1319. Value *Ptr = stInst->getPointerOperand();
  1320. if (ConstantExpr *CE = dyn_cast<ConstantExpr>(Ptr)) {
  1321. if (BitCastOperator *BCO = dyn_cast<BitCastOperator>(CE)) {
  1322. SimplifyBitCast(BCO, deadInsts);
  1323. }
  1324. }
  1325. } break;
  1326. case Instruction::LShr:
  1327. case Instruction::AShr:
  1328. case Instruction::Shl: {
  1329. llvm::BinaryOperator *BO = cast<llvm::BinaryOperator>(I);
  1330. Value *op2 = BO->getOperand(1);
  1331. IntegerType *Ty = cast<IntegerType>(BO->getType()->getScalarType());
  1332. unsigned bitWidth = Ty->getBitWidth();
  1333. // Clamp op2 to 0 ~ bitWidth-1
  1334. if (ConstantInt *cOp2 = dyn_cast<ConstantInt>(op2)) {
  1335. unsigned iOp2 = cOp2->getLimitedValue();
  1336. unsigned clampedOp2 = iOp2 & (bitWidth - 1);
  1337. if (iOp2 != clampedOp2) {
  1338. BO->setOperand(1, ConstantInt::get(op2->getType(), clampedOp2));
  1339. }
  1340. } else {
  1341. Value *mask = ConstantInt::get(op2->getType(), bitWidth - 1);
  1342. IRBuilder<> Builder(I);
  1343. op2 = Builder.CreateAnd(op2, mask);
  1344. BO->setOperand(1, op2);
  1345. }
  1346. } break;
  1347. }
  1348. }
  1349. } // namespace
  1350. namespace CGHLSLMSHelper {
  1351. Value *TryEvalIntrinsic(CallInst *CI, IntrinsicOp intriOp,
  1352. unsigned hlslVersion) {
  1353. switch (intriOp) {
  1354. case IntrinsicOp::IOP_tan: {
  1355. return EvalUnaryIntrinsic(CI, tanf, tan);
  1356. } break;
  1357. case IntrinsicOp::IOP_tanh: {
  1358. return EvalUnaryIntrinsic(CI, tanhf, tanh);
  1359. } break;
  1360. case IntrinsicOp::IOP_sin: {
  1361. return EvalUnaryIntrinsic(CI, sinf, sin);
  1362. } break;
  1363. case IntrinsicOp::IOP_sinh: {
  1364. return EvalUnaryIntrinsic(CI, sinhf, sinh);
  1365. } break;
  1366. case IntrinsicOp::IOP_cos: {
  1367. return EvalUnaryIntrinsic(CI, cosf, cos);
  1368. } break;
  1369. case IntrinsicOp::IOP_cosh: {
  1370. return EvalUnaryIntrinsic(CI, coshf, cosh);
  1371. } break;
  1372. case IntrinsicOp::IOP_asin: {
  1373. return EvalUnaryIntrinsic(CI, asinf, asin);
  1374. } break;
  1375. case IntrinsicOp::IOP_acos: {
  1376. return EvalUnaryIntrinsic(CI, acosf, acos);
  1377. } break;
  1378. case IntrinsicOp::IOP_atan: {
  1379. return EvalUnaryIntrinsic(CI, atanf, atan);
  1380. } break;
  1381. case IntrinsicOp::IOP_atan2: {
  1382. Value *V0 = CI->getArgOperand(0);
  1383. ConstantFP *fpV0 = cast<ConstantFP>(V0);
  1384. Value *V1 = CI->getArgOperand(1);
  1385. ConstantFP *fpV1 = cast<ConstantFP>(V1);
  1386. llvm::Type *Ty = CI->getType();
  1387. Value *Result = nullptr;
  1388. if (Ty->isDoubleTy()) {
  1389. double dV0 = fpV0->getValueAPF().convertToDouble();
  1390. double dV1 = fpV1->getValueAPF().convertToDouble();
  1391. Value *atanV = ConstantFP::get(CI->getType(), atan2(dV0, dV1));
  1392. CI->replaceAllUsesWith(atanV);
  1393. Result = atanV;
  1394. } else {
  1395. DXASSERT_NOMSG(Ty->isFloatTy());
  1396. float fV0 = fpV0->getValueAPF().convertToFloat();
  1397. float fV1 = fpV1->getValueAPF().convertToFloat();
  1398. Value *atanV = ConstantFP::get(CI->getType(), atan2f(fV0, fV1));
  1399. CI->replaceAllUsesWith(atanV);
  1400. Result = atanV;
  1401. }
  1402. CI->eraseFromParent();
  1403. return Result;
  1404. } break;
  1405. case IntrinsicOp::IOP_sqrt: {
  1406. return EvalUnaryIntrinsic(CI, sqrtf, sqrt);
  1407. } break;
  1408. case IntrinsicOp::IOP_rsqrt: {
  1409. auto rsqrtF = [](float v) -> float { return 1.0 / sqrtf(v); };
  1410. auto rsqrtD = [](double v) -> double { return 1.0 / sqrt(v); };
  1411. return EvalUnaryIntrinsic(CI, rsqrtF, rsqrtD);
  1412. } break;
  1413. case IntrinsicOp::IOP_exp: {
  1414. return EvalUnaryIntrinsic(CI, expf, exp);
  1415. } break;
  1416. case IntrinsicOp::IOP_exp2: {
  1417. return EvalUnaryIntrinsic(CI, exp2f, exp2);
  1418. } break;
  1419. case IntrinsicOp::IOP_log: {
  1420. return EvalUnaryIntrinsic(CI, logf, log);
  1421. } break;
  1422. case IntrinsicOp::IOP_log10: {
  1423. return EvalUnaryIntrinsic(CI, log10f, log10);
  1424. } break;
  1425. case IntrinsicOp::IOP_log2: {
  1426. return EvalUnaryIntrinsic(CI, log2f, log2);
  1427. } break;
  1428. case IntrinsicOp::IOP_pow: {
  1429. return EvalBinaryIntrinsic(CI, powf, pow);
  1430. } break;
  1431. case IntrinsicOp::IOP_max: {
  1432. auto maxF = [](float a, float b) -> float { return a > b ? a : b; };
  1433. auto maxD = [](double a, double b) -> double { return a > b ? a : b; };
  1434. auto imaxI = [](const APInt &a, const APInt &b) -> APInt {
  1435. return a.sgt(b) ? a : b;
  1436. };
  1437. return EvalBinaryIntrinsic(CI, maxF, maxD, imaxI);
  1438. } break;
  1439. case IntrinsicOp::IOP_min: {
  1440. auto minF = [](float a, float b) -> float { return a < b ? a : b; };
  1441. auto minD = [](double a, double b) -> double { return a < b ? a : b; };
  1442. auto iminI = [](const APInt &a, const APInt &b) -> APInt {
  1443. return a.slt(b) ? a : b;
  1444. };
  1445. return EvalBinaryIntrinsic(CI, minF, minD, iminI);
  1446. } break;
  1447. case IntrinsicOp::IOP_umax: {
  1448. DXASSERT_NOMSG(
  1449. CI->getArgOperand(0)->getType()->getScalarType()->isIntegerTy());
  1450. auto umaxI = [](const APInt &a, const APInt &b) -> APInt {
  1451. return a.ugt(b) ? a : b;
  1452. };
  1453. return EvalBinaryIntrinsic(CI, nullptr, nullptr, umaxI);
  1454. } break;
  1455. case IntrinsicOp::IOP_umin: {
  1456. DXASSERT_NOMSG(
  1457. CI->getArgOperand(0)->getType()->getScalarType()->isIntegerTy());
  1458. auto uminI = [](const APInt &a, const APInt &b) -> APInt {
  1459. return a.ult(b) ? a : b;
  1460. };
  1461. return EvalBinaryIntrinsic(CI, nullptr, nullptr, uminI);
  1462. } break;
  1463. case IntrinsicOp::IOP_rcp: {
  1464. auto rcpF = [](float v) -> float { return 1.0 / v; };
  1465. auto rcpD = [](double v) -> double { return 1.0 / v; };
  1466. return EvalUnaryIntrinsic(CI, rcpF, rcpD);
  1467. } break;
  1468. case IntrinsicOp::IOP_ceil: {
  1469. return EvalUnaryIntrinsic(CI, ceilf, ceil);
  1470. } break;
  1471. case IntrinsicOp::IOP_floor: {
  1472. return EvalUnaryIntrinsic(CI, floorf, floor);
  1473. } break;
  1474. case IntrinsicOp::IOP_round: {
  1475. // round intrinsic could exhibit different behaviour for constant and
  1476. // runtime evaluations. E.g., for round(0.5): constant evaluation results in
  1477. // 1 (away from zero rounding), while runtime evaluation results in 0
  1478. // (nearest even rounding).
  1479. //
  1480. // For back compat, DXC still preserves the above behavior for language
  1481. // versions 2016 or below. However, for newer language versions, DXC now
  1482. // always use nearest even for round() intrinsic in all cases.
  1483. if (hlslVersion <= 2016) {
  1484. return EvalUnaryIntrinsic(CI, roundf, round);
  1485. } else {
  1486. auto roundingMode = fegetround();
  1487. fesetround(FE_TONEAREST);
  1488. Value *result = EvalUnaryIntrinsic(CI, nearbyintf, nearbyint);
  1489. fesetround(roundingMode);
  1490. return result;
  1491. }
  1492. } break;
  1493. case IntrinsicOp::IOP_trunc: {
  1494. return EvalUnaryIntrinsic(CI, truncf, trunc);
  1495. } break;
  1496. case IntrinsicOp::IOP_frac: {
  1497. auto fracF = [](float v) -> float { return v - floor(v); };
  1498. auto fracD = [](double v) -> double { return v - floor(v); };
  1499. return EvalUnaryIntrinsic(CI, fracF, fracD);
  1500. } break;
  1501. case IntrinsicOp::IOP_isnan: {
  1502. Value *V = CI->getArgOperand(0);
  1503. ConstantFP *fV = cast<ConstantFP>(V);
  1504. bool isNan = fV->getValueAPF().isNaN();
  1505. Constant *cNan = ConstantInt::get(CI->getType(), isNan ? 1 : 0);
  1506. CI->replaceAllUsesWith(cNan);
  1507. CI->eraseFromParent();
  1508. return cNan;
  1509. } break;
  1510. default:
  1511. return nullptr;
  1512. }
  1513. }
  1514. // Do simple transform to make later lower pass easier.
  1515. void SimpleTransformForHLDXIR(llvm::Module *pM) {
  1516. SmallInstSet deadInsts;
  1517. for (Function &F : pM->functions()) {
  1518. for (BasicBlock &BB : F.getBasicBlockList()) {
  1519. for (BasicBlock::iterator Iter = BB.begin(); Iter != BB.end();) {
  1520. Instruction *I = (Iter++);
  1521. if (deadInsts.count(I))
  1522. continue; // Skip dead instructions
  1523. SimpleTransformForHLDXIRInst(I, deadInsts);
  1524. }
  1525. }
  1526. }
  1527. for (Instruction *I : deadInsts)
  1528. I->dropAllReferences();
  1529. for (Instruction *I : deadInsts)
  1530. I->eraseFromParent();
  1531. deadInsts.clear();
  1532. for (GlobalVariable &GV : pM->globals()) {
  1533. if (dxilutil::IsStaticGlobal(&GV)) {
  1534. for (User *U : GV.users()) {
  1535. if (BitCastOperator *BCO = dyn_cast<BitCastOperator>(U)) {
  1536. SimplifyBitCast(BCO, deadInsts);
  1537. }
  1538. }
  1539. }
  1540. }
  1541. for (Instruction *I : deadInsts)
  1542. I->dropAllReferences();
  1543. for (Instruction *I : deadInsts)
  1544. I->eraseFromParent();
  1545. }
  1546. } // namespace CGHLSLMSHelper
  1547. namespace {
  1548. unsigned RoundToAlign(unsigned num, unsigned mod) {
  1549. // round num to next highest mod
  1550. if (mod != 0)
  1551. return mod * ((num + mod - 1) / mod);
  1552. return num;
  1553. }
  1554. // Retrieve the last scalar or vector element type.
  1555. // This has to be recursive for the nasty empty struct case.
  1556. // returns true if found, false if we must backtrack.
  1557. bool RetrieveLastElementType(Type *Ty, Type *&EltTy) {
  1558. if (Ty->isStructTy()) {
  1559. if (Ty->getStructNumElements() == 0)
  1560. return false;
  1561. for (unsigned i = Ty->getStructNumElements(); i > 0; --i) {
  1562. if (RetrieveLastElementType(Ty->getStructElementType(i - 1), EltTy))
  1563. return true;
  1564. }
  1565. } else if (Ty->isArrayTy()) {
  1566. if (RetrieveLastElementType(Ty->getArrayElementType(), EltTy))
  1567. return true;
  1568. } else if ((Ty->isVectorTy() || Ty->isSingleValueType())) {
  1569. EltTy = Ty->getScalarType();
  1570. return true;
  1571. }
  1572. return false;
  1573. }
  1574. // Here the size is CB size.
  1575. // Offset still needs to be aligned based on type since this
  1576. // is the legacy cbuffer global path.
  1577. unsigned AlignCBufferOffset(unsigned offset, unsigned size, llvm::Type *Ty,
  1578. bool bRowMajor, bool bMinPrecMode,
  1579. bool &bCurRowIsMinPrec) {
  1580. DXASSERT(!(offset & 1), "otherwise we have an invalid offset.");
  1581. bool bNeedNewRow = Ty->isArrayTy();
  1582. // In min-precision mode, a new row is needed when
  1583. // going into or out of min-precision component type.
  1584. if (!bNeedNewRow) {
  1585. bool bMinPrec = false;
  1586. if (Ty->isStructTy()) {
  1587. if (HLMatrixType mat = HLMatrixType::dyn_cast(Ty)) {
  1588. bNeedNewRow |= !bRowMajor && mat.getNumColumns() > 1;
  1589. bNeedNewRow |= bRowMajor && mat.getNumRows() > 1;
  1590. bMinPrec = bMinPrecMode &&
  1591. mat.getElementType(false)->getScalarSizeInBits() < 32;
  1592. } else {
  1593. bNeedNewRow = true;
  1594. if (bMinPrecMode) {
  1595. // Need to get min-prec of last element of structure,
  1596. // in case we pack something else into the end.
  1597. Type *EltTy = nullptr;
  1598. if (RetrieveLastElementType(Ty, EltTy))
  1599. bCurRowIsMinPrec = EltTy->getScalarSizeInBits() < 32;
  1600. }
  1601. }
  1602. } else {
  1603. DXASSERT_NOMSG(Ty->isVectorTy() || Ty->isSingleValueType());
  1604. // vector or scalar
  1605. bMinPrec = bMinPrecMode && Ty->getScalarSizeInBits() < 32;
  1606. }
  1607. if (bMinPrecMode) {
  1608. bNeedNewRow |= bCurRowIsMinPrec != bMinPrec;
  1609. bCurRowIsMinPrec = bMinPrec;
  1610. }
  1611. }
  1612. unsigned scalarSizeInBytes = Ty->getScalarSizeInBits() / 8;
  1613. return AlignBufferOffsetInLegacy(offset, size, scalarSizeInBytes,
  1614. bNeedNewRow);
  1615. }
  1616. unsigned AllocateDxilConstantBuffer(
  1617. HLCBuffer &CB,
  1618. std::unordered_map<Constant *, DxilFieldAnnotation> &constVarAnnotationMap,
  1619. bool bMinPrecMode) {
  1620. unsigned offset = 0;
  1621. // Scan user allocated constants first.
  1622. // Update offset.
  1623. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1624. if (C->GetLowerBound() == UINT_MAX)
  1625. continue;
  1626. unsigned size = C->GetRangeSize();
  1627. unsigned nextOffset = size + C->GetLowerBound();
  1628. if (offset < nextOffset)
  1629. offset = nextOffset;
  1630. }
  1631. // Alloc after user allocated constants.
  1632. bool bCurRowIsMinPrec = false;
  1633. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1634. if (C->GetLowerBound() != UINT_MAX)
  1635. continue;
  1636. unsigned size = C->GetRangeSize();
  1637. llvm::Type *Ty = C->GetGlobalSymbol()->getType()->getPointerElementType();
  1638. auto fieldAnnotation = constVarAnnotationMap.at(C->GetGlobalSymbol());
  1639. bool bRowMajor = HLMatrixType::isa(Ty)
  1640. ? fieldAnnotation.GetMatrixAnnotation().Orientation ==
  1641. MatrixOrientation::RowMajor
  1642. : false;
  1643. // Align offset.
  1644. offset = AlignCBufferOffset(offset, size, Ty, bRowMajor, bMinPrecMode,
  1645. bCurRowIsMinPrec);
  1646. if (C->GetLowerBound() == UINT_MAX) {
  1647. C->SetLowerBound(offset);
  1648. }
  1649. offset += size;
  1650. }
  1651. return offset;
  1652. }
  1653. void AllocateDxilConstantBuffers(
  1654. HLModule &HLM, std::unordered_map<Constant *, DxilFieldAnnotation>
  1655. &constVarAnnotationMap) {
  1656. for (unsigned i = 0; i < HLM.GetCBuffers().size(); i++) {
  1657. HLCBuffer &CB = *static_cast<HLCBuffer *>(&(HLM.GetCBuffer(i)));
  1658. unsigned size = AllocateDxilConstantBuffer(
  1659. CB, constVarAnnotationMap, HLM.GetHLOptions().bUseMinPrecision);
  1660. CB.SetSize(size);
  1661. }
  1662. }
  1663. } // namespace
  1664. namespace {
  1665. void ReplaceUseInFunction(Value *V, Value *NewV, Function *F,
  1666. IRBuilder<> &Builder) {
  1667. for (auto U = V->user_begin(); U != V->user_end();) {
  1668. User *user = *(U++);
  1669. if (Instruction *I = dyn_cast<Instruction>(user)) {
  1670. if (I->getParent()->getParent() == F) {
  1671. // replace use with GEP if in F
  1672. if (BitCastInst *BCI = dyn_cast<BitCastInst>(I)) {
  1673. if (BCI->getType() == NewV->getType()) {
  1674. I->replaceAllUsesWith(NewV);
  1675. I->eraseFromParent();
  1676. continue;
  1677. }
  1678. }
  1679. I->replaceUsesOfWith(V, NewV);
  1680. }
  1681. } else {
  1682. // For constant operator, create local clone which use GEP.
  1683. // Only support GEP and bitcast.
  1684. if (GEPOperator *GEPOp = dyn_cast<GEPOperator>(user)) {
  1685. std::vector<Value *> idxList(GEPOp->idx_begin(), GEPOp->idx_end());
  1686. Value *NewGEP = Builder.CreateInBoundsGEP(NewV, idxList);
  1687. ReplaceUseInFunction(GEPOp, NewGEP, F, Builder);
  1688. } else if (GlobalVariable *GV = dyn_cast<GlobalVariable>(user)) {
  1689. // Change the init val into NewV with Store.
  1690. GV->setInitializer(nullptr);
  1691. Builder.CreateStore(NewV, GV);
  1692. } else {
  1693. // Must be bitcast here.
  1694. BitCastOperator *BC = cast<BitCastOperator>(user);
  1695. Value *NewBC = Builder.CreateBitCast(NewV, BC->getType());
  1696. ReplaceUseInFunction(BC, NewBC, F, Builder);
  1697. }
  1698. }
  1699. }
  1700. }
  1701. void MarkUsedFunctionForConst(Value *V,
  1702. std::unordered_set<Function *> &usedFunc) {
  1703. for (auto U = V->user_begin(); U != V->user_end();) {
  1704. User *user = *(U++);
  1705. if (Instruction *I = dyn_cast<Instruction>(user)) {
  1706. Function *F = I->getParent()->getParent();
  1707. usedFunc.insert(F);
  1708. } else {
  1709. // For constant operator, create local clone which use GEP.
  1710. // Only support GEP and bitcast.
  1711. if (GEPOperator *GEPOp = dyn_cast<GEPOperator>(user)) {
  1712. MarkUsedFunctionForConst(GEPOp, usedFunc);
  1713. } else if (GlobalVariable *GV = dyn_cast<GlobalVariable>(user)) {
  1714. MarkUsedFunctionForConst(GV, usedFunc);
  1715. } else {
  1716. // Must be bitcast here.
  1717. BitCastOperator *BC = cast<BitCastOperator>(user);
  1718. MarkUsedFunctionForConst(BC, usedFunc);
  1719. }
  1720. }
  1721. }
  1722. }
  1723. bool CreateCBufferVariable(HLCBuffer &CB, HLModule &HLM, llvm::Type *HandleTy) {
  1724. bool bUsed = false;
  1725. // Build Struct for CBuffer.
  1726. SmallVector<llvm::Type *, 4> Elements;
  1727. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1728. Value *GV = C->GetGlobalSymbol();
  1729. if (!GV->use_empty())
  1730. bUsed = true;
  1731. // Global variable must be pointer type.
  1732. llvm::Type *Ty = GV->getType()->getPointerElementType();
  1733. Elements.emplace_back(Ty);
  1734. }
  1735. // Don't create CBuffer variable for unused cbuffer.
  1736. if (!bUsed)
  1737. return false;
  1738. llvm::Module &M = *HLM.GetModule();
  1739. bool isCBArray = CB.IsArray();
  1740. llvm::GlobalVariable *cbGV = nullptr;
  1741. llvm::Type *cbTy = nullptr;
  1742. unsigned cbIndexDepth = 0;
  1743. if (!isCBArray) {
  1744. if (CB.IsView()) {
  1745. llvm::StructType *CBStructTy =
  1746. llvm::StructType::create(CB.GetResultType(), CB.GetGlobalName());
  1747. cbGV = new llvm::GlobalVariable(M, CBStructTy,
  1748. /*IsConstant*/ true,
  1749. llvm::GlobalValue::ExternalLinkage,
  1750. /*InitVal*/ nullptr, CB.GetGlobalName());
  1751. cbTy = cbGV->getType();
  1752. } else {
  1753. llvm::StructType *CBStructTy =
  1754. llvm::StructType::create(Elements, CB.GetGlobalName());
  1755. cbGV = new llvm::GlobalVariable(M, CBStructTy, /*IsConstant*/ true,
  1756. llvm::GlobalValue::ExternalLinkage,
  1757. /*InitVal*/ nullptr, CB.GetGlobalName());
  1758. cbTy = cbGV->getType();
  1759. }
  1760. } else {
  1761. // For array of ConstantBuffer, create array of struct instead of struct of
  1762. // array.
  1763. DXASSERT(CB.GetConstants().size() == 1,
  1764. "ConstantBuffer should have 1 constant");
  1765. Value *GV = CB.GetConstants()[0]->GetGlobalSymbol();
  1766. llvm::Type *CBEltTy =
  1767. GV->getType()->getPointerElementType()->getArrayElementType();
  1768. cbIndexDepth = 1;
  1769. while (CBEltTy->isArrayTy()) {
  1770. CBEltTy = CBEltTy->getArrayElementType();
  1771. cbIndexDepth++;
  1772. }
  1773. // Add one level struct type to match normal case.
  1774. llvm::StructType *CBStructTy =
  1775. llvm::StructType::create({CB.GetResultType()}, CB.GetGlobalName());
  1776. llvm::ArrayType *CBArrayTy =
  1777. llvm::ArrayType::get(CBStructTy, CB.GetRangeSize());
  1778. cbGV = new llvm::GlobalVariable(M, CBArrayTy, /*IsConstant*/ true,
  1779. llvm::GlobalValue::ExternalLinkage,
  1780. /*InitVal*/ nullptr, CB.GetGlobalName());
  1781. cbTy = llvm::PointerType::get(CBStructTy,
  1782. cbGV->getType()->getPointerAddressSpace());
  1783. }
  1784. CB.SetGlobalSymbol(cbGV);
  1785. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  1786. llvm::Type *idxTy = opcodeTy;
  1787. Constant *zeroIdx = ConstantInt::get(opcodeTy, 0);
  1788. Value *HandleArgs[] = {cbGV, zeroIdx};
  1789. llvm::FunctionType *SubscriptFuncTy =
  1790. llvm::FunctionType::get(cbTy, {opcodeTy, HandleTy, idxTy}, false);
  1791. Function *subscriptFunc =
  1792. GetOrCreateHLFunction(M, SubscriptFuncTy, HLOpcodeGroup::HLSubscript,
  1793. (unsigned)HLSubscriptOpcode::CBufferSubscript);
  1794. Constant *opArg =
  1795. ConstantInt::get(opcodeTy, (unsigned)HLSubscriptOpcode::CBufferSubscript);
  1796. Value *args[] = {opArg, nullptr, zeroIdx};
  1797. llvm::LLVMContext &Context = M.getContext();
  1798. llvm::Type *i32Ty = llvm::Type::getInt32Ty(Context);
  1799. Value *zero = ConstantInt::get(i32Ty, (uint64_t)0);
  1800. std::vector<Value *> indexArray(CB.GetConstants().size());
  1801. std::vector<std::unordered_set<Function *>> constUsedFuncList(
  1802. CB.GetConstants().size());
  1803. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1804. Value *idx = ConstantInt::get(i32Ty, C->GetID());
  1805. indexArray[C->GetID()] = idx;
  1806. Value *GV = C->GetGlobalSymbol();
  1807. MarkUsedFunctionForConst(GV, constUsedFuncList[C->GetID()]);
  1808. }
  1809. for (Function &F : M.functions()) {
  1810. if (F.isDeclaration())
  1811. continue;
  1812. if (GetHLOpcodeGroupByName(&F) != HLOpcodeGroup::NotHL)
  1813. continue;
  1814. IRBuilder<> Builder(F.getEntryBlock().getFirstInsertionPt());
  1815. // create HL subscript to make all the use of cbuffer start from it.
  1816. HandleArgs[HLOperandIndex::kCreateHandleResourceOpIdx - 1] = cbGV;
  1817. CallInst *Handle = HLM.EmitHLOperationCall(
  1818. Builder, HLOpcodeGroup::HLCreateHandle, 0, HandleTy, HandleArgs, M);
  1819. CallInst *OrigHandle = Handle;
  1820. DxilResourceProperties RP = resource_helper::loadPropsFromResourceBase(&CB);
  1821. Handle = CreateAnnotateHandle(HLM, Handle, RP, cbGV->getType()->getElementType(), Builder);
  1822. args[HLOperandIndex::kSubscriptObjectOpIdx] = Handle;
  1823. Instruction *cbSubscript =
  1824. cast<Instruction>(Builder.CreateCall(subscriptFunc, {args}));
  1825. // Replace constant var with GEP pGV
  1826. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1827. Value *GV = C->GetGlobalSymbol();
  1828. if (constUsedFuncList[C->GetID()].count(&F) == 0)
  1829. continue;
  1830. Value *idx = indexArray[C->GetID()];
  1831. if (!isCBArray) {
  1832. Instruction *GEP = cast<Instruction>(
  1833. Builder.CreateInBoundsGEP(cbSubscript, {zero, idx}));
  1834. // TODO: make sure the debug info is synced to GEP.
  1835. // GEP->setDebugLoc(GV);
  1836. ReplaceUseInFunction(GV, GEP, &F, Builder);
  1837. // Delete if no use in F.
  1838. if (GEP->user_empty())
  1839. GEP->eraseFromParent();
  1840. } else {
  1841. for (auto U = GV->user_begin(); U != GV->user_end();) {
  1842. User *user = *(U++);
  1843. if (user->user_empty())
  1844. continue;
  1845. Instruction *I = dyn_cast<Instruction>(user);
  1846. if (I && I->getParent()->getParent() != &F)
  1847. continue;
  1848. IRBuilder<> *instBuilder = &Builder;
  1849. std::unique_ptr<IRBuilder<>> B;
  1850. if (I) {
  1851. B = llvm::make_unique<IRBuilder<>>(I);
  1852. instBuilder = B.get();
  1853. }
  1854. GEPOperator *GEPOp = cast<GEPOperator>(user);
  1855. std::vector<Value *> idxList;
  1856. DXASSERT(GEPOp->getNumIndices() >= 1 + cbIndexDepth,
  1857. "must indexing ConstantBuffer array");
  1858. idxList.reserve(GEPOp->getNumIndices() - (cbIndexDepth - 1));
  1859. gep_type_iterator GI = gep_type_begin(*GEPOp),
  1860. E = gep_type_end(*GEPOp);
  1861. idxList.push_back(GI.getOperand());
  1862. // change array index with 0 for struct index.
  1863. idxList.push_back(zero);
  1864. GI++;
  1865. Value *arrayIdx = GI.getOperand();
  1866. GI++;
  1867. for (unsigned curIndex = 1; GI != E && curIndex < cbIndexDepth;
  1868. ++GI, ++curIndex) {
  1869. arrayIdx = instBuilder->CreateMul(
  1870. arrayIdx, Builder.getInt32(GI->getArrayNumElements()));
  1871. arrayIdx = instBuilder->CreateAdd(arrayIdx, GI.getOperand());
  1872. }
  1873. for (; GI != E; ++GI) {
  1874. idxList.push_back(GI.getOperand());
  1875. }
  1876. HandleArgs[HLOperandIndex::kCreateHandleIndexOpIdx - 1] = arrayIdx;
  1877. CallInst *Handle =
  1878. HLM.EmitHLOperationCall(*instBuilder,
  1879. HLOpcodeGroup::HLCreateHandle, 0,
  1880. HandleTy, HandleArgs, M);
  1881. DxilResourceProperties RP = resource_helper::loadPropsFromResourceBase(&CB);
  1882. Handle = CreateAnnotateHandle(HLM, Handle, RP, cbGV->getType()->getElementType(), *instBuilder);
  1883. args[HLOperandIndex::kSubscriptObjectOpIdx] = Handle;
  1884. args[HLOperandIndex::kSubscriptIndexOpIdx] = arrayIdx;
  1885. Instruction *cbSubscript =
  1886. cast<Instruction>(instBuilder->CreateCall(subscriptFunc, {args}));
  1887. Instruction *NewGEP = cast<Instruction>(
  1888. instBuilder->CreateInBoundsGEP(cbSubscript, idxList));
  1889. ReplaceUseInFunction(GEPOp, NewGEP, &F, *instBuilder);
  1890. }
  1891. }
  1892. }
  1893. // Delete if no use in F.
  1894. if (cbSubscript->user_empty()) {
  1895. cbSubscript->eraseFromParent();
  1896. Handle->eraseFromParent();
  1897. OrigHandle->eraseFromParent();
  1898. } else {
  1899. // merge GEP use for cbSubscript.
  1900. HLModule::MergeGepUse(cbSubscript);
  1901. }
  1902. }
  1903. return true;
  1904. }
  1905. void ConstructCBufferAnnotation(
  1906. HLCBuffer &CB, DxilTypeSystem &dxilTypeSys,
  1907. std::unordered_map<Constant *, DxilFieldAnnotation> &AnnotationMap) {
  1908. Value *GV = CB.GetGlobalSymbol();
  1909. llvm::StructType *CBStructTy =
  1910. dyn_cast<llvm::StructType>(GV->getType()->getPointerElementType());
  1911. if (!CBStructTy) {
  1912. // For Array of ConstantBuffer.
  1913. llvm::ArrayType *CBArrayTy =
  1914. cast<llvm::ArrayType>(GV->getType()->getPointerElementType());
  1915. CBStructTy = cast<llvm::StructType>(CBArrayTy->getArrayElementType());
  1916. }
  1917. DxilStructAnnotation *CBAnnotation =
  1918. dxilTypeSys.AddStructAnnotation(CBStructTy);
  1919. CBAnnotation->SetCBufferSize(CB.GetSize());
  1920. // Set fieldAnnotation for each constant var.
  1921. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1922. Constant *GV = C->GetGlobalSymbol();
  1923. DxilFieldAnnotation &fieldAnnotation =
  1924. CBAnnotation->GetFieldAnnotation(C->GetID());
  1925. fieldAnnotation = AnnotationMap[GV];
  1926. // This is after CBuffer allocation.
  1927. fieldAnnotation.SetCBufferOffset(C->GetLowerBound());
  1928. fieldAnnotation.SetFieldName(C->GetGlobalName());
  1929. }
  1930. }
  1931. void ConstructCBuffer(
  1932. HLModule &HLM, llvm::Type *CBufferType,
  1933. std::unordered_map<Constant *, DxilFieldAnnotation> &AnnotationMap) {
  1934. DxilTypeSystem &dxilTypeSys = HLM.GetTypeSystem();
  1935. llvm::Type *HandleTy = HLM.GetOP()->GetHandleType();
  1936. for (unsigned i = 0; i < HLM.GetCBuffers().size(); i++) {
  1937. HLCBuffer &CB = *static_cast<HLCBuffer *>(&(HLM.GetCBuffer(i)));
  1938. if (CB.GetConstants().size() == 0) {
  1939. // Create Fake variable for cbuffer which is empty.
  1940. llvm::GlobalVariable *pGV = new llvm::GlobalVariable(
  1941. *HLM.GetModule(), CBufferType, true,
  1942. llvm::GlobalValue::ExternalLinkage, nullptr, CB.GetGlobalName());
  1943. CB.SetGlobalSymbol(pGV);
  1944. } else {
  1945. bool bCreated = CreateCBufferVariable(CB, HLM, HandleTy);
  1946. if (bCreated)
  1947. ConstructCBufferAnnotation(CB, dxilTypeSys, AnnotationMap);
  1948. else {
  1949. // Create Fake variable for cbuffer which is unused.
  1950. llvm::GlobalVariable *pGV = new llvm::GlobalVariable(
  1951. *HLM.GetModule(), CBufferType, true,
  1952. llvm::GlobalValue::ExternalLinkage, nullptr, CB.GetGlobalName());
  1953. CB.SetGlobalSymbol(pGV);
  1954. }
  1955. }
  1956. // Clear the constants which useless now.
  1957. CB.GetConstants().clear();
  1958. }
  1959. }
  1960. } // namespace
  1961. namespace CGHLSLMSHelper {
  1962. // Align cbuffer offset in legacy mode (16 bytes per row).
  1963. unsigned AlignBufferOffsetInLegacy(unsigned offset, unsigned size,
  1964. unsigned scalarSizeInBytes,
  1965. bool bNeedNewRow) {
  1966. if (unsigned remainder = (offset & 0xf)) {
  1967. // Start from new row
  1968. if (remainder + size > 16 || bNeedNewRow) {
  1969. return offset + 16 - remainder;
  1970. }
  1971. // If not, naturally align data
  1972. return RoundToAlign(offset, scalarSizeInBytes);
  1973. }
  1974. return offset;
  1975. }
  1976. // Translate RayQuery constructor. From:
  1977. // %call = call %"RayQuery<flags>" @<constructor>(%"RayQuery<flags>" %ptr)
  1978. // To:
  1979. // i32 %handle = AllocateRayQuery(i32 <IntrinsicOp::IOP_AllocateRayQuery>, i32
  1980. // %flags) %gep = GEP %"RayQuery<flags>" %ptr, 0, 0 store i32* %gep, i32
  1981. // %handle ; and replace uses of %call with %ptr
  1982. void TranslateRayQueryConstructor(HLModule &HLM) {
  1983. llvm::Module &M = *HLM.GetModule();
  1984. SmallVector<Function *, 4> Constructors;
  1985. for (auto &F : M.functions()) {
  1986. // Match templated RayQuery constructor instantiation by prefix and
  1987. // signature. It should be impossible to achieve the same signature from
  1988. // HLSL.
  1989. if (!F.getName().startswith("\01??0?$RayQuery@$"))
  1990. continue;
  1991. llvm::Type *Ty = F.getReturnType();
  1992. if (!Ty->isPointerTy() ||
  1993. !dxilutil::IsHLSLRayQueryType(Ty->getPointerElementType()))
  1994. continue;
  1995. if (F.arg_size() != 1 || Ty != F.arg_begin()->getType())
  1996. continue;
  1997. Constructors.emplace_back(&F);
  1998. }
  1999. for (auto pConstructorFunc : Constructors) {
  2000. llvm::IntegerType *i32Ty = llvm::Type::getInt32Ty(M.getContext());
  2001. llvm::ConstantInt *i32Zero =
  2002. llvm::ConstantInt::get(i32Ty, (uint64_t)0, false);
  2003. llvm::FunctionType *funcTy =
  2004. llvm::FunctionType::get(i32Ty, {i32Ty, i32Ty}, false);
  2005. unsigned opcode = (unsigned)IntrinsicOp::IOP_AllocateRayQuery;
  2006. llvm::ConstantInt *opVal = llvm::ConstantInt::get(i32Ty, opcode, false);
  2007. Function *opFunc =
  2008. GetOrCreateHLFunction(M, funcTy, HLOpcodeGroup::HLIntrinsic, opcode);
  2009. while (!pConstructorFunc->user_empty()) {
  2010. Value *V = *pConstructorFunc->user_begin();
  2011. llvm::CallInst *CI = cast<CallInst>(V); // Must be call
  2012. llvm::Value *pThis = CI->getArgOperand(0);
  2013. llvm::StructType *pRQType =
  2014. cast<llvm::StructType>(pThis->getType()->getPointerElementType());
  2015. DxilStructAnnotation *SA =
  2016. HLM.GetTypeSystem().GetStructAnnotation(pRQType);
  2017. DXASSERT(SA, "otherwise, could not find type annoation for RayQuery "
  2018. "specialization");
  2019. DXASSERT(SA->GetNumTemplateArgs() == 1 &&
  2020. SA->GetTemplateArgAnnotation(0).IsIntegral(),
  2021. "otherwise, RayQuery has changed, or lacks template args");
  2022. llvm::IRBuilder<> Builder(CI);
  2023. llvm::Value *rayFlags =
  2024. Builder.getInt32(SA->GetTemplateArgAnnotation(0).GetIntegral());
  2025. llvm::Value *Call =
  2026. Builder.CreateCall(opFunc, {opVal, rayFlags}, pThis->getName());
  2027. llvm::Value *GEP = Builder.CreateInBoundsGEP(pThis, {i32Zero, i32Zero});
  2028. Builder.CreateStore(Call, GEP);
  2029. CI->replaceAllUsesWith(pThis);
  2030. CI->eraseFromParent();
  2031. }
  2032. pConstructorFunc->eraseFromParent();
  2033. }
  2034. }
  2035. } // namespace CGHLSLMSHelper
  2036. namespace {
  2037. bool BuildImmInit(Function *Ctor) {
  2038. GlobalVariable *GV = nullptr;
  2039. SmallVector<Constant *, 4> ImmList;
  2040. bool allConst = true;
  2041. for (inst_iterator I = inst_begin(Ctor), E = inst_end(Ctor); I != E; ++I) {
  2042. if (StoreInst *SI = dyn_cast<StoreInst>(&(*I))) {
  2043. Value *V = SI->getValueOperand();
  2044. if (!isa<Constant>(V) || V->getType()->isPointerTy()) {
  2045. allConst = false;
  2046. break;
  2047. }
  2048. ImmList.emplace_back(cast<Constant>(V));
  2049. Value *Ptr = SI->getPointerOperand();
  2050. if (GEPOperator *GepOp = dyn_cast<GEPOperator>(Ptr)) {
  2051. Ptr = GepOp->getPointerOperand();
  2052. if (GlobalVariable *pGV = dyn_cast<GlobalVariable>(Ptr)) {
  2053. if (GV == nullptr)
  2054. GV = pGV;
  2055. else {
  2056. DXASSERT(GV == pGV, "else pointer mismatch");
  2057. }
  2058. }
  2059. }
  2060. } else {
  2061. if (!isa<ReturnInst>(*I)) {
  2062. allConst = false;
  2063. break;
  2064. }
  2065. }
  2066. }
  2067. if (!allConst)
  2068. return false;
  2069. if (!GV)
  2070. return false;
  2071. llvm::Type *Ty = GV->getType()->getElementType();
  2072. llvm::ArrayType *AT = dyn_cast<llvm::ArrayType>(Ty);
  2073. // TODO: support other types.
  2074. if (!AT)
  2075. return false;
  2076. if (ImmList.size() != AT->getNumElements())
  2077. return false;
  2078. Constant *Init = llvm::ConstantArray::get(AT, ImmList);
  2079. GV->setInitializer(Init);
  2080. return true;
  2081. }
  2082. void CallCtorFunctionsAtInsertPt(llvm::Module &M,
  2083. llvm::SmallVector<llvm::Function *, 2> &Ctors,
  2084. Instruction *InsertPt) {
  2085. IRBuilder<> Builder(InsertPt);
  2086. for (Function *Ctor : Ctors) {
  2087. Builder.CreateCall(Ctor);
  2088. }
  2089. }
  2090. void CollectFunctionCallers(Function *F, DenseSet<Function *> &Callers) {
  2091. // worklist size max = call depth
  2092. SmallVector<Function *, 8> worklist;
  2093. worklist.push_back(F);
  2094. // add callers
  2095. while (worklist.size()) {
  2096. Function *F = worklist.pop_back_val();
  2097. for (User *U : F->users()) {
  2098. if (CallInst *CI = dyn_cast<CallInst>(U)) {
  2099. Function *Caller = CI->getParent()->getParent();
  2100. if (Callers.insert(Caller).second == true) {
  2101. // new caller
  2102. worklist.push_back(Caller);
  2103. }
  2104. }
  2105. }
  2106. }
  2107. }
  2108. DenseSet<Function *> CollectExternalFunctionCallers(Module &M) {
  2109. DenseSet<Function *> Callers;
  2110. for (Function &F : M) {
  2111. if (!F.isIntrinsic() && F.isDeclaration() &&
  2112. hlsl::GetHLOpcodeGroup(&F) == hlsl::HLOpcodeGroup::NotHL) {
  2113. CollectFunctionCallers(&F, Callers);
  2114. }
  2115. }
  2116. return Callers;
  2117. }
  2118. // If static initializers contain calls to external functions, this can
  2119. // introduce inter-module init function ordering dependencies. Some
  2120. // dependencies may even introduce contradictions. Creating and implementing an
  2121. // intuitive standard approach to solve this is likely quite difficult. Better
  2122. // to disallow the ambiguous and unlikely case for now.
  2123. bool IsValidCtorFunction(Function *F, DenseSet<Function *> &Callers) {
  2124. return Callers.count(F) == 0;
  2125. }
  2126. void ReportInitStaticGlobalWithExternalFunction(
  2127. clang::CodeGen ::CodeGenModule &CGM, StringRef name) {
  2128. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2129. unsigned DiagID = Diags.getCustomDiagID(
  2130. clang::DiagnosticsEngine::Error,
  2131. "Initializer for static global %0 makes disallowed call to external function.");
  2132. std::string escaped;
  2133. llvm::raw_string_ostream os(escaped);
  2134. size_t end = name.find_first_of('@');
  2135. if (end != StringRef::npos)
  2136. name = name.substr(0, end);
  2137. StringRef prefix = "\01??__E";
  2138. if (name.startswith(prefix))
  2139. name = name.substr(prefix.size());
  2140. dxilutil::PrintEscapedString(name, os);
  2141. Diags.Report(DiagID) << os.str();
  2142. }
  2143. } // namespace
  2144. namespace CGHLSLMSHelper {
  2145. void CollectCtorFunctions(llvm::Module &M, llvm::StringRef globalName,
  2146. llvm::SmallVector<llvm::Function *, 2> &Ctors,
  2147. clang::CodeGen::CodeGenModule &CGM) {
  2148. // add global call to entry func
  2149. GlobalVariable *GV = M.getGlobalVariable(globalName);
  2150. if (!GV)
  2151. return;
  2152. ConstantArray *CA = dyn_cast<ConstantArray>(GV->getInitializer());
  2153. if (!CA)
  2154. return;
  2155. DenseSet<Function *> Callers = CollectExternalFunctionCallers(M);
  2156. for (User::op_iterator i = CA->op_begin(), e = CA->op_end(); i != e; ++i) {
  2157. if (isa<ConstantAggregateZero>(*i))
  2158. continue;
  2159. ConstantStruct *CS = cast<ConstantStruct>(*i);
  2160. if (isa<ConstantPointerNull>(CS->getOperand(1)))
  2161. continue;
  2162. // Must have a function or null ptr.
  2163. if (!isa<Function>(CS->getOperand(1)))
  2164. continue;
  2165. Function *Ctor = cast<Function>(CS->getOperand(1));
  2166. DXASSERT(Ctor->getReturnType()->isVoidTy() && Ctor->arg_size() == 0,
  2167. "function type must be void (void)");
  2168. for (inst_iterator I = inst_begin(Ctor), E = inst_end(Ctor); I != E; ++I) {
  2169. if (CallInst *CI = dyn_cast<CallInst>(&(*I))) {
  2170. Function *F = CI->getCalledFunction();
  2171. // Try to build imm initilizer.
  2172. // If not work, add global call to entry func.
  2173. if (BuildImmInit(F) == false) {
  2174. if (IsValidCtorFunction(F, Callers)) {
  2175. Ctors.emplace_back(F);
  2176. } else {
  2177. ReportInitStaticGlobalWithExternalFunction(CGM, F->getName());
  2178. }
  2179. }
  2180. } else {
  2181. DXASSERT(isa<ReturnInst>(&(*I)),
  2182. "else invalid Global constructor function");
  2183. }
  2184. }
  2185. }
  2186. }
  2187. void ProcessCtorFunctions(llvm::Module &M,
  2188. llvm::SmallVector<llvm::Function *, 2> &Ctors,
  2189. llvm::Function *Entry,
  2190. llvm::Function *PatchConstantFn) {
  2191. if (PatchConstantFn) {
  2192. // static globals are independent for entry function and patch constant
  2193. // function. Update static global in entry function will not affect
  2194. // value in patch constant function. So just call ctors for patch
  2195. // constant function too.
  2196. CallCtorFunctionsAtInsertPt(
  2197. M, Ctors, PatchConstantFn->getEntryBlock().getFirstInsertionPt());
  2198. IRBuilder<> B(PatchConstantFn->getEntryBlock().getFirstInsertionPt());
  2199. // For static globals which has const initialize value, copy it at
  2200. // beginning of patch constant function to avoid use value updated by
  2201. // entry function.
  2202. for (GlobalVariable &GV : M.globals()) {
  2203. if (GV.isConstant())
  2204. continue;
  2205. if (!GV.hasInitializer())
  2206. continue;
  2207. if (GV.getName() == "llvm.global_ctors")
  2208. continue;
  2209. Value *V = GV.getInitializer();
  2210. if (isa<UndefValue>(V))
  2211. continue;
  2212. B.CreateStore(V, &GV);
  2213. }
  2214. }
  2215. CallCtorFunctionsAtInsertPt(M, Ctors,
  2216. Entry->getEntryBlock().getFirstInsertionPt());
  2217. }
  2218. void FinishCBuffer(HLModule &HLM, llvm::Type *CBufferType,
  2219. std::unordered_map<Constant *, DxilFieldAnnotation>
  2220. &constVarAnnotationMap) {
  2221. // Allocate constant buffers.
  2222. AllocateDxilConstantBuffers(HLM, constVarAnnotationMap);
  2223. // TODO: create temp variable for constant which has store use.
  2224. // Create Global variable and type annotation for each CBuffer.
  2225. ConstructCBuffer(HLM, CBufferType, constVarAnnotationMap);
  2226. }
  2227. void AddRegBindingsForResourceInConstantBuffer(
  2228. HLModule &HLM,
  2229. llvm::DenseMap<llvm::Constant *,
  2230. llvm::SmallVector<std::pair<DXIL::ResourceClass, unsigned>,
  2231. 1>> &constantRegBindingMap) {
  2232. for (unsigned i = 0; i < HLM.GetCBuffers().size(); i++) {
  2233. HLCBuffer &CB = *static_cast<HLCBuffer *>(&(HLM.GetCBuffer(i)));
  2234. auto &Constants = CB.GetConstants();
  2235. for (unsigned j = 0; j < Constants.size(); j++) {
  2236. const std::unique_ptr<DxilResourceBase> &C = Constants[j];
  2237. Constant *CGV = C->GetGlobalSymbol();
  2238. auto &regBindings = constantRegBindingMap[CGV];
  2239. if (regBindings.empty())
  2240. continue;
  2241. unsigned Srv = UINT_MAX;
  2242. unsigned Uav = UINT_MAX;
  2243. unsigned Sampler = UINT_MAX;
  2244. for (auto it : regBindings) {
  2245. unsigned RegNum = it.second;
  2246. switch (it.first) {
  2247. case DXIL::ResourceClass::SRV:
  2248. Srv = RegNum;
  2249. break;
  2250. case DXIL::ResourceClass::UAV:
  2251. Uav = RegNum;
  2252. break;
  2253. case DXIL::ResourceClass::Sampler:
  2254. Sampler = RegNum;
  2255. break;
  2256. default:
  2257. DXASSERT(0, "invalid resource class");
  2258. break;
  2259. }
  2260. }
  2261. HLM.AddRegBinding(CB.GetID(), j, Srv, Uav, Sampler);
  2262. }
  2263. }
  2264. }
  2265. // extension codegen.
  2266. void ExtensionCodeGen(HLModule &HLM, clang::CodeGen::CodeGenModule &CGM) {
  2267. // Add semantic defines for extensions if any are available.
  2268. HLSLExtensionsCodegenHelper::SemanticDefineErrorList errors =
  2269. CGM.getCodeGenOpts().HLSLExtensionsCodegen->WriteSemanticDefines(
  2270. HLM.GetModule());
  2271. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2272. for (const HLSLExtensionsCodegenHelper::SemanticDefineError &error : errors) {
  2273. clang::DiagnosticsEngine::Level level = clang::DiagnosticsEngine::Error;
  2274. if (error.IsWarning())
  2275. level = clang::DiagnosticsEngine::Warning;
  2276. unsigned DiagID = Diags.getCustomDiagID(level, "%0");
  2277. Diags.Report(clang::SourceLocation::getFromRawEncoding(error.Location()),
  2278. DiagID)
  2279. << error.Message();
  2280. }
  2281. // Add root signature from a #define. Overrides root signature in function
  2282. // attribute.
  2283. {
  2284. using Status = HLSLExtensionsCodegenHelper::CustomRootSignature::Status;
  2285. HLSLExtensionsCodegenHelper::CustomRootSignature customRootSig;
  2286. HLSLExtensionsCodegenHelper::CustomRootSignature::Status status =
  2287. CGM.getCodeGenOpts().HLSLExtensionsCodegen->GetCustomRootSignature(
  2288. &customRootSig);
  2289. if (status == Status::FOUND) {
  2290. DxilRootSignatureVersion rootSigVer;
  2291. // set root signature version.
  2292. if (CGM.getLangOpts().RootSigMinor == 0) {
  2293. rootSigVer = hlsl::DxilRootSignatureVersion::Version_1_0;
  2294. } else {
  2295. DXASSERT(CGM.getLangOpts().RootSigMinor == 1,
  2296. "else CGMSHLSLRuntime Constructor needs to be updated");
  2297. rootSigVer = hlsl::DxilRootSignatureVersion::Version_1_1;
  2298. }
  2299. RootSignatureHandle RootSigHandle;
  2300. CompileRootSignature(
  2301. customRootSig.RootSignature, Diags,
  2302. clang::SourceLocation::getFromRawEncoding(
  2303. customRootSig.EncodedSourceLocation),
  2304. rootSigVer, DxilRootSignatureCompilationFlags::GlobalRootSignature,
  2305. &RootSigHandle);
  2306. if (!RootSigHandle.IsEmpty()) {
  2307. RootSigHandle.EnsureSerializedAvailable();
  2308. HLM.SetSerializedRootSignature(RootSigHandle.GetSerializedBytes(),
  2309. RootSigHandle.GetSerializedSize());
  2310. }
  2311. }
  2312. }
  2313. }
  2314. } // namespace CGHLSLMSHelper
  2315. namespace {
  2316. void ReportDisallowedTypeInExportParam(clang::CodeGen ::CodeGenModule &CGM,
  2317. StringRef name) {
  2318. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2319. unsigned DiagID =
  2320. Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  2321. "Exported function %0 must not contain a "
  2322. "resource in parameter or return type.");
  2323. std::string escaped;
  2324. llvm::raw_string_ostream os(escaped);
  2325. dxilutil::PrintEscapedString(name, os);
  2326. Diags.Report(DiagID) << os.str();
  2327. }
  2328. } // namespace
  2329. namespace CGHLSLMSHelper {
  2330. void FinishClipPlane(HLModule &HLM, std::vector<Function *> &clipPlaneFuncList,
  2331. std::unordered_map<Value *, DebugLoc> &debugInfoMap,
  2332. clang::CodeGen::CodeGenModule &CGM) {
  2333. bool bDebugInfo = CGM.getCodeGenOpts().getDebugInfo() ==
  2334. clang::CodeGenOptions::FullDebugInfo;
  2335. Module &M = *HLM.GetModule();
  2336. for (Function *F : clipPlaneFuncList) {
  2337. DxilFunctionProps &props = HLM.GetDxilFunctionProps(F);
  2338. IRBuilder<> Builder(F->getEntryBlock().getFirstInsertionPt());
  2339. for (unsigned i = 0; i < DXIL::kNumClipPlanes; i++) {
  2340. Value *clipPlane = props.ShaderProps.VS.clipPlanes[i];
  2341. if (!clipPlane)
  2342. continue;
  2343. if (bDebugInfo) {
  2344. Builder.SetCurrentDebugLocation(debugInfoMap[clipPlane]);
  2345. }
  2346. llvm::Type *Ty = clipPlane->getType()->getPointerElementType();
  2347. // Constant *zeroInit = ConstantFP::get(Ty, 0);
  2348. GlobalVariable *GV = new llvm::GlobalVariable(
  2349. M, Ty, /*IsConstant*/ false, // constant false to store.
  2350. llvm::GlobalValue::ExternalLinkage,
  2351. /*InitVal*/ nullptr, Twine("SV_ClipPlane") + Twine(i));
  2352. Value *initVal = Builder.CreateLoad(clipPlane);
  2353. Builder.CreateStore(initVal, GV);
  2354. props.ShaderProps.VS.clipPlanes[i] = GV;
  2355. }
  2356. }
  2357. }
  2358. } // namespace CGHLSLMSHelper
  2359. namespace {
  2360. void LowerExportFunctions(HLModule &HLM, clang::CodeGen::CodeGenModule &CGM,
  2361. dxilutil::ExportMap &exportMap,
  2362. StringMap<EntryFunctionInfo> &entryFunctionMap) {
  2363. bool bIsLib = HLM.GetShaderModel()->IsLib();
  2364. Module &M = *HLM.GetModule();
  2365. if (bIsLib && !exportMap.empty()) {
  2366. for (auto &it : entryFunctionMap) {
  2367. if (HLM.HasDxilFunctionProps(it.second.Func)) {
  2368. const DxilFunctionProps &props =
  2369. HLM.GetDxilFunctionProps(it.second.Func);
  2370. if (props.IsHS())
  2371. exportMap.RegisterExportedFunction(
  2372. props.ShaderProps.HS.patchConstantFunc);
  2373. }
  2374. }
  2375. }
  2376. if (bIsLib && !exportMap.empty()) {
  2377. exportMap.BeginProcessing();
  2378. for (Function &f : M.functions()) {
  2379. if (f.isDeclaration() || f.isIntrinsic() ||
  2380. GetHLOpcodeGroup(&f) != HLOpcodeGroup::NotHL)
  2381. continue;
  2382. exportMap.ProcessFunction(&f, true);
  2383. }
  2384. // TODO: add subobject export names here.
  2385. if (!exportMap.EndProcessing()) {
  2386. for (auto &name : exportMap.GetNameCollisions()) {
  2387. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2388. unsigned DiagID = Diags.getCustomDiagID(
  2389. clang::DiagnosticsEngine::Error,
  2390. "Export name collides with another export: %0");
  2391. std::string escaped;
  2392. llvm::raw_string_ostream os(escaped);
  2393. dxilutil::PrintEscapedString(name, os);
  2394. Diags.Report(DiagID) << os.str();
  2395. }
  2396. for (auto &name : exportMap.GetUnusedExports()) {
  2397. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2398. unsigned DiagID =
  2399. Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  2400. "Could not find target for export: %0");
  2401. std::string escaped;
  2402. llvm::raw_string_ostream os(escaped);
  2403. dxilutil::PrintEscapedString(name, os);
  2404. Diags.Report(DiagID) << os.str();
  2405. }
  2406. }
  2407. }
  2408. for (auto &it : exportMap.GetFunctionRenames()) {
  2409. Function *F = it.first;
  2410. auto &renames = it.second;
  2411. if (renames.empty())
  2412. continue;
  2413. // Rename the original, if necessary, then clone the rest
  2414. if (renames.find(F->getName()) == renames.end())
  2415. F->setName(*renames.begin());
  2416. for (auto &itName : renames) {
  2417. if (F->getName() != itName) {
  2418. Function *pClone = CloneFunction(F, itName, &M, HLM.GetTypeSystem(),
  2419. HLM.GetTypeSystem());
  2420. // add DxilFunctionProps if entry
  2421. if (HLM.HasDxilFunctionProps(F)) {
  2422. DxilFunctionProps &props = HLM.GetDxilFunctionProps(F);
  2423. auto newProps = llvm::make_unique<DxilFunctionProps>(props);
  2424. HLM.AddDxilFunctionProps(pClone, newProps);
  2425. }
  2426. }
  2427. }
  2428. }
  2429. }
  2430. void CheckResourceParameters(HLModule &HLM,
  2431. clang::CodeGen::CodeGenModule &CGM) {
  2432. Module &M = *HLM.GetModule();
  2433. for (Function &f : M.functions()) {
  2434. // Skip llvm intrinsics, non-external linkage, entry/patch constant func,
  2435. // and HL intrinsics
  2436. if (!f.isIntrinsic() &&
  2437. f.getLinkage() == GlobalValue::LinkageTypes::ExternalLinkage &&
  2438. !HLM.HasDxilFunctionProps(&f) && !HLM.IsPatchConstantShader(&f) &&
  2439. GetHLOpcodeGroup(&f) == HLOpcodeGroup::NotHL) {
  2440. // Verify no resources in param/return types
  2441. if (dxilutil::ContainsHLSLObjectType(f.getReturnType())) {
  2442. ReportDisallowedTypeInExportParam(CGM, f.getName());
  2443. continue;
  2444. }
  2445. for (auto &Arg : f.args()) {
  2446. if (dxilutil::ContainsHLSLObjectType(Arg.getType())) {
  2447. ReportDisallowedTypeInExportParam(CGM, f.getName());
  2448. break;
  2449. }
  2450. }
  2451. }
  2452. }
  2453. }
  2454. } // namespace
  2455. namespace CGHLSLMSHelper {
  2456. void UpdateLinkage(HLModule &HLM, clang::CodeGen::CodeGenModule &CGM,
  2457. dxilutil::ExportMap &exportMap,
  2458. StringMap<EntryFunctionInfo> &entryFunctionMap,
  2459. StringMap<PatchConstantInfo> &patchConstantFunctionMap) {
  2460. bool bIsLib = HLM.GetShaderModel()->IsLib();
  2461. Module &M = *HLM.GetModule();
  2462. // Pin entry point and constant buffers, mark everything else internal.
  2463. for (Function &f : M.functions()) {
  2464. if (!bIsLib) {
  2465. if (&f == HLM.GetEntryFunction() ||
  2466. IsPatchConstantFunction(&f, patchConstantFunctionMap) ||
  2467. f.isDeclaration()) {
  2468. if (f.isDeclaration() && !f.isIntrinsic() &&
  2469. GetHLOpcodeGroup(&f) == HLOpcodeGroup::NotHL) {
  2470. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2471. unsigned DiagID = Diags.getCustomDiagID(
  2472. clang::DiagnosticsEngine::Error,
  2473. "External function used in non-library profile: %0");
  2474. std::string escaped;
  2475. llvm::raw_string_ostream os(escaped);
  2476. dxilutil::PrintEscapedString(f.getName(), os);
  2477. Diags.Report(DiagID) << os.str();
  2478. return;
  2479. }
  2480. f.setLinkage(GlobalValue::LinkageTypes::ExternalLinkage);
  2481. } else {
  2482. f.setLinkage(GlobalValue::LinkageTypes::InternalLinkage);
  2483. }
  2484. }
  2485. // Skip no inline functions.
  2486. if (f.hasFnAttribute(llvm::Attribute::NoInline))
  2487. continue;
  2488. // Always inline for used functions.
  2489. if (!f.user_empty() && !f.isDeclaration())
  2490. f.addFnAttr(llvm::Attribute::AlwaysInline);
  2491. }
  2492. LowerExportFunctions(HLM, CGM, exportMap, entryFunctionMap);
  2493. if (CGM.getCodeGenOpts().ExportShadersOnly) {
  2494. for (Function &f : M.functions()) {
  2495. // Skip declarations, intrinsics, shaders, and non-external linkage
  2496. if (f.isDeclaration() || f.isIntrinsic() ||
  2497. GetHLOpcodeGroup(&f) != HLOpcodeGroup::NotHL ||
  2498. HLM.HasDxilFunctionProps(&f) || HLM.IsPatchConstantShader(&f) ||
  2499. f.getLinkage() != GlobalValue::LinkageTypes::ExternalLinkage)
  2500. continue;
  2501. // Mark non-shader user functions as InternalLinkage
  2502. f.setLinkage(GlobalValue::LinkageTypes::InternalLinkage);
  2503. }
  2504. }
  2505. // Now iterate hull shaders and make sure their corresponding patch constant
  2506. // functions are marked ExternalLinkage:
  2507. for (Function &f : M.functions()) {
  2508. if (f.isDeclaration() || f.isIntrinsic() ||
  2509. GetHLOpcodeGroup(&f) != HLOpcodeGroup::NotHL ||
  2510. f.getLinkage() != GlobalValue::LinkageTypes::ExternalLinkage ||
  2511. !HLM.HasDxilFunctionProps(&f))
  2512. continue;
  2513. DxilFunctionProps &props = HLM.GetDxilFunctionProps(&f);
  2514. if (!props.IsHS())
  2515. continue;
  2516. Function *PCFunc = props.ShaderProps.HS.patchConstantFunc;
  2517. if (PCFunc->getLinkage() != GlobalValue::LinkageTypes::ExternalLinkage)
  2518. PCFunc->setLinkage(GlobalValue::LinkageTypes::ExternalLinkage);
  2519. }
  2520. // Disallow resource arguments in (non-entry) function exports
  2521. // unless offline linking target.
  2522. if (bIsLib &&
  2523. HLM.GetShaderModel()->GetMinor() != ShaderModel::kOfflineMinor) {
  2524. CheckResourceParameters(HLM, CGM);
  2525. }
  2526. }
  2527. void FinishEntries(
  2528. HLModule &HLM, const EntryFunctionInfo &Entry,
  2529. clang::CodeGen::CodeGenModule &CGM,
  2530. StringMap<EntryFunctionInfo> &entryFunctionMap,
  2531. std::unordered_map<Function *, const clang::HLSLPatchConstantFuncAttr *>
  2532. &HSEntryPatchConstantFuncAttr,
  2533. StringMap<PatchConstantInfo> &patchConstantFunctionMap,
  2534. std::unordered_map<Function *, std::unique_ptr<DxilFunctionProps>>
  2535. &patchConstantFunctionPropsMap) {
  2536. bool bIsLib = HLM.GetShaderModel()->IsLib();
  2537. // Library don't have entry.
  2538. if (!bIsLib) {
  2539. SetEntryFunction(HLM, Entry.Func, CGM);
  2540. // If at this point we haven't determined the entry function it's an error.
  2541. if (HLM.GetEntryFunction() == nullptr) {
  2542. assert(CGM.getDiags().hasErrorOccurred() &&
  2543. "else SetEntryFunction should have reported this condition");
  2544. return;
  2545. }
  2546. // In back-compat mode (with /Gec flag) create a static global for each
  2547. // const global to allow writing to it.
  2548. // TODO: Verfiy the behavior of static globals in hull shader
  2549. if (CGM.getLangOpts().EnableDX9CompatMode &&
  2550. CGM.getLangOpts().HLSLVersion <= 2016)
  2551. CreateWriteEnabledStaticGlobals(HLM.GetModule(), HLM.GetEntryFunction());
  2552. if (HLM.GetShaderModel()->IsHS()) {
  2553. SetPatchConstantFunction(Entry, HSEntryPatchConstantFuncAttr,
  2554. patchConstantFunctionMap,
  2555. patchConstantFunctionPropsMap, HLM, CGM);
  2556. }
  2557. } else {
  2558. for (auto &it : entryFunctionMap) {
  2559. // skip clone if RT entry
  2560. if (HLM.GetDxilFunctionProps(it.second.Func).IsRay())
  2561. continue;
  2562. // TODO: change flattened function names to dx.entry.<name>:
  2563. // std::string entryName = (Twine(dxilutil::EntryPrefix) +
  2564. // it.getKey()).str();
  2565. CloneShaderEntry(it.second.Func, it.getKey(), HLM);
  2566. auto AttrIter = HSEntryPatchConstantFuncAttr.find(it.second.Func);
  2567. if (AttrIter != HSEntryPatchConstantFuncAttr.end()) {
  2568. SetPatchConstantFunctionWithAttr(
  2569. it.second, AttrIter->second, patchConstantFunctionMap,
  2570. patchConstantFunctionPropsMap, HLM, CGM);
  2571. }
  2572. }
  2573. }
  2574. }
  2575. } // namespace CGHLSLMSHelper
  2576. namespace CGHLSLMSHelper {
  2577. void FinishIntrinsics(
  2578. HLModule &HLM, std::vector<std::pair<Function *, unsigned>> &intrinsicMap,
  2579. DxilObjectProperties &objectProperties) {
  2580. // Lower getResourceHeap before AddOpcodeParamForIntrinsics to skip automatic
  2581. // lower for getResourceFromHeap.
  2582. LowerGetResourceFromHeap(HLM, intrinsicMap);
  2583. // Lower bitcast use of CBV into cbSubscript.
  2584. LowerDynamicCBVUseToHandle(HLM, objectProperties);
  2585. // translate opcode into parameter for intrinsic functions
  2586. // Do this before CloneShaderEntry and TranslateRayQueryConstructor to avoid
  2587. // update valToResPropertiesMap for cloned inst.
  2588. AddOpcodeParamForIntrinsics(HLM, intrinsicMap, objectProperties);
  2589. }
  2590. // Add the dx.break temporary intrinsic and create Call Instructions
  2591. // to it for each branch that requires the artificial conditional.
  2592. void AddDxBreak(Module &M,
  2593. const SmallVector<llvm::BranchInst *, 16> &DxBreaks) {
  2594. if (DxBreaks.empty())
  2595. return;
  2596. // Collect functions that make use of any wave operations
  2597. // Only they will need the dx.break condition added
  2598. SmallPtrSet<Function *, 16> WaveUsers;
  2599. for (Function &F : M.functions()) {
  2600. HLOpcodeGroup opgroup = hlsl::GetHLOpcodeGroup(&F);
  2601. if (F.isDeclaration() && IsHLWaveSensitive(&F) &&
  2602. (opgroup == HLOpcodeGroup::HLIntrinsic ||
  2603. opgroup == HLOpcodeGroup::HLExtIntrinsic)) {
  2604. for (User *U : F.users()) {
  2605. CallInst *CI = cast<CallInst>(U);
  2606. WaveUsers.insert(CI->getParent()->getParent());
  2607. }
  2608. }
  2609. }
  2610. // If there are no wave users, not even the function declaration is needed
  2611. if (WaveUsers.empty())
  2612. return;
  2613. // Create the dx.break function
  2614. FunctionType *FT =
  2615. llvm::FunctionType::get(llvm::Type::getInt1Ty(M.getContext()), false);
  2616. Function *func =
  2617. cast<llvm::Function>(M.getOrInsertFunction(DXIL::kDxBreakFuncName, FT));
  2618. func->addFnAttr(Attribute::AttrKind::NoUnwind);
  2619. // For all break branches recorded previously, if the function they are in
  2620. // makes any use of a wave op, it may need to be artificially conditional.
  2621. // Make it so now. The CleanupDxBreak pass will remove those that aren't
  2622. // needed when more is known.
  2623. for (llvm::BranchInst *BI : DxBreaks) {
  2624. if (WaveUsers.count(BI->getParent()->getParent())) {
  2625. CallInst *Call = CallInst::Create(FT, func, ArrayRef<Value *>(), "", BI);
  2626. BI->setCondition(Call);
  2627. if (!BI->getMetadata(DXIL::kDxBreakMDName)) {
  2628. BI->setMetadata(DXIL::kDxBreakMDName,
  2629. llvm::MDNode::get(BI->getContext(), {}));
  2630. }
  2631. }
  2632. }
  2633. }
  2634. } // namespace CGHLSLMSHelper
  2635. namespace CGHLSLMSHelper {
  2636. ScopeInfo::ScopeInfo(Function *F) : maxRetLevel(0), bAllReturnsInIf(true) {
  2637. Scope FuncScope;
  2638. FuncScope.kind = Scope::ScopeKind::FunctionScope;
  2639. FuncScope.EndScopeBB = nullptr;
  2640. FuncScope.bWholeScopeReturned = false;
  2641. // Make it 0 to avoid check when get parent.
  2642. // All loop on scopes should check kind != FunctionScope.
  2643. FuncScope.parentScopeIndex = 0;
  2644. scopes.emplace_back(FuncScope);
  2645. scopeStack.emplace_back(0);
  2646. }
  2647. // When all returns is inside if which is not nested, the flow is still
  2648. // structurized even there're more than one return.
  2649. bool ScopeInfo::CanSkipStructurize() {
  2650. return bAllReturnsInIf && maxRetLevel < 2;
  2651. }
  2652. void ScopeInfo::AddScope(Scope::ScopeKind k, BasicBlock *endScopeBB) {
  2653. Scope Scope;
  2654. Scope.kind = k;
  2655. Scope.bWholeScopeReturned = false;
  2656. Scope.EndScopeBB = endScopeBB;
  2657. Scope.parentScopeIndex = scopeStack.back();
  2658. scopeStack.emplace_back(scopes.size());
  2659. scopes.emplace_back(Scope);
  2660. }
  2661. void ScopeInfo::AddIf(BasicBlock *endIfBB) {
  2662. AddScope(Scope::ScopeKind::IfScope, endIfBB);
  2663. }
  2664. void ScopeInfo::AddSwitch(BasicBlock *endSwitch) {
  2665. AddScope(Scope::ScopeKind::SwitchScope, endSwitch);
  2666. }
  2667. void ScopeInfo::AddLoop(BasicBlock *loopContinue, BasicBlock *endLoop) {
  2668. AddScope(Scope::ScopeKind::LoopScope, endLoop);
  2669. scopes.back().loopContinueBB = loopContinue;
  2670. }
  2671. void ScopeInfo::AddRet(BasicBlock *bbWithRet) {
  2672. Scope RetScope;
  2673. RetScope.kind = Scope::ScopeKind::ReturnScope;
  2674. RetScope.EndScopeBB = bbWithRet;
  2675. RetScope.parentScopeIndex = scopeStack.back();
  2676. // - 1 for function scope which is at scopeStack[0].
  2677. unsigned retLevel = scopeStack.size() - 1;
  2678. // save max nested level for ret.
  2679. maxRetLevel = std::max<unsigned>(maxRetLevel, retLevel);
  2680. bool bGotLoopOrSwitch = false;
  2681. for (auto it = scopeStack.rbegin(); it != scopeStack.rend(); it++) {
  2682. unsigned idx = *it;
  2683. Scope &S = scopes[idx];
  2684. switch (S.kind) {
  2685. default:
  2686. break;
  2687. case Scope::ScopeKind::LoopScope:
  2688. case Scope::ScopeKind::SwitchScope:
  2689. bGotLoopOrSwitch = true;
  2690. // For return inside loop and switch, can just break.
  2691. RetScope.parentScopeIndex = idx;
  2692. break;
  2693. }
  2694. if (bGotLoopOrSwitch)
  2695. break;
  2696. }
  2697. bAllReturnsInIf &= !bGotLoopOrSwitch;
  2698. // return finish current scope.
  2699. RetScope.bWholeScopeReturned = true;
  2700. // save retScope to rets.
  2701. rets.emplace_back(scopes.size());
  2702. scopes.emplace_back(RetScope);
  2703. // Don't need to put retScope to stack since it cannot nested other scopes.
  2704. }
  2705. void ScopeInfo::EndScope(bool bScopeFinishedWithRet) {
  2706. unsigned idx = scopeStack.pop_back_val();
  2707. Scope &Scope = GetScope(idx);
  2708. // If whole stmt is finished and end scope bb has not used(nothing branch to
  2709. // it). Then the whole scope is returned.
  2710. Scope.bWholeScopeReturned =
  2711. bScopeFinishedWithRet && Scope.EndScopeBB->user_empty();
  2712. }
  2713. Scope &ScopeInfo::GetScope(unsigned i) { return scopes[i]; }
  2714. void ScopeInfo::LegalizeWholeReturnedScope() {
  2715. // legalize scopes which whole scope returned.
  2716. // When whole scope is returned, the endScopeBB will be deleted in codeGen.
  2717. // Here update it to parent scope's endScope.
  2718. // Since the scopes are in order, so it will automatic update to the final
  2719. // target. A->B->C will just get A->C.
  2720. for (auto &S : scopes) {
  2721. if (S.bWholeScopeReturned && S.kind != Scope::ScopeKind::ReturnScope) {
  2722. S.EndScopeBB = scopes[S.parentScopeIndex].EndScopeBB;
  2723. }
  2724. }
  2725. }
  2726. } // namespace CGHLSLMSHelper
  2727. namespace {
  2728. void updateEndScope(
  2729. ScopeInfo &ScopeInfo,
  2730. DenseMap<BasicBlock *, SmallVector<unsigned, 2>> &EndBBToScopeIndexMap,
  2731. BasicBlock *oldEndScope, BasicBlock *newEndScope) {
  2732. auto it = EndBBToScopeIndexMap.find(oldEndScope);
  2733. DXASSERT(it != EndBBToScopeIndexMap.end(),
  2734. "fail to find endScopeBB in EndBBToScopeIndexMap");
  2735. SmallVector<unsigned, 2> &scopeList = it->second;
  2736. // Don't need to update when not share endBB with other scope.
  2737. if (scopeList.size() < 2)
  2738. return;
  2739. for (unsigned i : scopeList) {
  2740. Scope &S = ScopeInfo.GetScope(i);
  2741. // Don't update return endBB, because that is the Block has return branch.
  2742. if (S.kind != Scope::ScopeKind::ReturnScope)
  2743. S.EndScopeBB = newEndScope;
  2744. }
  2745. EndBBToScopeIndexMap[newEndScope] = scopeList;
  2746. }
  2747. // Init ret value with undef to make sure it will not live thru loop inside
  2748. // callers.
  2749. // Because structurize return, the flow is controled by bIsReturned. The
  2750. // semantic is the same as multiple return, but without konwledge of
  2751. // bIsReturend, some path for structrized flow will have ret value not
  2752. // initialized.
  2753. // When function is called inside loop, ret value will live across the loop
  2754. // after inline.
  2755. void InitRetValue(BasicBlock *exitBB) {
  2756. Value *RetValPtr = nullptr;
  2757. if (ReturnInst *RI = dyn_cast<ReturnInst>(exitBB->getTerminator())) {
  2758. if (Value *RetV = RI->getReturnValue()) {
  2759. if (LoadInst *LI = dyn_cast<LoadInst>(RetV)) {
  2760. RetValPtr = LI->getPointerOperand();
  2761. }
  2762. }
  2763. }
  2764. if (!RetValPtr)
  2765. return;
  2766. if (AllocaInst *RetVAlloc = dyn_cast<AllocaInst>(RetValPtr)) {
  2767. IRBuilder<> B(RetVAlloc->getNextNode());
  2768. Type *Ty = RetVAlloc->getAllocatedType();
  2769. Value *Init = UndefValue::get(Ty);
  2770. if (Ty->isAggregateType()) {
  2771. // TODO: support aggreagate type and out parameters.
  2772. // Skip it here will cause undef on phi which the incoming path should
  2773. // never hit.
  2774. } else {
  2775. B.CreateStore(Init, RetVAlloc);
  2776. }
  2777. }
  2778. }
  2779. // For functions has multiple returns like
  2780. // float foo(float a, float b, float c) {
  2781. // float r = c;
  2782. // if (a > 0) {
  2783. // if (b > 0) {
  2784. // return -1;
  2785. // }
  2786. // ***
  2787. // }
  2788. // ...
  2789. // return r;
  2790. // }
  2791. // transform into
  2792. // float foo(float a, float b, float c) {
  2793. // bool bRet = false;
  2794. // float retV;
  2795. // float r = c;
  2796. // if (a > 0) {
  2797. // if (b > 0) {
  2798. // bRet = true;
  2799. // retV = -1;
  2800. // }
  2801. // if (!bRet) {
  2802. // ***
  2803. // }
  2804. // }
  2805. // if (!bRet) {
  2806. // ...
  2807. // retV = r;
  2808. // }
  2809. // return vRet;
  2810. // }
  2811. void StructurizeMultiRetFunction(Function *F, ScopeInfo &ScopeInfo,
  2812. bool bWaveEnabledStage,
  2813. SmallVector<BranchInst *, 16> &DxBreaks) {
  2814. if (ScopeInfo.CanSkipStructurize())
  2815. return;
  2816. // Get bbWithRets.
  2817. auto &rets = ScopeInfo.GetRetScopes();
  2818. IRBuilder<> B(F->getEntryBlock().begin());
  2819. Scope &FunctionScope = ScopeInfo.GetScope(0);
  2820. Type *boolTy = Type::getInt1Ty(F->getContext());
  2821. Constant *cTrue = ConstantInt::get(boolTy, 1);
  2822. Constant *cFalse = ConstantInt::get(boolTy, 0);
  2823. // bool bIsReturned = false;
  2824. AllocaInst *bIsReturned = B.CreateAlloca(boolTy, nullptr, "bReturned");
  2825. B.CreateStore(cFalse, bIsReturned);
  2826. Scope &RetScope = ScopeInfo.GetScope(rets[0]);
  2827. BasicBlock *exitBB = RetScope.EndScopeBB->getTerminator()->getSuccessor(0);
  2828. FunctionScope.EndScopeBB = exitBB;
  2829. // Find alloca for retunr val and init it to avoid undef after guard code with
  2830. // bIsReturned.
  2831. InitRetValue(exitBB);
  2832. ScopeInfo.LegalizeWholeReturnedScope();
  2833. // Map from endScopeBB to scope index.
  2834. // When 2 scopes share same endScopeBB, need to update endScopeBB after
  2835. // structurize.
  2836. DenseMap<BasicBlock *, SmallVector<unsigned, 2>> EndBBToScopeIndexMap;
  2837. auto &scopes = ScopeInfo.GetScopes();
  2838. for (unsigned i = 0; i < scopes.size(); i++) {
  2839. Scope &S = scopes[i];
  2840. EndBBToScopeIndexMap[S.EndScopeBB].emplace_back(i);
  2841. }
  2842. DenseSet<unsigned> guardedSet;
  2843. for (auto it = rets.begin(); it != rets.end(); it++) {
  2844. unsigned scopeIndex = *it;
  2845. Scope *pCurScope = &ScopeInfo.GetScope(scopeIndex);
  2846. Scope *pRetParentScope = &ScopeInfo.GetScope(pCurScope->parentScopeIndex);
  2847. // skip ret not in nested control flow.
  2848. if (pRetParentScope->kind == Scope::ScopeKind::FunctionScope)
  2849. continue;
  2850. do {
  2851. BasicBlock *BB = pCurScope->EndScopeBB;
  2852. // exit when scope is processed.
  2853. if (guardedSet.count(scopeIndex))
  2854. break;
  2855. guardedSet.insert(scopeIndex);
  2856. Scope *pParentScope = &ScopeInfo.GetScope(pCurScope->parentScopeIndex);
  2857. BasicBlock *EndBB = pParentScope->EndScopeBB;
  2858. // When whole scope returned, just branch to endScope of parent.
  2859. if (pCurScope->bWholeScopeReturned) {
  2860. // For ret, just branch to endScope of parent.
  2861. if (pCurScope->kind == Scope::ScopeKind::ReturnScope) {
  2862. BasicBlock *retBB = pCurScope->EndScopeBB;
  2863. TerminatorInst *retBr = retBB->getTerminator();
  2864. IRBuilder<> B(retBr);
  2865. // Set bReturned to true.
  2866. B.CreateStore(cTrue, bIsReturned);
  2867. if (bWaveEnabledStage &&
  2868. pParentScope->kind == Scope::ScopeKind::LoopScope) {
  2869. BranchInst *BI =
  2870. B.CreateCondBr(cTrue, EndBB, pParentScope->loopContinueBB);
  2871. DxBreaks.emplace_back(BI);
  2872. retBr->eraseFromParent();
  2873. } else {
  2874. // Update branch target.
  2875. retBr->setSuccessor(0, EndBB);
  2876. }
  2877. }
  2878. // For other scope, do nothing. Since whole scope is returned.
  2879. // Just flow naturally to parent scope.
  2880. } else {
  2881. // When only part scope returned.
  2882. // Use bIsReturned to guard to part which not returned.
  2883. switch (pParentScope->kind) {
  2884. case Scope::ScopeKind::ReturnScope:
  2885. DXASSERT(0, "return scope must get whole scope returned.");
  2886. break;
  2887. case Scope::ScopeKind::FunctionScope:
  2888. case Scope::ScopeKind::IfScope: {
  2889. // inside if.
  2890. // if (!bReturned) {
  2891. // rest of if or else.
  2892. // }
  2893. BasicBlock *CmpBB = BasicBlock::Create(BB->getContext(),
  2894. "bReturned.cmp.false", F, BB);
  2895. // Make BB preds go to cmpBB.
  2896. BB->replaceAllUsesWith(CmpBB);
  2897. // Update endscopeBB to CmpBB for scopes which has BB as endscope.
  2898. updateEndScope(ScopeInfo, EndBBToScopeIndexMap, BB, CmpBB);
  2899. IRBuilder<> B(CmpBB);
  2900. Value *isRetured = B.CreateLoad(bIsReturned, "bReturned.load");
  2901. Value *notReturned =
  2902. B.CreateICmpNE(isRetured, cFalse, "bReturned.not");
  2903. B.CreateCondBr(notReturned, EndBB, BB);
  2904. } break;
  2905. default: {
  2906. // inside switch/loop
  2907. // if (bReturned) {
  2908. // br endOfScope.
  2909. // }
  2910. BasicBlock *CmpBB =
  2911. BasicBlock::Create(BB->getContext(), "bReturned.cmp.true", F, BB);
  2912. BasicBlock *BreakBB =
  2913. BasicBlock::Create(BB->getContext(), "bReturned.break", F, BB);
  2914. BB->replaceAllUsesWith(CmpBB);
  2915. // Update endscopeBB to CmpBB for scopes which has BB as endscope.
  2916. updateEndScope(ScopeInfo, EndBBToScopeIndexMap, BB, CmpBB);
  2917. IRBuilder<> B(CmpBB);
  2918. Value *isReturned = B.CreateLoad(bIsReturned, "bReturned.load");
  2919. isReturned = B.CreateICmpEQ(isReturned, cTrue, "bReturned.true");
  2920. B.CreateCondBr(isReturned, BreakBB, BB);
  2921. B.SetInsertPoint(BreakBB);
  2922. if (bWaveEnabledStage &&
  2923. pParentScope->kind == Scope::ScopeKind::LoopScope) {
  2924. BranchInst *BI =
  2925. B.CreateCondBr(cTrue, EndBB, pParentScope->loopContinueBB);
  2926. DxBreaks.emplace_back(BI);
  2927. } else {
  2928. B.CreateBr(EndBB);
  2929. }
  2930. } break;
  2931. }
  2932. }
  2933. scopeIndex = pCurScope->parentScopeIndex;
  2934. pCurScope = &ScopeInfo.GetScope(scopeIndex);
  2935. // done when reach function scope.
  2936. } while (pCurScope->kind != Scope::ScopeKind::FunctionScope);
  2937. }
  2938. }
  2939. } // namespace
  2940. namespace CGHLSLMSHelper {
  2941. void StructurizeMultiRet(Module &M, clang::CodeGen::CodeGenModule &CGM,
  2942. DenseMap<Function *, ScopeInfo> &ScopeMap,
  2943. bool bWaveEnabledStage,
  2944. SmallVector<BranchInst *, 16> &DxBreaks) {
  2945. if (CGM.getCodeGenOpts().HLSLExtensionsCodegen) {
  2946. if (!CGM.getCodeGenOpts().HLSLExtensionsCodegen->IsOptionEnabled(
  2947. "structurize-returns"))
  2948. return;
  2949. } else {
  2950. if (!CGM.getCodeGenOpts().HLSLOptimizationToggles.count(
  2951. "structurize-returns") ||
  2952. !CGM.getCodeGenOpts()
  2953. .HLSLOptimizationToggles.find("structurize-returns")
  2954. ->second)
  2955. return;
  2956. }
  2957. for (Function &F : M) {
  2958. if (F.isDeclaration())
  2959. continue;
  2960. auto it = ScopeMap.find(&F);
  2961. if (it == ScopeMap.end())
  2962. continue;
  2963. StructurizeMultiRetFunction(&F, it->second, bWaveEnabledStage, DxBreaks);
  2964. }
  2965. }
  2966. bool DxilObjectProperties::AddResource(llvm::Value *V, const hlsl::DxilResourceProperties &RP) {
  2967. if (RP.isValid()) {
  2968. DXASSERT(!GetResource(V).isValid() || GetResource(V) == RP, "otherwise, property conflict");
  2969. resMap[V] = RP;
  2970. return true;
  2971. }
  2972. return false;
  2973. }
  2974. bool DxilObjectProperties::IsResource(llvm::Value *V) {
  2975. return resMap.count(V) != 0;
  2976. }
  2977. hlsl::DxilResourceProperties DxilObjectProperties::GetResource(llvm::Value *V) {
  2978. auto it = resMap.find(V);
  2979. if (it != resMap.end())
  2980. return it->second;
  2981. return DxilResourceProperties();
  2982. }
  2983. } // namespace CGHLSLMSHelper