CGHLSLMSFinishCodeGen.cpp 110 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564256525662567256825692570257125722573257425752576257725782579258025812582258325842585258625872588258925902591259225932594259525962597259825992600260126022603260426052606260726082609261026112612261326142615261626172618261926202621262226232624262526262627262826292630263126322633263426352636263726382639264026412642264326442645264626472648264926502651265226532654265526562657265826592660266126622663266426652666266726682669267026712672267326742675267626772678267926802681268226832684268526862687268826892690269126922693269426952696269726982699270027012702270327042705270627072708270927102711271227132714271527162717271827192720272127222723272427252726272727282729273027312732273327342735273627372738273927402741274227432744274527462747274827492750275127522753275427552756275727582759276027612762276327642765276627672768276927702771277227732774277527762777277827792780278127822783278427852786278727882789279027912792279327942795279627972798279928002801280228032804280528062807280828092810281128122813281428152816281728182819282028212822282328242825282628272828282928302831283228332834283528362837283828392840284128422843284428452846284728482849285028512852285328542855285628572858285928602861286228632864286528662867286828692870287128722873287428752876287728782879288028812882288328842885288628872888288928902891289228932894289528962897289828992900290129022903290429052906290729082909291029112912291329142915291629172918291929202921292229232924292529262927292829292930293129322933293429352936293729382939294029412942294329442945294629472948294929502951295229532954295529562957295829592960296129622963296429652966296729682969297029712972297329742975297629772978297929802981298229832984298529862987298829892990299129922993299429952996299729982999300030013002300330043005300630073008300930103011301230133014301530163017301830193020302130223023302430253026302730283029303030313032303330343035303630373038303930403041304230433044304530463047304830493050305130523053305430553056
  1. ///////////////////////////////////////////////////////////////////////////////
  2. // //
  3. // CGHLSLMSFinishCodeGen.cpp //
  4. // Copyright (C) Microsoft Corporation. All rights reserved. //
  5. // This file is distributed under the University of Illinois Open Source //
  6. // License. See LICENSE.TXT for details. //
  7. // //
  8. // Impliment FinishCodeGen. //
  9. // //
  10. ///////////////////////////////////////////////////////////////////////////////
  11. #include "llvm/IR/Function.h"
  12. #include "llvm/IR/IRBuilder.h"
  13. #include "llvm/IR/Module.h"
  14. #include "llvm/IR/Type.h"
  15. #include "llvm/IR/Instructions.h"
  16. #include "llvm/IR/InstIterator.h"
  17. #include "llvm/IR/GetElementPtrTypeIterator.h"
  18. #include "llvm/ADT/SmallVector.h"
  19. #include "llvm/ADT/StringRef.h"
  20. #include "llvm/Analysis/DxilValueCache.h"
  21. #include "llvm/Transforms/Utils/ValueMapper.h"
  22. #include "llvm/Transforms/Utils/Cloning.h"
  23. #include "llvm/IR/CFG.h"
  24. #include "CodeGenModule.h"
  25. #include "clang/Frontend/CodeGenOptions.h"
  26. #include "clang/Basic/LangOptions.h"
  27. #include "clang/Parse/ParseHLSL.h" // root sig would be in Parser if part of lang
  28. #include "dxc/HLSL/HLModule.h"
  29. #include "dxc/HLSL/HLSLExtensionsCodegenHelper.h"
  30. #include "dxc/DXIL/DxilOperations.h"
  31. #include "dxc/HlslIntrinsicOp.h"
  32. #include "dxc/DXIL/DxilUtil.h"
  33. #include "dxc/HLSL/DxilExportMap.h"
  34. #include "dxc/DXIL/DxilResourceProperties.h"
  35. #include "dxc/DXIL/DxilTypeSystem.h"
  36. #include "dxc/DXIL/DxilConstants.h"
  37. #include "dxc/DxilRootSignature/DxilRootSignature.h"
  38. #include "dxc/HLSL/DxilGenerationPass.h"
  39. #include "dxc/HLSL/HLMatrixType.h"
  40. #include <vector>
  41. #include <memory>
  42. #include <fenv.h>
  43. #include "CGHLSLMSHelper.h"
  44. using namespace llvm;
  45. using namespace hlsl;
  46. using namespace CGHLSLMSHelper;
  47. namespace {
  48. Value *CreateHandleFromResPtr(Value *ResPtr, HLModule &HLM,
  49. llvm::Type *HandleTy, IRBuilder<> &Builder) {
  50. Module &M = *HLM.GetModule();
  51. // Load to make sure resource only have Ld/St use so mem2reg could remove
  52. // temp resource.
  53. Value *ldObj = Builder.CreateLoad(ResPtr);
  54. Value *args[] = {ldObj};
  55. CallInst *Handle = HLM.EmitHLOperationCall(
  56. Builder, HLOpcodeGroup::HLCreateHandle, 0, HandleTy, args, M);
  57. return Handle;
  58. }
  59. Value *CreateAnnotateHandle(HLModule &HLM, Value *Handle,
  60. DxilResourceProperties &RP, llvm::Type *ResTy,
  61. IRBuilder<> &Builder) {
  62. Constant *RPConstant = resource_helper::getAsConstant(
  63. RP, HLM.GetOP()->GetResourcePropertiesType(), *HLM.GetShaderModel());
  64. return HLM.EmitHLOperationCall(
  65. Builder, HLOpcodeGroup::HLAnnotateHandle,
  66. (unsigned)HLOpcodeGroup::HLAnnotateHandle, Handle->getType(),
  67. {Handle, Builder.getInt8((uint8_t)RP.Class),
  68. Builder.getInt8((uint8_t)RP.Kind), RPConstant, UndefValue::get(ResTy)},
  69. *HLM.GetModule());
  70. }
  71. void LowerGetResourceFromHeap(
  72. HLModule &HLM, std::vector<std::pair<Function *, unsigned>> &intrinsicMap) {
  73. llvm::Module &M = *HLM.GetModule();
  74. llvm::Type *HandleTy = HLM.GetOP()->GetHandleType();
  75. unsigned GetResFromHeapOp =
  76. static_cast<unsigned>(IntrinsicOp::IOP_CreateResourceFromHeap);
  77. DenseMap<Instruction *, Instruction *> ResourcePtrToHandlePtrMap;
  78. for (auto it : intrinsicMap) {
  79. unsigned opcode = it.second;
  80. if (opcode != GetResFromHeapOp)
  81. continue;
  82. Function *F = it.first;
  83. HLOpcodeGroup group = hlsl::GetHLOpcodeGroup(F);
  84. if (group != HLOpcodeGroup::HLIntrinsic)
  85. continue;
  86. for (auto uit = F->user_begin(); uit != F->user_end();) {
  87. CallInst *CI = cast<CallInst>(*(uit++));
  88. Instruction *ResPtr = cast<Instruction>(CI->getArgOperand(0));
  89. Value *Index = CI->getArgOperand(1);
  90. IRBuilder<> Builder(CI);
  91. // Make a handle from GetResFromHeap.
  92. Value *Handle =
  93. HLM.EmitHLOperationCall(Builder, HLOpcodeGroup::HLIntrinsic,
  94. GetResFromHeapOp, HandleTy, {Index}, M);
  95. // Find the handle ptr for res ptr.
  96. auto it = ResourcePtrToHandlePtrMap.find(ResPtr);
  97. Instruction *HandlePtr = nullptr;
  98. if (it != ResourcePtrToHandlePtrMap.end()) {
  99. HandlePtr = it->second;
  100. } else {
  101. IRBuilder<> AllocaBuilder(
  102. ResPtr->getParent()->getParent()->getEntryBlock().begin());
  103. HandlePtr = AllocaBuilder.CreateAlloca(HandleTy);
  104. ResourcePtrToHandlePtrMap[ResPtr] = HandlePtr;
  105. }
  106. // Store handle to handle ptr.
  107. Builder.CreateStore(Handle, HandlePtr);
  108. CI->eraseFromParent();
  109. }
  110. }
  111. // Replace load of Resource ptr into load of handel ptr.
  112. for (auto it : ResourcePtrToHandlePtrMap) {
  113. Instruction *resPtr = it.first;
  114. Instruction *handlePtr = it.second;
  115. for (auto uit = resPtr->user_begin(); uit != resPtr->user_end();) {
  116. User *U = *(uit++);
  117. BitCastInst *BCI = cast<BitCastInst>(U);
  118. DXASSERT(
  119. dxilutil::IsHLSLResourceType(BCI->getType()->getPointerElementType()),
  120. "illegal cast of resource ptr");
  121. for (auto cuit = BCI->user_begin(); cuit != BCI->user_end();) {
  122. LoadInst *LI = cast<LoadInst>(*(cuit++));
  123. IRBuilder<> Builder(LI);
  124. Value *Handle = Builder.CreateLoad(handlePtr);
  125. Value *Res =
  126. HLM.EmitHLOperationCall(Builder, HLOpcodeGroup::HLCast,
  127. (unsigned)HLCastOpcode::HandleToResCast,
  128. LI->getType(), {Handle}, M);
  129. LI->replaceAllUsesWith(Res);
  130. LI->eraseFromParent();
  131. }
  132. BCI->eraseFromParent();
  133. }
  134. resPtr->eraseFromParent();
  135. }
  136. }
  137. void ReplaceBoolVectorSubscript(CallInst *CI) {
  138. Value *Ptr = CI->getArgOperand(0);
  139. Value *Idx = CI->getArgOperand(1);
  140. Value *IdxList[] = {ConstantInt::get(Idx->getType(), 0), Idx};
  141. for (auto It = CI->user_begin(), E = CI->user_end(); It != E;) {
  142. Instruction *user = cast<Instruction>(*(It++));
  143. IRBuilder<> Builder(user);
  144. Value *GEP = Builder.CreateInBoundsGEP(Ptr, IdxList);
  145. if (LoadInst *LI = dyn_cast<LoadInst>(user)) {
  146. Value *NewLd = Builder.CreateLoad(GEP);
  147. Value *cast = Builder.CreateZExt(NewLd, LI->getType());
  148. LI->replaceAllUsesWith(cast);
  149. LI->eraseFromParent();
  150. } else {
  151. // Must be a store inst here.
  152. StoreInst *SI = cast<StoreInst>(user);
  153. Value *V = SI->getValueOperand();
  154. Value *cast =
  155. Builder.CreateICmpNE(V, llvm::ConstantInt::get(V->getType(), 0));
  156. Builder.CreateStore(cast, GEP);
  157. SI->eraseFromParent();
  158. }
  159. }
  160. CI->eraseFromParent();
  161. }
  162. void ReplaceBoolVectorSubscript(Function *F) {
  163. for (auto It = F->user_begin(), E = F->user_end(); It != E;) {
  164. User *user = *(It++);
  165. CallInst *CI = cast<CallInst>(user);
  166. ReplaceBoolVectorSubscript(CI);
  167. }
  168. }
  169. // Add function body for intrinsic if possible.
  170. Function *CreateOpFunction(llvm::Module &M, Function *F,
  171. llvm::FunctionType *funcTy, HLOpcodeGroup group,
  172. unsigned opcode) {
  173. Function *opFunc = nullptr;
  174. AttributeSet attribs = F->getAttributes().getFnAttributes();
  175. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  176. if (group == HLOpcodeGroup::HLIntrinsic) {
  177. IntrinsicOp intriOp = static_cast<IntrinsicOp>(opcode);
  178. switch (intriOp) {
  179. case IntrinsicOp::MOP_Append:
  180. case IntrinsicOp::MOP_Consume: {
  181. bool bAppend = intriOp == IntrinsicOp::MOP_Append;
  182. llvm::Type *handleTy = funcTy->getParamType(HLOperandIndex::kHandleOpIdx);
  183. // Don't generate body for OutputStream::Append.
  184. if (bAppend && HLModule::IsStreamOutputPtrType(handleTy)) {
  185. opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode, attribs);
  186. break;
  187. }
  188. opFunc = GetOrCreateHLFunctionWithBody(M, funcTy, group, opcode,
  189. bAppend ? "append" : "consume");
  190. llvm::Type *counterTy = llvm::Type::getInt32Ty(M.getContext());
  191. llvm::FunctionType *IncCounterFuncTy =
  192. llvm::FunctionType::get(counterTy, {opcodeTy, handleTy}, false);
  193. unsigned counterOpcode =
  194. bAppend ? (unsigned)IntrinsicOp::MOP_IncrementCounter
  195. : (unsigned)IntrinsicOp::MOP_DecrementCounter;
  196. Function *incCounterFunc =
  197. GetOrCreateHLFunction(M, IncCounterFuncTy, group, counterOpcode, attribs);
  198. llvm::Type *idxTy = counterTy;
  199. llvm::Type *valTy =
  200. bAppend ? funcTy->getParamType(HLOperandIndex::kAppendValOpIndex)
  201. : funcTy->getReturnType();
  202. // Return type for subscript should be pointer type, hence in memory
  203. // representation
  204. llvm::Type *subscriptTy = valTy;
  205. bool isBoolScalarOrVector = false;
  206. if (!subscriptTy->isPointerTy()) {
  207. if (subscriptTy->getScalarType()->isIntegerTy(1)) {
  208. isBoolScalarOrVector = true;
  209. llvm::Type *memReprType =
  210. llvm::IntegerType::get(subscriptTy->getContext(), 32);
  211. subscriptTy =
  212. subscriptTy->isVectorTy()
  213. ? llvm::VectorType::get(memReprType,
  214. subscriptTy->getVectorNumElements())
  215. : memReprType;
  216. }
  217. subscriptTy = llvm::PointerType::get(subscriptTy, 0);
  218. }
  219. llvm::FunctionType *SubscriptFuncTy = llvm::FunctionType::get(
  220. subscriptTy, {opcodeTy, handleTy, idxTy}, false);
  221. Function *subscriptFunc =
  222. GetOrCreateHLFunction(M, SubscriptFuncTy, HLOpcodeGroup::HLSubscript,
  223. (unsigned)HLSubscriptOpcode::DefaultSubscript, attribs);
  224. BasicBlock *BB =
  225. BasicBlock::Create(opFunc->getContext(), "Entry", opFunc);
  226. IRBuilder<> Builder(BB);
  227. auto argIter = opFunc->args().begin();
  228. // Skip the opcode arg.
  229. argIter++;
  230. Argument *thisArg = argIter++;
  231. // int counter = IncrementCounter/DecrementCounter(Buf);
  232. Value *incCounterOpArg = ConstantInt::get(idxTy, counterOpcode);
  233. Value *counter =
  234. Builder.CreateCall(incCounterFunc, {incCounterOpArg, thisArg});
  235. // Buf[counter];
  236. Value *subscriptOpArg = ConstantInt::get(
  237. idxTy, (unsigned)HLSubscriptOpcode::DefaultSubscript);
  238. Value *subscript =
  239. Builder.CreateCall(subscriptFunc, {subscriptOpArg, thisArg, counter});
  240. if (bAppend) {
  241. Argument *valArg = argIter;
  242. // Buf[counter] = val;
  243. if (valTy->isPointerTy()) {
  244. unsigned size = M.getDataLayout().getTypeAllocSize(
  245. subscript->getType()->getPointerElementType());
  246. Builder.CreateMemCpy(subscript, valArg, size, 1);
  247. } else {
  248. Value *storedVal = valArg;
  249. // Convert to memory representation
  250. if (isBoolScalarOrVector)
  251. storedVal = Builder.CreateZExt(
  252. storedVal, subscriptTy->getPointerElementType(), "frombool");
  253. Builder.CreateStore(storedVal, subscript);
  254. }
  255. Builder.CreateRetVoid();
  256. } else {
  257. // return Buf[counter];
  258. if (valTy->isPointerTy())
  259. Builder.CreateRet(subscript);
  260. else {
  261. Value *retVal = Builder.CreateLoad(subscript);
  262. // Convert to register representation
  263. if (isBoolScalarOrVector)
  264. retVal = Builder.CreateICmpNE(
  265. retVal, Constant::getNullValue(retVal->getType()), "tobool");
  266. Builder.CreateRet(retVal);
  267. }
  268. }
  269. } break;
  270. case IntrinsicOp::IOP_sincos: {
  271. opFunc =
  272. GetOrCreateHLFunctionWithBody(M, funcTy, group, opcode, "sincos");
  273. llvm::Type *valTy =
  274. funcTy->getParamType(HLOperandIndex::kTrinaryOpSrc0Idx);
  275. llvm::FunctionType *sinFuncTy =
  276. llvm::FunctionType::get(valTy, {opcodeTy, valTy}, false);
  277. unsigned sinOp = static_cast<unsigned>(IntrinsicOp::IOP_sin);
  278. unsigned cosOp = static_cast<unsigned>(IntrinsicOp::IOP_cos);
  279. Function *sinFunc = GetOrCreateHLFunction(M, sinFuncTy, group, sinOp, attribs);
  280. Function *cosFunc = GetOrCreateHLFunction(M, sinFuncTy, group, cosOp, attribs);
  281. BasicBlock *BB =
  282. BasicBlock::Create(opFunc->getContext(), "Entry", opFunc);
  283. IRBuilder<> Builder(BB);
  284. auto argIter = opFunc->args().begin();
  285. // Skip the opcode arg.
  286. argIter++;
  287. Argument *valArg = argIter++;
  288. Argument *sinPtrArg = argIter++;
  289. Argument *cosPtrArg = argIter++;
  290. Value *sinOpArg = ConstantInt::get(opcodeTy, sinOp);
  291. Value *sinVal = Builder.CreateCall(sinFunc, {sinOpArg, valArg});
  292. Builder.CreateStore(sinVal, sinPtrArg);
  293. Value *cosOpArg = ConstantInt::get(opcodeTy, cosOp);
  294. Value *cosVal = Builder.CreateCall(cosFunc, {cosOpArg, valArg});
  295. Builder.CreateStore(cosVal, cosPtrArg);
  296. // Ret.
  297. Builder.CreateRetVoid();
  298. } break;
  299. default:
  300. opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode, attribs);
  301. break;
  302. }
  303. } else if (group == HLOpcodeGroup::HLExtIntrinsic) {
  304. llvm::StringRef fnName = F->getName();
  305. llvm::StringRef groupName = GetHLOpcodeGroupNameByAttr(F);
  306. opFunc =
  307. GetOrCreateHLFunction(M, funcTy, group, &groupName, &fnName, opcode, attribs);
  308. } else {
  309. opFunc = GetOrCreateHLFunction(M, funcTy, group, opcode, attribs);
  310. }
  311. return opFunc;
  312. }
  313. DxilResourceProperties GetResourcePropsFromIntrinsicObjectArg(
  314. Value *arg, HLModule &HLM, DxilTypeSystem &typeSys,
  315. DenseMap<Value *, DxilResourceProperties> &valToResPropertiesMap) {
  316. DxilResourceProperties RP;
  317. RP.Class = DXIL::ResourceClass::Invalid;
  318. auto RPIt = valToResPropertiesMap.find(arg);
  319. if (RPIt != valToResPropertiesMap.end()) {
  320. RP = RPIt->second;
  321. } else {
  322. // Must be GEP.
  323. GEPOperator *GEP = cast<GEPOperator>(arg);
  324. // Find RP from GEP.
  325. Value *Ptr = GEP->getPointerOperand();
  326. // When Ptr is array of resource, check if it is another GEP.
  327. while (
  328. dxilutil::IsHLSLResourceType(dxilutil::GetArrayEltTy(Ptr->getType()))) {
  329. if (GEPOperator *ParentGEP = dyn_cast<GEPOperator>(Ptr)) {
  330. GEP = ParentGEP;
  331. Ptr = GEP->getPointerOperand();
  332. } else {
  333. break;
  334. }
  335. }
  336. RPIt = valToResPropertiesMap.find(Ptr);
  337. // When ptr is array of resource, ptr could be in
  338. // valToResPropertiesMap.
  339. if (RPIt != valToResPropertiesMap.end()) {
  340. RP = RPIt->second;
  341. } else {
  342. DxilStructAnnotation *Anno = nullptr;
  343. for (auto gepIt = gep_type_begin(GEP), E = gep_type_end(GEP); gepIt != E;
  344. ++gepIt) {
  345. if (StructType *ST = dyn_cast<StructType>(*gepIt)) {
  346. Anno = typeSys.GetStructAnnotation(ST);
  347. DXASSERT(Anno, "missing type annotation");
  348. unsigned Index =
  349. cast<ConstantInt>(gepIt.getOperand())->getLimitedValue();
  350. DxilFieldAnnotation &fieldAnno = Anno->GetFieldAnnotation(Index);
  351. if (fieldAnno.HasResourceAttribute()) {
  352. MDNode *resAttrib = fieldAnno.GetResourceAttribute();
  353. DxilResourceBase R(DXIL::ResourceClass::Invalid);
  354. HLM.LoadDxilResourceBaseFromMDNode(resAttrib, R);
  355. switch (R.GetClass()) {
  356. case DXIL::ResourceClass::SRV:
  357. case DXIL::ResourceClass::UAV: {
  358. DxilResource Res;
  359. HLM.LoadDxilResourceFromMDNode(resAttrib, Res);
  360. RP = resource_helper::loadFromResourceBase(&Res);
  361. } break;
  362. case DXIL::ResourceClass::Sampler: {
  363. DxilSampler Sampler;
  364. HLM.LoadDxilSamplerFromMDNode(resAttrib, Sampler);
  365. RP = resource_helper::loadFromResourceBase(&Sampler);
  366. } break;
  367. default:
  368. DXASSERT(0, "invalid resource attribute in filed annotation");
  369. break;
  370. }
  371. break;
  372. }
  373. }
  374. }
  375. }
  376. }
  377. DXASSERT(RP.Class != DXIL::ResourceClass::Invalid,
  378. "invalid resource properties");
  379. return RP;
  380. }
  381. void AddOpcodeParamForIntrinsic(
  382. HLModule &HLM, Function *F, unsigned opcode, llvm::Type *HandleTy,
  383. DenseMap<Value *, DxilResourceProperties> &valToResPropertiesMap) {
  384. llvm::Module &M = *HLM.GetModule();
  385. llvm::FunctionType *oldFuncTy = F->getFunctionType();
  386. SmallVector<llvm::Type *, 4> paramTyList;
  387. // Add the opcode param
  388. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  389. paramTyList.emplace_back(opcodeTy);
  390. paramTyList.append(oldFuncTy->param_begin(), oldFuncTy->param_end());
  391. for (unsigned i = 1; i < paramTyList.size(); i++) {
  392. llvm::Type *Ty = paramTyList[i];
  393. if (Ty->isPointerTy()) {
  394. Ty = Ty->getPointerElementType();
  395. if (dxilutil::IsHLSLResourceType(Ty)) {
  396. // Use handle type for resource type.
  397. // This will make sure temp object variable only used by createHandle.
  398. paramTyList[i] = HandleTy;
  399. }
  400. }
  401. }
  402. HLOpcodeGroup group = hlsl::GetHLOpcodeGroup(F);
  403. if (group == HLOpcodeGroup::HLSubscript &&
  404. opcode == static_cast<unsigned>(HLSubscriptOpcode::VectorSubscript)) {
  405. llvm::FunctionType *FT = F->getFunctionType();
  406. llvm::Type *VecArgTy = FT->getParamType(0);
  407. llvm::VectorType *VType =
  408. cast<llvm::VectorType>(VecArgTy->getPointerElementType());
  409. llvm::Type *Ty = VType->getElementType();
  410. DXASSERT(Ty->isIntegerTy(), "Only bool could use VectorSubscript");
  411. llvm::IntegerType *ITy = cast<IntegerType>(Ty);
  412. DXASSERT_LOCALVAR(ITy, ITy->getBitWidth() == 1,
  413. "Only bool could use VectorSubscript");
  414. // The return type is i8*.
  415. // Replace all uses with i1*.
  416. ReplaceBoolVectorSubscript(F);
  417. return;
  418. }
  419. bool isDoubleSubscriptFunc =
  420. group == HLOpcodeGroup::HLSubscript &&
  421. opcode == static_cast<unsigned>(HLSubscriptOpcode::DoubleSubscript);
  422. llvm::Type *RetTy = oldFuncTy->getReturnType();
  423. if (isDoubleSubscriptFunc) {
  424. CallInst *doubleSub = cast<CallInst>(*F->user_begin());
  425. // Change currentIdx type into coord type.
  426. auto U = doubleSub->user_begin();
  427. Value *user = *U;
  428. CallInst *secSub = cast<CallInst>(user);
  429. unsigned coordIdx = HLOperandIndex::kSubscriptIndexOpIdx;
  430. // opcode operand not add yet, so the index need -1.
  431. if (GetHLOpcodeGroupByName(secSub->getCalledFunction()) ==
  432. HLOpcodeGroup::NotHL)
  433. coordIdx -= 1;
  434. Value *coord = secSub->getArgOperand(coordIdx);
  435. llvm::Type *coordTy = coord->getType();
  436. paramTyList[HLOperandIndex::kSubscriptIndexOpIdx] = coordTy;
  437. // Add the sampleIdx or mipLevel parameter to the end.
  438. paramTyList.emplace_back(opcodeTy);
  439. // Change return type to be resource ret type.
  440. // opcode operand not add yet, so the index need -1.
  441. Value *objPtr =
  442. doubleSub->getArgOperand(HLOperandIndex::kSubscriptObjectOpIdx - 1);
  443. // Must be a GEP
  444. GEPOperator *objGEP = cast<GEPOperator>(objPtr);
  445. gep_type_iterator GEPIt = gep_type_begin(objGEP), E = gep_type_end(objGEP);
  446. llvm::Type *resTy = nullptr;
  447. while (GEPIt != E) {
  448. if (dxilutil::IsHLSLResourceType(*GEPIt)) {
  449. resTy = *GEPIt;
  450. break;
  451. }
  452. GEPIt++;
  453. }
  454. DXASSERT(resTy, "must find the resource type");
  455. // Change object type to handle type.
  456. paramTyList[HLOperandIndex::kSubscriptObjectOpIdx] = HandleTy;
  457. // Change RetTy into pointer of resource reture type.
  458. RetTy = cast<StructType>(resTy)->getElementType(0)->getPointerTo();
  459. }
  460. llvm::FunctionType *funcTy =
  461. llvm::FunctionType::get(RetTy, paramTyList, oldFuncTy->isVarArg());
  462. Function *opFunc = CreateOpFunction(M, F, funcTy, group, opcode);
  463. StringRef lower = hlsl::GetHLLowerStrategy(F);
  464. if (!lower.empty())
  465. hlsl::SetHLLowerStrategy(opFunc, lower);
  466. DxilTypeSystem &typeSys = HLM.GetTypeSystem();
  467. for (auto user = F->user_begin(); user != F->user_end();) {
  468. // User must be a call.
  469. CallInst *oldCI = cast<CallInst>(*(user++));
  470. SmallVector<Value *, 4> opcodeParamList;
  471. Value *opcodeConst = Constant::getIntegerValue(opcodeTy, APInt(32, opcode));
  472. opcodeParamList.emplace_back(opcodeConst);
  473. opcodeParamList.append(oldCI->arg_operands().begin(),
  474. oldCI->arg_operands().end());
  475. IRBuilder<> Builder(oldCI);
  476. if (isDoubleSubscriptFunc) {
  477. // Change obj to the resource pointer.
  478. Value *objVal = opcodeParamList[HLOperandIndex::kSubscriptObjectOpIdx];
  479. GEPOperator *objGEP = cast<GEPOperator>(objVal);
  480. SmallVector<Value *, 8> IndexList;
  481. IndexList.append(objGEP->idx_begin(), objGEP->idx_end());
  482. Value *lastIndex = IndexList.back();
  483. ConstantInt *constIndex = cast<ConstantInt>(lastIndex);
  484. DXASSERT_LOCALVAR(constIndex, constIndex->getLimitedValue() == 1,
  485. "last index must 1");
  486. // Remove the last index.
  487. IndexList.pop_back();
  488. objVal = objGEP->getPointerOperand();
  489. DxilResourceProperties RP = GetResourcePropsFromIntrinsicObjectArg(
  490. objVal, HLM, typeSys, valToResPropertiesMap);
  491. if (IndexList.size() > 1)
  492. objVal = Builder.CreateInBoundsGEP(objVal, IndexList);
  493. Value *Handle = CreateHandleFromResPtr(objVal, HLM, HandleTy, Builder);
  494. Type *ResTy = objVal->getType()->getPointerElementType();
  495. Handle = CreateAnnotateHandle(HLM, Handle, RP, ResTy, Builder);
  496. // Change obj to the resource pointer.
  497. opcodeParamList[HLOperandIndex::kSubscriptObjectOpIdx] = Handle;
  498. // Set idx and mipIdx.
  499. Value *mipIdx = opcodeParamList[HLOperandIndex::kSubscriptIndexOpIdx];
  500. auto U = oldCI->user_begin();
  501. Value *user = *U;
  502. CallInst *secSub = cast<CallInst>(user);
  503. unsigned idxOpIndex = HLOperandIndex::kSubscriptIndexOpIdx;
  504. if (GetHLOpcodeGroupByName(secSub->getCalledFunction()) ==
  505. HLOpcodeGroup::NotHL)
  506. idxOpIndex--;
  507. Value *idx = secSub->getArgOperand(idxOpIndex);
  508. DXASSERT(secSub->hasOneUse(), "subscript should only has one use");
  509. // Add the sampleIdx or mipLevel parameter to the end.
  510. opcodeParamList[HLOperandIndex::kSubscriptIndexOpIdx] = idx;
  511. opcodeParamList.emplace_back(mipIdx);
  512. // Insert new call before secSub to make sure idx is ready to use.
  513. Builder.SetInsertPoint(secSub);
  514. }
  515. for (unsigned i = 1; i < opcodeParamList.size(); i++) {
  516. Value *arg = opcodeParamList[i];
  517. llvm::Type *Ty = arg->getType();
  518. if (Ty->isPointerTy()) {
  519. Ty = Ty->getPointerElementType();
  520. if (dxilutil::IsHLSLResourceType(Ty)) {
  521. DxilResourceProperties RP = GetResourcePropsFromIntrinsicObjectArg(
  522. arg, HLM, typeSys, valToResPropertiesMap);
  523. // Use object type directly, not by pointer.
  524. // This will make sure temp object variable only used by ld/st.
  525. if (GEPOperator *argGEP = dyn_cast<GEPOperator>(arg)) {
  526. std::vector<Value *> idxList(argGEP->idx_begin(),
  527. argGEP->idx_end());
  528. // Create instruction to avoid GEPOperator.
  529. GetElementPtrInst *GEP = GetElementPtrInst::CreateInBounds(
  530. argGEP->getPointerOperand(), idxList);
  531. Builder.Insert(GEP);
  532. arg = GEP;
  533. }
  534. llvm::Type *ResTy = arg->getType()->getPointerElementType();
  535. Value *Handle = CreateHandleFromResPtr(arg, HLM, HandleTy, Builder);
  536. Handle = CreateAnnotateHandle(HLM, Handle, RP, ResTy, Builder);
  537. opcodeParamList[i] = Handle;
  538. }
  539. }
  540. }
  541. Value *CI = Builder.CreateCall(opFunc, opcodeParamList);
  542. if (!isDoubleSubscriptFunc) {
  543. // replace new call and delete the old call
  544. oldCI->replaceAllUsesWith(CI);
  545. oldCI->eraseFromParent();
  546. } else {
  547. // For double script.
  548. // Replace single users use with new CI.
  549. auto U = oldCI->user_begin();
  550. Value *user = *U;
  551. CallInst *secSub = cast<CallInst>(user);
  552. secSub->replaceAllUsesWith(CI);
  553. secSub->eraseFromParent();
  554. oldCI->eraseFromParent();
  555. }
  556. }
  557. // delete the function
  558. F->eraseFromParent();
  559. }
  560. void AddOpcodeParamForIntrinsics(
  561. HLModule &HLM, std::vector<std::pair<Function *, unsigned>> &intrinsicMap,
  562. DenseMap<Value *, DxilResourceProperties> &valToResPropertiesMap) {
  563. llvm::Type *HandleTy = HLM.GetOP()->GetHandleType();
  564. for (auto mapIter : intrinsicMap) {
  565. Function *F = mapIter.first;
  566. if (F->user_empty()) {
  567. // delete the function
  568. F->eraseFromParent();
  569. continue;
  570. }
  571. unsigned opcode = mapIter.second;
  572. AddOpcodeParamForIntrinsic(HLM, F, opcode, HandleTy, valToResPropertiesMap);
  573. }
  574. }
  575. }
  576. namespace {
  577. // Returns true a global value is being updated
  578. bool GlobalHasStoreUserRec(Value *V, std::set<Value *> &visited) {
  579. bool isWriteEnabled = false;
  580. if (V && visited.find(V) == visited.end()) {
  581. visited.insert(V);
  582. for (User *U : V->users()) {
  583. if (isa<StoreInst>(U)) {
  584. return true;
  585. } else if (CallInst *CI = dyn_cast<CallInst>(U)) {
  586. Function *F = CI->getCalledFunction();
  587. if (!F->isIntrinsic()) {
  588. HLOpcodeGroup hlGroup = GetHLOpcodeGroup(F);
  589. switch (hlGroup) {
  590. case HLOpcodeGroup::NotHL:
  591. return true;
  592. case HLOpcodeGroup::HLMatLoadStore: {
  593. HLMatLoadStoreOpcode opCode =
  594. static_cast<HLMatLoadStoreOpcode>(hlsl::GetHLOpcode(CI));
  595. if (opCode == HLMatLoadStoreOpcode::ColMatStore ||
  596. opCode == HLMatLoadStoreOpcode::RowMatStore)
  597. return true;
  598. break;
  599. }
  600. case HLOpcodeGroup::HLCast:
  601. case HLOpcodeGroup::HLSubscript:
  602. if (GlobalHasStoreUserRec(U, visited))
  603. return true;
  604. break;
  605. default:
  606. break;
  607. }
  608. }
  609. } else if (isa<GEPOperator>(U) || isa<PHINode>(U) || isa<SelectInst>(U)) {
  610. if (GlobalHasStoreUserRec(U, visited))
  611. return true;
  612. }
  613. }
  614. }
  615. return isWriteEnabled;
  616. }
  617. // Returns true if any of the direct user of a global is a store inst
  618. // otherwise recurse through the remaining users and check if any GEP
  619. // exists and which in turn has a store inst as user.
  620. bool GlobalHasStoreUser(GlobalVariable *GV) {
  621. std::set<Value *> visited;
  622. Value *V = cast<Value>(GV);
  623. return GlobalHasStoreUserRec(V, visited);
  624. }
  625. GlobalVariable *CreateStaticGlobal(llvm::Module *M, GlobalVariable *GV) {
  626. Constant *GC = M->getOrInsertGlobal(GV->getName().str() + ".static.copy",
  627. GV->getType()->getPointerElementType());
  628. GlobalVariable *NGV = cast<GlobalVariable>(GC);
  629. if (GV->hasInitializer()) {
  630. NGV->setInitializer(GV->getInitializer());
  631. } else {
  632. // The copy being static, it should be initialized per llvm rules
  633. NGV->setInitializer(
  634. Constant::getNullValue(GV->getType()->getPointerElementType()));
  635. }
  636. // static global should have internal linkage
  637. NGV->setLinkage(GlobalValue::InternalLinkage);
  638. return NGV;
  639. }
  640. void CreateWriteEnabledStaticGlobals(llvm::Module *M, llvm::Function *EF) {
  641. std::vector<GlobalVariable *> worklist;
  642. for (GlobalVariable &GV : M->globals()) {
  643. if (!GV.isConstant() && GV.getLinkage() != GlobalValue::InternalLinkage &&
  644. // skip globals which are HLSL objects or group shared
  645. !dxilutil::IsHLSLObjectType(GV.getType()->getPointerElementType()) &&
  646. !dxilutil::IsSharedMemoryGlobal(&GV)) {
  647. if (GlobalHasStoreUser(&GV))
  648. worklist.emplace_back(&GV);
  649. // TODO: Ensure that constant globals aren't using initializer
  650. GV.setConstant(true);
  651. }
  652. }
  653. IRBuilder<> Builder(
  654. dxilutil::FirstNonAllocaInsertionPt(&EF->getEntryBlock()));
  655. for (GlobalVariable *GV : worklist) {
  656. GlobalVariable *NGV = CreateStaticGlobal(M, GV);
  657. GV->replaceAllUsesWith(NGV);
  658. // insert memcpy in all entryblocks
  659. uint64_t size = M->getDataLayout().getTypeAllocSize(
  660. GV->getType()->getPointerElementType());
  661. Builder.CreateMemCpy(NGV, GV, size, 1);
  662. }
  663. }
  664. } // namespace
  665. namespace {
  666. void SetEntryFunction(HLModule &HLM, Function *Entry,
  667. clang::CodeGen::CodeGenModule &CGM) {
  668. if (Entry == nullptr) {
  669. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  670. unsigned DiagID = Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  671. "cannot find entry function %0");
  672. Diags.Report(DiagID) << CGM.getCodeGenOpts().HLSLEntryFunction;
  673. return;
  674. }
  675. HLM.SetEntryFunction(Entry);
  676. }
  677. Function *CloneFunction(Function *Orig, const llvm::Twine &Name,
  678. llvm::Module *llvmModule, hlsl::DxilTypeSystem &TypeSys,
  679. hlsl::DxilTypeSystem &SrcTypeSys) {
  680. Function *F = Function::Create(Orig->getFunctionType(),
  681. GlobalValue::LinkageTypes::ExternalLinkage,
  682. Name, llvmModule);
  683. SmallVector<ReturnInst *, 2> Returns;
  684. ValueToValueMapTy vmap;
  685. // Map params.
  686. auto entryParamIt = F->arg_begin();
  687. for (Argument &param : Orig->args()) {
  688. vmap[&param] = (entryParamIt++);
  689. }
  690. llvm::CloneFunctionInto(F, Orig, vmap, /*ModuleLevelChagnes*/ false, Returns);
  691. TypeSys.CopyFunctionAnnotation(F, Orig, SrcTypeSys);
  692. return F;
  693. }
  694. // Clone shader entry function to be called by other functions.
  695. // The original function will be used as shader entry.
  696. void CloneShaderEntry(Function *ShaderF, StringRef EntryName, HLModule &HLM) {
  697. Function *F = CloneFunction(ShaderF, "", HLM.GetModule(), HLM.GetTypeSystem(),
  698. HLM.GetTypeSystem());
  699. F->takeName(ShaderF);
  700. F->setLinkage(GlobalValue::LinkageTypes::InternalLinkage);
  701. // Set to name before mangled.
  702. ShaderF->setName(EntryName);
  703. DxilFunctionAnnotation *annot = HLM.GetFunctionAnnotation(F);
  704. DxilParameterAnnotation &cloneRetAnnot = annot->GetRetTypeAnnotation();
  705. // Clear semantic for cloned one.
  706. cloneRetAnnot.SetSemanticString("");
  707. cloneRetAnnot.SetSemanticIndexVec({});
  708. for (unsigned i = 0; i < annot->GetNumParameters(); i++) {
  709. DxilParameterAnnotation &cloneParamAnnot = annot->GetParameterAnnotation(i);
  710. // Clear semantic for cloned one.
  711. cloneParamAnnot.SetSemanticString("");
  712. cloneParamAnnot.SetSemanticIndexVec({});
  713. }
  714. }
  715. } // namespace
  716. namespace {
  717. bool IsPatchConstantFunction(
  718. const Function *F, StringMap<PatchConstantInfo> &patchConstantFunctionMap) {
  719. DXASSERT_NOMSG(F != nullptr);
  720. for (auto &&p : patchConstantFunctionMap) {
  721. if (p.second.Func == F)
  722. return true;
  723. }
  724. return false;
  725. }
  726. void SetPatchConstantFunctionWithAttr(
  727. const EntryFunctionInfo &EntryFunc,
  728. const clang::HLSLPatchConstantFuncAttr *PatchConstantFuncAttr,
  729. StringMap<PatchConstantInfo> &patchConstantFunctionMap,
  730. std::unordered_map<Function *, std::unique_ptr<DxilFunctionProps>>
  731. &patchConstantFunctionPropsMap,
  732. HLModule &HLM, clang::CodeGen::CodeGenModule &CGM) {
  733. StringRef funcName = PatchConstantFuncAttr->getFunctionName();
  734. auto Entry = patchConstantFunctionMap.find(funcName);
  735. if (Entry == patchConstantFunctionMap.end()) {
  736. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  737. unsigned DiagID = Diags.getCustomDiagID(
  738. clang::DiagnosticsEngine::Error, "Cannot find patchconstantfunc %0.");
  739. Diags.Report(PatchConstantFuncAttr->getLocation(), DiagID) << funcName;
  740. return;
  741. }
  742. if (Entry->second.NumOverloads != 1) {
  743. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  744. unsigned DiagID =
  745. Diags.getCustomDiagID(clang::DiagnosticsEngine::Warning,
  746. "Multiple overloads of patchconstantfunc %0.");
  747. unsigned NoteID = Diags.getCustomDiagID(clang::DiagnosticsEngine::Note,
  748. "This overload was selected.");
  749. Diags.Report(PatchConstantFuncAttr->getLocation(), DiagID) << funcName;
  750. Diags.Report(Entry->second.SL, NoteID);
  751. }
  752. Function *patchConstFunc = Entry->second.Func;
  753. DXASSERT(
  754. HLM.HasDxilFunctionProps(EntryFunc.Func),
  755. " else AddHLSLFunctionInfo did not save the dxil function props for the "
  756. "HS entry.");
  757. DxilFunctionProps *HSProps = &HLM.GetDxilFunctionProps(EntryFunc.Func);
  758. HLM.SetPatchConstantFunctionForHS(EntryFunc.Func, patchConstFunc);
  759. DXASSERT_NOMSG(patchConstantFunctionPropsMap.count(patchConstFunc));
  760. // Check no inout parameter for patch constant function.
  761. DxilFunctionAnnotation *patchConstFuncAnnotation =
  762. HLM.GetFunctionAnnotation(patchConstFunc);
  763. for (unsigned i = 0; i < patchConstFuncAnnotation->GetNumParameters(); i++) {
  764. if (patchConstFuncAnnotation->GetParameterAnnotation(i)
  765. .GetParamInputQual() == DxilParamInputQual::Inout) {
  766. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  767. unsigned DiagID = Diags.getCustomDiagID(
  768. clang::DiagnosticsEngine::Error,
  769. "Patch Constant function %0 should not have inout param.");
  770. Diags.Report(Entry->second.SL, DiagID) << funcName;
  771. }
  772. }
  773. // Input/Output control point validation.
  774. if (patchConstantFunctionPropsMap.count(patchConstFunc)) {
  775. const DxilFunctionProps &patchProps =
  776. *patchConstantFunctionPropsMap[patchConstFunc];
  777. if (patchProps.ShaderProps.HS.inputControlPoints != 0 &&
  778. patchProps.ShaderProps.HS.inputControlPoints !=
  779. HSProps->ShaderProps.HS.inputControlPoints) {
  780. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  781. unsigned DiagID =
  782. Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  783. "Patch constant function's input patch input "
  784. "should have %0 elements, but has %1.");
  785. Diags.Report(Entry->second.SL, DiagID)
  786. << HSProps->ShaderProps.HS.inputControlPoints
  787. << patchProps.ShaderProps.HS.inputControlPoints;
  788. }
  789. if (patchProps.ShaderProps.HS.outputControlPoints != 0 &&
  790. patchProps.ShaderProps.HS.outputControlPoints !=
  791. HSProps->ShaderProps.HS.outputControlPoints) {
  792. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  793. unsigned DiagID =
  794. Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  795. "Patch constant function's output patch input "
  796. "should have %0 elements, but has %1.");
  797. Diags.Report(Entry->second.SL, DiagID)
  798. << HSProps->ShaderProps.HS.outputControlPoints
  799. << patchProps.ShaderProps.HS.outputControlPoints;
  800. }
  801. }
  802. }
  803. void SetPatchConstantFunction(
  804. const EntryFunctionInfo &EntryFunc,
  805. std::unordered_map<Function *, const clang::HLSLPatchConstantFuncAttr *>
  806. &HSEntryPatchConstantFuncAttr,
  807. StringMap<PatchConstantInfo> &patchConstantFunctionMap,
  808. std::unordered_map<Function *, std::unique_ptr<DxilFunctionProps>>
  809. &patchConstantFunctionPropsMap,
  810. HLModule &HLM, clang::CodeGen::CodeGenModule &CGM) {
  811. auto AttrsIter = HSEntryPatchConstantFuncAttr.find(EntryFunc.Func);
  812. DXASSERT(AttrsIter != HSEntryPatchConstantFuncAttr.end(),
  813. "we have checked this in AddHLSLFunctionInfo()");
  814. SetPatchConstantFunctionWithAttr(EntryFunc, AttrsIter->second,
  815. patchConstantFunctionMap,
  816. patchConstantFunctionPropsMap, HLM, CGM);
  817. }
  818. } // namespace
  819. namespace {
  820. // For case like:
  821. // cbuffer A {
  822. // float a;
  823. // int b;
  824. //}
  825. //
  826. // const static struct {
  827. // float a;
  828. // int b;
  829. //} ST = { a, b };
  830. // Replace user of ST with a and b.
  831. bool ReplaceConstStaticGlobalUser(GEPOperator *GEP,
  832. std::vector<Constant *> &InitList,
  833. IRBuilder<> &Builder) {
  834. if (GEP->getNumIndices() < 2) {
  835. // Don't use sub element.
  836. return false;
  837. }
  838. SmallVector<Value *, 4> idxList;
  839. auto iter = GEP->idx_begin();
  840. idxList.emplace_back(*(iter++));
  841. ConstantInt *subIdx = dyn_cast<ConstantInt>(*(iter++));
  842. DXASSERT(subIdx, "else dynamic indexing on struct field");
  843. unsigned subIdxImm = subIdx->getLimitedValue();
  844. DXASSERT(subIdxImm < InitList.size(), "else struct index out of bound");
  845. Constant *subPtr = InitList[subIdxImm];
  846. // Move every idx to idxList except idx for InitList.
  847. while (iter != GEP->idx_end()) {
  848. idxList.emplace_back(*(iter++));
  849. }
  850. Value *NewGEP = Builder.CreateGEP(subPtr, idxList);
  851. GEP->replaceAllUsesWith(NewGEP);
  852. return true;
  853. }
  854. } // namespace
  855. namespace CGHLSLMSHelper {
  856. void ReplaceConstStaticGlobals(
  857. std::unordered_map<GlobalVariable *, std::vector<Constant *>>
  858. &staticConstGlobalInitListMap,
  859. std::unordered_map<GlobalVariable *, Function *>
  860. &staticConstGlobalCtorMap) {
  861. for (auto &iter : staticConstGlobalInitListMap) {
  862. GlobalVariable *GV = iter.first;
  863. std::vector<Constant *> &InitList = iter.second;
  864. LLVMContext &Ctx = GV->getContext();
  865. // Do the replace.
  866. bool bPass = true;
  867. for (User *U : GV->users()) {
  868. IRBuilder<> Builder(Ctx);
  869. if (GetElementPtrInst *GEPInst = dyn_cast<GetElementPtrInst>(U)) {
  870. Builder.SetInsertPoint(GEPInst);
  871. bPass &= ReplaceConstStaticGlobalUser(cast<GEPOperator>(GEPInst),
  872. InitList, Builder);
  873. } else if (GEPOperator *GEP = dyn_cast<GEPOperator>(U)) {
  874. bPass &= ReplaceConstStaticGlobalUser(GEP, InitList, Builder);
  875. } else {
  876. DXASSERT(false, "invalid user of const static global");
  877. }
  878. }
  879. // Clear the Ctor which is useless now.
  880. if (bPass) {
  881. Function *Ctor = staticConstGlobalCtorMap[GV];
  882. Ctor->getBasicBlockList().clear();
  883. BasicBlock *Entry = BasicBlock::Create(Ctx, "", Ctor);
  884. IRBuilder<> Builder(Entry);
  885. Builder.CreateRetVoid();
  886. }
  887. }
  888. }
  889. }
  890. namespace {
  891. Value *CastLdValue(Value *Ptr, llvm::Type *FromTy, llvm::Type *ToTy,
  892. IRBuilder<> &Builder) {
  893. if (ToTy->isVectorTy()) {
  894. unsigned vecSize = ToTy->getVectorNumElements();
  895. if (vecSize == 1 && ToTy->getVectorElementType() == FromTy) {
  896. Value *V = Builder.CreateLoad(Ptr);
  897. // ScalarToVec1Splat
  898. // Change scalar into vec1.
  899. Value *Vec1 = UndefValue::get(ToTy);
  900. return Builder.CreateInsertElement(Vec1, V, (uint64_t)0);
  901. } else if (vecSize == 1 && FromTy->isIntegerTy() &&
  902. ToTy->getVectorElementType()->isIntegerTy(1)) {
  903. // load(bitcast i32* to <1 x i1>*)
  904. // Rewrite to
  905. // insertelement(icmp ne (load i32*), 0)
  906. Value *IntV = Builder.CreateLoad(Ptr);
  907. Value *BoolV = Builder.CreateICmpNE(
  908. IntV, ConstantInt::get(IntV->getType(), 0), "tobool");
  909. Value *Vec1 = UndefValue::get(ToTy);
  910. return Builder.CreateInsertElement(Vec1, BoolV, (uint64_t)0);
  911. } else if (FromTy->isVectorTy() && vecSize == 1) {
  912. Value *V = Builder.CreateLoad(Ptr);
  913. // VectorTrunc
  914. // Change vector into vec1.
  915. int mask[] = {0};
  916. return Builder.CreateShuffleVector(V, V, mask);
  917. } else if (FromTy->isArrayTy()) {
  918. llvm::Type *FromEltTy = FromTy->getArrayElementType();
  919. llvm::Type *ToEltTy = ToTy->getVectorElementType();
  920. if (FromTy->getArrayNumElements() == vecSize && FromEltTy == ToEltTy) {
  921. // ArrayToVector.
  922. Value *NewLd = UndefValue::get(ToTy);
  923. Value *zeroIdx = Builder.getInt32(0);
  924. for (unsigned i = 0; i < vecSize; i++) {
  925. Value *GEP =
  926. Builder.CreateInBoundsGEP(Ptr, {zeroIdx, Builder.getInt32(i)});
  927. Value *Elt = Builder.CreateLoad(GEP);
  928. NewLd = Builder.CreateInsertElement(NewLd, Elt, i);
  929. }
  930. return NewLd;
  931. }
  932. }
  933. } else if (FromTy == Builder.getInt1Ty()) {
  934. Value *V = Builder.CreateLoad(Ptr);
  935. // BoolCast
  936. DXASSERT_NOMSG(ToTy->isIntegerTy());
  937. return Builder.CreateZExt(V, ToTy);
  938. }
  939. return nullptr;
  940. }
  941. Value *CastStValue(Value *Ptr, Value *V, llvm::Type *FromTy, llvm::Type *ToTy,
  942. IRBuilder<> &Builder) {
  943. if (ToTy->isVectorTy()) {
  944. unsigned vecSize = ToTy->getVectorNumElements();
  945. if (vecSize == 1 && ToTy->getVectorElementType() == FromTy) {
  946. // ScalarToVec1Splat
  947. // Change vec1 back to scalar.
  948. Value *Elt = Builder.CreateExtractElement(V, (uint64_t)0);
  949. return Elt;
  950. } else if (FromTy->isVectorTy() && vecSize == 1) {
  951. // VectorTrunc
  952. // Change vec1 into vector.
  953. // Should not happen.
  954. // Reported error at Sema::ImpCastExprToType.
  955. DXASSERT_NOMSG(0);
  956. } else if (FromTy->isArrayTy()) {
  957. llvm::Type *FromEltTy = FromTy->getArrayElementType();
  958. llvm::Type *ToEltTy = ToTy->getVectorElementType();
  959. if (FromTy->getArrayNumElements() == vecSize && FromEltTy == ToEltTy) {
  960. // ArrayToVector.
  961. Value *zeroIdx = Builder.getInt32(0);
  962. for (unsigned i = 0; i < vecSize; i++) {
  963. Value *Elt = Builder.CreateExtractElement(V, i);
  964. Value *GEP =
  965. Builder.CreateInBoundsGEP(Ptr, {zeroIdx, Builder.getInt32(i)});
  966. Builder.CreateStore(Elt, GEP);
  967. }
  968. // The store already done.
  969. // Return null to ignore use of the return value.
  970. return nullptr;
  971. }
  972. }
  973. } else if (FromTy == Builder.getInt1Ty()) {
  974. // BoolCast
  975. // Change i1 to ToTy.
  976. DXASSERT_NOMSG(ToTy->isIntegerTy());
  977. Value *CastV = Builder.CreateICmpNE(V, ConstantInt::get(V->getType(), 0));
  978. return CastV;
  979. }
  980. return nullptr;
  981. }
  982. bool SimplifyBitCastLoad(LoadInst *LI, llvm::Type *FromTy, llvm::Type *ToTy,
  983. Value *Ptr) {
  984. IRBuilder<> Builder(LI);
  985. // Cast FromLd to ToTy.
  986. Value *CastV = CastLdValue(Ptr, FromTy, ToTy, Builder);
  987. if (CastV) {
  988. LI->replaceAllUsesWith(CastV);
  989. return true;
  990. } else {
  991. return false;
  992. }
  993. }
  994. bool SimplifyBitCastStore(StoreInst *SI, llvm::Type *FromTy, llvm::Type *ToTy,
  995. Value *Ptr) {
  996. IRBuilder<> Builder(SI);
  997. Value *V = SI->getValueOperand();
  998. // Cast Val to FromTy.
  999. Value *CastV = CastStValue(Ptr, V, FromTy, ToTy, Builder);
  1000. if (CastV) {
  1001. Builder.CreateStore(CastV, Ptr);
  1002. return true;
  1003. } else {
  1004. return false;
  1005. }
  1006. }
  1007. bool SimplifyBitCastGEP(GEPOperator *GEP, llvm::Type *FromTy, llvm::Type *ToTy,
  1008. Value *Ptr) {
  1009. if (ToTy->isVectorTy()) {
  1010. unsigned vecSize = ToTy->getVectorNumElements();
  1011. if (vecSize == 1 && ToTy->getVectorElementType() == FromTy) {
  1012. // ScalarToVec1Splat
  1013. GEP->replaceAllUsesWith(Ptr);
  1014. return true;
  1015. } else if (FromTy->isVectorTy() && vecSize == 1) {
  1016. // VectorTrunc
  1017. DXASSERT_NOMSG(
  1018. !isa<llvm::VectorType>(GEP->getType()->getPointerElementType()));
  1019. IRBuilder<> Builder(FromTy->getContext());
  1020. if (Instruction *I = dyn_cast<Instruction>(GEP))
  1021. Builder.SetInsertPoint(I);
  1022. std::vector<Value *> idxList(GEP->idx_begin(), GEP->idx_end());
  1023. Value *NewGEP = Builder.CreateInBoundsGEP(Ptr, idxList);
  1024. GEP->replaceAllUsesWith(NewGEP);
  1025. return true;
  1026. } else if (FromTy->isArrayTy()) {
  1027. llvm::Type *FromEltTy = FromTy->getArrayElementType();
  1028. llvm::Type *ToEltTy = ToTy->getVectorElementType();
  1029. if (FromTy->getArrayNumElements() == vecSize && FromEltTy == ToEltTy) {
  1030. // ArrayToVector.
  1031. }
  1032. }
  1033. } else if (FromTy == llvm::Type::getInt1Ty(FromTy->getContext())) {
  1034. // BoolCast
  1035. }
  1036. return false;
  1037. }
  1038. typedef SmallPtrSet<Instruction *, 4> SmallInstSet;
  1039. void SimplifyBitCast(BitCastOperator *BC, SmallInstSet &deadInsts) {
  1040. Value *Ptr = BC->getOperand(0);
  1041. llvm::Type *FromTy = Ptr->getType();
  1042. llvm::Type *ToTy = BC->getType();
  1043. if (!FromTy->isPointerTy() || !ToTy->isPointerTy())
  1044. return;
  1045. FromTy = FromTy->getPointerElementType();
  1046. ToTy = ToTy->getPointerElementType();
  1047. // Take care case like %2 = bitcast %struct.T* %1 to <1 x float>*.
  1048. bool GEPCreated = false;
  1049. if (FromTy->isStructTy()) {
  1050. IRBuilder<> Builder(FromTy->getContext());
  1051. if (Instruction *I = dyn_cast<Instruction>(BC))
  1052. Builder.SetInsertPoint(I);
  1053. Value *zeroIdx = Builder.getInt32(0);
  1054. unsigned nestLevel = 1;
  1055. while (llvm::StructType *ST = dyn_cast<llvm::StructType>(FromTy)) {
  1056. if (ST->getNumElements() == 0)
  1057. break;
  1058. FromTy = ST->getElementType(0);
  1059. nestLevel++;
  1060. }
  1061. std::vector<Value *> idxList(nestLevel, zeroIdx);
  1062. Ptr = Builder.CreateGEP(Ptr, idxList);
  1063. GEPCreated = true;
  1064. }
  1065. for (User *U : BC->users()) {
  1066. if (LoadInst *LI = dyn_cast<LoadInst>(U)) {
  1067. if (SimplifyBitCastLoad(LI, FromTy, ToTy, Ptr)) {
  1068. LI->dropAllReferences();
  1069. deadInsts.insert(LI);
  1070. }
  1071. } else if (StoreInst *SI = dyn_cast<StoreInst>(U)) {
  1072. if (SimplifyBitCastStore(SI, FromTy, ToTy, Ptr)) {
  1073. SI->dropAllReferences();
  1074. deadInsts.insert(SI);
  1075. }
  1076. } else if (GEPOperator *GEP = dyn_cast<GEPOperator>(U)) {
  1077. if (SimplifyBitCastGEP(GEP, FromTy, ToTy, Ptr))
  1078. if (Instruction *I = dyn_cast<Instruction>(GEP)) {
  1079. I->dropAllReferences();
  1080. deadInsts.insert(I);
  1081. }
  1082. } else if (dyn_cast<CallInst>(U)) {
  1083. // Skip function call.
  1084. } else if (dyn_cast<BitCastInst>(U)) {
  1085. // Skip bitcast.
  1086. } else if (dyn_cast<AddrSpaceCastInst>(U)) {
  1087. // Skip addrspacecast.
  1088. } else {
  1089. DXASSERT(0, "not support yet");
  1090. }
  1091. }
  1092. // We created a GEP instruction but didn't end up consuming it, so delete it.
  1093. if (GEPCreated && Ptr->use_empty()) {
  1094. if (GetElementPtrInst *GEP = dyn_cast<GetElementPtrInst>(Ptr))
  1095. GEP->eraseFromParent();
  1096. else
  1097. cast<Constant>(Ptr)->destroyConstant();
  1098. }
  1099. }
  1100. typedef float(__cdecl *FloatUnaryEvalFuncType)(float);
  1101. typedef double(__cdecl *DoubleUnaryEvalFuncType)(double);
  1102. typedef APInt(__cdecl *IntBinaryEvalFuncType)(const APInt &, const APInt &);
  1103. typedef float(__cdecl *FloatBinaryEvalFuncType)(float, float);
  1104. typedef double(__cdecl *DoubleBinaryEvalFuncType)(double, double);
  1105. Value *EvalUnaryIntrinsic(ConstantFP *fpV, FloatUnaryEvalFuncType floatEvalFunc,
  1106. DoubleUnaryEvalFuncType doubleEvalFunc) {
  1107. llvm::Type *Ty = fpV->getType();
  1108. Value *Result = nullptr;
  1109. if (Ty->isDoubleTy()) {
  1110. double dV = fpV->getValueAPF().convertToDouble();
  1111. Value *dResult = ConstantFP::get(Ty, doubleEvalFunc(dV));
  1112. Result = dResult;
  1113. } else {
  1114. DXASSERT_NOMSG(Ty->isFloatTy());
  1115. float fV = fpV->getValueAPF().convertToFloat();
  1116. Value *dResult = ConstantFP::get(Ty, floatEvalFunc(fV));
  1117. Result = dResult;
  1118. }
  1119. return Result;
  1120. }
  1121. Value *EvalBinaryIntrinsic(Constant *cV0, Constant *cV1,
  1122. FloatBinaryEvalFuncType floatEvalFunc,
  1123. DoubleBinaryEvalFuncType doubleEvalFunc,
  1124. IntBinaryEvalFuncType intEvalFunc) {
  1125. llvm::Type *Ty = cV0->getType();
  1126. Value *Result = nullptr;
  1127. if (Ty->isDoubleTy()) {
  1128. ConstantFP *fpV0 = cast<ConstantFP>(cV0);
  1129. ConstantFP *fpV1 = cast<ConstantFP>(cV1);
  1130. double dV0 = fpV0->getValueAPF().convertToDouble();
  1131. double dV1 = fpV1->getValueAPF().convertToDouble();
  1132. Value *dResult = ConstantFP::get(Ty, doubleEvalFunc(dV0, dV1));
  1133. Result = dResult;
  1134. } else if (Ty->isFloatTy()) {
  1135. ConstantFP *fpV0 = cast<ConstantFP>(cV0);
  1136. ConstantFP *fpV1 = cast<ConstantFP>(cV1);
  1137. float fV0 = fpV0->getValueAPF().convertToFloat();
  1138. float fV1 = fpV1->getValueAPF().convertToFloat();
  1139. Value *dResult = ConstantFP::get(Ty, floatEvalFunc(fV0, fV1));
  1140. Result = dResult;
  1141. } else {
  1142. DXASSERT_NOMSG(Ty->isIntegerTy());
  1143. DXASSERT_NOMSG(intEvalFunc);
  1144. ConstantInt *ciV0 = cast<ConstantInt>(cV0);
  1145. ConstantInt *ciV1 = cast<ConstantInt>(cV1);
  1146. const APInt &iV0 = ciV0->getValue();
  1147. const APInt &iV1 = ciV1->getValue();
  1148. Value *dResult = ConstantInt::get(Ty, intEvalFunc(iV0, iV1));
  1149. Result = dResult;
  1150. }
  1151. return Result;
  1152. }
  1153. Value *EvalUnaryIntrinsic(CallInst *CI, FloatUnaryEvalFuncType floatEvalFunc,
  1154. DoubleUnaryEvalFuncType doubleEvalFunc) {
  1155. Value *V = CI->getArgOperand(0);
  1156. llvm::Type *Ty = CI->getType();
  1157. Value *Result = nullptr;
  1158. if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(Ty)) {
  1159. Result = UndefValue::get(Ty);
  1160. Constant *CV = cast<Constant>(V);
  1161. IRBuilder<> Builder(CI);
  1162. for (unsigned i = 0; i < VT->getNumElements(); i++) {
  1163. ConstantFP *fpV = cast<ConstantFP>(CV->getAggregateElement(i));
  1164. Value *EltResult = EvalUnaryIntrinsic(fpV, floatEvalFunc, doubleEvalFunc);
  1165. Result = Builder.CreateInsertElement(Result, EltResult, i);
  1166. }
  1167. } else {
  1168. ConstantFP *fpV = cast<ConstantFP>(V);
  1169. Result = EvalUnaryIntrinsic(fpV, floatEvalFunc, doubleEvalFunc);
  1170. }
  1171. CI->replaceAllUsesWith(Result);
  1172. CI->eraseFromParent();
  1173. return Result;
  1174. }
  1175. Value *EvalBinaryIntrinsic(CallInst *CI, FloatBinaryEvalFuncType floatEvalFunc,
  1176. DoubleBinaryEvalFuncType doubleEvalFunc,
  1177. IntBinaryEvalFuncType intEvalFunc = nullptr) {
  1178. Value *V0 = CI->getArgOperand(0);
  1179. Value *V1 = CI->getArgOperand(1);
  1180. llvm::Type *Ty = CI->getType();
  1181. Value *Result = nullptr;
  1182. if (llvm::VectorType *VT = dyn_cast<llvm::VectorType>(Ty)) {
  1183. Result = UndefValue::get(Ty);
  1184. Constant *CV0 = cast<Constant>(V0);
  1185. Constant *CV1 = cast<Constant>(V1);
  1186. IRBuilder<> Builder(CI);
  1187. for (unsigned i = 0; i < VT->getNumElements(); i++) {
  1188. Constant *cV0 = cast<Constant>(CV0->getAggregateElement(i));
  1189. Constant *cV1 = cast<Constant>(CV1->getAggregateElement(i));
  1190. Value *EltResult = EvalBinaryIntrinsic(cV0, cV1, floatEvalFunc,
  1191. doubleEvalFunc, intEvalFunc);
  1192. Result = Builder.CreateInsertElement(Result, EltResult, i);
  1193. }
  1194. } else {
  1195. Constant *cV0 = cast<Constant>(V0);
  1196. Constant *cV1 = cast<Constant>(V1);
  1197. Result = EvalBinaryIntrinsic(cV0, cV1, floatEvalFunc, doubleEvalFunc,
  1198. intEvalFunc);
  1199. }
  1200. CI->replaceAllUsesWith(Result);
  1201. CI->eraseFromParent();
  1202. return Result;
  1203. CI->eraseFromParent();
  1204. return Result;
  1205. }
  1206. void SimpleTransformForHLDXIRInst(Instruction *I, SmallInstSet &deadInsts) {
  1207. unsigned opcode = I->getOpcode();
  1208. switch (opcode) {
  1209. case Instruction::BitCast: {
  1210. BitCastOperator *BCI = cast<BitCastOperator>(I);
  1211. SimplifyBitCast(BCI, deadInsts);
  1212. } break;
  1213. case Instruction::Load: {
  1214. LoadInst *ldInst = cast<LoadInst>(I);
  1215. DXASSERT(!HLMatrixType::isa(ldInst->getType()),
  1216. "matrix load should use HL LdStMatrix");
  1217. Value *Ptr = ldInst->getPointerOperand();
  1218. if (ConstantExpr *CE = dyn_cast_or_null<ConstantExpr>(Ptr)) {
  1219. if (BitCastOperator *BCO = dyn_cast<BitCastOperator>(CE)) {
  1220. SimplifyBitCast(BCO, deadInsts);
  1221. }
  1222. }
  1223. } break;
  1224. case Instruction::Store: {
  1225. StoreInst *stInst = cast<StoreInst>(I);
  1226. Value *V = stInst->getValueOperand();
  1227. DXASSERT_LOCALVAR(V, !HLMatrixType::isa(V->getType()),
  1228. "matrix store should use HL LdStMatrix");
  1229. Value *Ptr = stInst->getPointerOperand();
  1230. if (ConstantExpr *CE = dyn_cast<ConstantExpr>(Ptr)) {
  1231. if (BitCastOperator *BCO = dyn_cast<BitCastOperator>(CE)) {
  1232. SimplifyBitCast(BCO, deadInsts);
  1233. }
  1234. }
  1235. } break;
  1236. case Instruction::LShr:
  1237. case Instruction::AShr:
  1238. case Instruction::Shl: {
  1239. llvm::BinaryOperator *BO = cast<llvm::BinaryOperator>(I);
  1240. Value *op2 = BO->getOperand(1);
  1241. IntegerType *Ty = cast<IntegerType>(BO->getType()->getScalarType());
  1242. unsigned bitWidth = Ty->getBitWidth();
  1243. // Clamp op2 to 0 ~ bitWidth-1
  1244. if (ConstantInt *cOp2 = dyn_cast<ConstantInt>(op2)) {
  1245. unsigned iOp2 = cOp2->getLimitedValue();
  1246. unsigned clampedOp2 = iOp2 & (bitWidth - 1);
  1247. if (iOp2 != clampedOp2) {
  1248. BO->setOperand(1, ConstantInt::get(op2->getType(), clampedOp2));
  1249. }
  1250. } else {
  1251. Value *mask = ConstantInt::get(op2->getType(), bitWidth - 1);
  1252. IRBuilder<> Builder(I);
  1253. op2 = Builder.CreateAnd(op2, mask);
  1254. BO->setOperand(1, op2);
  1255. }
  1256. } break;
  1257. }
  1258. }
  1259. } // namespace
  1260. namespace CGHLSLMSHelper {
  1261. Value *TryEvalIntrinsic(CallInst *CI, IntrinsicOp intriOp, unsigned hlslVersion) {
  1262. switch (intriOp) {
  1263. case IntrinsicOp::IOP_tan: {
  1264. return EvalUnaryIntrinsic(CI, tanf, tan);
  1265. } break;
  1266. case IntrinsicOp::IOP_tanh: {
  1267. return EvalUnaryIntrinsic(CI, tanhf, tanh);
  1268. } break;
  1269. case IntrinsicOp::IOP_sin: {
  1270. return EvalUnaryIntrinsic(CI, sinf, sin);
  1271. } break;
  1272. case IntrinsicOp::IOP_sinh: {
  1273. return EvalUnaryIntrinsic(CI, sinhf, sinh);
  1274. } break;
  1275. case IntrinsicOp::IOP_cos: {
  1276. return EvalUnaryIntrinsic(CI, cosf, cos);
  1277. } break;
  1278. case IntrinsicOp::IOP_cosh: {
  1279. return EvalUnaryIntrinsic(CI, coshf, cosh);
  1280. } break;
  1281. case IntrinsicOp::IOP_asin: {
  1282. return EvalUnaryIntrinsic(CI, asinf, asin);
  1283. } break;
  1284. case IntrinsicOp::IOP_acos: {
  1285. return EvalUnaryIntrinsic(CI, acosf, acos);
  1286. } break;
  1287. case IntrinsicOp::IOP_atan: {
  1288. return EvalUnaryIntrinsic(CI, atanf, atan);
  1289. } break;
  1290. case IntrinsicOp::IOP_atan2: {
  1291. Value *V0 = CI->getArgOperand(0);
  1292. ConstantFP *fpV0 = cast<ConstantFP>(V0);
  1293. Value *V1 = CI->getArgOperand(1);
  1294. ConstantFP *fpV1 = cast<ConstantFP>(V1);
  1295. llvm::Type *Ty = CI->getType();
  1296. Value *Result = nullptr;
  1297. if (Ty->isDoubleTy()) {
  1298. double dV0 = fpV0->getValueAPF().convertToDouble();
  1299. double dV1 = fpV1->getValueAPF().convertToDouble();
  1300. Value *atanV = ConstantFP::get(CI->getType(), atan2(dV0, dV1));
  1301. CI->replaceAllUsesWith(atanV);
  1302. Result = atanV;
  1303. } else {
  1304. DXASSERT_NOMSG(Ty->isFloatTy());
  1305. float fV0 = fpV0->getValueAPF().convertToFloat();
  1306. float fV1 = fpV1->getValueAPF().convertToFloat();
  1307. Value *atanV = ConstantFP::get(CI->getType(), atan2f(fV0, fV1));
  1308. CI->replaceAllUsesWith(atanV);
  1309. Result = atanV;
  1310. }
  1311. CI->eraseFromParent();
  1312. return Result;
  1313. } break;
  1314. case IntrinsicOp::IOP_sqrt: {
  1315. return EvalUnaryIntrinsic(CI, sqrtf, sqrt);
  1316. } break;
  1317. case IntrinsicOp::IOP_rsqrt: {
  1318. auto rsqrtF = [](float v) -> float { return 1.0 / sqrtf(v); };
  1319. auto rsqrtD = [](double v) -> double { return 1.0 / sqrt(v); };
  1320. return EvalUnaryIntrinsic(CI, rsqrtF, rsqrtD);
  1321. } break;
  1322. case IntrinsicOp::IOP_exp: {
  1323. return EvalUnaryIntrinsic(CI, expf, exp);
  1324. } break;
  1325. case IntrinsicOp::IOP_exp2: {
  1326. return EvalUnaryIntrinsic(CI, exp2f, exp2);
  1327. } break;
  1328. case IntrinsicOp::IOP_log: {
  1329. return EvalUnaryIntrinsic(CI, logf, log);
  1330. } break;
  1331. case IntrinsicOp::IOP_log10: {
  1332. return EvalUnaryIntrinsic(CI, log10f, log10);
  1333. } break;
  1334. case IntrinsicOp::IOP_log2: {
  1335. return EvalUnaryIntrinsic(CI, log2f, log2);
  1336. } break;
  1337. case IntrinsicOp::IOP_pow: {
  1338. return EvalBinaryIntrinsic(CI, powf, pow);
  1339. } break;
  1340. case IntrinsicOp::IOP_max: {
  1341. auto maxF = [](float a, float b) -> float { return a > b ? a : b; };
  1342. auto maxD = [](double a, double b) -> double { return a > b ? a : b; };
  1343. auto imaxI = [](const APInt &a, const APInt &b) -> APInt {
  1344. return a.sgt(b) ? a : b;
  1345. };
  1346. return EvalBinaryIntrinsic(CI, maxF, maxD, imaxI);
  1347. } break;
  1348. case IntrinsicOp::IOP_min: {
  1349. auto minF = [](float a, float b) -> float { return a < b ? a : b; };
  1350. auto minD = [](double a, double b) -> double { return a < b ? a : b; };
  1351. auto iminI = [](const APInt &a, const APInt &b) -> APInt {
  1352. return a.slt(b) ? a : b;
  1353. };
  1354. return EvalBinaryIntrinsic(CI, minF, minD, iminI);
  1355. } break;
  1356. case IntrinsicOp::IOP_umax: {
  1357. DXASSERT_NOMSG(
  1358. CI->getArgOperand(0)->getType()->getScalarType()->isIntegerTy());
  1359. auto umaxI = [](const APInt &a, const APInt &b) -> APInt {
  1360. return a.ugt(b) ? a : b;
  1361. };
  1362. return EvalBinaryIntrinsic(CI, nullptr, nullptr, umaxI);
  1363. } break;
  1364. case IntrinsicOp::IOP_umin: {
  1365. DXASSERT_NOMSG(
  1366. CI->getArgOperand(0)->getType()->getScalarType()->isIntegerTy());
  1367. auto uminI = [](const APInt &a, const APInt &b) -> APInt {
  1368. return a.ult(b) ? a : b;
  1369. };
  1370. return EvalBinaryIntrinsic(CI, nullptr, nullptr, uminI);
  1371. } break;
  1372. case IntrinsicOp::IOP_rcp: {
  1373. auto rcpF = [](float v) -> float { return 1.0 / v; };
  1374. auto rcpD = [](double v) -> double { return 1.0 / v; };
  1375. return EvalUnaryIntrinsic(CI, rcpF, rcpD);
  1376. } break;
  1377. case IntrinsicOp::IOP_ceil: {
  1378. return EvalUnaryIntrinsic(CI, ceilf, ceil);
  1379. } break;
  1380. case IntrinsicOp::IOP_floor: {
  1381. return EvalUnaryIntrinsic(CI, floorf, floor);
  1382. } break;
  1383. case IntrinsicOp::IOP_round: {
  1384. // round intrinsic could exhibit different behaviour for constant and runtime evaluations.
  1385. // E.g., for round(0.5): constant evaluation results in 1 (away from zero rounding),
  1386. // while runtime evaluation results in 0 (nearest even rounding).
  1387. //
  1388. // For back compat, DXC still preserves the above behavior for language versions 2016 or below.
  1389. // However, for newer language versions, DXC now always use nearest even for round() intrinsic in all
  1390. // cases.
  1391. if (hlslVersion <= 2016) {
  1392. return EvalUnaryIntrinsic(CI, roundf, round);
  1393. } else {
  1394. auto roundingMode = fegetround();
  1395. fesetround(FE_TONEAREST);
  1396. Value *result = EvalUnaryIntrinsic(CI, nearbyintf, nearbyint);
  1397. fesetround(roundingMode);
  1398. return result;
  1399. }
  1400. } break;
  1401. case IntrinsicOp::IOP_trunc: {
  1402. return EvalUnaryIntrinsic(CI, truncf, trunc);
  1403. } break;
  1404. case IntrinsicOp::IOP_frac: {
  1405. auto fracF = [](float v) -> float { return v - floor(v); };
  1406. auto fracD = [](double v) -> double { return v - floor(v); };
  1407. return EvalUnaryIntrinsic(CI, fracF, fracD);
  1408. } break;
  1409. case IntrinsicOp::IOP_isnan: {
  1410. Value *V = CI->getArgOperand(0);
  1411. ConstantFP *fV = cast<ConstantFP>(V);
  1412. bool isNan = fV->getValueAPF().isNaN();
  1413. Constant *cNan = ConstantInt::get(CI->getType(), isNan ? 1 : 0);
  1414. CI->replaceAllUsesWith(cNan);
  1415. CI->eraseFromParent();
  1416. return cNan;
  1417. } break;
  1418. default:
  1419. return nullptr;
  1420. }
  1421. }
  1422. // Do simple transform to make later lower pass easier.
  1423. void SimpleTransformForHLDXIR(llvm::Module *pM) {
  1424. SmallInstSet deadInsts;
  1425. for (Function &F : pM->functions()) {
  1426. for (BasicBlock &BB : F.getBasicBlockList()) {
  1427. for (BasicBlock::iterator Iter = BB.begin(); Iter != BB.end();) {
  1428. Instruction *I = (Iter++);
  1429. if (deadInsts.count(I))
  1430. continue; // Skip dead instructions
  1431. SimpleTransformForHLDXIRInst(I, deadInsts);
  1432. }
  1433. }
  1434. }
  1435. for (Instruction *I : deadInsts)
  1436. I->dropAllReferences();
  1437. for (Instruction *I : deadInsts)
  1438. I->eraseFromParent();
  1439. deadInsts.clear();
  1440. for (GlobalVariable &GV : pM->globals()) {
  1441. if (dxilutil::IsStaticGlobal(&GV)) {
  1442. for (User *U : GV.users()) {
  1443. if (BitCastOperator *BCO = dyn_cast<BitCastOperator>(U)) {
  1444. SimplifyBitCast(BCO, deadInsts);
  1445. }
  1446. }
  1447. }
  1448. }
  1449. for (Instruction *I : deadInsts)
  1450. I->dropAllReferences();
  1451. for (Instruction *I : deadInsts)
  1452. I->eraseFromParent();
  1453. }
  1454. } // namespace CGHLSLMSHelper
  1455. namespace {
  1456. unsigned RoundToAlign(unsigned num, unsigned mod) {
  1457. // round num to next highest mod
  1458. if (mod != 0)
  1459. return mod * ((num + mod - 1) / mod);
  1460. return num;
  1461. }
  1462. // Retrieve the last scalar or vector element type.
  1463. // This has to be recursive for the nasty empty struct case.
  1464. // returns true if found, false if we must backtrack.
  1465. bool RetrieveLastElementType(Type *Ty, Type *&EltTy) {
  1466. if (Ty->isStructTy()) {
  1467. if (Ty->getStructNumElements() == 0)
  1468. return false;
  1469. for (unsigned i = Ty->getStructNumElements(); i > 0; --i) {
  1470. if (RetrieveLastElementType(Ty->getStructElementType(i - 1), EltTy))
  1471. return true;
  1472. }
  1473. } else if (Ty->isArrayTy()) {
  1474. if (RetrieveLastElementType(Ty->getArrayElementType(), EltTy))
  1475. return true;
  1476. } else if ((Ty->isVectorTy() || Ty->isSingleValueType())) {
  1477. EltTy = Ty->getScalarType();
  1478. return true;
  1479. }
  1480. return false;
  1481. }
  1482. // Here the size is CB size.
  1483. // Offset still needs to be aligned based on type since this
  1484. // is the legacy cbuffer global path.
  1485. unsigned AlignCBufferOffset(unsigned offset, unsigned size, llvm::Type *Ty,
  1486. bool bRowMajor,
  1487. bool bMinPrecMode, bool &bCurRowIsMinPrec) {
  1488. DXASSERT(!(offset & 1), "otherwise we have an invalid offset.");
  1489. bool bNeedNewRow = Ty->isArrayTy();
  1490. // In min-precision mode, a new row is needed when
  1491. // going into or out of min-precision component type.
  1492. if (!bNeedNewRow) {
  1493. bool bMinPrec = false;
  1494. if (Ty->isStructTy()) {
  1495. if (HLMatrixType mat = HLMatrixType::dyn_cast(Ty)) {
  1496. bNeedNewRow |= !bRowMajor && mat.getNumColumns() > 1;
  1497. bNeedNewRow |= bRowMajor && mat.getNumRows() > 1;
  1498. bMinPrec = bMinPrecMode && mat.getElementType(false)->getScalarSizeInBits() < 32;
  1499. } else {
  1500. bNeedNewRow = true;
  1501. if (bMinPrecMode) {
  1502. // Need to get min-prec of last element of structure,
  1503. // in case we pack something else into the end.
  1504. Type *EltTy = nullptr;
  1505. if (RetrieveLastElementType(Ty, EltTy))
  1506. bCurRowIsMinPrec = EltTy->getScalarSizeInBits() < 32;
  1507. }
  1508. }
  1509. } else {
  1510. DXASSERT_NOMSG(Ty->isVectorTy() || Ty->isSingleValueType());
  1511. // vector or scalar
  1512. bMinPrec = bMinPrecMode && Ty->getScalarSizeInBits() < 32;
  1513. }
  1514. if (bMinPrecMode) {
  1515. bNeedNewRow |= bCurRowIsMinPrec != bMinPrec;
  1516. bCurRowIsMinPrec = bMinPrec;
  1517. }
  1518. }
  1519. unsigned scalarSizeInBytes = Ty->getScalarSizeInBits() / 8;
  1520. return AlignBufferOffsetInLegacy(offset, size, scalarSizeInBytes,
  1521. bNeedNewRow);
  1522. }
  1523. unsigned
  1524. AllocateDxilConstantBuffer(HLCBuffer &CB,
  1525. std::unordered_map<Constant *, DxilFieldAnnotation>
  1526. &constVarAnnotationMap,
  1527. bool bMinPrecMode) {
  1528. unsigned offset = 0;
  1529. // Scan user allocated constants first.
  1530. // Update offset.
  1531. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1532. if (C->GetLowerBound() == UINT_MAX)
  1533. continue;
  1534. unsigned size = C->GetRangeSize();
  1535. unsigned nextOffset = size + C->GetLowerBound();
  1536. if (offset < nextOffset)
  1537. offset = nextOffset;
  1538. }
  1539. // Alloc after user allocated constants.
  1540. bool bCurRowIsMinPrec = false;
  1541. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1542. if (C->GetLowerBound() != UINT_MAX)
  1543. continue;
  1544. unsigned size = C->GetRangeSize();
  1545. llvm::Type *Ty = C->GetGlobalSymbol()->getType()->getPointerElementType();
  1546. auto fieldAnnotation = constVarAnnotationMap.at(C->GetGlobalSymbol());
  1547. bool bRowMajor = HLMatrixType::isa(Ty)
  1548. ? fieldAnnotation.GetMatrixAnnotation().Orientation ==
  1549. MatrixOrientation::RowMajor
  1550. : false;
  1551. // Align offset.
  1552. offset = AlignCBufferOffset(offset, size, Ty, bRowMajor, bMinPrecMode, bCurRowIsMinPrec);
  1553. if (C->GetLowerBound() == UINT_MAX) {
  1554. C->SetLowerBound(offset);
  1555. }
  1556. offset += size;
  1557. }
  1558. return offset;
  1559. }
  1560. void AllocateDxilConstantBuffers(
  1561. HLModule &HLM, std::unordered_map<Constant *, DxilFieldAnnotation>
  1562. &constVarAnnotationMap) {
  1563. for (unsigned i = 0; i < HLM.GetCBuffers().size(); i++) {
  1564. HLCBuffer &CB = *static_cast<HLCBuffer *>(&(HLM.GetCBuffer(i)));
  1565. unsigned size = AllocateDxilConstantBuffer(CB, constVarAnnotationMap,
  1566. HLM.GetHLOptions().bUseMinPrecision);
  1567. CB.SetSize(size);
  1568. }
  1569. }
  1570. } // namespace
  1571. namespace {
  1572. void ReplaceUseInFunction(Value *V, Value *NewV, Function *F,
  1573. IRBuilder<> &Builder) {
  1574. for (auto U = V->user_begin(); U != V->user_end();) {
  1575. User *user = *(U++);
  1576. if (Instruction *I = dyn_cast<Instruction>(user)) {
  1577. if (I->getParent()->getParent() == F) {
  1578. // replace use with GEP if in F
  1579. if (BitCastInst *BCI = dyn_cast<BitCastInst>(I)) {
  1580. if (BCI->getType() == NewV->getType()) {
  1581. I->replaceAllUsesWith(NewV);
  1582. I->eraseFromParent();
  1583. continue;
  1584. }
  1585. }
  1586. I->replaceUsesOfWith(V, NewV);
  1587. }
  1588. } else {
  1589. // For constant operator, create local clone which use GEP.
  1590. // Only support GEP and bitcast.
  1591. if (GEPOperator *GEPOp = dyn_cast<GEPOperator>(user)) {
  1592. std::vector<Value *> idxList(GEPOp->idx_begin(), GEPOp->idx_end());
  1593. Value *NewGEP = Builder.CreateInBoundsGEP(NewV, idxList);
  1594. ReplaceUseInFunction(GEPOp, NewGEP, F, Builder);
  1595. } else if (GlobalVariable *GV = dyn_cast<GlobalVariable>(user)) {
  1596. // Change the init val into NewV with Store.
  1597. GV->setInitializer(nullptr);
  1598. Builder.CreateStore(NewV, GV);
  1599. } else {
  1600. // Must be bitcast here.
  1601. BitCastOperator *BC = cast<BitCastOperator>(user);
  1602. Value *NewBC = Builder.CreateBitCast(NewV, BC->getType());
  1603. ReplaceUseInFunction(BC, NewBC, F, Builder);
  1604. }
  1605. }
  1606. }
  1607. }
  1608. void MarkUsedFunctionForConst(Value *V,
  1609. std::unordered_set<Function *> &usedFunc) {
  1610. for (auto U = V->user_begin(); U != V->user_end();) {
  1611. User *user = *(U++);
  1612. if (Instruction *I = dyn_cast<Instruction>(user)) {
  1613. Function *F = I->getParent()->getParent();
  1614. usedFunc.insert(F);
  1615. } else {
  1616. // For constant operator, create local clone which use GEP.
  1617. // Only support GEP and bitcast.
  1618. if (GEPOperator *GEPOp = dyn_cast<GEPOperator>(user)) {
  1619. MarkUsedFunctionForConst(GEPOp, usedFunc);
  1620. } else if (GlobalVariable *GV = dyn_cast<GlobalVariable>(user)) {
  1621. MarkUsedFunctionForConst(GV, usedFunc);
  1622. } else {
  1623. // Must be bitcast here.
  1624. BitCastOperator *BC = cast<BitCastOperator>(user);
  1625. MarkUsedFunctionForConst(BC, usedFunc);
  1626. }
  1627. }
  1628. }
  1629. }
  1630. bool CreateCBufferVariable(HLCBuffer &CB, HLModule &HLM, llvm::Type *HandleTy) {
  1631. bool bUsed = false;
  1632. // Build Struct for CBuffer.
  1633. SmallVector<llvm::Type *, 4> Elements;
  1634. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1635. Value *GV = C->GetGlobalSymbol();
  1636. if (!GV->use_empty())
  1637. bUsed = true;
  1638. // Global variable must be pointer type.
  1639. llvm::Type *Ty = GV->getType()->getPointerElementType();
  1640. Elements.emplace_back(Ty);
  1641. }
  1642. // Don't create CBuffer variable for unused cbuffer.
  1643. if (!bUsed)
  1644. return false;
  1645. llvm::Module &M = *HLM.GetModule();
  1646. bool isCBArray = CB.IsArray();
  1647. llvm::GlobalVariable *cbGV = nullptr;
  1648. llvm::Type *cbTy = nullptr;
  1649. unsigned cbIndexDepth = 0;
  1650. if (!isCBArray) {
  1651. if (CB.IsView()) {
  1652. llvm::StructType *CBStructTy =
  1653. llvm::StructType::create(CB.GetResultType(), CB.GetGlobalName());
  1654. cbGV = new llvm::GlobalVariable(M, CBStructTy,
  1655. /*IsConstant*/ true,
  1656. llvm::GlobalValue::ExternalLinkage,
  1657. /*InitVal*/ nullptr, CB.GetGlobalName());
  1658. cbTy = cbGV->getType();
  1659. } else {
  1660. llvm::StructType *CBStructTy =
  1661. llvm::StructType::create(Elements, CB.GetGlobalName());
  1662. cbGV = new llvm::GlobalVariable(M, CBStructTy, /*IsConstant*/ true,
  1663. llvm::GlobalValue::ExternalLinkage,
  1664. /*InitVal*/ nullptr, CB.GetGlobalName());
  1665. cbTy = cbGV->getType();
  1666. }
  1667. } else {
  1668. // For array of ConstantBuffer, create array of struct instead of struct of
  1669. // array.
  1670. DXASSERT(CB.GetConstants().size() == 1,
  1671. "ConstantBuffer should have 1 constant");
  1672. Value *GV = CB.GetConstants()[0]->GetGlobalSymbol();
  1673. llvm::Type *CBEltTy =
  1674. GV->getType()->getPointerElementType()->getArrayElementType();
  1675. cbIndexDepth = 1;
  1676. while (CBEltTy->isArrayTy()) {
  1677. CBEltTy = CBEltTy->getArrayElementType();
  1678. cbIndexDepth++;
  1679. }
  1680. // Add one level struct type to match normal case.
  1681. llvm::StructType *CBStructTy =
  1682. llvm::StructType::create({CB.GetResultType()}, CB.GetGlobalName());
  1683. llvm::ArrayType *CBArrayTy =
  1684. llvm::ArrayType::get(CBStructTy, CB.GetRangeSize());
  1685. cbGV = new llvm::GlobalVariable(M, CBArrayTy, /*IsConstant*/ true,
  1686. llvm::GlobalValue::ExternalLinkage,
  1687. /*InitVal*/ nullptr, CB.GetGlobalName());
  1688. cbTy = llvm::PointerType::get(CBStructTy,
  1689. cbGV->getType()->getPointerAddressSpace());
  1690. }
  1691. CB.SetGlobalSymbol(cbGV);
  1692. llvm::Type *opcodeTy = llvm::Type::getInt32Ty(M.getContext());
  1693. llvm::Type *idxTy = opcodeTy;
  1694. Constant *zeroIdx = ConstantInt::get(opcodeTy, 0);
  1695. Value *HandleArgs[] = {cbGV, zeroIdx};
  1696. llvm::FunctionType *SubscriptFuncTy =
  1697. llvm::FunctionType::get(cbTy, {opcodeTy, HandleTy, idxTy}, false);
  1698. Function *subscriptFunc =
  1699. GetOrCreateHLFunction(M, SubscriptFuncTy, HLOpcodeGroup::HLSubscript,
  1700. (unsigned)HLSubscriptOpcode::CBufferSubscript);
  1701. Constant *opArg =
  1702. ConstantInt::get(opcodeTy, (unsigned)HLSubscriptOpcode::CBufferSubscript);
  1703. Value *args[] = {opArg, nullptr, zeroIdx};
  1704. llvm::LLVMContext &Context = M.getContext();
  1705. llvm::Type *i32Ty = llvm::Type::getInt32Ty(Context);
  1706. Value *zero = ConstantInt::get(i32Ty, (uint64_t)0);
  1707. std::vector<Value *> indexArray(CB.GetConstants().size());
  1708. std::vector<std::unordered_set<Function *>> constUsedFuncList(
  1709. CB.GetConstants().size());
  1710. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1711. Value *idx = ConstantInt::get(i32Ty, C->GetID());
  1712. indexArray[C->GetID()] = idx;
  1713. Value *GV = C->GetGlobalSymbol();
  1714. MarkUsedFunctionForConst(GV, constUsedFuncList[C->GetID()]);
  1715. }
  1716. for (Function &F : M.functions()) {
  1717. if (F.isDeclaration())
  1718. continue;
  1719. if (GetHLOpcodeGroupByName(&F) != HLOpcodeGroup::NotHL)
  1720. continue;
  1721. IRBuilder<> Builder(F.getEntryBlock().getFirstInsertionPt());
  1722. // create HL subscript to make all the use of cbuffer start from it.
  1723. HandleArgs[HLOperandIndex::kCreateHandleResourceOpIdx-1] = cbGV;
  1724. CallInst *Handle = HLM.EmitHLOperationCall(
  1725. Builder, HLOpcodeGroup::HLCreateHandle, 0, HandleTy, HandleArgs, M);
  1726. args[HLOperandIndex::kSubscriptObjectOpIdx] = Handle;
  1727. Instruction *cbSubscript =
  1728. cast<Instruction>(Builder.CreateCall(subscriptFunc, {args}));
  1729. // Replace constant var with GEP pGV
  1730. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1731. Value *GV = C->GetGlobalSymbol();
  1732. if (constUsedFuncList[C->GetID()].count(&F) == 0)
  1733. continue;
  1734. Value *idx = indexArray[C->GetID()];
  1735. if (!isCBArray) {
  1736. Instruction *GEP = cast<Instruction>(
  1737. Builder.CreateInBoundsGEP(cbSubscript, {zero, idx}));
  1738. // TODO: make sure the debug info is synced to GEP.
  1739. // GEP->setDebugLoc(GV);
  1740. ReplaceUseInFunction(GV, GEP, &F, Builder);
  1741. // Delete if no use in F.
  1742. if (GEP->user_empty())
  1743. GEP->eraseFromParent();
  1744. } else {
  1745. for (auto U = GV->user_begin(); U != GV->user_end();) {
  1746. User *user = *(U++);
  1747. if (user->user_empty())
  1748. continue;
  1749. Instruction *I = dyn_cast<Instruction>(user);
  1750. if (I && I->getParent()->getParent() != &F)
  1751. continue;
  1752. IRBuilder<> *instBuilder = &Builder;
  1753. std::unique_ptr<IRBuilder<>> B;
  1754. if (I) {
  1755. B = llvm::make_unique<IRBuilder<>>(I);
  1756. instBuilder = B.get();
  1757. }
  1758. GEPOperator *GEPOp = cast<GEPOperator>(user);
  1759. std::vector<Value *> idxList;
  1760. DXASSERT(GEPOp->getNumIndices() >= 1 + cbIndexDepth,
  1761. "must indexing ConstantBuffer array");
  1762. idxList.reserve(GEPOp->getNumIndices() - (cbIndexDepth - 1));
  1763. gep_type_iterator GI = gep_type_begin(*GEPOp),
  1764. E = gep_type_end(*GEPOp);
  1765. idxList.push_back(GI.getOperand());
  1766. // change array index with 0 for struct index.
  1767. idxList.push_back(zero);
  1768. GI++;
  1769. Value *arrayIdx = GI.getOperand();
  1770. GI++;
  1771. for (unsigned curIndex = 1; GI != E && curIndex < cbIndexDepth;
  1772. ++GI, ++curIndex) {
  1773. arrayIdx = instBuilder->CreateMul(
  1774. arrayIdx, Builder.getInt32(GI->getArrayNumElements()));
  1775. arrayIdx = instBuilder->CreateAdd(arrayIdx, GI.getOperand());
  1776. }
  1777. for (; GI != E; ++GI) {
  1778. idxList.push_back(GI.getOperand());
  1779. }
  1780. HandleArgs[HLOperandIndex::kCreateHandleIndexOpIdx-1] = arrayIdx;
  1781. CallInst *Handle =
  1782. HLM.EmitHLOperationCall(*instBuilder,
  1783. HLOpcodeGroup::HLCreateHandle, 0,
  1784. HandleTy, HandleArgs, M);
  1785. args[HLOperandIndex::kSubscriptObjectOpIdx] = Handle;
  1786. args[HLOperandIndex::kSubscriptIndexOpIdx] = arrayIdx;
  1787. Instruction *cbSubscript =
  1788. cast<Instruction>(instBuilder->CreateCall(subscriptFunc, {args}));
  1789. Instruction *NewGEP = cast<Instruction>(
  1790. instBuilder->CreateInBoundsGEP(cbSubscript, idxList));
  1791. ReplaceUseInFunction(GEPOp, NewGEP, &F, *instBuilder);
  1792. }
  1793. }
  1794. }
  1795. // Delete if no use in F.
  1796. if (cbSubscript->user_empty()) {
  1797. cbSubscript->eraseFromParent();
  1798. Handle->eraseFromParent();
  1799. } else {
  1800. // merge GEP use for cbSubscript.
  1801. HLModule::MergeGepUse(cbSubscript);
  1802. }
  1803. }
  1804. return true;
  1805. }
  1806. void ConstructCBufferAnnotation(
  1807. HLCBuffer &CB, DxilTypeSystem &dxilTypeSys,
  1808. std::unordered_map<Constant *, DxilFieldAnnotation> &AnnotationMap) {
  1809. Value *GV = CB.GetGlobalSymbol();
  1810. llvm::StructType *CBStructTy =
  1811. dyn_cast<llvm::StructType>(GV->getType()->getPointerElementType());
  1812. if (!CBStructTy) {
  1813. // For Array of ConstantBuffer.
  1814. llvm::ArrayType *CBArrayTy =
  1815. cast<llvm::ArrayType>(GV->getType()->getPointerElementType());
  1816. CBStructTy = cast<llvm::StructType>(CBArrayTy->getArrayElementType());
  1817. }
  1818. DxilStructAnnotation *CBAnnotation =
  1819. dxilTypeSys.AddStructAnnotation(CBStructTy);
  1820. CBAnnotation->SetCBufferSize(CB.GetSize());
  1821. // Set fieldAnnotation for each constant var.
  1822. for (const std::unique_ptr<DxilResourceBase> &C : CB.GetConstants()) {
  1823. Constant *GV = C->GetGlobalSymbol();
  1824. DxilFieldAnnotation &fieldAnnotation =
  1825. CBAnnotation->GetFieldAnnotation(C->GetID());
  1826. fieldAnnotation = AnnotationMap[GV];
  1827. // This is after CBuffer allocation.
  1828. fieldAnnotation.SetCBufferOffset(C->GetLowerBound());
  1829. fieldAnnotation.SetFieldName(C->GetGlobalName());
  1830. }
  1831. }
  1832. void ConstructCBuffer(
  1833. HLModule &HLM, llvm::Type *CBufferType,
  1834. std::unordered_map<Constant *, DxilFieldAnnotation> &AnnotationMap) {
  1835. DxilTypeSystem &dxilTypeSys = HLM.GetTypeSystem();
  1836. llvm::Type *HandleTy = HLM.GetOP()->GetHandleType();
  1837. for (unsigned i = 0; i < HLM.GetCBuffers().size(); i++) {
  1838. HLCBuffer &CB = *static_cast<HLCBuffer *>(&(HLM.GetCBuffer(i)));
  1839. if (CB.GetConstants().size() == 0) {
  1840. // Create Fake variable for cbuffer which is empty.
  1841. llvm::GlobalVariable *pGV = new llvm::GlobalVariable(
  1842. *HLM.GetModule(), CBufferType, true,
  1843. llvm::GlobalValue::ExternalLinkage, nullptr, CB.GetGlobalName());
  1844. CB.SetGlobalSymbol(pGV);
  1845. } else {
  1846. bool bCreated = CreateCBufferVariable(CB, HLM, HandleTy);
  1847. if (bCreated)
  1848. ConstructCBufferAnnotation(CB, dxilTypeSys, AnnotationMap);
  1849. else {
  1850. // Create Fake variable for cbuffer which is unused.
  1851. llvm::GlobalVariable *pGV = new llvm::GlobalVariable(
  1852. *HLM.GetModule(), CBufferType, true,
  1853. llvm::GlobalValue::ExternalLinkage, nullptr, CB.GetGlobalName());
  1854. CB.SetGlobalSymbol(pGV);
  1855. }
  1856. }
  1857. // Clear the constants which useless now.
  1858. CB.GetConstants().clear();
  1859. }
  1860. }
  1861. }
  1862. namespace CGHLSLMSHelper {
  1863. // Align cbuffer offset in legacy mode (16 bytes per row).
  1864. unsigned AlignBufferOffsetInLegacy(unsigned offset, unsigned size,
  1865. unsigned scalarSizeInBytes,
  1866. bool bNeedNewRow) {
  1867. if (unsigned remainder = (offset & 0xf)) {
  1868. // Start from new row
  1869. if (remainder + size > 16 || bNeedNewRow) {
  1870. return offset + 16 - remainder;
  1871. }
  1872. // If not, naturally align data
  1873. return RoundToAlign(offset, scalarSizeInBytes);
  1874. }
  1875. return offset;
  1876. }
  1877. // Translate RayQuery constructor. From:
  1878. // %call = call %"RayQuery<flags>" @<constructor>(%"RayQuery<flags>" %ptr)
  1879. // To:
  1880. // i32 %handle = AllocateRayQuery(i32 <IntrinsicOp::IOP_AllocateRayQuery>, i32
  1881. // %flags) %gep = GEP %"RayQuery<flags>" %ptr, 0, 0 store i32* %gep, i32
  1882. // %handle ; and replace uses of %call with %ptr
  1883. void TranslateRayQueryConstructor(HLModule &HLM) {
  1884. llvm::Module &M = *HLM.GetModule();
  1885. SmallVector<Function *, 4> Constructors;
  1886. for (auto &F : M.functions()) {
  1887. // Match templated RayQuery constructor instantiation by prefix and
  1888. // signature. It should be impossible to achieve the same signature from
  1889. // HLSL.
  1890. if (!F.getName().startswith("\01??0?$RayQuery@$"))
  1891. continue;
  1892. llvm::Type *Ty = F.getReturnType();
  1893. if (!Ty->isPointerTy() ||
  1894. !dxilutil::IsHLSLRayQueryType(Ty->getPointerElementType()))
  1895. continue;
  1896. if (F.arg_size() != 1 || Ty != F.arg_begin()->getType())
  1897. continue;
  1898. Constructors.emplace_back(&F);
  1899. }
  1900. for (auto pConstructorFunc : Constructors) {
  1901. llvm::IntegerType *i32Ty = llvm::Type::getInt32Ty(M.getContext());
  1902. llvm::ConstantInt *i32Zero =
  1903. llvm::ConstantInt::get(i32Ty, (uint64_t)0, false);
  1904. llvm::FunctionType *funcTy =
  1905. llvm::FunctionType::get(i32Ty, {i32Ty, i32Ty}, false);
  1906. unsigned opcode = (unsigned)IntrinsicOp::IOP_AllocateRayQuery;
  1907. llvm::ConstantInt *opVal = llvm::ConstantInt::get(i32Ty, opcode, false);
  1908. Function *opFunc =
  1909. GetOrCreateHLFunction(M, funcTy, HLOpcodeGroup::HLIntrinsic, opcode);
  1910. while (!pConstructorFunc->user_empty()) {
  1911. Value *V = *pConstructorFunc->user_begin();
  1912. llvm::CallInst *CI = cast<CallInst>(V); // Must be call
  1913. llvm::Value *pThis = CI->getArgOperand(0);
  1914. llvm::StructType *pRQType =
  1915. cast<llvm::StructType>(pThis->getType()->getPointerElementType());
  1916. DxilStructAnnotation *SA =
  1917. HLM.GetTypeSystem().GetStructAnnotation(pRQType);
  1918. DXASSERT(SA, "otherwise, could not find type annoation for RayQuery "
  1919. "specialization");
  1920. DXASSERT(SA->GetNumTemplateArgs() == 1 &&
  1921. SA->GetTemplateArgAnnotation(0).IsIntegral(),
  1922. "otherwise, RayQuery has changed, or lacks template args");
  1923. llvm::IRBuilder<> Builder(CI);
  1924. llvm::Value *rayFlags =
  1925. Builder.getInt32(SA->GetTemplateArgAnnotation(0).GetIntegral());
  1926. llvm::Value *Call =
  1927. Builder.CreateCall(opFunc, {opVal, rayFlags}, pThis->getName());
  1928. llvm::Value *GEP = Builder.CreateInBoundsGEP(pThis, {i32Zero, i32Zero});
  1929. Builder.CreateStore(Call, GEP);
  1930. CI->replaceAllUsesWith(pThis);
  1931. CI->eraseFromParent();
  1932. }
  1933. pConstructorFunc->eraseFromParent();
  1934. }
  1935. }
  1936. }
  1937. namespace {
  1938. bool BuildImmInit(Function *Ctor) {
  1939. GlobalVariable *GV = nullptr;
  1940. SmallVector<Constant *, 4> ImmList;
  1941. bool allConst = true;
  1942. for (inst_iterator I = inst_begin(Ctor), E = inst_end(Ctor); I != E; ++I) {
  1943. if (StoreInst *SI = dyn_cast<StoreInst>(&(*I))) {
  1944. Value *V = SI->getValueOperand();
  1945. if (!isa<Constant>(V) || V->getType()->isPointerTy()) {
  1946. allConst = false;
  1947. break;
  1948. }
  1949. ImmList.emplace_back(cast<Constant>(V));
  1950. Value *Ptr = SI->getPointerOperand();
  1951. if (GEPOperator *GepOp = dyn_cast<GEPOperator>(Ptr)) {
  1952. Ptr = GepOp->getPointerOperand();
  1953. if (GlobalVariable *pGV = dyn_cast<GlobalVariable>(Ptr)) {
  1954. if (GV == nullptr)
  1955. GV = pGV;
  1956. else {
  1957. DXASSERT(GV == pGV, "else pointer mismatch");
  1958. }
  1959. }
  1960. }
  1961. } else {
  1962. if (!isa<ReturnInst>(*I)) {
  1963. allConst = false;
  1964. break;
  1965. }
  1966. }
  1967. }
  1968. if (!allConst)
  1969. return false;
  1970. if (!GV)
  1971. return false;
  1972. llvm::Type *Ty = GV->getType()->getElementType();
  1973. llvm::ArrayType *AT = dyn_cast<llvm::ArrayType>(Ty);
  1974. // TODO: support other types.
  1975. if (!AT)
  1976. return false;
  1977. if (ImmList.size() != AT->getNumElements())
  1978. return false;
  1979. Constant *Init = llvm::ConstantArray::get(AT, ImmList);
  1980. GV->setInitializer(Init);
  1981. return true;
  1982. }
  1983. } // namespace
  1984. namespace CGHLSLMSHelper {
  1985. void ProcessCtorFunctions(llvm::Module &M, StringRef globalName,
  1986. Instruction *InsertPt, bool bRemoveGlobal) {
  1987. // add global call to entry func
  1988. GlobalVariable *GV = M.getGlobalVariable(globalName);
  1989. if (!GV)
  1990. return;
  1991. ConstantArray *CA = dyn_cast<ConstantArray>(GV->getInitializer());
  1992. if (!CA)
  1993. return;
  1994. IRBuilder<> Builder(InsertPt);
  1995. for (User::op_iterator i = CA->op_begin(), e = CA->op_end(); i != e; ++i) {
  1996. if (isa<ConstantAggregateZero>(*i))
  1997. continue;
  1998. ConstantStruct *CS = cast<ConstantStruct>(*i);
  1999. if (isa<ConstantPointerNull>(CS->getOperand(1)))
  2000. continue;
  2001. // Must have a function or null ptr.
  2002. if (!isa<Function>(CS->getOperand(1)))
  2003. continue;
  2004. Function *Ctor = cast<Function>(CS->getOperand(1));
  2005. DXASSERT(Ctor->getReturnType()->isVoidTy() && Ctor->arg_size() == 0,
  2006. "function type must be void (void)");
  2007. for (inst_iterator I = inst_begin(Ctor), E = inst_end(Ctor); I != E; ++I) {
  2008. if (CallInst *CI = dyn_cast<CallInst>(&(*I))) {
  2009. Function *F = CI->getCalledFunction();
  2010. // Try to build imm initilizer.
  2011. // If not work, add global call to entry func.
  2012. if (BuildImmInit(F) == false) {
  2013. Builder.CreateCall(F);
  2014. }
  2015. } else {
  2016. DXASSERT(isa<ReturnInst>(&(*I)),
  2017. "else invalid Global constructor function");
  2018. }
  2019. }
  2020. }
  2021. // remove the GV
  2022. if (bRemoveGlobal) {
  2023. GV->eraseFromParent();
  2024. }
  2025. }
  2026. void FinishCBuffer(
  2027. HLModule &HLM, llvm::Type *CBufferType,
  2028. std::unordered_map<Constant *, DxilFieldAnnotation> &constVarAnnotationMap) {
  2029. // Allocate constant buffers.
  2030. AllocateDxilConstantBuffers(HLM, constVarAnnotationMap);
  2031. // TODO: create temp variable for constant which has store use.
  2032. // Create Global variable and type annotation for each CBuffer.
  2033. ConstructCBuffer(HLM, CBufferType, constVarAnnotationMap);
  2034. }
  2035. void AddRegBindingsForResourceInConstantBuffer(
  2036. HLModule &HLM,
  2037. llvm::DenseMap<llvm::Constant *,
  2038. llvm::SmallVector<std::pair<DXIL::ResourceClass, unsigned>,
  2039. 1>> &constantRegBindingMap) {
  2040. for (unsigned i = 0; i < HLM.GetCBuffers().size(); i++) {
  2041. HLCBuffer &CB = *static_cast<HLCBuffer *>(&(HLM.GetCBuffer(i)));
  2042. auto &Constants = CB.GetConstants();
  2043. for (unsigned j = 0; j < Constants.size(); j++) {
  2044. const std::unique_ptr<DxilResourceBase> &C = Constants[j];
  2045. Constant *CGV = C->GetGlobalSymbol();
  2046. auto &regBindings = constantRegBindingMap[CGV];
  2047. if (regBindings.empty())
  2048. continue;
  2049. unsigned Srv = UINT_MAX;
  2050. unsigned Uav = UINT_MAX;
  2051. unsigned Sampler = UINT_MAX;
  2052. for (auto it : regBindings) {
  2053. unsigned RegNum = it.second;
  2054. switch (it.first) {
  2055. case DXIL::ResourceClass::SRV:
  2056. Srv = RegNum;
  2057. break;
  2058. case DXIL::ResourceClass::UAV:
  2059. Uav = RegNum;
  2060. break;
  2061. case DXIL::ResourceClass::Sampler:
  2062. Sampler = RegNum;
  2063. break;
  2064. default:
  2065. DXASSERT(0, "invalid resource class");
  2066. break;
  2067. }
  2068. }
  2069. HLM.AddRegBinding(CB.GetID(), j, Srv, Uav, Sampler);
  2070. }
  2071. }
  2072. }
  2073. // extension codegen.
  2074. void ExtensionCodeGen(HLModule &HLM, clang::CodeGen::CodeGenModule &CGM) {
  2075. // Add semantic defines for extensions if any are available.
  2076. HLSLExtensionsCodegenHelper::SemanticDefineErrorList errors =
  2077. CGM.getCodeGenOpts().HLSLExtensionsCodegen->WriteSemanticDefines(
  2078. HLM.GetModule());
  2079. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2080. for (const HLSLExtensionsCodegenHelper::SemanticDefineError &error : errors) {
  2081. clang::DiagnosticsEngine::Level level = clang::DiagnosticsEngine::Error;
  2082. if (error.IsWarning())
  2083. level = clang::DiagnosticsEngine::Warning;
  2084. unsigned DiagID = Diags.getCustomDiagID(level, "%0");
  2085. Diags.Report(clang::SourceLocation::getFromRawEncoding(error.Location()),
  2086. DiagID)
  2087. << error.Message();
  2088. }
  2089. // Add root signature from a #define. Overrides root signature in function
  2090. // attribute.
  2091. {
  2092. using Status = HLSLExtensionsCodegenHelper::CustomRootSignature::Status;
  2093. HLSLExtensionsCodegenHelper::CustomRootSignature customRootSig;
  2094. HLSLExtensionsCodegenHelper::CustomRootSignature::Status status =
  2095. CGM.getCodeGenOpts().HLSLExtensionsCodegen->GetCustomRootSignature(
  2096. &customRootSig);
  2097. if (status == Status::FOUND) {
  2098. DxilRootSignatureVersion rootSigVer;
  2099. // set root signature version.
  2100. if (CGM.getLangOpts().RootSigMinor == 0) {
  2101. rootSigVer = hlsl::DxilRootSignatureVersion::Version_1_0;
  2102. } else {
  2103. DXASSERT(CGM.getLangOpts().RootSigMinor == 1,
  2104. "else CGMSHLSLRuntime Constructor needs to be updated");
  2105. rootSigVer = hlsl::DxilRootSignatureVersion::Version_1_1;
  2106. }
  2107. RootSignatureHandle RootSigHandle;
  2108. CompileRootSignature(
  2109. customRootSig.RootSignature, Diags,
  2110. clang::SourceLocation::getFromRawEncoding(
  2111. customRootSig.EncodedSourceLocation),
  2112. rootSigVer, DxilRootSignatureCompilationFlags::GlobalRootSignature,
  2113. &RootSigHandle);
  2114. if (!RootSigHandle.IsEmpty()) {
  2115. RootSigHandle.EnsureSerializedAvailable();
  2116. HLM.SetSerializedRootSignature(RootSigHandle.GetSerializedBytes(),
  2117. RootSigHandle.GetSerializedSize());
  2118. }
  2119. }
  2120. }
  2121. }
  2122. } // namespace CGHLSLMSHelper
  2123. namespace {
  2124. void ReportDisallowedTypeInExportParam(clang::CodeGen ::CodeGenModule &CGM,
  2125. StringRef name) {
  2126. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2127. unsigned DiagID =
  2128. Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  2129. "Exported function %0 must not contain a "
  2130. "resource in parameter or return type.");
  2131. std::string escaped;
  2132. llvm::raw_string_ostream os(escaped);
  2133. dxilutil::PrintEscapedString(name, os);
  2134. Diags.Report(DiagID) << os.str();
  2135. }
  2136. } // namespace
  2137. namespace CGHLSLMSHelper {
  2138. void FinishClipPlane(HLModule &HLM, std::vector<Function *> &clipPlaneFuncList,
  2139. std::unordered_map<Value *, DebugLoc> &debugInfoMap,
  2140. clang::CodeGen::CodeGenModule &CGM) {
  2141. bool bDebugInfo = CGM.getCodeGenOpts().getDebugInfo() ==
  2142. clang::CodeGenOptions::FullDebugInfo;
  2143. Module &M = *HLM.GetModule();
  2144. for (Function *F : clipPlaneFuncList) {
  2145. DxilFunctionProps &props = HLM.GetDxilFunctionProps(F);
  2146. IRBuilder<> Builder(F->getEntryBlock().getFirstInsertionPt());
  2147. for (unsigned i = 0; i < DXIL::kNumClipPlanes; i++) {
  2148. Value *clipPlane = props.ShaderProps.VS.clipPlanes[i];
  2149. if (!clipPlane)
  2150. continue;
  2151. if (bDebugInfo) {
  2152. Builder.SetCurrentDebugLocation(debugInfoMap[clipPlane]);
  2153. }
  2154. llvm::Type *Ty = clipPlane->getType()->getPointerElementType();
  2155. // Constant *zeroInit = ConstantFP::get(Ty, 0);
  2156. GlobalVariable *GV = new llvm::GlobalVariable(
  2157. M, Ty, /*IsConstant*/ false, // constant false to store.
  2158. llvm::GlobalValue::ExternalLinkage,
  2159. /*InitVal*/ nullptr, Twine("SV_ClipPlane") + Twine(i));
  2160. Value *initVal = Builder.CreateLoad(clipPlane);
  2161. Builder.CreateStore(initVal, GV);
  2162. props.ShaderProps.VS.clipPlanes[i] = GV;
  2163. }
  2164. }
  2165. }
  2166. } // namespace
  2167. namespace {
  2168. void LowerExportFunctions(HLModule &HLM, clang::CodeGen::CodeGenModule &CGM,
  2169. dxilutil::ExportMap &exportMap,
  2170. StringMap<EntryFunctionInfo> &entryFunctionMap) {
  2171. bool bIsLib = HLM.GetShaderModel()->IsLib();
  2172. Module &M = *HLM.GetModule();
  2173. if (bIsLib && !exportMap.empty()) {
  2174. for (auto &it : entryFunctionMap) {
  2175. if (HLM.HasDxilFunctionProps(it.second.Func)) {
  2176. const DxilFunctionProps &props =
  2177. HLM.GetDxilFunctionProps(it.second.Func);
  2178. if (props.IsHS())
  2179. exportMap.RegisterExportedFunction(
  2180. props.ShaderProps.HS.patchConstantFunc);
  2181. }
  2182. }
  2183. }
  2184. if (bIsLib && !exportMap.empty()) {
  2185. exportMap.BeginProcessing();
  2186. for (Function &f : M.functions()) {
  2187. if (f.isDeclaration() || f.isIntrinsic() ||
  2188. GetHLOpcodeGroup(&f) != HLOpcodeGroup::NotHL)
  2189. continue;
  2190. exportMap.ProcessFunction(&f, true);
  2191. }
  2192. // TODO: add subobject export names here.
  2193. if (!exportMap.EndProcessing()) {
  2194. for (auto &name : exportMap.GetNameCollisions()) {
  2195. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2196. unsigned DiagID = Diags.getCustomDiagID(
  2197. clang::DiagnosticsEngine::Error,
  2198. "Export name collides with another export: %0");
  2199. std::string escaped;
  2200. llvm::raw_string_ostream os(escaped);
  2201. dxilutil::PrintEscapedString(name, os);
  2202. Diags.Report(DiagID) << os.str();
  2203. }
  2204. for (auto &name : exportMap.GetUnusedExports()) {
  2205. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2206. unsigned DiagID =
  2207. Diags.getCustomDiagID(clang::DiagnosticsEngine::Error,
  2208. "Could not find target for export: %0");
  2209. std::string escaped;
  2210. llvm::raw_string_ostream os(escaped);
  2211. dxilutil::PrintEscapedString(name, os);
  2212. Diags.Report(DiagID) << os.str();
  2213. }
  2214. }
  2215. }
  2216. for (auto &it : exportMap.GetFunctionRenames()) {
  2217. Function *F = it.first;
  2218. auto &renames = it.second;
  2219. if (renames.empty())
  2220. continue;
  2221. // Rename the original, if necessary, then clone the rest
  2222. if (renames.find(F->getName()) == renames.end())
  2223. F->setName(*renames.begin());
  2224. for (auto &itName : renames) {
  2225. if (F->getName() != itName) {
  2226. Function *pClone = CloneFunction(F, itName, &M, HLM.GetTypeSystem(),
  2227. HLM.GetTypeSystem());
  2228. // add DxilFunctionProps if entry
  2229. if (HLM.HasDxilFunctionProps(F)) {
  2230. DxilFunctionProps &props = HLM.GetDxilFunctionProps(F);
  2231. auto newProps = llvm::make_unique<DxilFunctionProps>(props);
  2232. HLM.AddDxilFunctionProps(pClone, newProps);
  2233. }
  2234. }
  2235. }
  2236. }
  2237. }
  2238. void CheckResourceParameters(HLModule &HLM,
  2239. clang::CodeGen::CodeGenModule &CGM) {
  2240. Module &M = *HLM.GetModule();
  2241. for (Function &f : M.functions()) {
  2242. // Skip llvm intrinsics, non-external linkage, entry/patch constant func,
  2243. // and HL intrinsics
  2244. if (!f.isIntrinsic() &&
  2245. f.getLinkage() == GlobalValue::LinkageTypes::ExternalLinkage &&
  2246. !HLM.HasDxilFunctionProps(&f) && !HLM.IsPatchConstantShader(&f) &&
  2247. GetHLOpcodeGroup(&f) == HLOpcodeGroup::NotHL) {
  2248. // Verify no resources in param/return types
  2249. if (dxilutil::ContainsHLSLObjectType(f.getReturnType())) {
  2250. ReportDisallowedTypeInExportParam(CGM, f.getName());
  2251. continue;
  2252. }
  2253. for (auto &Arg : f.args()) {
  2254. if (dxilutil::ContainsHLSLObjectType(Arg.getType())) {
  2255. ReportDisallowedTypeInExportParam(CGM, f.getName());
  2256. break;
  2257. }
  2258. }
  2259. }
  2260. }
  2261. }
  2262. } // namespace
  2263. namespace CGHLSLMSHelper {
  2264. void UpdateLinkage(HLModule &HLM, clang::CodeGen::CodeGenModule &CGM,
  2265. dxilutil::ExportMap &exportMap,
  2266. StringMap<EntryFunctionInfo> &entryFunctionMap,
  2267. StringMap<PatchConstantInfo> &patchConstantFunctionMap) {
  2268. bool bIsLib = HLM.GetShaderModel()->IsLib();
  2269. Module &M = *HLM.GetModule();
  2270. // Pin entry point and constant buffers, mark everything else internal.
  2271. for (Function &f : M.functions()) {
  2272. if (!bIsLib) {
  2273. if (&f == HLM.GetEntryFunction() ||
  2274. IsPatchConstantFunction(&f, patchConstantFunctionMap) ||
  2275. f.isDeclaration()) {
  2276. if (f.isDeclaration() && !f.isIntrinsic() &&
  2277. GetHLOpcodeGroup(&f) == HLOpcodeGroup::NotHL) {
  2278. clang::DiagnosticsEngine &Diags = CGM.getDiags();
  2279. unsigned DiagID = Diags.getCustomDiagID(
  2280. clang::DiagnosticsEngine::Error,
  2281. "External function used in non-library profile: %0");
  2282. std::string escaped;
  2283. llvm::raw_string_ostream os(escaped);
  2284. dxilutil::PrintEscapedString(f.getName(), os);
  2285. Diags.Report(DiagID) << os.str();
  2286. return;
  2287. }
  2288. f.setLinkage(GlobalValue::LinkageTypes::ExternalLinkage);
  2289. } else {
  2290. f.setLinkage(GlobalValue::LinkageTypes::InternalLinkage);
  2291. }
  2292. }
  2293. // Skip no inline functions.
  2294. if (f.hasFnAttribute(llvm::Attribute::NoInline))
  2295. continue;
  2296. // Always inline for used functions.
  2297. if (!f.user_empty() && !f.isDeclaration())
  2298. f.addFnAttr(llvm::Attribute::AlwaysInline);
  2299. }
  2300. LowerExportFunctions(HLM, CGM, exportMap, entryFunctionMap);
  2301. if (CGM.getCodeGenOpts().ExportShadersOnly) {
  2302. for (Function &f : M.functions()) {
  2303. // Skip declarations, intrinsics, shaders, and non-external linkage
  2304. if (f.isDeclaration() || f.isIntrinsic() ||
  2305. GetHLOpcodeGroup(&f) != HLOpcodeGroup::NotHL ||
  2306. HLM.HasDxilFunctionProps(&f) || HLM.IsPatchConstantShader(&f) ||
  2307. f.getLinkage() != GlobalValue::LinkageTypes::ExternalLinkage)
  2308. continue;
  2309. // Mark non-shader user functions as InternalLinkage
  2310. f.setLinkage(GlobalValue::LinkageTypes::InternalLinkage);
  2311. }
  2312. }
  2313. // Now iterate hull shaders and make sure their corresponding patch constant
  2314. // functions are marked ExternalLinkage:
  2315. for (Function &f : M.functions()) {
  2316. if (f.isDeclaration() || f.isIntrinsic() ||
  2317. GetHLOpcodeGroup(&f) != HLOpcodeGroup::NotHL ||
  2318. f.getLinkage() != GlobalValue::LinkageTypes::ExternalLinkage ||
  2319. !HLM.HasDxilFunctionProps(&f))
  2320. continue;
  2321. DxilFunctionProps &props = HLM.GetDxilFunctionProps(&f);
  2322. if (!props.IsHS())
  2323. continue;
  2324. Function *PCFunc = props.ShaderProps.HS.patchConstantFunc;
  2325. if (PCFunc->getLinkage() != GlobalValue::LinkageTypes::ExternalLinkage)
  2326. PCFunc->setLinkage(GlobalValue::LinkageTypes::ExternalLinkage);
  2327. }
  2328. // Disallow resource arguments in (non-entry) function exports
  2329. // unless offline linking target.
  2330. if (bIsLib &&
  2331. HLM.GetShaderModel()->GetMinor() != ShaderModel::kOfflineMinor) {
  2332. CheckResourceParameters(HLM, CGM);
  2333. }
  2334. }
  2335. void FinishEntries(
  2336. HLModule &HLM, const EntryFunctionInfo &Entry,
  2337. clang::CodeGen::CodeGenModule &CGM,
  2338. StringMap<EntryFunctionInfo> &entryFunctionMap,
  2339. std::unordered_map<Function *, const clang::HLSLPatchConstantFuncAttr *>
  2340. &HSEntryPatchConstantFuncAttr,
  2341. StringMap<PatchConstantInfo> &patchConstantFunctionMap,
  2342. std::unordered_map<Function *, std::unique_ptr<DxilFunctionProps>>
  2343. &patchConstantFunctionPropsMap) {
  2344. bool bIsLib = HLM.GetShaderModel()->IsLib();
  2345. // Library don't have entry.
  2346. if (!bIsLib) {
  2347. SetEntryFunction(HLM, Entry.Func, CGM);
  2348. // If at this point we haven't determined the entry function it's an error.
  2349. if (HLM.GetEntryFunction() == nullptr) {
  2350. assert(CGM.getDiags().hasErrorOccurred() &&
  2351. "else SetEntryFunction should have reported this condition");
  2352. return;
  2353. }
  2354. // In back-compat mode (with /Gec flag) create a static global for each
  2355. // const global to allow writing to it.
  2356. // TODO: Verfiy the behavior of static globals in hull shader
  2357. if (CGM.getLangOpts().EnableDX9CompatMode &&
  2358. CGM.getLangOpts().HLSLVersion <= 2016)
  2359. CreateWriteEnabledStaticGlobals(HLM.GetModule(), HLM.GetEntryFunction());
  2360. if (HLM.GetShaderModel()->IsHS()) {
  2361. SetPatchConstantFunction(Entry, HSEntryPatchConstantFuncAttr,
  2362. patchConstantFunctionMap,
  2363. patchConstantFunctionPropsMap, HLM, CGM);
  2364. }
  2365. } else {
  2366. for (auto &it : entryFunctionMap) {
  2367. // skip clone if RT entry
  2368. if (HLM.GetDxilFunctionProps(it.second.Func).IsRay())
  2369. continue;
  2370. // TODO: change flattened function names to dx.entry.<name>:
  2371. // std::string entryName = (Twine(dxilutil::EntryPrefix) +
  2372. // it.getKey()).str();
  2373. CloneShaderEntry(it.second.Func, it.getKey(), HLM);
  2374. auto AttrIter = HSEntryPatchConstantFuncAttr.find(it.second.Func);
  2375. if (AttrIter != HSEntryPatchConstantFuncAttr.end()) {
  2376. SetPatchConstantFunctionWithAttr(
  2377. it.second, AttrIter->second, patchConstantFunctionMap,
  2378. patchConstantFunctionPropsMap, HLM, CGM);
  2379. }
  2380. }
  2381. }
  2382. }
  2383. } // namespace
  2384. namespace CGHLSLMSHelper {
  2385. void FinishIntrinsics(
  2386. HLModule &HLM, std::vector<std::pair<Function *, unsigned>> &intrinsicMap,
  2387. DenseMap<Value *, DxilResourceProperties> &valToResPropertiesMap) {
  2388. // Lower getResourceHeap before AddOpcodeParamForIntrinsics to skip automatic
  2389. // lower for getResourceFromHeap.
  2390. LowerGetResourceFromHeap(HLM, intrinsicMap);
  2391. // translate opcode into parameter for intrinsic functions
  2392. // Do this before CloneShaderEntry and TranslateRayQueryConstructor to avoid
  2393. // update valToResPropertiesMap for cloned inst.
  2394. AddOpcodeParamForIntrinsics(HLM, intrinsicMap, valToResPropertiesMap);
  2395. }
  2396. // Add the dx.break temporary intrinsic and create Call Instructions
  2397. // to it for each branch that requires the artificial conditional.
  2398. void AddDxBreak(Module &M, const SmallVector<llvm::BranchInst*, 16> &DxBreaks) {
  2399. if (DxBreaks.empty())
  2400. return;
  2401. // Collect functions that make use of any wave operations
  2402. // Only they will need the dx.break condition added
  2403. SmallPtrSet<Function *, 16> WaveUsers;
  2404. for (Function &F : M.functions()) {
  2405. HLOpcodeGroup opgroup = hlsl::GetHLOpcodeGroup(&F);
  2406. if (F.isDeclaration() && IsHLWaveSensitive(&F) &&
  2407. (opgroup == HLOpcodeGroup::HLIntrinsic || opgroup == HLOpcodeGroup::HLExtIntrinsic)) {
  2408. for (User *U : F.users()) {
  2409. CallInst *CI = cast<CallInst>(U);
  2410. WaveUsers.insert(CI->getParent()->getParent());
  2411. }
  2412. }
  2413. }
  2414. // If there are no wave users, not even the function declaration is needed
  2415. if (WaveUsers.empty())
  2416. return;
  2417. // Create the dx.break function
  2418. FunctionType *FT = llvm::FunctionType::get(llvm::Type::getInt1Ty(M.getContext()), false);
  2419. Function *func = cast<llvm::Function>(M.getOrInsertFunction(DXIL::kDxBreakFuncName, FT));
  2420. func->addFnAttr(Attribute::AttrKind::NoUnwind);
  2421. // For all break branches recorded previously, if the function they are in makes
  2422. // any use of a wave op, it may need to be artificially conditional. Make it so now.
  2423. // The CleanupDxBreak pass will remove those that aren't needed when more is known.
  2424. for(llvm::BranchInst *BI : DxBreaks) {
  2425. if (WaveUsers.count(BI->getParent()->getParent())) {
  2426. CallInst *Call = CallInst::Create(FT, func, ArrayRef<Value *>(), "", BI);
  2427. BI->setCondition(Call);
  2428. if (!BI->getMetadata(DXIL::kDxBreakMDName)) {
  2429. BI->setMetadata(DXIL::kDxBreakMDName, llvm::MDNode::get(BI->getContext(), {}));
  2430. }
  2431. }
  2432. }
  2433. }
  2434. }
  2435. namespace CGHLSLMSHelper {
  2436. ScopeInfo::ScopeInfo(Function *F) : maxRetLevel(0), bAllReturnsInIf(true) {
  2437. Scope FuncScope;
  2438. FuncScope.kind = Scope::ScopeKind::FunctionScope;
  2439. FuncScope.EndScopeBB = nullptr;
  2440. FuncScope.bWholeScopeReturned = false;
  2441. // Make it 0 to avoid check when get parent.
  2442. // All loop on scopes should check kind != FunctionScope.
  2443. FuncScope.parentScopeIndex = 0;
  2444. scopes.emplace_back(FuncScope);
  2445. scopeStack.emplace_back(0);
  2446. }
  2447. // When all returns is inside if which is not nested, the flow is still
  2448. // structurized even there're more than one return.
  2449. bool ScopeInfo::CanSkipStructurize() {
  2450. return bAllReturnsInIf && maxRetLevel < 2;
  2451. }
  2452. void ScopeInfo::AddScope(Scope::ScopeKind k, BasicBlock *endScopeBB) {
  2453. Scope Scope;
  2454. Scope.kind = k;
  2455. Scope.bWholeScopeReturned = false;
  2456. Scope.EndScopeBB = endScopeBB;
  2457. Scope.parentScopeIndex = scopeStack.back();
  2458. scopeStack.emplace_back(scopes.size());
  2459. scopes.emplace_back(Scope);
  2460. }
  2461. void ScopeInfo::AddIf(BasicBlock *endIfBB) {
  2462. AddScope(Scope::ScopeKind::IfScope, endIfBB);
  2463. }
  2464. void ScopeInfo::AddSwitch(BasicBlock *endSwitch) {
  2465. AddScope(Scope::ScopeKind::SwitchScope, endSwitch);
  2466. }
  2467. void ScopeInfo::AddLoop(BasicBlock *loopContinue, BasicBlock *endLoop) {
  2468. AddScope(Scope::ScopeKind::LoopScope, endLoop);
  2469. scopes.back().loopContinueBB = loopContinue;
  2470. }
  2471. void ScopeInfo::AddRet(BasicBlock *bbWithRet) {
  2472. Scope RetScope;
  2473. RetScope.kind = Scope::ScopeKind::ReturnScope;
  2474. RetScope.EndScopeBB = bbWithRet;
  2475. RetScope.parentScopeIndex = scopeStack.back();
  2476. // - 1 for function scope which is at scopeStack[0].
  2477. unsigned retLevel = scopeStack.size() - 1;
  2478. // save max nested level for ret.
  2479. maxRetLevel = std::max<unsigned>(maxRetLevel, retLevel);
  2480. bool bGotLoopOrSwitch = false;
  2481. for (auto it = scopeStack.rbegin(); it != scopeStack.rend(); it++) {
  2482. unsigned idx = *it;
  2483. Scope &S = scopes[idx];
  2484. switch (S.kind) {
  2485. default:
  2486. break;
  2487. case Scope::ScopeKind::LoopScope:
  2488. case Scope::ScopeKind::SwitchScope:
  2489. bGotLoopOrSwitch = true;
  2490. // For return inside loop and switch, can just break.
  2491. RetScope.parentScopeIndex = idx;
  2492. break;
  2493. }
  2494. if (bGotLoopOrSwitch)
  2495. break;
  2496. }
  2497. bAllReturnsInIf &= !bGotLoopOrSwitch;
  2498. // return finish current scope.
  2499. RetScope.bWholeScopeReturned = true;
  2500. // save retScope to rets.
  2501. rets.emplace_back(scopes.size());
  2502. scopes.emplace_back(RetScope);
  2503. // Don't need to put retScope to stack since it cannot nested other scopes.
  2504. }
  2505. void ScopeInfo::EndScope(bool bScopeFinishedWithRet) {
  2506. unsigned idx = scopeStack.pop_back_val();
  2507. Scope &Scope = GetScope(idx);
  2508. // If whole stmt is finished and end scope bb has not used(nothing branch to
  2509. // it). Then the whole scope is returned.
  2510. Scope.bWholeScopeReturned =
  2511. bScopeFinishedWithRet && Scope.EndScopeBB->user_empty();
  2512. }
  2513. Scope &ScopeInfo::GetScope(unsigned i) { return scopes[i]; }
  2514. void ScopeInfo::LegalizeWholeReturnedScope() {
  2515. // legalize scopes which whole scope returned.
  2516. // When whole scope is returned, the endScopeBB will be deleted in codeGen.
  2517. // Here update it to parent scope's endScope.
  2518. // Since the scopes are in order, so it will automatic update to the final
  2519. // target. A->B->C will just get A->C.
  2520. for (auto &S : scopes) {
  2521. if (S.bWholeScopeReturned && S.kind != Scope::ScopeKind::ReturnScope) {
  2522. S.EndScopeBB = scopes[S.parentScopeIndex].EndScopeBB;
  2523. }
  2524. }
  2525. }
  2526. } // namespace CGHLSLMSHelper
  2527. namespace {
  2528. void updateEndScope(
  2529. ScopeInfo &ScopeInfo,
  2530. DenseMap<BasicBlock *, SmallVector<unsigned, 2>> &EndBBToScopeIndexMap,
  2531. BasicBlock *oldEndScope, BasicBlock *newEndScope) {
  2532. auto it = EndBBToScopeIndexMap.find(oldEndScope);
  2533. DXASSERT(it != EndBBToScopeIndexMap.end(),
  2534. "fail to find endScopeBB in EndBBToScopeIndexMap");
  2535. SmallVector<unsigned, 2> &scopeList = it->second;
  2536. // Don't need to update when not share endBB with other scope.
  2537. if (scopeList.size() < 2)
  2538. return;
  2539. for (unsigned i : scopeList) {
  2540. Scope &S = ScopeInfo.GetScope(i);
  2541. // Don't update return endBB, because that is the Block has return branch.
  2542. if (S.kind != Scope::ScopeKind::ReturnScope)
  2543. S.EndScopeBB = newEndScope;
  2544. }
  2545. EndBBToScopeIndexMap[newEndScope] = scopeList;
  2546. }
  2547. // Init ret value with undef to make sure it will not live thru loop inside
  2548. // callers.
  2549. // Because structurize return, the flow is controled by bIsReturned. The
  2550. // semantic is the same as multiple return, but without konwledge of
  2551. // bIsReturend, some path for structrized flow will have ret value not
  2552. // initialized.
  2553. // When function is called inside loop, ret value will live across the loop
  2554. // after inline.
  2555. void InitRetValue(BasicBlock *exitBB) {
  2556. Value *RetValPtr = nullptr;
  2557. if (ReturnInst *RI = dyn_cast<ReturnInst>(exitBB->getTerminator())) {
  2558. if (Value *RetV = RI->getReturnValue()) {
  2559. if (LoadInst *LI = dyn_cast<LoadInst>(RetV)) {
  2560. RetValPtr = LI->getPointerOperand();
  2561. }
  2562. }
  2563. }
  2564. if (!RetValPtr)
  2565. return;
  2566. if (AllocaInst *RetVAlloc = dyn_cast<AllocaInst>(RetValPtr)) {
  2567. IRBuilder<> B(RetVAlloc->getNextNode());
  2568. Type *Ty = RetVAlloc->getAllocatedType();
  2569. Value *Init = UndefValue::get(Ty);
  2570. if (Ty->isAggregateType()) {
  2571. // TODO: support aggreagate type and out parameters.
  2572. // Skip it here will cause undef on phi which the incoming path should never hit.
  2573. } else {
  2574. B.CreateStore(Init, RetVAlloc);
  2575. }
  2576. }
  2577. }
  2578. // For functions has multiple returns like
  2579. // float foo(float a, float b, float c) {
  2580. // float r = c;
  2581. // if (a > 0) {
  2582. // if (b > 0) {
  2583. // return -1;
  2584. // }
  2585. // ***
  2586. // }
  2587. // ...
  2588. // return r;
  2589. // }
  2590. // transform into
  2591. // float foo(float a, float b, float c) {
  2592. // bool bRet = false;
  2593. // float retV;
  2594. // float r = c;
  2595. // if (a > 0) {
  2596. // if (b > 0) {
  2597. // bRet = true;
  2598. // retV = -1;
  2599. // }
  2600. // if (!bRet) {
  2601. // ***
  2602. // }
  2603. // }
  2604. // if (!bRet) {
  2605. // ...
  2606. // retV = r;
  2607. // }
  2608. // return vRet;
  2609. // }
  2610. void StructurizeMultiRetFunction(Function *F, ScopeInfo &ScopeInfo,
  2611. bool bWaveEnabledStage,
  2612. SmallVector<BranchInst *, 16> &DxBreaks) {
  2613. if (ScopeInfo.CanSkipStructurize())
  2614. return;
  2615. // Get bbWithRets.
  2616. auto &rets = ScopeInfo.GetRetScopes();
  2617. IRBuilder<> B(F->getEntryBlock().begin());
  2618. Scope &FunctionScope = ScopeInfo.GetScope(0);
  2619. Type *boolTy = Type::getInt1Ty(F->getContext());
  2620. Constant *cTrue = ConstantInt::get(boolTy, 1);
  2621. Constant *cFalse = ConstantInt::get(boolTy, 0);
  2622. // bool bIsReturned = false;
  2623. AllocaInst *bIsReturned = B.CreateAlloca(boolTy, nullptr, "bReturned");
  2624. B.CreateStore(cFalse, bIsReturned);
  2625. Scope &RetScope = ScopeInfo.GetScope(rets[0]);
  2626. BasicBlock *exitBB = RetScope.EndScopeBB->getTerminator()->getSuccessor(0);
  2627. FunctionScope.EndScopeBB = exitBB;
  2628. // Find alloca for retunr val and init it to avoid undef after guard code with
  2629. // bIsReturned.
  2630. InitRetValue(exitBB);
  2631. ScopeInfo.LegalizeWholeReturnedScope();
  2632. // Map from endScopeBB to scope index.
  2633. // When 2 scopes share same endScopeBB, need to update endScopeBB after
  2634. // structurize.
  2635. DenseMap<BasicBlock *, SmallVector<unsigned, 2>> EndBBToScopeIndexMap;
  2636. auto &scopes = ScopeInfo.GetScopes();
  2637. for (unsigned i = 0; i < scopes.size(); i++) {
  2638. Scope &S = scopes[i];
  2639. EndBBToScopeIndexMap[S.EndScopeBB].emplace_back(i);
  2640. }
  2641. DenseSet<unsigned> guardedSet;
  2642. for (auto it = rets.begin(); it != rets.end(); it++) {
  2643. unsigned scopeIndex = *it;
  2644. Scope *pCurScope = &ScopeInfo.GetScope(scopeIndex);
  2645. Scope *pRetParentScope = &ScopeInfo.GetScope(pCurScope->parentScopeIndex);
  2646. // skip ret not in nested control flow.
  2647. if (pRetParentScope->kind == Scope::ScopeKind::FunctionScope)
  2648. continue;
  2649. do {
  2650. BasicBlock *BB = pCurScope->EndScopeBB;
  2651. // exit when scope is processed.
  2652. if (guardedSet.count(scopeIndex))
  2653. break;
  2654. guardedSet.insert(scopeIndex);
  2655. Scope *pParentScope = &ScopeInfo.GetScope(pCurScope->parentScopeIndex);
  2656. BasicBlock *EndBB = pParentScope->EndScopeBB;
  2657. // When whole scope returned, just branch to endScope of parent.
  2658. if (pCurScope->bWholeScopeReturned) {
  2659. // For ret, just branch to endScope of parent.
  2660. if (pCurScope->kind == Scope::ScopeKind::ReturnScope) {
  2661. BasicBlock *retBB = pCurScope->EndScopeBB;
  2662. TerminatorInst *retBr = retBB->getTerminator();
  2663. IRBuilder<> B(retBr);
  2664. // Set bReturned to true.
  2665. B.CreateStore(cTrue, bIsReturned);
  2666. if (bWaveEnabledStage &&
  2667. pParentScope->kind == Scope::ScopeKind::LoopScope) {
  2668. BranchInst *BI =
  2669. B.CreateCondBr(cTrue, EndBB, pParentScope->loopContinueBB);
  2670. DxBreaks.emplace_back(BI);
  2671. retBr->eraseFromParent();
  2672. } else {
  2673. // Update branch target.
  2674. retBr->setSuccessor(0, EndBB);
  2675. }
  2676. }
  2677. // For other scope, do nothing. Since whole scope is returned.
  2678. // Just flow naturally to parent scope.
  2679. } else {
  2680. // When only part scope returned.
  2681. // Use bIsReturned to guard to part which not returned.
  2682. switch (pParentScope->kind) {
  2683. case Scope::ScopeKind::ReturnScope:
  2684. DXASSERT(0, "return scope must get whole scope returned.");
  2685. break;
  2686. case Scope::ScopeKind::FunctionScope:
  2687. case Scope::ScopeKind::IfScope: {
  2688. // inside if.
  2689. // if (!bReturned) {
  2690. // rest of if or else.
  2691. // }
  2692. BasicBlock *CmpBB = BasicBlock::Create(BB->getContext(),
  2693. "bReturned.cmp.false", F, BB);
  2694. // Make BB preds go to cmpBB.
  2695. BB->replaceAllUsesWith(CmpBB);
  2696. // Update endscopeBB to CmpBB for scopes which has BB as endscope.
  2697. updateEndScope(ScopeInfo, EndBBToScopeIndexMap, BB, CmpBB);
  2698. IRBuilder<> B(CmpBB);
  2699. Value *isRetured = B.CreateLoad(bIsReturned, "bReturned.load");
  2700. Value *notReturned =
  2701. B.CreateICmpNE(isRetured, cFalse, "bReturned.not");
  2702. B.CreateCondBr(notReturned, EndBB, BB);
  2703. } break;
  2704. default: {
  2705. // inside switch/loop
  2706. // if (bReturned) {
  2707. // br endOfScope.
  2708. // }
  2709. BasicBlock *CmpBB =
  2710. BasicBlock::Create(BB->getContext(), "bReturned.cmp.true", F, BB);
  2711. BasicBlock *BreakBB =
  2712. BasicBlock::Create(BB->getContext(), "bReturned.break", F, BB);
  2713. BB->replaceAllUsesWith(CmpBB);
  2714. // Update endscopeBB to CmpBB for scopes which has BB as endscope.
  2715. updateEndScope(ScopeInfo, EndBBToScopeIndexMap, BB, CmpBB);
  2716. IRBuilder<> B(CmpBB);
  2717. Value *isReturned = B.CreateLoad(bIsReturned, "bReturned.load");
  2718. isReturned = B.CreateICmpEQ(isReturned, cTrue, "bReturned.true");
  2719. B.CreateCondBr(isReturned, BreakBB, BB);
  2720. B.SetInsertPoint(BreakBB);
  2721. if (bWaveEnabledStage &&
  2722. pParentScope->kind == Scope::ScopeKind::LoopScope) {
  2723. BranchInst *BI =
  2724. B.CreateCondBr(cTrue, EndBB, pParentScope->loopContinueBB);
  2725. DxBreaks.emplace_back(BI);
  2726. } else {
  2727. B.CreateBr(EndBB);
  2728. }
  2729. } break;
  2730. }
  2731. }
  2732. scopeIndex = pCurScope->parentScopeIndex;
  2733. pCurScope = &ScopeInfo.GetScope(scopeIndex);
  2734. // done when reach function scope.
  2735. } while (pCurScope->kind != Scope::ScopeKind::FunctionScope);
  2736. }
  2737. }
  2738. } // namespace
  2739. namespace CGHLSLMSHelper {
  2740. void StructurizeMultiRet(Module &M, clang::CodeGen::CodeGenModule &CGM,
  2741. DenseMap<Function *, ScopeInfo> &ScopeMap,
  2742. bool bWaveEnabledStage,
  2743. SmallVector<BranchInst *, 16> &DxBreaks) {
  2744. if (CGM.getCodeGenOpts().HLSLExtensionsCodegen) {
  2745. if (!CGM.getCodeGenOpts().HLSLExtensionsCodegen->IsOptionEnabled("structurize-returns"))
  2746. return;
  2747. } else {
  2748. if (!CGM.getCodeGenOpts().HLSLOptimizationToggles.count("structurize-returns") ||
  2749. !CGM.getCodeGenOpts().HLSLOptimizationToggles.find("structurize-returns")->second)
  2750. return;
  2751. }
  2752. for (Function &F : M) {
  2753. if (F.isDeclaration())
  2754. continue;
  2755. auto it = ScopeMap.find(&F);
  2756. if (it == ScopeMap.end())
  2757. continue;
  2758. StructurizeMultiRetFunction(&F, it->second, bWaveEnabledStage, DxBreaks);
  2759. }
  2760. }
  2761. } // namespace CGHLSLMSHelper