DxilCondenseResources.cpp 73 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129
  1. ///////////////////////////////////////////////////////////////////////////////
  2. // //
  3. // DxilCondenseResources.cpp //
  4. // Copyright (C) Microsoft Corporation. All rights reserved. //
  5. // This file is distributed under the University of Illinois Open Source //
  6. // License. See LICENSE.TXT for details. //
  7. // //
  8. // Provides a pass to make resource IDs zero-based and dense. //
  9. // //
  10. ///////////////////////////////////////////////////////////////////////////////
  11. #include "dxc/HLSL/DxilGenerationPass.h"
  12. #include "dxc/DXIL/DxilOperations.h"
  13. #include "dxc/DXIL/DxilSignatureElement.h"
  14. #include "dxc/DXIL/DxilModule.h"
  15. #include "dxc/Support/Global.h"
  16. #include "dxc/DXIL/DxilTypeSystem.h"
  17. #include "dxc/DXIL/DxilInstructions.h"
  18. #include "dxc/HLSL/DxilSpanAllocator.h"
  19. #include "dxc/HLSL/HLMatrixType.h"
  20. #include "dxc/DXIL/DxilUtil.h"
  21. #include "dxc/HLSL/HLModule.h"
  22. #include "llvm/IR/Instructions.h"
  23. #include "llvm/IR/IntrinsicInst.h"
  24. #include "llvm/IR/InstIterator.h"
  25. #include "llvm/IR/Module.h"
  26. #include "llvm/IR/PassManager.h"
  27. #include "llvm/IR/DebugInfo.h"
  28. #include "llvm/ADT/BitVector.h"
  29. #include "llvm/ADT/SetVector.h"
  30. #include "llvm/Pass.h"
  31. #include "llvm/Transforms/Utils/Local.h"
  32. #include <memory>
  33. #include <unordered_set>
  34. using namespace llvm;
  35. using namespace hlsl;
  36. // Resource rangeID remap.
  37. namespace {
  38. struct ResourceID {
  39. DXIL::ResourceClass Class; // Resource class.
  40. unsigned ID; // Resource ID, as specified on entry.
  41. bool operator<(const ResourceID &other) const {
  42. if (Class < other.Class)
  43. return true;
  44. if (Class > other.Class)
  45. return false;
  46. if (ID < other.ID)
  47. return true;
  48. return false;
  49. }
  50. };
  51. struct RemapEntry {
  52. ResourceID ResID; // Resource identity, as specified on entry.
  53. DxilResourceBase *Resource; // In-memory resource representation.
  54. unsigned Index; // Index in resource vector - new ID for the resource.
  55. };
  56. typedef std::map<ResourceID, RemapEntry> RemapEntryCollection;
  57. template <typename TResource>
  58. void BuildRewrites(const std::vector<std::unique_ptr<TResource>> &Rs,
  59. RemapEntryCollection &C) {
  60. const unsigned s = (unsigned)Rs.size();
  61. for (unsigned i = 0; i < s; ++i) {
  62. const std::unique_ptr<TResource> &R = Rs[i];
  63. if (R->GetID() != i) {
  64. ResourceID RId = {R->GetClass(), R->GetID()};
  65. RemapEntry RE = {RId, R.get(), i};
  66. C[RId] = RE;
  67. }
  68. }
  69. }
  70. // Build m_rewrites, returns 'true' if any rewrites are needed.
  71. bool BuildRewriteMap(RemapEntryCollection &rewrites, DxilModule &DM) {
  72. BuildRewrites(DM.GetCBuffers(), rewrites);
  73. BuildRewrites(DM.GetSRVs(), rewrites);
  74. BuildRewrites(DM.GetUAVs(), rewrites);
  75. BuildRewrites(DM.GetSamplers(), rewrites);
  76. return !rewrites.empty();
  77. }
  78. void ApplyRewriteMapOnResTable(RemapEntryCollection &rewrites, DxilModule &DM) {
  79. for (auto &entry : rewrites) {
  80. entry.second.Resource->SetID(entry.second.Index);
  81. }
  82. }
  83. } // namespace
  84. class DxilResourceRegisterAllocator {
  85. private:
  86. SpacesAllocator<unsigned, hlsl::DxilCBuffer> m_reservedCBufferRegisters;
  87. SpacesAllocator<unsigned, hlsl::DxilSampler> m_reservedSamplerRegisters;
  88. SpacesAllocator<unsigned, hlsl::DxilResource> m_reservedUAVRegisters;
  89. SpacesAllocator<unsigned, hlsl::DxilResource> m_reservedSRVRegisters;
  90. template<typename T>
  91. static void GatherReservedRegisters(
  92. const std::vector<std::unique_ptr<T>> &ResourceList,
  93. SpacesAllocator<unsigned, T> &SAlloc) {
  94. for (auto &res : ResourceList) {
  95. if (res->IsAllocated()) {
  96. typename SpacesAllocator<unsigned, T>::Allocator &Alloc = SAlloc.Get(res->GetSpaceID());
  97. Alloc.ForceInsertAndClobber(res.get(), res->GetLowerBound(), res->GetUpperBound());
  98. if (res->IsUnbounded())
  99. Alloc.SetUnbounded(res.get());
  100. }
  101. }
  102. }
  103. template <typename T>
  104. static bool
  105. AllocateRegisters(const std::vector<std::unique_ptr<T>> &resourceList,
  106. LLVMContext &Ctx, SpacesAllocator<unsigned, T> &ReservedRegisters,
  107. unsigned AutoBindingSpace) {
  108. bool bChanged = false;
  109. SpacesAllocator<unsigned, T> SAlloc;
  110. // Reserve explicitly allocated resources
  111. for (auto &res : resourceList) {
  112. const unsigned space = res->GetSpaceID();
  113. typename SpacesAllocator<unsigned, T>::Allocator &alloc = SAlloc.Get(space);
  114. typename SpacesAllocator<unsigned, T>::Allocator &reservedAlloc = ReservedRegisters.Get(space);
  115. if (res->IsAllocated()) {
  116. const unsigned reg = res->GetLowerBound();
  117. const T *conflict = nullptr;
  118. if (res->IsUnbounded()) {
  119. const T *unbounded = alloc.GetUnbounded();
  120. if (unbounded) {
  121. Ctx.emitError(Twine("more than one unbounded resource (") +
  122. unbounded->GetGlobalName() + (" and ") +
  123. res->GetGlobalName() + (") in space ") + Twine(space));
  124. }
  125. else {
  126. conflict = alloc.Insert(res.get(), reg, res->GetUpperBound());
  127. if (!conflict) {
  128. alloc.SetUnbounded(res.get());
  129. reservedAlloc.SetUnbounded(res.get());
  130. }
  131. }
  132. }
  133. else {
  134. conflict = alloc.Insert(res.get(), reg, res->GetUpperBound());
  135. }
  136. if (conflict) {
  137. Ctx.emitError(((res->IsUnbounded()) ? Twine("unbounded ") : Twine("")) +
  138. Twine("resource ") + res->GetGlobalName() +
  139. Twine(" at register ") + Twine(reg) +
  140. Twine(" overlaps with resource ") +
  141. conflict->GetGlobalName() + Twine(" at register ") +
  142. Twine(conflict->GetLowerBound()) + Twine(", space ") +
  143. Twine(space));
  144. }
  145. else {
  146. // Also add this to the reserved (unallocatable) range, if it wasn't already there.
  147. reservedAlloc.ForceInsertAndClobber(res.get(), res->GetLowerBound(), res->GetUpperBound());
  148. }
  149. }
  150. }
  151. // Allocate unallocated resources
  152. const unsigned space = AutoBindingSpace;
  153. typename SpacesAllocator<unsigned, T>::Allocator &alloc0 = SAlloc.Get(space);
  154. typename SpacesAllocator<unsigned, T>::Allocator &reservedAlloc0 = ReservedRegisters.Get(space);
  155. for (auto &res : resourceList) {
  156. if (res->IsAllocated())
  157. continue;
  158. DXASSERT(res->GetSpaceID() == 0,
  159. "otherwise non-zero space has no user register assignment");
  160. unsigned reg = 0;
  161. unsigned end = 0;
  162. bool allocateSpaceFound = false;
  163. if (res->IsUnbounded()) {
  164. if (alloc0.GetUnbounded() != nullptr) {
  165. const T *unbounded = alloc0.GetUnbounded();
  166. Ctx.emitError(Twine("more than one unbounded resource (") +
  167. unbounded->GetGlobalName() + Twine(" and ") +
  168. res->GetGlobalName() + Twine(") in space ") +
  169. Twine(space));
  170. continue;
  171. }
  172. if (reservedAlloc0.FindForUnbounded(reg)) {
  173. end = UINT_MAX;
  174. allocateSpaceFound = true;
  175. }
  176. }
  177. else if (reservedAlloc0.Find(res->GetRangeSize(), reg)) {
  178. end = reg + res->GetRangeSize() - 1;
  179. allocateSpaceFound = true;
  180. }
  181. if (allocateSpaceFound) {
  182. bool success = reservedAlloc0.Insert(res.get(), reg, end) == nullptr;
  183. DXASSERT_NOMSG(success);
  184. success = alloc0.Insert(res.get(), reg, end) == nullptr;
  185. DXASSERT_NOMSG(success);
  186. if (res->IsUnbounded()) {
  187. alloc0.SetUnbounded(res.get());
  188. reservedAlloc0.SetUnbounded(res.get());
  189. }
  190. res->SetLowerBound(reg);
  191. res->SetSpaceID(space);
  192. bChanged = true;
  193. } else {
  194. Ctx.emitError(((res->IsUnbounded()) ? Twine("unbounded ") : Twine("")) +
  195. Twine("resource ") + res->GetGlobalName() +
  196. Twine(" could not be allocated"));
  197. }
  198. }
  199. return bChanged;
  200. }
  201. public:
  202. void GatherReservedRegisters(DxilModule &DM) {
  203. // For backcompat with FXC, shader models 5.0 and below will not auto-allocate
  204. // resources at a register explicitely assigned to even an unused resource.
  205. if (DM.GetLegacyResourceReservation()) {
  206. GatherReservedRegisters(DM.GetCBuffers(), m_reservedCBufferRegisters);
  207. GatherReservedRegisters(DM.GetSamplers(), m_reservedSamplerRegisters);
  208. GatherReservedRegisters(DM.GetUAVs(), m_reservedUAVRegisters);
  209. GatherReservedRegisters(DM.GetSRVs(), m_reservedSRVRegisters);
  210. }
  211. }
  212. bool AllocateRegisters(DxilModule &DM) {
  213. uint32_t AutoBindingSpace = DM.GetAutoBindingSpace();
  214. if (AutoBindingSpace == UINT_MAX) {
  215. // For libraries, we don't allocate unless AutoBindingSpace is set.
  216. if (DM.GetShaderModel()->IsLib())
  217. return false;
  218. // For shaders, we allocate in space 0 by default.
  219. AutoBindingSpace = 0;
  220. }
  221. bool bChanged = false;
  222. bChanged |= AllocateRegisters(DM.GetCBuffers(), DM.GetCtx(), m_reservedCBufferRegisters, AutoBindingSpace);
  223. bChanged |= AllocateRegisters(DM.GetSamplers(), DM.GetCtx(), m_reservedSamplerRegisters, AutoBindingSpace);
  224. bChanged |= AllocateRegisters(DM.GetUAVs(), DM.GetCtx(), m_reservedUAVRegisters, AutoBindingSpace);
  225. bChanged |= AllocateRegisters(DM.GetSRVs(), DM.GetCtx(), m_reservedSRVRegisters, AutoBindingSpace);
  226. return bChanged;
  227. }
  228. };
  229. class DxilCondenseResources : public ModulePass {
  230. private:
  231. RemapEntryCollection m_rewrites;
  232. public:
  233. static char ID; // Pass identification, replacement for typeid
  234. explicit DxilCondenseResources() : ModulePass(ID) {}
  235. const char *getPassName() const override { return "DXIL Condense Resources"; }
  236. bool runOnModule(Module &M) override {
  237. DxilModule &DM = M.GetOrCreateDxilModule();
  238. // Skip lib.
  239. if (DM.GetShaderModel()->IsLib())
  240. return false;
  241. // Gather reserved resource registers while we still have
  242. // unused resources that might have explicit register assignments.
  243. DxilResourceRegisterAllocator ResourceRegisterAllocator;
  244. ResourceRegisterAllocator.GatherReservedRegisters(DM);
  245. // Remove unused resources.
  246. DM.RemoveUnusedResources();
  247. // Make sure all resource types are dense; build a map of rewrites.
  248. if (BuildRewriteMap(m_rewrites, DM)) {
  249. // Rewrite all instructions that refer to resources in the map.
  250. ApplyRewriteMap(DM);
  251. }
  252. bool hasResource = DM.GetCBuffers().size() ||
  253. DM.GetUAVs().size() || DM.GetSRVs().size() || DM.GetSamplers().size();
  254. if (hasResource) {
  255. if (!DM.GetShaderModel()->IsLib()) {
  256. ResourceRegisterAllocator.AllocateRegisters(DM);
  257. PatchCreateHandle(DM);
  258. }
  259. }
  260. return true;
  261. }
  262. DxilResourceBase &GetFirstRewrite() const {
  263. DXASSERT_NOMSG(!m_rewrites.empty());
  264. return *m_rewrites.begin()->second.Resource;
  265. }
  266. private:
  267. void ApplyRewriteMap(DxilModule &DM);
  268. // Add lowbound to create handle range index.
  269. void PatchCreateHandle(DxilModule &DM);
  270. };
  271. void DxilCondenseResources::ApplyRewriteMap(DxilModule &DM) {
  272. for (Function &F : DM.GetModule()->functions()) {
  273. if (F.isDeclaration()) {
  274. continue;
  275. }
  276. for (inst_iterator iter = inst_begin(F), E = inst_end(F); iter != E; ++iter) {
  277. llvm::Instruction &I = *iter;
  278. DxilInst_CreateHandle CH(&I);
  279. if (!CH)
  280. continue;
  281. ResourceID RId;
  282. RId.Class = (DXIL::ResourceClass)CH.get_resourceClass_val();
  283. RId.ID = (unsigned)llvm::dyn_cast<llvm::ConstantInt>(CH.get_rangeId())
  284. ->getZExtValue();
  285. RemapEntryCollection::iterator it = m_rewrites.find(RId);
  286. if (it == m_rewrites.end()) {
  287. continue;
  288. }
  289. CallInst *CI = cast<CallInst>(&I);
  290. Value *newRangeID = DM.GetOP()->GetU32Const(it->second.Index);
  291. CI->setArgOperand(DXIL::OperandIndex::kCreateHandleResIDOpIdx,
  292. newRangeID);
  293. }
  294. }
  295. ApplyRewriteMapOnResTable(m_rewrites, DM);
  296. }
  297. namespace {
  298. void PatchLowerBoundOfCreateHandle(CallInst *handle, DxilModule &DM) {
  299. DxilInst_CreateHandle createHandle(handle);
  300. DXASSERT_NOMSG(createHandle);
  301. DXIL::ResourceClass ResClass =
  302. static_cast<DXIL::ResourceClass>(createHandle.get_resourceClass_val());
  303. // Dynamic rangeId is not supported - skip and let validation report the
  304. // error.
  305. if (!isa<ConstantInt>(createHandle.get_rangeId()))
  306. return;
  307. unsigned rangeId =
  308. cast<ConstantInt>(createHandle.get_rangeId())->getLimitedValue();
  309. DxilResourceBase *res = nullptr;
  310. switch (ResClass) {
  311. case DXIL::ResourceClass::SRV:
  312. res = &DM.GetSRV(rangeId);
  313. break;
  314. case DXIL::ResourceClass::UAV:
  315. res = &DM.GetUAV(rangeId);
  316. break;
  317. case DXIL::ResourceClass::CBuffer:
  318. res = &DM.GetCBuffer(rangeId);
  319. break;
  320. case DXIL::ResourceClass::Sampler:
  321. res = &DM.GetSampler(rangeId);
  322. break;
  323. default:
  324. DXASSERT(0, "invalid res class");
  325. return;
  326. }
  327. IRBuilder<> Builder(handle);
  328. unsigned lowBound = res->GetLowerBound();
  329. if (lowBound) {
  330. Value *Index = createHandle.get_index();
  331. if (ConstantInt *cIndex = dyn_cast<ConstantInt>(Index)) {
  332. unsigned newIdx = lowBound + cIndex->getLimitedValue();
  333. handle->setArgOperand(DXIL::OperandIndex::kCreateHandleResIndexOpIdx,
  334. Builder.getInt32(newIdx));
  335. } else {
  336. Value *newIdx = Builder.CreateAdd(Index, Builder.getInt32(lowBound));
  337. handle->setArgOperand(DXIL::OperandIndex::kCreateHandleResIndexOpIdx,
  338. newIdx);
  339. }
  340. }
  341. }
  342. }
  343. void DxilCondenseResources::PatchCreateHandle(DxilModule &DM) {
  344. Function *createHandle = DM.GetOP()->GetOpFunc(DXIL::OpCode::CreateHandle,
  345. Type::getVoidTy(DM.GetCtx()));
  346. for (User *U : createHandle->users()) {
  347. PatchLowerBoundOfCreateHandle(cast<CallInst>(U), DM);
  348. }
  349. }
  350. char DxilCondenseResources::ID = 0;
  351. bool llvm::AreDxilResourcesDense(llvm::Module *M, hlsl::DxilResourceBase **ppNonDense) {
  352. DxilModule &DM = M->GetOrCreateDxilModule();
  353. RemapEntryCollection rewrites;
  354. if (BuildRewriteMap(rewrites, DM)) {
  355. *ppNonDense = rewrites.begin()->second.Resource;
  356. return false;
  357. }
  358. else {
  359. *ppNonDense = nullptr;
  360. return true;
  361. }
  362. }
  363. ModulePass *llvm::createDxilCondenseResourcesPass() {
  364. return new DxilCondenseResources();
  365. }
  366. INITIALIZE_PASS(DxilCondenseResources, "hlsl-dxil-condense", "DXIL Condense Resources", false, false)
  367. namespace {
  368. class DxilLowerCreateHandleForLib : public ModulePass {
  369. private:
  370. RemapEntryCollection m_rewrites;
  371. DxilModule *m_DM;
  372. bool m_HasDbgInfo;
  373. bool m_bIsLib;
  374. bool m_bLegalizationFailed;
  375. public:
  376. static char ID; // Pass identification, replacement for typeid
  377. explicit DxilLowerCreateHandleForLib() : ModulePass(ID) {}
  378. const char *getPassName() const override {
  379. return "DXIL Lower createHandleForLib";
  380. }
  381. bool runOnModule(Module &M) override {
  382. DxilModule &DM = M.GetOrCreateDxilModule();
  383. m_DM = &DM;
  384. // Clear llvm used to remove unused resource.
  385. m_DM->ClearLLVMUsed();
  386. m_bIsLib = DM.GetShaderModel()->IsLib();
  387. m_bLegalizationFailed = false;
  388. bool bChanged = false;
  389. unsigned numResources = DM.GetCBuffers().size() + DM.GetUAVs().size() +
  390. DM.GetSRVs().size() + DM.GetSamplers().size();
  391. if (!numResources)
  392. return false;
  393. // Switch tbuffers to SRVs, as they have been treated as cbuffers up to this
  394. // point.
  395. if (DM.GetCBuffers().size())
  396. bChanged = PatchTBuffers(DM) || bChanged;
  397. // Gather reserved resource registers while we still have
  398. // unused resources that might have explicit register assignments.
  399. DxilResourceRegisterAllocator ResourceRegisterAllocator;
  400. ResourceRegisterAllocator.GatherReservedRegisters(DM);
  401. // Remove unused resources.
  402. DM.RemoveResourcesWithUnusedSymbols();
  403. unsigned newResources = DM.GetCBuffers().size() + DM.GetUAVs().size() +
  404. DM.GetSRVs().size() + DM.GetSamplers().size();
  405. bChanged = bChanged || (numResources != newResources);
  406. if (0 == newResources)
  407. return bChanged;
  408. bChanged |= ResourceRegisterAllocator.AllocateRegisters(DM);
  409. if (m_bIsLib && DM.GetShaderModel()->GetMinor() == ShaderModel::kOfflineMinor)
  410. return bChanged;
  411. // Make sure no select on resource.
  412. bChanged |= RemovePhiOnResource();
  413. if (m_bIsLib || m_bLegalizationFailed)
  414. return bChanged;
  415. bChanged = true;
  416. // Load up debug information, to cross-reference values and the instructions
  417. // used to load them.
  418. m_HasDbgInfo = getDebugMetadataVersionFromModule(M) != 0;
  419. GenerateDxilResourceHandles();
  420. if (DM.GetOP()->UseMinPrecision())
  421. UpdateStructTypeForLegacyLayout();
  422. // Change resource symbol into undef.
  423. UpdateResourceSymbols();
  424. // Remove unused createHandleForLib functions.
  425. dxilutil::RemoveUnusedFunctions(M, DM.GetEntryFunction(),
  426. DM.GetPatchConstantFunction(), m_bIsLib);
  427. return bChanged;
  428. }
  429. private:
  430. bool RemovePhiOnResource();
  431. void UpdateResourceSymbols();
  432. void TranslateDxilResourceUses(DxilResourceBase &res);
  433. void GenerateDxilResourceHandles();
  434. void UpdateStructTypeForLegacyLayout();
  435. // Switch CBuffer for SRV for TBuffers.
  436. bool PatchTBuffers(DxilModule &DM);
  437. void PatchTBufferUse(Value *V, DxilModule &DM);
  438. };
  439. } // namespace
  440. // Phi on resource.
  441. namespace {
  442. typedef std::unordered_map<Value*, Value*> ValueToValueMap;
  443. typedef llvm::SetVector<Value*> ValueSetVector;
  444. typedef llvm::SmallVector<Value*, 4> IndexVector;
  445. typedef std::unordered_map<Value*, IndexVector> ValueToIdxMap;
  446. //#define SUPPORT_SELECT_ON_ALLOCA
  447. // Errors:
  448. class ResourceUseErrors
  449. {
  450. bool m_bErrorsReported;
  451. public:
  452. ResourceUseErrors() : m_bErrorsReported(false) {}
  453. enum ErrorCode {
  454. // Collision between use of one resource GV and another.
  455. // All uses must be guaranteed to resolve to only one GV.
  456. // Additionally, when writing resource to alloca, all uses
  457. // of that alloca are considered resolving to a single GV.
  458. GVConflicts,
  459. // static global resources are disallowed for libraries at this time.
  460. // for non-library targets, they should have been eliminated already.
  461. StaticGVUsed,
  462. // user function calls with resource params or return type are
  463. // are currently disallowed for libraries.
  464. UserCallsWithResources,
  465. // When searching up from store pointer looking for alloca,
  466. // we encountered an unexpted value type
  467. UnexpectedValuesFromStorePointer,
  468. // When remapping values to be replaced, we add them to RemappedValues
  469. // so we don't use dead values stored in other sets/maps. Circular
  470. // remaps that should not happen are aadded to RemappingCyclesDetected.
  471. RemappingCyclesDetected,
  472. // Without SUPPORT_SELECT_ON_ALLOCA, phi/select on alloca based
  473. // pointer is disallowed, since this scenario is still untested.
  474. // This error also covers any other unknown alloca pointer uses.
  475. // Supported:
  476. // alloca (-> gep)? -> load -> ...
  477. // alloca (-> gep)? -> store.
  478. // Unsupported without SUPPORT_SELECT_ON_ALLOCA:
  479. // alloca (-> gep)? -> phi/select -> ...
  480. AllocaUserDisallowed,
  481. #ifdef SUPPORT_SELECT_ON_ALLOCA
  482. // Conflict in select/phi between GV pointer and alloca pointer. This
  483. // algorithm can't handle this case.
  484. AllocaSelectConflict,
  485. #endif
  486. ErrorCodeCount
  487. };
  488. const StringRef ErrorText[ErrorCodeCount] = {
  489. "local resource not guaranteed to map to unique global resource.",
  490. "static global resource use is disallowed for library functions.",
  491. "exported library functions cannot have resource parameters or return value.",
  492. "internal error: unexpected instruction type when looking for alloca from store.",
  493. "internal error: cycles detected in value remapping.",
  494. "phi/select disallowed on pointers to local resources."
  495. #ifdef SUPPORT_SELECT_ON_ALLOCA
  496. ,"unable to resolve merge of global and local resource pointers."
  497. #endif
  498. };
  499. ValueSetVector ErrorSets[ErrorCodeCount];
  500. // Ulitimately, the goal of ErrorUsers is to mark all create handles
  501. // so we don't try to report errors on them again later.
  502. std::unordered_set<Value*> ErrorUsers; // users of error values
  503. bool AddErrorUsers(Value* V) {
  504. auto it = ErrorUsers.insert(V);
  505. if (!it.second)
  506. return false; // already there
  507. if (isa<GEPOperator>(V) ||
  508. isa<LoadInst>(V) ||
  509. isa<PHINode>(V) ||
  510. isa<SelectInst>(V) ||
  511. isa<AllocaInst>(V)) {
  512. for (auto U : V->users()) {
  513. AddErrorUsers(U);
  514. }
  515. } else if(isa<StoreInst>(V)) {
  516. AddErrorUsers(cast<StoreInst>(V)->getPointerOperand());
  517. }
  518. // create handle will be marked, but users not followed
  519. return true;
  520. }
  521. void ReportError(ErrorCode ec, Value* V) {
  522. DXASSERT_NOMSG(ec < ErrorCodeCount);
  523. if (!ErrorSets[ec].insert(V))
  524. return; // Error already reported
  525. AddErrorUsers(V);
  526. m_bErrorsReported = true;
  527. if (Instruction *I = dyn_cast<Instruction>(V)) {
  528. dxilutil::EmitErrorOnInstruction(I, ErrorText[ec]);
  529. } else {
  530. StringRef Name = V->getName();
  531. std::string escName;
  532. if (isa<Function>(V)) {
  533. llvm::raw_string_ostream os(escName);
  534. dxilutil::PrintEscapedString(Name, os);
  535. os.flush();
  536. Name = escName;
  537. }
  538. Twine msg = Twine(ErrorText[ec]) + " Value: " + Name;
  539. V->getContext().emitError(msg);
  540. }
  541. }
  542. bool ErrorsReported() {
  543. return m_bErrorsReported;
  544. }
  545. };
  546. unsigned CountArrayDimensions(Type* Ty,
  547. // Optionally collect dimensions
  548. SmallVector<unsigned, 4> *dims = nullptr) {
  549. if (Ty->isPointerTy())
  550. Ty = Ty->getPointerElementType();
  551. unsigned dim = 0;
  552. if (dims)
  553. dims->clear();
  554. while (Ty->isArrayTy()) {
  555. if (dims)
  556. dims->push_back(Ty->getArrayNumElements());
  557. dim++;
  558. Ty = Ty->getArrayElementType();
  559. }
  560. return dim;
  561. }
  562. // Helper class for legalizing resource use
  563. // Convert select/phi on resources to select/phi on index to GEP on GV.
  564. // Convert resource alloca to index alloca.
  565. // Assumes createHandleForLib has no select/phi
  566. class LegalizeResourceUseHelper {
  567. // Change:
  568. // gep1 = GEP gRes, i1
  569. // res1 = load gep1
  570. // gep2 = GEP gRes, i2
  571. // gep3 = GEP gRes, i3
  572. // gep4 = phi gep2, gep3 <-- handle select/phi on GEP
  573. // res4 = load gep4
  574. // res5 = phi res1, res4
  575. // res6 = load GEP gRes, 23 <-- handle constant GepExpression
  576. // res = select cnd2, res5, res6
  577. // handle = createHandleForLib(res)
  578. // To:
  579. // i4 = phi i2, i3
  580. // i5 = phi i1, i4
  581. // i6 = select cnd, i5, 23
  582. // gep = GEP gRes, i6
  583. // res = load gep
  584. // handle = createHandleForLib(res)
  585. // Also handles alloca
  586. // resArray = alloca [2 x Resource]
  587. // gep1 = GEP gRes, i1
  588. // res1 = load gep1
  589. // gep2 = GEP gRes, i2
  590. // gep3 = GEP gRes, i3
  591. // phi4 = phi gep2, gep3
  592. // res4 = load phi4
  593. // gep5 = GEP resArray, 0
  594. // gep6 = GEP resArray, 1
  595. // store gep5, res1
  596. // store gep6, res4
  597. // gep7 = GEP resArray, i7 <-- dynamically index array
  598. // res = load gep7
  599. // handle = createHandleForLib(res)
  600. // Desired result:
  601. // idxArray = alloca [2 x i32]
  602. // phi4 = phi i2, i3
  603. // gep5 = GEP idxArray, 0
  604. // gep6 = GEP idxArray, 1
  605. // store gep5, i1
  606. // store gep6, phi4
  607. // gep7 = GEP idxArray, i7
  608. // gep8 = GEP gRes, gep7
  609. // res = load gep8
  610. // handle = createHandleForLib(res)
  611. // Also handles multi-dim resource index and multi-dim resource array allocas
  612. // Basic algorithm:
  613. // - recursively mark each GV user with GV (ValueToResourceGV)
  614. // - verify only one GV used for any given value
  615. // - handle allocas by searching up from store for alloca
  616. // - then recursively mark alloca users
  617. // - ResToIdxReplacement keeps track of vector of indices that
  618. // will be used to replace a given resource value or pointer
  619. // - Next, create selects/phis for indices corresponding to
  620. // selects/phis on resource pointers or values.
  621. // - leave incoming index values undef for now
  622. // - Create index allocas to replace resource allocas
  623. // - Create GEPs on index allocas to replace GEPs on resource allocas
  624. // - Create index loads on index allocas to replace loads on resource alloca GEP
  625. // - Fill in replacements for GEPs on resource GVs
  626. // - copy replacement index vectors to corresponding loads
  627. // - Create index stores to replace resource stores to alloca/GEPs
  628. // - Update selects/phis incoming index values
  629. // - SimplifyMerges: replace index phis/selects on same value with that value
  630. // - RemappedValues[phi/select] set to replacement value
  631. // - use LookupValue from now on when reading from ResToIdxReplacement
  632. // - Update handles by replacing load/GEP chains that go through select/phi
  633. // with direct GV GEP + load, with select/phi on GEP indices instead.
  634. public:
  635. ResourceUseErrors m_Errors;
  636. ValueToValueMap ValueToResourceGV;
  637. ValueToIdxMap ResToIdxReplacement;
  638. // Value sets we can use to iterate
  639. ValueSetVector Selects, GEPs, Stores, Handles;
  640. ValueSetVector Allocas, AllocaGEPs, AllocaLoads;
  641. #ifdef SUPPORT_SELECT_ON_ALLOCA
  642. ValueSetVector AllocaSelects;
  643. #endif
  644. std::unordered_set<Value *> NonUniformSet;
  645. // New index selects created by pass, so we can try simplifying later
  646. ValueSetVector NewSelects;
  647. // Values that have been replaced with other values need remapping
  648. ValueToValueMap RemappedValues;
  649. // Things to clean up if no users:
  650. std::unordered_set<Instruction*> CleanupInsts;
  651. GlobalVariable *LookupResourceGV(Value *V) {
  652. auto itGV = ValueToResourceGV.find(V);
  653. if (itGV == ValueToResourceGV.end())
  654. return nullptr;
  655. return cast<GlobalVariable>(itGV->second);
  656. }
  657. // Follow RemappedValues, return input if not remapped
  658. Value *LookupValue(Value *V) {
  659. auto it = RemappedValues.find(V);
  660. SmallPtrSet<Value*, 4> visited;
  661. while (it != RemappedValues.end()) {
  662. // Cycles should not happen, but are bad if they do.
  663. if (visited.count(it->second)) {
  664. DXASSERT(false, "otherwise, circular remapping");
  665. m_Errors.ReportError(ResourceUseErrors::RemappingCyclesDetected, V);
  666. break;
  667. }
  668. V = it->second;
  669. it = RemappedValues.find(V);
  670. if (it != RemappedValues.end())
  671. visited.insert(V);
  672. }
  673. return V;
  674. }
  675. bool AreLoadUsersTrivial(LoadInst *LI) {
  676. for (auto U : LI->users()) {
  677. if (CallInst *CI = dyn_cast<CallInst>(U)) {
  678. Function *F = CI->getCalledFunction();
  679. DxilModule &DM = F->getParent()->GetDxilModule();
  680. hlsl::OP *hlslOP = DM.GetOP();
  681. if (hlslOP->IsDxilOpFunc(F)) {
  682. hlsl::OP::OpCodeClass opClass;
  683. if (hlslOP->GetOpCodeClass(F, opClass) &&
  684. opClass == DXIL::OpCodeClass::CreateHandleForLib) {
  685. continue;
  686. }
  687. }
  688. }
  689. return false;
  690. }
  691. return true;
  692. }
  693. // This is used to quickly skip the common case where no work is needed
  694. bool AreGEPUsersTrivial(GEPOperator *GEP) {
  695. if (GlobalVariable *GV = LookupResourceGV(GEP)) {
  696. if (GEP->getPointerOperand() != LookupResourceGV(GEP))
  697. return false;
  698. }
  699. for (auto U : GEP->users()) {
  700. if (LoadInst *LI = dyn_cast<LoadInst>(U)) {
  701. if (AreLoadUsersTrivial(LI))
  702. continue;
  703. }
  704. return false;
  705. }
  706. return true;
  707. }
  708. // AssignResourceGVFromStore is used on pointer being stored to.
  709. // Follow GEP/Phi/Select up to Alloca, then CollectResourceGVUsers on Alloca
  710. void AssignResourceGVFromStore(GlobalVariable *GV, Value *V,
  711. SmallPtrSet<Value*, 4> &visited,
  712. bool bNonUniform) {
  713. // Prevent cycles as we search up
  714. if (visited.count(V) != 0)
  715. return;
  716. // Verify and skip if already processed
  717. auto it = ValueToResourceGV.find(V);
  718. if (it != ValueToResourceGV.end()) {
  719. if (it->second != GV) {
  720. m_Errors.ReportError(ResourceUseErrors::GVConflicts, V);
  721. }
  722. return;
  723. }
  724. if (AllocaInst *AI = dyn_cast<AllocaInst>(V)) {
  725. CollectResourceGVUsers(GV, AI, /*bAlloca*/true, bNonUniform);
  726. return;
  727. } else if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
  728. // follow the pointer up
  729. AssignResourceGVFromStore(GV, GEP->getPointerOperand(), visited, bNonUniform);
  730. return;
  731. } else if (PHINode *Phi = dyn_cast<PHINode>(V)) {
  732. #ifdef SUPPORT_SELECT_ON_ALLOCA
  733. // follow all incoming values
  734. for (auto it : Phi->operand_values())
  735. AssignResourceGVFromStore(GV, it, visited, bNonUniform);
  736. #else
  737. m_Errors.ReportError(ResourceUseErrors::AllocaUserDisallowed, V);
  738. #endif
  739. return;
  740. } else if (SelectInst *Sel = dyn_cast<SelectInst>(V)) {
  741. #ifdef SUPPORT_SELECT_ON_ALLOCA
  742. // follow all incoming values
  743. AssignResourceGVFromStore(GV, Sel->getTrueValue(), visited, bNonUniform);
  744. AssignResourceGVFromStore(GV, Sel->getFalseValue(), visited, bNonUniform);
  745. #else
  746. m_Errors.ReportError(ResourceUseErrors::AllocaUserDisallowed, V);
  747. #endif
  748. return;
  749. } else if (isa<GlobalVariable>(V) &&
  750. cast<GlobalVariable>(V)->getLinkage() ==
  751. GlobalVariable::LinkageTypes::InternalLinkage) {
  752. // this is writing to global static, which is disallowed at this point.
  753. m_Errors.ReportError(ResourceUseErrors::StaticGVUsed, V);
  754. return;
  755. } else {
  756. // Most likely storing to output parameter
  757. m_Errors.ReportError(ResourceUseErrors::UserCallsWithResources, V);
  758. return;
  759. }
  760. return;
  761. }
  762. // Recursively mark values with GV, following users.
  763. // Starting value V should be GV itself.
  764. // Returns true if value/uses reference no other GV in map.
  765. void CollectResourceGVUsers(GlobalVariable *GV, Value *V, bool bAlloca = false, bool bNonUniform = false) {
  766. // Recursively tag value V and its users as using GV.
  767. auto it = ValueToResourceGV.find(V);
  768. if (it != ValueToResourceGV.end()) {
  769. if (it->second != GV) {
  770. m_Errors.ReportError(ResourceUseErrors::GVConflicts, V);
  771. #ifdef SUPPORT_SELECT_ON_ALLOCA
  772. } else {
  773. // if select/phi, make sure bAlloca is consistent
  774. if (isa<PHINode>(V) || isa<SelectInst>(V))
  775. if ((bAlloca && AllocaSelects.count(V) == 0) ||
  776. (!bAlloca && Selects.count(V) == 0))
  777. m_Errors.ReportError(ResourceUseErrors::AllocaSelectConflict, V);
  778. #endif
  779. }
  780. return;
  781. }
  782. ValueToResourceGV[V] = GV;
  783. if (GV == V) {
  784. // Just add and recurse users
  785. // make sure bAlloca is clear for users
  786. bAlloca = false;
  787. } else if (GEPOperator *GEP = dyn_cast<GEPOperator>(V)) {
  788. if (bAlloca)
  789. AllocaGEPs.insert(GEP);
  790. else if (!AreGEPUsersTrivial(GEP))
  791. GEPs.insert(GEP);
  792. else
  793. return; // Optimization: skip trivial GV->GEP->load->createHandle
  794. if (GetElementPtrInst *GEPInst = dyn_cast<GetElementPtrInst>(GEP)) {
  795. if (DxilMDHelper::IsMarkedNonUniform(GEPInst))
  796. bNonUniform = true;
  797. }
  798. } else if (LoadInst *LI = dyn_cast<LoadInst>(V)) {
  799. if (bAlloca)
  800. AllocaLoads.insert(LI);
  801. // clear bAlloca for users
  802. bAlloca = false;
  803. if (bNonUniform)
  804. NonUniformSet.insert(LI);
  805. } else if (StoreInst *SI = dyn_cast<StoreInst>(V)) {
  806. Stores.insert(SI);
  807. if (!bAlloca) {
  808. // Find and mark allocas this store could be storing to
  809. SmallPtrSet<Value*, 4> visited;
  810. AssignResourceGVFromStore(GV, SI->getPointerOperand(), visited, bNonUniform);
  811. }
  812. return;
  813. } else if (PHINode *Phi = dyn_cast<PHINode>(V)) {
  814. if (bAlloca) {
  815. #ifdef SUPPORT_SELECT_ON_ALLOCA
  816. AllocaSelects.insert(Phi);
  817. #else
  818. m_Errors.ReportError(ResourceUseErrors::AllocaUserDisallowed, V);
  819. #endif
  820. } else {
  821. Selects.insert(Phi);
  822. }
  823. } else if (SelectInst *Sel = dyn_cast<SelectInst>(V)) {
  824. if (bAlloca) {
  825. #ifdef SUPPORT_SELECT_ON_ALLOCA
  826. AllocaSelects.insert(Sel);
  827. #else
  828. m_Errors.ReportError(ResourceUseErrors::AllocaUserDisallowed, V);
  829. #endif
  830. } else {
  831. Selects.insert(Sel);
  832. }
  833. } else if (AllocaInst *AI = dyn_cast<AllocaInst>(V)) {
  834. Allocas.insert(AI);
  835. // set bAlloca for users
  836. bAlloca = true;
  837. } else if (Constant *C = dyn_cast<Constant>(V)) {
  838. // skip @llvm.used entry
  839. return;
  840. } else if (bAlloca) {
  841. m_Errors.ReportError(ResourceUseErrors::AllocaUserDisallowed, V);
  842. } else {
  843. // Must be createHandleForLib or user function call.
  844. CallInst *CI = cast<CallInst>(V);
  845. Function *F = CI->getCalledFunction();
  846. DxilModule &DM = GV->getParent()->GetDxilModule();
  847. hlsl::OP *hlslOP = DM.GetOP();
  848. if (hlslOP->IsDxilOpFunc(F)) {
  849. hlsl::OP::OpCodeClass opClass;
  850. if (hlslOP->GetOpCodeClass(F, opClass) &&
  851. opClass == DXIL::OpCodeClass::CreateHandleForLib) {
  852. Handles.insert(CI);
  853. if (bNonUniform)
  854. NonUniformSet.insert(CI);
  855. return;
  856. }
  857. }
  858. // This could be user call with resource param, which is disallowed for lib_6_3
  859. m_Errors.ReportError(ResourceUseErrors::UserCallsWithResources, V);
  860. return;
  861. }
  862. // Recurse users
  863. for (auto U : V->users())
  864. CollectResourceGVUsers(GV, U, bAlloca, bNonUniform);
  865. return;
  866. }
  867. // Remove conflicting values from sets before
  868. // transforming the remainder.
  869. void RemoveConflictingValue(Value* V) {
  870. bool bRemoved = false;
  871. if (isa<GEPOperator>(V)) {
  872. bRemoved = GEPs.remove(V) || AllocaGEPs.remove(V);
  873. } else if (isa<LoadInst>(V)) {
  874. bRemoved = AllocaLoads.remove(V);
  875. } else if (isa<StoreInst>(V)) {
  876. bRemoved = Stores.remove(V);
  877. } else if (isa<PHINode>(V) || isa<SelectInst>(V)) {
  878. bRemoved = Selects.remove(V);
  879. #ifdef SUPPORT_SELECT_ON_ALLOCA
  880. bRemoved |= AllocaSelects.remove(V);
  881. #endif
  882. } else if (isa<AllocaInst>(V)) {
  883. bRemoved = Allocas.remove(V);
  884. } else if (isa<CallInst>(V)) {
  885. bRemoved = Handles.remove(V);
  886. return; // don't recurse
  887. }
  888. if (bRemoved) {
  889. // Recurse users
  890. for (auto U : V->users())
  891. RemoveConflictingValue(U);
  892. }
  893. }
  894. void RemoveConflicts() {
  895. for (auto V : m_Errors.ErrorSets[ResourceUseErrors::GVConflicts]) {
  896. RemoveConflictingValue(V);
  897. ValueToResourceGV.erase(V);
  898. }
  899. }
  900. void CreateSelects() {
  901. if (Selects.empty()
  902. #ifdef SUPPORT_SELECT_ON_ALLOCA
  903. && AllocaSelects.empty()
  904. #endif
  905. )
  906. return;
  907. LLVMContext &Ctx =
  908. #ifdef SUPPORT_SELECT_ON_ALLOCA
  909. Selects.empty() ? AllocaSelects[0]->getContext() :
  910. #endif
  911. Selects[0]->getContext();
  912. Type *i32Ty = IntegerType::getInt32Ty(Ctx);
  913. #ifdef SUPPORT_SELECT_ON_ALLOCA
  914. for (auto &SelectSet : {Selects, AllocaSelects}) {
  915. bool bAlloca = !(&SelectSet == &Selects);
  916. #else
  917. for (auto &SelectSet : { Selects }) {
  918. #endif
  919. for (auto pValue : SelectSet) {
  920. Type *SelectTy = i32Ty;
  921. #ifdef SUPPORT_SELECT_ON_ALLOCA
  922. // For alloca case, type needs to match dimensionality of incoming value
  923. if (bAlloca) {
  924. // TODO: Not sure if this case will actually work
  925. // (or whether it can even be generated from HLSL)
  926. Type *Ty = pValue->getType();
  927. SmallVector<unsigned, 4> dims;
  928. unsigned dim = CountArrayDimensions(Ty, &dims);
  929. for (unsigned i = 0; i < dim; i++)
  930. SelectTy = ArrayType::get(SelectTy, (uint64_t)dims[dim - i - 1]);
  931. if (Ty->isPointerTy())
  932. SelectTy = PointerType::get(SelectTy, 0);
  933. }
  934. #endif
  935. Value *UndefValue = UndefValue::get(SelectTy);
  936. if (PHINode *Phi = dyn_cast<PHINode>(pValue)) {
  937. GlobalVariable *GV = LookupResourceGV(Phi);
  938. if (!GV)
  939. continue; // skip value removed due to conflict
  940. IRBuilder<> PhiBuilder(Phi);
  941. unsigned gvDim = CountArrayDimensions(GV->getType());
  942. IndexVector &idxVector = ResToIdxReplacement[Phi];
  943. idxVector.resize(gvDim, nullptr);
  944. unsigned numIncoming = Phi->getNumIncomingValues();
  945. for (unsigned i = 0; i < gvDim; i++) {
  946. PHINode *newPhi = PhiBuilder.CreatePHI(SelectTy, numIncoming);
  947. NewSelects.insert(newPhi);
  948. idxVector[i] = newPhi;
  949. for (unsigned j = 0; j < numIncoming; j++) {
  950. // Set incoming values to undef until next pass
  951. newPhi->addIncoming(UndefValue, Phi->getIncomingBlock(j));
  952. }
  953. }
  954. } else if (SelectInst *Sel = dyn_cast<SelectInst>(pValue)) {
  955. GlobalVariable *GV = LookupResourceGV(Sel);
  956. if (!GV)
  957. continue; // skip value removed due to conflict
  958. IRBuilder<> Builder(Sel);
  959. unsigned gvDim = CountArrayDimensions(GV->getType());
  960. IndexVector &idxVector = ResToIdxReplacement[Sel];
  961. idxVector.resize(gvDim, nullptr);
  962. for (unsigned i = 0; i < gvDim; i++) {
  963. Value *newSel = Builder.CreateSelect(Sel->getCondition(), UndefValue, UndefValue);
  964. NewSelects.insert(newSel);
  965. idxVector[i] = newSel;
  966. }
  967. } else {
  968. DXASSERT(false, "otherwise, non-select/phi in Selects set");
  969. }
  970. }
  971. }
  972. }
  973. // Create index allocas to replace resource allocas
  974. void CreateIndexAllocas() {
  975. if (Allocas.empty())
  976. return;
  977. Type *i32Ty = IntegerType::getInt32Ty(Allocas[0]->getContext());
  978. for (auto pValue : Allocas) {
  979. AllocaInst *pAlloca = cast<AllocaInst>(pValue);
  980. GlobalVariable *GV = LookupResourceGV(pAlloca);
  981. if (!GV)
  982. continue; // skip value removed due to conflict
  983. IRBuilder<> AllocaBuilder(pAlloca);
  984. unsigned gvDim = CountArrayDimensions(GV->getType());
  985. SmallVector<unsigned, 4> dimVector;
  986. unsigned allocaTyDim = CountArrayDimensions(pAlloca->getType(), &dimVector);
  987. Type *pIndexType = i32Ty;
  988. for (unsigned i = 0; i < allocaTyDim; i++) {
  989. pIndexType = ArrayType::get(pIndexType, dimVector[allocaTyDim - i - 1]);
  990. }
  991. Value *arraySize = pAlloca->getArraySize();
  992. IndexVector &idxVector = ResToIdxReplacement[pAlloca];
  993. idxVector.resize(gvDim, nullptr);
  994. for (unsigned i = 0; i < gvDim; i++) {
  995. AllocaInst *pAlloca = AllocaBuilder.CreateAlloca(pIndexType, arraySize);
  996. pAlloca->setAlignment(4);
  997. idxVector[i] = pAlloca;
  998. }
  999. }
  1000. }
  1001. // Add corresponding GEPs for index allocas
  1002. IndexVector &ReplaceAllocaGEP(GetElementPtrInst *GEP) {
  1003. IndexVector &idxVector = ResToIdxReplacement[GEP];
  1004. if (!idxVector.empty())
  1005. return idxVector;
  1006. Value *Ptr = GEP->getPointerOperand();
  1007. // Recurse for partial GEPs
  1008. IndexVector &ptrIndices = isa<GetElementPtrInst>(Ptr) ?
  1009. ReplaceAllocaGEP(cast<GetElementPtrInst>(Ptr)) : ResToIdxReplacement[Ptr];
  1010. IRBuilder<> Builder(GEP);
  1011. SmallVector<Value*, 4> gepIndices;
  1012. for (auto it = GEP->idx_begin(), idxEnd = GEP->idx_end(); it != idxEnd; it++)
  1013. gepIndices.push_back(*it);
  1014. idxVector.resize(ptrIndices.size(), nullptr);
  1015. for (unsigned i = 0; i < ptrIndices.size(); i++) {
  1016. idxVector[i] = Builder.CreateInBoundsGEP(ptrIndices[i], gepIndices);
  1017. }
  1018. return idxVector;
  1019. }
  1020. void ReplaceAllocaGEPs() {
  1021. for (auto V : AllocaGEPs) {
  1022. ReplaceAllocaGEP(cast<GetElementPtrInst>(V));
  1023. }
  1024. }
  1025. void ReplaceAllocaLoads() {
  1026. for (auto V : AllocaLoads) {
  1027. LoadInst *LI = cast<LoadInst>(V);
  1028. Value *Ptr = LI->getPointerOperand();
  1029. IRBuilder<> Builder(LI);
  1030. IndexVector &idxVector = ResToIdxReplacement[V];
  1031. IndexVector &ptrIndices = ResToIdxReplacement[Ptr];
  1032. idxVector.resize(ptrIndices.size(), nullptr);
  1033. for (unsigned i = 0; i < ptrIndices.size(); i++) {
  1034. idxVector[i] = Builder.CreateLoad(ptrIndices[i]);
  1035. }
  1036. }
  1037. }
  1038. // Add GEP to ResToIdxReplacement with indices from incoming + GEP
  1039. IndexVector &ReplaceGVGEPs(GEPOperator *GEP) {
  1040. IndexVector &idxVector = ResToIdxReplacement[GEP];
  1041. // Skip if already done
  1042. // (we recurse into partial GEP and iterate all GEPs)
  1043. if (!idxVector.empty())
  1044. return idxVector;
  1045. Type *i32Ty = IntegerType::getInt32Ty(GEP->getContext());
  1046. Constant *Zero = Constant::getIntegerValue(i32Ty, APInt(32, 0));
  1047. Value *Ptr = GEP->getPointerOperand();
  1048. unsigned idx = 0;
  1049. if (GlobalVariable *GV = dyn_cast<GlobalVariable>(Ptr)) {
  1050. unsigned gvDim = CountArrayDimensions(GV->getType());
  1051. idxVector.resize(gvDim, Zero);
  1052. } else if (isa<GEPOperator>(Ptr) || isa<PHINode>(Ptr) || isa<SelectInst>(Ptr)) {
  1053. // Recurse for partial GEPs
  1054. IndexVector &ptrIndices = isa<GEPOperator>(Ptr) ?
  1055. ReplaceGVGEPs(cast<GEPOperator>(Ptr)) : ResToIdxReplacement[Ptr];
  1056. unsigned ptrDim = CountArrayDimensions(Ptr->getType());
  1057. unsigned gvDim = ptrIndices.size();
  1058. DXASSERT(ptrDim <= gvDim, "otherwise incoming pointer has more dimensions than associated GV");
  1059. unsigned gepStart = gvDim - ptrDim;
  1060. // Copy indices and add ours
  1061. idxVector.resize(ptrIndices.size(), Zero);
  1062. for (; idx < gepStart; idx++)
  1063. idxVector[idx] = ptrIndices[idx];
  1064. }
  1065. if (GEP->hasIndices()) {
  1066. auto itIdx = GEP->idx_begin();
  1067. ++itIdx; // Always skip leading zero (we don't support GV+n pointer arith)
  1068. while (itIdx != GEP->idx_end())
  1069. idxVector[idx++] = *itIdx++;
  1070. }
  1071. return idxVector;
  1072. }
  1073. // Add GEPs to ResToIdxReplacement and update loads
  1074. void ReplaceGVGEPs() {
  1075. if (GEPs.empty())
  1076. return;
  1077. for (auto V : GEPs) {
  1078. GEPOperator *GEP = cast<GEPOperator>(V);
  1079. IndexVector &gepVector = ReplaceGVGEPs(GEP);
  1080. for (auto U : GEP->users()) {
  1081. if (LoadInst *LI = dyn_cast<LoadInst>(U)) {
  1082. // Just copy incoming indices
  1083. ResToIdxReplacement[LI] = gepVector;
  1084. }
  1085. }
  1086. }
  1087. }
  1088. // Create new index stores for incoming indices
  1089. void ReplaceStores() {
  1090. // generate stores of incoming indices to corresponding index pointers
  1091. if (Stores.empty())
  1092. return;
  1093. for (auto V : Stores) {
  1094. StoreInst *SI = cast<StoreInst>(V);
  1095. IRBuilder<> Builder(SI);
  1096. IndexVector &idxVector = ResToIdxReplacement[SI];
  1097. Value *Ptr = SI->getPointerOperand();
  1098. Value *Val = SI->getValueOperand();
  1099. IndexVector &ptrIndices = ResToIdxReplacement[Ptr];
  1100. IndexVector &valIndices = ResToIdxReplacement[Val];
  1101. DXASSERT_NOMSG(ptrIndices.size() == valIndices.size());
  1102. idxVector.resize(ptrIndices.size(), nullptr);
  1103. for (unsigned i = 0; i < idxVector.size(); i++) {
  1104. idxVector[i] = Builder.CreateStore(valIndices[i], ptrIndices[i]);
  1105. }
  1106. }
  1107. }
  1108. // For each Phi/Select: update matching incoming values for new phis
  1109. void UpdateSelects() {
  1110. for (auto V : Selects) {
  1111. // update incoming index values corresponding to incoming resource values
  1112. IndexVector &idxVector = ResToIdxReplacement[V];
  1113. Instruction *I = cast<Instruction>(V);
  1114. unsigned numOperands = I->getNumOperands();
  1115. unsigned startOp = isa<PHINode>(V) ? 0 : 1;
  1116. for (unsigned iOp = startOp; iOp < numOperands; iOp++) {
  1117. IndexVector &incomingIndices = ResToIdxReplacement[I->getOperand(iOp)];
  1118. DXASSERT_NOMSG(idxVector.size() == incomingIndices.size());
  1119. for (unsigned i = 0; i < idxVector.size(); i++) {
  1120. // must be instruction (phi/select)
  1121. Instruction *indexI = cast<Instruction>(idxVector[i]);
  1122. indexI->setOperand(iOp, incomingIndices[i]);
  1123. }
  1124. // Now clear incoming operand (adding to cleanup) to break cycles
  1125. if (Instruction *OpI = dyn_cast<Instruction>(I->getOperand(iOp)))
  1126. CleanupInsts.insert(OpI);
  1127. I->setOperand(iOp, UndefValue::get(I->getType()));
  1128. }
  1129. }
  1130. }
  1131. // ReplaceHandles
  1132. // - iterate handles
  1133. // - insert GEP using new indices associated with resource value
  1134. // - load resource from new GEP
  1135. // - replace resource use in createHandleForLib with new load
  1136. // Assumes: no users of handle are phi/select or store
  1137. void ReplaceHandles() {
  1138. if (Handles.empty())
  1139. return;
  1140. Type *i32Ty = IntegerType::getInt32Ty(Handles[0]->getContext());
  1141. Constant *Zero = Constant::getIntegerValue(i32Ty, APInt(32, 0));
  1142. for (auto V : Handles) {
  1143. CallInst *CI = cast<CallInst>(V);
  1144. DxilInst_CreateHandleForLib createHandle(CI);
  1145. Value *res = createHandle.get_Resource();
  1146. // Skip extra work if nothing between load and create handle
  1147. if (LoadInst *LI = dyn_cast<LoadInst>(res)) {
  1148. Value *Ptr = LI->getPointerOperand();
  1149. if (GEPOperator *GEP = dyn_cast<GEPOperator>(Ptr))
  1150. Ptr = GEP->getPointerOperand();
  1151. if (isa<GlobalVariable>(Ptr))
  1152. continue;
  1153. }
  1154. GlobalVariable *GV = LookupResourceGV(res);
  1155. if (!GV)
  1156. continue; // skip value removed due to conflict
  1157. IRBuilder<> Builder(CI);
  1158. IndexVector &idxVector = ResToIdxReplacement[res];
  1159. DXASSERT(idxVector.size() == CountArrayDimensions(GV->getType()), "replacements empty or invalid");
  1160. SmallVector<Value*, 4> gepIndices;
  1161. gepIndices.push_back(Zero);
  1162. for (auto idxVal : idxVector)
  1163. gepIndices.push_back(LookupValue(idxVal));
  1164. Value *GEP = Builder.CreateInBoundsGEP(GV, gepIndices);
  1165. // Mark new GEP instruction non-uniform if necessary
  1166. if (NonUniformSet.count(res) != 0 || NonUniformSet.count(CI) != 0)
  1167. if (GetElementPtrInst *GEPInst = dyn_cast<GetElementPtrInst>(GEP))
  1168. DxilMDHelper::MarkNonUniform(GEPInst);
  1169. LoadInst *LI = Builder.CreateLoad(GEP);
  1170. createHandle.set_Resource(LI);
  1171. if (Instruction *resI = dyn_cast<Instruction>(res))
  1172. CleanupInsts.insert(resI);
  1173. }
  1174. }
  1175. // Delete unused CleanupInsts, restarting when changed
  1176. // Return true if something was deleted
  1177. bool CleanupUnusedValues() {
  1178. // - delete unused CleanupInsts, restarting when changed
  1179. bool bAnyChanges = false;
  1180. bool bChanged = false;
  1181. do {
  1182. bChanged = false;
  1183. for (auto it = CleanupInsts.begin(); it != CleanupInsts.end();) {
  1184. Instruction *I = *(it++);
  1185. if (I->user_empty()) {
  1186. // Add instructions operands CleanupInsts
  1187. for (unsigned iOp = 0; iOp < I->getNumOperands(); iOp++) {
  1188. if (Instruction *opI = dyn_cast<Instruction>(I->getOperand(iOp)))
  1189. CleanupInsts.insert(opI);
  1190. }
  1191. I->eraseFromParent();
  1192. CleanupInsts.erase(I);
  1193. bChanged = true;
  1194. }
  1195. }
  1196. if (bChanged)
  1197. bAnyChanges = true;
  1198. } while (bChanged);
  1199. return bAnyChanges;
  1200. }
  1201. void SimplifyMerges() {
  1202. // Loop if changed
  1203. bool bChanged = false;
  1204. do {
  1205. bChanged = false;
  1206. for (auto V : NewSelects) {
  1207. if (LookupValue(V) != V)
  1208. continue;
  1209. Instruction *I = cast<Instruction>(V);
  1210. unsigned startOp = isa<PHINode>(I) ? 0 : 1;
  1211. Value *newV = dxilutil::MergeSelectOnSameValue(
  1212. cast<Instruction>(V), startOp, I->getNumOperands());
  1213. if (newV) {
  1214. RemappedValues[V] = newV;
  1215. bChanged = true;
  1216. }
  1217. }
  1218. } while (bChanged);
  1219. }
  1220. void CleanupDeadInsts() {
  1221. // Assuming everything was successful:
  1222. // delete stores to allocas to remove cycles
  1223. for (auto V : Stores) {
  1224. StoreInst *SI = cast<StoreInst>(V);
  1225. if (Instruction *I = dyn_cast<Instruction>(SI->getValueOperand()))
  1226. CleanupInsts.insert(I);
  1227. if (Instruction *I = dyn_cast<Instruction>(SI->getPointerOperand()))
  1228. CleanupInsts.insert(I);
  1229. SI->eraseFromParent();
  1230. }
  1231. CleanupUnusedValues();
  1232. }
  1233. void VerifyComplete(DxilModule &DM) {
  1234. // Check that all handles now resolve to a global variable, otherwise,
  1235. // they are likely loading from resource function parameter, which
  1236. // is disallowed.
  1237. hlsl::OP *hlslOP = DM.GetOP();
  1238. for (Function &F : DM.GetModule()->functions()) {
  1239. if (hlslOP->IsDxilOpFunc(&F)) {
  1240. hlsl::OP::OpCodeClass opClass;
  1241. if (hlslOP->GetOpCodeClass(&F, opClass) &&
  1242. opClass == DXIL::OpCodeClass::CreateHandleForLib) {
  1243. for (auto U : F.users()) {
  1244. CallInst *CI = cast<CallInst>(U);
  1245. if (m_Errors.ErrorUsers.count(CI))
  1246. continue; // Error already reported
  1247. DxilInst_CreateHandleForLib createHandle(CI);
  1248. Value *res = createHandle.get_Resource();
  1249. LoadInst *LI = dyn_cast<LoadInst>(res);
  1250. if (LI) {
  1251. Value *Ptr = LI->getPointerOperand();
  1252. if (GEPOperator *GEP = dyn_cast<GEPOperator>(Ptr))
  1253. Ptr = GEP->getPointerOperand();
  1254. if (isa<GlobalVariable>(Ptr))
  1255. continue;
  1256. }
  1257. // handle wasn't processed
  1258. // Right now, the most likely cause is user call with resources, but
  1259. // this should be updated if there are other reasons for this to happen.
  1260. m_Errors.ReportError(ResourceUseErrors::UserCallsWithResources, U);
  1261. }
  1262. }
  1263. }
  1264. }
  1265. }
  1266. // Fix resource global variable properties to external constant
  1267. bool SetExternalConstant(GlobalVariable *GV) {
  1268. if (GV->hasInitializer() || !GV->isConstant() ||
  1269. GV->getLinkage() != GlobalVariable::LinkageTypes::ExternalLinkage) {
  1270. GV->setInitializer(nullptr);
  1271. GV->setConstant(true);
  1272. GV->setLinkage(GlobalVariable::LinkageTypes::ExternalLinkage);
  1273. return true;
  1274. }
  1275. return false;
  1276. }
  1277. bool CollectResources(DxilModule &DM) {
  1278. bool bChanged = false;
  1279. for (const auto &res : DM.GetCBuffers()) {
  1280. if (GlobalVariable *GV = dyn_cast<GlobalVariable>(res->GetGlobalSymbol())) {
  1281. bChanged |= SetExternalConstant(GV);
  1282. CollectResourceGVUsers(GV, GV);
  1283. }
  1284. }
  1285. for (const auto &res : DM.GetSRVs()) {
  1286. if (GlobalVariable *GV = dyn_cast<GlobalVariable>(res->GetGlobalSymbol())) {
  1287. bChanged |= SetExternalConstant(GV);
  1288. CollectResourceGVUsers(GV, GV);
  1289. }
  1290. }
  1291. for (const auto &res : DM.GetUAVs()) {
  1292. if (GlobalVariable *GV = dyn_cast<GlobalVariable>(res->GetGlobalSymbol())) {
  1293. bChanged |= SetExternalConstant(GV);
  1294. CollectResourceGVUsers(GV, GV);
  1295. }
  1296. }
  1297. for (const auto &res : DM.GetSamplers()) {
  1298. if (GlobalVariable *GV = dyn_cast<GlobalVariable>(res->GetGlobalSymbol())) {
  1299. bChanged |= SetExternalConstant(GV);
  1300. CollectResourceGVUsers(GV, GV);
  1301. }
  1302. }
  1303. return bChanged;
  1304. }
  1305. void DoTransform() {
  1306. RemoveConflicts();
  1307. CreateSelects();
  1308. CreateIndexAllocas();
  1309. ReplaceAllocaGEPs();
  1310. ReplaceAllocaLoads();
  1311. ReplaceGVGEPs();
  1312. ReplaceStores();
  1313. UpdateSelects();
  1314. SimplifyMerges();
  1315. ReplaceHandles();
  1316. if (!m_Errors.ErrorsReported())
  1317. CleanupDeadInsts();
  1318. }
  1319. bool ErrorsReported() {
  1320. return m_Errors.ErrorsReported();
  1321. }
  1322. bool runOnModule(llvm::Module &M) {
  1323. DxilModule &DM = M.GetOrCreateDxilModule();
  1324. bool bChanged = CollectResources(DM);
  1325. // If no selects or allocas are involved, there isn't anything to do
  1326. if (Selects.empty() && Allocas.empty())
  1327. return bChanged;
  1328. DoTransform();
  1329. VerifyComplete(DM);
  1330. return true;
  1331. }
  1332. };
  1333. class DxilLegalizeResources : public ModulePass {
  1334. public:
  1335. static char ID; // Pass identification, replacement for typeid
  1336. explicit DxilLegalizeResources()
  1337. : ModulePass(ID) {}
  1338. const char *getPassName() const override {
  1339. return "DXIL Legalize Resource Use";
  1340. }
  1341. bool runOnModule(Module &M) override {
  1342. LegalizeResourceUseHelper helper;
  1343. return helper.runOnModule(M);
  1344. }
  1345. private:
  1346. };
  1347. } // namespace
  1348. char DxilLegalizeResources::ID = 0;
  1349. ModulePass *llvm::createDxilLegalizeResources() {
  1350. return new DxilLegalizeResources();
  1351. }
  1352. INITIALIZE_PASS(DxilLegalizeResources,
  1353. "hlsl-dxil-legalize-resources",
  1354. "DXIL legalize resource use", false, false)
  1355. bool DxilLowerCreateHandleForLib::RemovePhiOnResource() {
  1356. LegalizeResourceUseHelper helper;
  1357. bool bChanged = helper.runOnModule(*m_DM->GetModule());
  1358. if (helper.ErrorsReported())
  1359. m_bLegalizationFailed = true;
  1360. return bChanged;
  1361. }
  1362. // LegacyLayout.
  1363. namespace {
  1364. StructType *UpdateStructTypeForLegacyLayout(StructType *ST, bool IsCBuf,
  1365. DxilTypeSystem &TypeSys, Module &M);
  1366. Type *UpdateFieldTypeForLegacyLayout(Type *Ty, bool IsCBuf,
  1367. DxilFieldAnnotation &annotation,
  1368. DxilTypeSystem &TypeSys, Module &M) {
  1369. DXASSERT(!Ty->isPointerTy(), "struct field should not be a pointer");
  1370. if (Ty->isArrayTy()) {
  1371. Type *EltTy = Ty->getArrayElementType();
  1372. Type *UpdatedTy =
  1373. UpdateFieldTypeForLegacyLayout(EltTy, IsCBuf, annotation, TypeSys, M);
  1374. if (EltTy == UpdatedTy)
  1375. return Ty;
  1376. else
  1377. return ArrayType::get(UpdatedTy, Ty->getArrayNumElements());
  1378. } else if (dxilutil::IsHLSLMatrixType(Ty)) {
  1379. DXASSERT(annotation.HasMatrixAnnotation(), "must a matrix");
  1380. HLMatrixType MatTy = HLMatrixType::cast(Ty);
  1381. unsigned rows = MatTy.getNumRows();
  1382. unsigned cols = MatTy.getNumColumns();
  1383. Type *EltTy = MatTy.getElementTypeForReg();
  1384. // Get cols and rows from annotation.
  1385. const DxilMatrixAnnotation &matrix = annotation.GetMatrixAnnotation();
  1386. if (matrix.Orientation == MatrixOrientation::RowMajor) {
  1387. rows = matrix.Rows;
  1388. cols = matrix.Cols;
  1389. } else {
  1390. DXASSERT(matrix.Orientation == MatrixOrientation::ColumnMajor, "");
  1391. cols = matrix.Rows;
  1392. rows = matrix.Cols;
  1393. }
  1394. // CBuffer matrix must 4 * 4 bytes align.
  1395. if (IsCBuf)
  1396. cols = 4;
  1397. EltTy =
  1398. UpdateFieldTypeForLegacyLayout(EltTy, IsCBuf, annotation, TypeSys, M);
  1399. Type *rowTy = VectorType::get(EltTy, cols);
  1400. return ArrayType::get(rowTy, rows);
  1401. } else if (StructType *ST = dyn_cast<StructType>(Ty)) {
  1402. return UpdateStructTypeForLegacyLayout(ST, IsCBuf, TypeSys, M);
  1403. } else if (Ty->isVectorTy()) {
  1404. Type *EltTy = Ty->getVectorElementType();
  1405. Type *UpdatedTy =
  1406. UpdateFieldTypeForLegacyLayout(EltTy, IsCBuf, annotation, TypeSys, M);
  1407. if (EltTy == UpdatedTy)
  1408. return Ty;
  1409. else
  1410. return VectorType::get(UpdatedTy, Ty->getVectorNumElements());
  1411. } else {
  1412. Type *i32Ty = Type::getInt32Ty(Ty->getContext());
  1413. // Basic types.
  1414. if (Ty->isHalfTy()) {
  1415. return Type::getFloatTy(Ty->getContext());
  1416. } else if (IntegerType *ITy = dyn_cast<IntegerType>(Ty)) {
  1417. if (ITy->getBitWidth() < 32)
  1418. return i32Ty;
  1419. else
  1420. return Ty;
  1421. } else
  1422. return Ty;
  1423. }
  1424. }
  1425. StructType *UpdateStructTypeForLegacyLayout(StructType *ST, bool IsCBuf,
  1426. DxilTypeSystem &TypeSys,
  1427. Module &M) {
  1428. bool bUpdated = false;
  1429. unsigned fieldsCount = ST->getNumElements();
  1430. std::vector<Type *> fieldTypes(fieldsCount);
  1431. DxilStructAnnotation *SA = TypeSys.GetStructAnnotation(ST);
  1432. DXASSERT(SA, "must have annotation for struct type");
  1433. for (unsigned i = 0; i < fieldsCount; i++) {
  1434. Type *EltTy = ST->getElementType(i);
  1435. Type *UpdatedTy = UpdateFieldTypeForLegacyLayout(
  1436. EltTy, IsCBuf, SA->GetFieldAnnotation(i), TypeSys, M);
  1437. fieldTypes[i] = UpdatedTy;
  1438. if (EltTy != UpdatedTy)
  1439. bUpdated = true;
  1440. }
  1441. if (!bUpdated) {
  1442. return ST;
  1443. } else {
  1444. std::string legacyName = "dx.alignment.legacy." + ST->getName().str();
  1445. if (StructType *legacyST = M.getTypeByName(legacyName))
  1446. return legacyST;
  1447. StructType *NewST =
  1448. StructType::create(ST->getContext(), fieldTypes, legacyName);
  1449. DxilStructAnnotation *NewSA = TypeSys.AddStructAnnotation(NewST);
  1450. // Clone annotation.
  1451. *NewSA = *SA;
  1452. return NewST;
  1453. }
  1454. }
  1455. void UpdateStructTypeForLegacyLayout(DxilResourceBase &Res,
  1456. DxilTypeSystem &TypeSys, Module &M) {
  1457. Constant *Symbol = Res.GetGlobalSymbol();
  1458. Type *ElemTy = Symbol->getType()->getPointerElementType();
  1459. bool IsResourceArray = Res.GetRangeSize() != 1;
  1460. if (IsResourceArray) {
  1461. // Support Array of struct buffer.
  1462. if (ElemTy->isArrayTy())
  1463. ElemTy = ElemTy->getArrayElementType();
  1464. }
  1465. StructType *ST = cast<StructType>(ElemTy);
  1466. if (ST->isOpaque()) {
  1467. DXASSERT(Res.GetClass() == DxilResourceBase::Class::CBuffer,
  1468. "Only cbuffer can have opaque struct.");
  1469. return;
  1470. }
  1471. Type *UpdatedST =
  1472. UpdateStructTypeForLegacyLayout(ST, IsResourceArray, TypeSys, M);
  1473. if (ST != UpdatedST) {
  1474. Type *Ty = Symbol->getType()->getPointerElementType();
  1475. if (IsResourceArray) {
  1476. // Support Array of struct buffer.
  1477. if (Ty->isArrayTy()) {
  1478. UpdatedST = ArrayType::get(UpdatedST, Ty->getArrayNumElements());
  1479. }
  1480. }
  1481. GlobalVariable *NewGV = cast<GlobalVariable>(
  1482. M.getOrInsertGlobal(Symbol->getName().str() + "_legacy", UpdatedST));
  1483. Res.SetGlobalSymbol(NewGV);
  1484. // Delete old GV.
  1485. for (auto UserIt = Symbol->user_begin(); UserIt != Symbol->user_end();) {
  1486. Value *User = *(UserIt++);
  1487. if (Instruction *I = dyn_cast<Instruction>(User)) {
  1488. if (!User->user_empty())
  1489. I->replaceAllUsesWith(UndefValue::get(I->getType()));
  1490. I->eraseFromParent();
  1491. } else {
  1492. ConstantExpr *CE = cast<ConstantExpr>(User);
  1493. if (!CE->user_empty())
  1494. CE->replaceAllUsesWith(UndefValue::get(CE->getType()));
  1495. }
  1496. }
  1497. Symbol->removeDeadConstantUsers();
  1498. if (GlobalVariable *GV = dyn_cast<GlobalVariable>(Symbol))
  1499. GV->eraseFromParent();
  1500. }
  1501. }
  1502. void UpdateStructTypeForLegacyLayoutOnDM(DxilModule &DM) {
  1503. DxilTypeSystem &TypeSys = DM.GetTypeSystem();
  1504. Module &M = *DM.GetModule();
  1505. for (auto &CBuf : DM.GetCBuffers()) {
  1506. UpdateStructTypeForLegacyLayout(*CBuf.get(), TypeSys, M);
  1507. }
  1508. for (auto &UAV : DM.GetUAVs()) {
  1509. if (UAV->GetKind() == DxilResourceBase::Kind::StructuredBuffer)
  1510. UpdateStructTypeForLegacyLayout(*UAV.get(), TypeSys, M);
  1511. }
  1512. for (auto &SRV : DM.GetSRVs()) {
  1513. if (SRV->GetKind() == DxilResourceBase::Kind::StructuredBuffer)
  1514. UpdateStructTypeForLegacyLayout(*SRV.get(), TypeSys, M);
  1515. }
  1516. }
  1517. } // namespace
  1518. void DxilLowerCreateHandleForLib::UpdateStructTypeForLegacyLayout() {
  1519. UpdateStructTypeForLegacyLayoutOnDM(*m_DM);
  1520. }
  1521. // Change ResourceSymbol to undef if don't need.
  1522. void DxilLowerCreateHandleForLib::UpdateResourceSymbols() {
  1523. std::vector<GlobalVariable *> &LLVMUsed = m_DM->GetLLVMUsed();
  1524. auto UpdateResourceSymbol = [&LLVMUsed, this](DxilResourceBase *res) {
  1525. if (GlobalVariable *GV = dyn_cast<GlobalVariable>(res->GetGlobalSymbol())) {
  1526. GV->removeDeadConstantUsers();
  1527. DXASSERT(GV->user_empty(), "else resource not lowered");
  1528. res->SetGlobalSymbol(UndefValue::get(GV->getType()));
  1529. if (m_HasDbgInfo)
  1530. LLVMUsed.emplace_back(GV);
  1531. }
  1532. };
  1533. for (auto &&C : m_DM->GetCBuffers()) {
  1534. UpdateResourceSymbol(C.get());
  1535. }
  1536. for (auto &&Srv : m_DM->GetSRVs()) {
  1537. UpdateResourceSymbol(Srv.get());
  1538. }
  1539. for (auto &&Uav : m_DM->GetUAVs()) {
  1540. UpdateResourceSymbol(Uav.get());
  1541. }
  1542. for (auto &&S : m_DM->GetSamplers()) {
  1543. UpdateResourceSymbol(S.get());
  1544. }
  1545. }
  1546. // Lower createHandleForLib
  1547. namespace {
  1548. void ReplaceResourceUserWithHandle(
  1549. LoadInst *Res, Value *handle) {
  1550. for (auto resUser = Res->user_begin(); resUser != Res->user_end();) {
  1551. Value *V = *(resUser++);
  1552. CallInst *CI = dyn_cast<CallInst>(V);
  1553. DxilInst_CreateHandleForLib createHandle(CI);
  1554. DXASSERT(createHandle, "must be createHandle");
  1555. CI->replaceAllUsesWith(handle);
  1556. CI->eraseFromParent();
  1557. }
  1558. Res->eraseFromParent();
  1559. }
  1560. DIGlobalVariable *FindGlobalVariableDebugInfo(GlobalVariable *GV,
  1561. DebugInfoFinder &DbgInfoFinder) {
  1562. struct GlobalFinder {
  1563. GlobalVariable *GV;
  1564. bool operator()(llvm::DIGlobalVariable *const arg) const {
  1565. return arg->getVariable() == GV;
  1566. }
  1567. };
  1568. GlobalFinder F = {GV};
  1569. DebugInfoFinder::global_variable_iterator Found =
  1570. std::find_if(DbgInfoFinder.global_variables().begin(),
  1571. DbgInfoFinder.global_variables().end(), F);
  1572. if (Found != DbgInfoFinder.global_variables().end()) {
  1573. return *Found;
  1574. }
  1575. return nullptr;
  1576. }
  1577. } // namespace
  1578. void DxilLowerCreateHandleForLib::TranslateDxilResourceUses(
  1579. DxilResourceBase &res) {
  1580. OP *hlslOP = m_DM->GetOP();
  1581. Function *createHandle = hlslOP->GetOpFunc(
  1582. OP::OpCode::CreateHandle, llvm::Type::getVoidTy(m_DM->GetCtx()));
  1583. Value *opArg = hlslOP->GetU32Const((unsigned)OP::OpCode::CreateHandle);
  1584. bool isViewResource = res.GetClass() == DXIL::ResourceClass::SRV ||
  1585. res.GetClass() == DXIL::ResourceClass::UAV;
  1586. bool isROV = isViewResource && static_cast<DxilResource &>(res).IsROV();
  1587. std::string handleName =
  1588. (res.GetGlobalName() + Twine("_") + Twine(res.GetResClassName())).str();
  1589. if (isViewResource)
  1590. handleName += (Twine("_") + Twine(res.GetResDimName())).str();
  1591. if (isROV)
  1592. handleName += "_ROV";
  1593. Value *resClassArg = hlslOP->GetU8Const(
  1594. static_cast<std::underlying_type<DxilResourceBase::Class>::type>(
  1595. res.GetClass()));
  1596. Value *resIDArg = hlslOP->GetU32Const(res.GetID());
  1597. // resLowerBound will be added after allocation in DxilCondenseResources.
  1598. Value *resLowerBound = hlslOP->GetU32Const(res.GetLowerBound());
  1599. Value *isUniformRes = hlslOP->GetI1Const(0);
  1600. Value *GV = res.GetGlobalSymbol();
  1601. DXASSERT(isa<GlobalValue>(GV), "DxilLowerCreateHandleForLib cannot deal with unused resources.");
  1602. Module *pM = m_DM->GetModule();
  1603. // TODO: add debug info to create handle.
  1604. DIVariable *DIV = nullptr;
  1605. DILocation *DL = nullptr;
  1606. if (m_HasDbgInfo) {
  1607. DebugInfoFinder &Finder = m_DM->GetOrCreateDebugInfoFinder();
  1608. DIV = FindGlobalVariableDebugInfo(cast<GlobalVariable>(GV), Finder);
  1609. if (DIV)
  1610. // TODO: how to get col?
  1611. DL =
  1612. DILocation::get(pM->getContext(), DIV->getLine(), 1, DIV->getScope());
  1613. }
  1614. bool isResArray = res.GetRangeSize() > 1;
  1615. std::unordered_map<Function *, Instruction *> handleMapOnFunction;
  1616. Value *createHandleArgs[] = {opArg, resClassArg, resIDArg, resLowerBound,
  1617. isUniformRes};
  1618. for (iplist<Function>::iterator F : pM->getFunctionList()) {
  1619. if (!F->isDeclaration()) {
  1620. if (!isResArray) {
  1621. IRBuilder<> Builder(dxilutil::FirstNonAllocaInsertionPt(F));
  1622. if (m_HasDbgInfo) {
  1623. // TODO: set debug info.
  1624. // Builder.SetCurrentDebugLocation(DL);
  1625. }
  1626. handleMapOnFunction[F] =
  1627. Builder.CreateCall(createHandle, createHandleArgs, handleName);
  1628. }
  1629. }
  1630. }
  1631. for (auto U = GV->user_begin(), E = GV->user_end(); U != E;) {
  1632. User *user = *(U++);
  1633. // Skip unused user.
  1634. if (user->user_empty())
  1635. continue;
  1636. if (LoadInst *ldInst = dyn_cast<LoadInst>(user)) {
  1637. Function *userF = ldInst->getParent()->getParent();
  1638. DXASSERT(handleMapOnFunction.count(userF), "must exist");
  1639. Value *handle = handleMapOnFunction[userF];
  1640. ReplaceResourceUserWithHandle(ldInst, handle);
  1641. } else {
  1642. DXASSERT(dyn_cast<GEPOperator>(user) != nullptr,
  1643. "else AddOpcodeParamForIntrinsic in CodeGen did not patch uses "
  1644. "to only have ld/st refer to temp object");
  1645. GEPOperator *GEP = cast<GEPOperator>(user);
  1646. Value *idx = nullptr;
  1647. if (GEP->getNumIndices() == 2) {
  1648. // one dim array of resource
  1649. idx = (GEP->idx_begin() + 1)->get();
  1650. } else {
  1651. gep_type_iterator GEPIt = gep_type_begin(GEP), E = gep_type_end(GEP);
  1652. // Must be instruction for multi dim array.
  1653. std::unique_ptr<IRBuilder<> > Builder;
  1654. if (GetElementPtrInst *GEPInst = dyn_cast<GetElementPtrInst>(GEP)) {
  1655. Builder = llvm::make_unique<IRBuilder<> >(GEPInst);
  1656. } else {
  1657. Builder = llvm::make_unique<IRBuilder<> >(GV->getContext());
  1658. }
  1659. for (; GEPIt != E; ++GEPIt) {
  1660. if (GEPIt->isArrayTy()) {
  1661. unsigned arraySize = GEPIt->getArrayNumElements();
  1662. Value * tmpIdx = GEPIt.getOperand();
  1663. if (idx == nullptr)
  1664. idx = tmpIdx;
  1665. else {
  1666. idx = Builder->CreateMul(idx, Builder->getInt32(arraySize));
  1667. idx = Builder->CreateAdd(idx, tmpIdx);
  1668. }
  1669. }
  1670. }
  1671. }
  1672. createHandleArgs[DXIL::OperandIndex::kCreateHandleResIndexOpIdx] = idx;
  1673. createHandleArgs[DXIL::OperandIndex::kCreateHandleIsUniformOpIdx] =
  1674. isUniformRes;
  1675. Value *handle = nullptr;
  1676. if (GetElementPtrInst *GEPInst = dyn_cast<GetElementPtrInst>(GEP)) {
  1677. IRBuilder<> Builder = IRBuilder<>(GEPInst);
  1678. if (DxilMDHelper::IsMarkedNonUniform(GEPInst)) {
  1679. // Mark nonUniform.
  1680. createHandleArgs[DXIL::OperandIndex::kCreateHandleIsUniformOpIdx] =
  1681. hlslOP->GetI1Const(1);
  1682. // Clear nonUniform on GEP.
  1683. GEPInst->setMetadata(DxilMDHelper::kDxilNonUniformAttributeMDName, nullptr);
  1684. }
  1685. createHandleArgs[DXIL::OperandIndex::kCreateHandleResIndexOpIdx] =
  1686. Builder.CreateAdd(idx, resLowerBound);
  1687. handle = Builder.CreateCall(createHandle, createHandleArgs, handleName);
  1688. }
  1689. for (auto GEPU = GEP->user_begin(), GEPE = GEP->user_end();
  1690. GEPU != GEPE;) {
  1691. // Must be load inst.
  1692. LoadInst *ldInst = cast<LoadInst>(*(GEPU++));
  1693. if (handle) {
  1694. ReplaceResourceUserWithHandle(ldInst, handle);
  1695. } else {
  1696. IRBuilder<> Builder = IRBuilder<>(ldInst);
  1697. createHandleArgs[DXIL::OperandIndex::kCreateHandleResIndexOpIdx] =
  1698. Builder.CreateAdd(idx, resLowerBound);
  1699. Value *localHandle =
  1700. Builder.CreateCall(createHandle, createHandleArgs, handleName);
  1701. ReplaceResourceUserWithHandle(ldInst, localHandle);
  1702. }
  1703. }
  1704. if (Instruction *I = dyn_cast<Instruction>(GEP)) {
  1705. I->eraseFromParent();
  1706. }
  1707. }
  1708. }
  1709. // Erase unused handle.
  1710. for (auto It : handleMapOnFunction) {
  1711. Instruction *I = It.second;
  1712. if (I->user_empty())
  1713. I->eraseFromParent();
  1714. }
  1715. }
  1716. void DxilLowerCreateHandleForLib::GenerateDxilResourceHandles() {
  1717. for (size_t i = 0; i < m_DM->GetCBuffers().size(); i++) {
  1718. DxilCBuffer &C = m_DM->GetCBuffer(i);
  1719. TranslateDxilResourceUses(C);
  1720. }
  1721. // Create sampler handle first, may be used by SRV operations.
  1722. for (size_t i = 0; i < m_DM->GetSamplers().size(); i++) {
  1723. DxilSampler &S = m_DM->GetSampler(i);
  1724. TranslateDxilResourceUses(S);
  1725. }
  1726. for (size_t i = 0; i < m_DM->GetSRVs().size(); i++) {
  1727. DxilResource &SRV = m_DM->GetSRV(i);
  1728. TranslateDxilResourceUses(SRV);
  1729. }
  1730. for (size_t i = 0; i < m_DM->GetUAVs().size(); i++) {
  1731. DxilResource &UAV = m_DM->GetUAV(i);
  1732. TranslateDxilResourceUses(UAV);
  1733. }
  1734. }
  1735. // TBuffer.
  1736. namespace {
  1737. void InitTBuffer(const DxilCBuffer *pSource, DxilResource *pDest) {
  1738. pDest->SetKind(pSource->GetKind());
  1739. pDest->SetCompType(DXIL::ComponentType::U32);
  1740. pDest->SetSampleCount(0);
  1741. pDest->SetElementStride(0);
  1742. pDest->SetGloballyCoherent(false);
  1743. pDest->SetHasCounter(false);
  1744. pDest->SetRW(false);
  1745. pDest->SetROV(false);
  1746. pDest->SetID(pSource->GetID());
  1747. pDest->SetSpaceID(pSource->GetSpaceID());
  1748. pDest->SetLowerBound(pSource->GetLowerBound());
  1749. pDest->SetRangeSize(pSource->GetRangeSize());
  1750. pDest->SetGlobalSymbol(pSource->GetGlobalSymbol());
  1751. pDest->SetGlobalName(pSource->GetGlobalName());
  1752. pDest->SetHandle(pSource->GetHandle());
  1753. }
  1754. void PatchTBufferLoad(CallInst *handle, DxilModule &DM) {
  1755. hlsl::OP *hlslOP = DM.GetOP();
  1756. llvm::LLVMContext &Ctx = DM.GetCtx();
  1757. Type *doubleTy = Type::getDoubleTy(Ctx);
  1758. Type *i64Ty = Type::getInt64Ty(Ctx);
  1759. // Replace corresponding cbuffer loads with typed buffer loads
  1760. for (auto U = handle->user_begin(); U != handle->user_end();) {
  1761. CallInst *I = cast<CallInst>(*(U++));
  1762. DXASSERT(I && OP::IsDxilOpFuncCallInst(I),
  1763. "otherwise unexpected user of CreateHandle value");
  1764. DXIL::OpCode opcode = OP::GetDxilOpFuncCallInst(I);
  1765. if (opcode == DXIL::OpCode::CBufferLoadLegacy) {
  1766. DxilInst_CBufferLoadLegacy cbLoad(I);
  1767. // Replace with appropriate buffer load instruction
  1768. IRBuilder<> Builder(I);
  1769. opcode = OP::OpCode::BufferLoad;
  1770. Type *Ty = Type::getInt32Ty(Ctx);
  1771. Function *BufLoad = hlslOP->GetOpFunc(opcode, Ty);
  1772. Constant *opArg = hlslOP->GetU32Const((unsigned)opcode);
  1773. Value *undefI = UndefValue::get(Type::getInt32Ty(Ctx));
  1774. Value *offset = cbLoad.get_regIndex();
  1775. CallInst *load =
  1776. Builder.CreateCall(BufLoad, {opArg, handle, offset, undefI});
  1777. // Find extractelement uses of cbuffer load and replace + generate bitcast
  1778. // as necessary
  1779. for (auto LU = I->user_begin(); LU != I->user_end();) {
  1780. ExtractValueInst *evInst = dyn_cast<ExtractValueInst>(*(LU++));
  1781. DXASSERT(evInst && evInst->getNumIndices() == 1,
  1782. "user of cbuffer load result should be extractvalue");
  1783. uint64_t idx = evInst->getIndices()[0];
  1784. Type *EltTy = evInst->getType();
  1785. IRBuilder<> EEBuilder(evInst);
  1786. Value *result = nullptr;
  1787. if (EltTy != Ty) {
  1788. // extract two values and DXIL::OpCode::MakeDouble or construct i64
  1789. if ((EltTy == doubleTy) || (EltTy == i64Ty)) {
  1790. DXASSERT(idx < 2, "64-bit component index out of range");
  1791. // This assumes big endian order in tbuffer elements (is this
  1792. // correct?)
  1793. Value *low = EEBuilder.CreateExtractValue(load, idx * 2);
  1794. Value *high = EEBuilder.CreateExtractValue(load, idx * 2 + 1);
  1795. if (EltTy == doubleTy) {
  1796. opcode = OP::OpCode::MakeDouble;
  1797. Function *MakeDouble = hlslOP->GetOpFunc(opcode, doubleTy);
  1798. Constant *opArg = hlslOP->GetU32Const((unsigned)opcode);
  1799. result = EEBuilder.CreateCall(MakeDouble, {opArg, low, high});
  1800. } else {
  1801. high = EEBuilder.CreateZExt(high, i64Ty);
  1802. low = EEBuilder.CreateZExt(low, i64Ty);
  1803. high = EEBuilder.CreateShl(high, hlslOP->GetU64Const(32));
  1804. result = EEBuilder.CreateOr(high, low);
  1805. }
  1806. } else {
  1807. result = EEBuilder.CreateExtractValue(load, idx);
  1808. result = EEBuilder.CreateBitCast(result, EltTy);
  1809. }
  1810. } else {
  1811. result = EEBuilder.CreateExtractValue(load, idx);
  1812. }
  1813. evInst->replaceAllUsesWith(result);
  1814. evInst->eraseFromParent();
  1815. }
  1816. } else if (opcode == DXIL::OpCode::CBufferLoad) {
  1817. // TODO: Handle this, or prevent this for tbuffer
  1818. DXASSERT(false, "otherwise CBufferLoad used for tbuffer rather than "
  1819. "CBufferLoadLegacy");
  1820. } else {
  1821. DXASSERT(false, "otherwise unexpected user of CreateHandle value");
  1822. }
  1823. I->eraseFromParent();
  1824. }
  1825. }
  1826. } // namespace
  1827. void DxilLowerCreateHandleForLib::PatchTBufferUse(Value *V, DxilModule &DM) {
  1828. for (User *U : V->users()) {
  1829. if (CallInst *CI = dyn_cast<CallInst>(U)) {
  1830. // Patch dxil call.
  1831. if (hlsl::OP::IsDxilOpFuncCallInst(CI))
  1832. PatchTBufferLoad(CI, DM);
  1833. } else {
  1834. PatchTBufferUse(U, DM);
  1835. }
  1836. }
  1837. }
  1838. bool DxilLowerCreateHandleForLib::PatchTBuffers(DxilModule &DM) {
  1839. bool bChanged = false;
  1840. // move tbuffer resources to SRVs
  1841. unsigned offset = DM.GetSRVs().size();
  1842. Module &M = *DM.GetModule();
  1843. for (auto it = DM.GetCBuffers().begin(); it != DM.GetCBuffers().end(); it++) {
  1844. DxilCBuffer *CB = it->get();
  1845. if (CB->GetKind() == DXIL::ResourceKind::TBuffer) {
  1846. auto srv = make_unique<DxilResource>();
  1847. InitTBuffer(CB, srv.get());
  1848. srv->SetID(offset++);
  1849. DM.AddSRV(std::move(srv));
  1850. GlobalVariable *GV = dyn_cast<GlobalVariable>(CB->GetGlobalSymbol());
  1851. if (GV == nullptr)
  1852. continue;
  1853. PatchTBufferUse(GV, DM);
  1854. // Set global symbol for cbuffer to an unused value so it can be removed
  1855. // in RemoveUnusedResourceSymbols.
  1856. Type *Ty = GV->getType()->getElementType();
  1857. GlobalVariable *NewGV = new GlobalVariable(
  1858. M, Ty, GV->isConstant(), GV->getLinkage(), /*Initializer*/ nullptr,
  1859. GV->getName(),
  1860. /*InsertBefore*/ nullptr, GV->getThreadLocalMode(),
  1861. GV->getType()->getAddressSpace(), GV->isExternallyInitialized());
  1862. CB->SetGlobalSymbol(NewGV);
  1863. bChanged = true;
  1864. }
  1865. }
  1866. return bChanged;
  1867. }
  1868. char DxilLowerCreateHandleForLib::ID = 0;
  1869. ModulePass *llvm::createDxilLowerCreateHandleForLibPass() {
  1870. return new DxilLowerCreateHandleForLib();
  1871. }
  1872. INITIALIZE_PASS(DxilLowerCreateHandleForLib, "hlsl-dxil-lower-handle-for-lib", "DXIL Lower createHandleForLib", false, false)
  1873. class DxilAllocateResourcesForLib : public ModulePass {
  1874. private:
  1875. RemapEntryCollection m_rewrites;
  1876. public:
  1877. static char ID; // Pass identification, replacement for typeid
  1878. explicit DxilAllocateResourcesForLib() : ModulePass(ID), m_AutoBindingSpace(UINT_MAX) {}
  1879. void applyOptions(PassOptions O) override {
  1880. GetPassOptionUInt32(O, "auto-binding-space", &m_AutoBindingSpace, UINT_MAX);
  1881. }
  1882. const char *getPassName() const override { return "DXIL Condense Resources"; }
  1883. bool runOnModule(Module &M) override {
  1884. DxilModule &DM = M.GetOrCreateDxilModule();
  1885. // Must specify a default space, and must apply to library.
  1886. // Use DxilCondenseResources instead for shaders.
  1887. if ((m_AutoBindingSpace == UINT_MAX) || !DM.GetShaderModel()->IsLib())
  1888. return false;
  1889. bool hasResource = DM.GetCBuffers().size() ||
  1890. DM.GetUAVs().size() || DM.GetSRVs().size() || DM.GetSamplers().size();
  1891. if (hasResource) {
  1892. DM.SetAutoBindingSpace(m_AutoBindingSpace);
  1893. DxilResourceRegisterAllocator ResourceRegisterAllocator;
  1894. ResourceRegisterAllocator.AllocateRegisters(DM);
  1895. }
  1896. return true;
  1897. }
  1898. private:
  1899. uint32_t m_AutoBindingSpace;
  1900. };
  1901. char DxilAllocateResourcesForLib::ID = 0;
  1902. ModulePass *llvm::createDxilAllocateResourcesForLibPass() {
  1903. return new DxilAllocateResourcesForLib();
  1904. }
  1905. INITIALIZE_PASS(DxilAllocateResourcesForLib, "hlsl-dxil-allocate-resources-for-lib", "DXIL Allocate Resources For Library", false, false)