ir_context.cpp 39 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176
  1. // Copyright (c) 2017 Google Inc.
  2. // Modifications Copyright (C) 2024 Advanced Micro Devices, Inc. All rights
  3. // reserved.
  4. //
  5. // Licensed under the Apache License, Version 2.0 (the "License");
  6. // you may not use this file except in compliance with the License.
  7. // You may obtain a copy of the License at
  8. //
  9. // http://www.apache.org/licenses/LICENSE-2.0
  10. //
  11. // Unless required by applicable law or agreed to in writing, software
  12. // distributed under the License is distributed on an "AS IS" BASIS,
  13. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. // See the License for the specific language governing permissions and
  15. // limitations under the License.
  16. #include "source/opt/ir_context.h"
  17. #include <cstring>
  18. #include "OpenCLDebugInfo100.h"
  19. #include "source/latest_version_glsl_std_450_header.h"
  20. #include "source/opt/log.h"
  21. #include "source/opt/reflect.h"
  22. namespace spvtools {
  23. namespace opt {
  24. namespace {
  25. constexpr int kSpvDecorateTargetIdInIdx = 0;
  26. constexpr int kSpvDecorateDecorationInIdx = 1;
  27. constexpr int kSpvDecorateBuiltinInIdx = 2;
  28. constexpr int kEntryPointInterfaceInIdx = 3;
  29. constexpr int kEntryPointFunctionIdInIdx = 1;
  30. constexpr int kEntryPointExecutionModelInIdx = 0;
  31. // Constants for OpenCL.DebugInfo.100 / NonSemantic.Shader.DebugInfo.100
  32. // extension instructions.
  33. constexpr uint32_t kDebugFunctionOperandFunctionIndex = 13;
  34. constexpr uint32_t kDebugGlobalVariableOperandVariableIndex = 11;
  35. } // namespace
  36. void IRContext::BuildInvalidAnalyses(IRContext::Analysis set) {
  37. set = Analysis(set & ~valid_analyses_);
  38. if (set & kAnalysisDefUse) {
  39. BuildDefUseManager();
  40. }
  41. if (set & kAnalysisInstrToBlockMapping) {
  42. BuildInstrToBlockMapping();
  43. }
  44. if (set & kAnalysisDecorations) {
  45. BuildDecorationManager();
  46. }
  47. if (set & kAnalysisCFG) {
  48. BuildCFG();
  49. }
  50. if (set & kAnalysisDominatorAnalysis) {
  51. ResetDominatorAnalysis();
  52. }
  53. if (set & kAnalysisLoopAnalysis) {
  54. ResetLoopAnalysis();
  55. }
  56. if (set & kAnalysisBuiltinVarId) {
  57. ResetBuiltinAnalysis();
  58. }
  59. if (set & kAnalysisNameMap) {
  60. BuildIdToNameMap();
  61. }
  62. if (set & kAnalysisScalarEvolution) {
  63. BuildScalarEvolutionAnalysis();
  64. }
  65. if (set & kAnalysisRegisterPressure) {
  66. BuildRegPressureAnalysis();
  67. }
  68. if (set & kAnalysisValueNumberTable) {
  69. BuildValueNumberTable();
  70. }
  71. if (set & kAnalysisStructuredCFG) {
  72. BuildStructuredCFGAnalysis();
  73. }
  74. if (set & kAnalysisIdToFuncMapping) {
  75. BuildIdToFuncMapping();
  76. }
  77. if (set & kAnalysisConstants) {
  78. BuildConstantManager();
  79. }
  80. if (set & kAnalysisTypes) {
  81. BuildTypeManager();
  82. }
  83. if (set & kAnalysisDebugInfo) {
  84. BuildDebugInfoManager();
  85. }
  86. if (set & kAnalysisLiveness) {
  87. BuildLivenessManager();
  88. }
  89. }
  90. void IRContext::InvalidateAnalysesExceptFor(
  91. IRContext::Analysis preserved_analyses) {
  92. uint32_t analyses_to_invalidate = valid_analyses_ & (~preserved_analyses);
  93. InvalidateAnalyses(static_cast<IRContext::Analysis>(analyses_to_invalidate));
  94. }
  95. void IRContext::InvalidateAnalyses(IRContext::Analysis analyses_to_invalidate) {
  96. // The ConstantManager and DebugInfoManager contain Type pointers. If the
  97. // TypeManager goes away, the ConstantManager and DebugInfoManager have to
  98. // go away.
  99. if (analyses_to_invalidate & kAnalysisTypes) {
  100. analyses_to_invalidate |= kAnalysisConstants;
  101. analyses_to_invalidate |= kAnalysisDebugInfo;
  102. }
  103. // The dominator analysis hold the pseudo entry and exit nodes from the CFG.
  104. // Also if the CFG change the dominators many changed as well, so the
  105. // dominator analysis should be invalidated as well.
  106. if (analyses_to_invalidate & kAnalysisCFG) {
  107. analyses_to_invalidate |= kAnalysisDominatorAnalysis;
  108. }
  109. if (analyses_to_invalidate & kAnalysisDefUse) {
  110. def_use_mgr_.reset(nullptr);
  111. }
  112. if (analyses_to_invalidate & kAnalysisInstrToBlockMapping) {
  113. instr_to_block_.clear();
  114. }
  115. if (analyses_to_invalidate & kAnalysisDecorations) {
  116. decoration_mgr_.reset(nullptr);
  117. }
  118. if (analyses_to_invalidate & kAnalysisCombinators) {
  119. combinator_ops_.clear();
  120. }
  121. if (analyses_to_invalidate & kAnalysisBuiltinVarId) {
  122. builtin_var_id_map_.clear();
  123. }
  124. if (analyses_to_invalidate & kAnalysisCFG) {
  125. cfg_.reset(nullptr);
  126. }
  127. if (analyses_to_invalidate & kAnalysisDominatorAnalysis) {
  128. dominator_trees_.clear();
  129. post_dominator_trees_.clear();
  130. }
  131. if (analyses_to_invalidate & kAnalysisNameMap) {
  132. id_to_name_.reset(nullptr);
  133. }
  134. if (analyses_to_invalidate & kAnalysisValueNumberTable) {
  135. vn_table_.reset(nullptr);
  136. }
  137. if (analyses_to_invalidate & kAnalysisStructuredCFG) {
  138. struct_cfg_analysis_.reset(nullptr);
  139. }
  140. if (analyses_to_invalidate & kAnalysisIdToFuncMapping) {
  141. id_to_func_.clear();
  142. }
  143. if (analyses_to_invalidate & kAnalysisConstants) {
  144. constant_mgr_.reset(nullptr);
  145. }
  146. if (analyses_to_invalidate & kAnalysisLiveness) {
  147. liveness_mgr_.reset(nullptr);
  148. }
  149. if (analyses_to_invalidate & kAnalysisTypes) {
  150. type_mgr_.reset(nullptr);
  151. }
  152. if (analyses_to_invalidate & kAnalysisDebugInfo) {
  153. debug_info_mgr_.reset(nullptr);
  154. }
  155. valid_analyses_ = Analysis(valid_analyses_ & ~analyses_to_invalidate);
  156. }
  157. Instruction* IRContext::KillInst(Instruction* inst) {
  158. if (!inst) {
  159. return nullptr;
  160. }
  161. KillNamesAndDecorates(inst);
  162. KillOperandFromDebugInstructions(inst);
  163. if (AreAnalysesValid(kAnalysisDefUse)) {
  164. analysis::DefUseManager* def_use_mgr = get_def_use_mgr();
  165. def_use_mgr->ClearInst(inst);
  166. for (auto& l_inst : inst->dbg_line_insts()) def_use_mgr->ClearInst(&l_inst);
  167. }
  168. if (AreAnalysesValid(kAnalysisInstrToBlockMapping)) {
  169. instr_to_block_.erase(inst);
  170. }
  171. if (AreAnalysesValid(kAnalysisDecorations)) {
  172. if (inst->IsDecoration()) {
  173. decoration_mgr_->RemoveDecoration(inst);
  174. }
  175. }
  176. if (AreAnalysesValid(kAnalysisDebugInfo)) {
  177. get_debug_info_mgr()->ClearDebugScopeAndInlinedAtUses(inst);
  178. get_debug_info_mgr()->ClearDebugInfo(inst);
  179. }
  180. if (type_mgr_ && IsTypeInst(inst->opcode())) {
  181. type_mgr_->RemoveId(inst->result_id());
  182. }
  183. if (constant_mgr_ && IsConstantInst(inst->opcode())) {
  184. constant_mgr_->RemoveId(inst->result_id());
  185. }
  186. if (inst->opcode() == spv::Op::OpCapability ||
  187. inst->opcode() == spv::Op::OpExtension) {
  188. // We reset the feature manager, instead of updating it, because it is just
  189. // as much work. We would have to remove all capabilities implied by this
  190. // capability that are not also implied by the remaining OpCapability
  191. // instructions. We could update extensions, but we will see if it is
  192. // needed.
  193. ResetFeatureManager();
  194. }
  195. RemoveFromIdToName(inst);
  196. Instruction* next_instruction = nullptr;
  197. if (inst->IsInAList()) {
  198. next_instruction = inst->NextNode();
  199. inst->RemoveFromList();
  200. delete inst;
  201. } else {
  202. // Needed for instructions that are not part of a list like OpLabels,
  203. // OpFunction, OpFunctionEnd, etc..
  204. inst->ToNop();
  205. }
  206. return next_instruction;
  207. }
  208. bool IRContext::KillInstructionIf(Module::inst_iterator begin,
  209. Module::inst_iterator end,
  210. std::function<bool(Instruction*)> condition) {
  211. bool removed = false;
  212. for (auto it = begin; it != end;) {
  213. if (!condition(&*it)) {
  214. ++it;
  215. continue;
  216. }
  217. removed = true;
  218. // `it` is an iterator on an intrusive list. Next is invalidated on the
  219. // current node when an instruction is killed. The iterator must be moved
  220. // forward before deleting the node.
  221. auto instruction = &*it;
  222. ++it;
  223. KillInst(instruction);
  224. }
  225. return removed;
  226. }
  227. void IRContext::CollectNonSemanticTree(
  228. Instruction* inst, std::unordered_set<Instruction*>* to_kill) {
  229. if (!inst->HasResultId()) return;
  230. // Debug[No]Line result id is not used, so we are done
  231. if (inst->IsDebugLineInst()) return;
  232. std::vector<Instruction*> work_list;
  233. std::unordered_set<Instruction*> seen;
  234. work_list.push_back(inst);
  235. while (!work_list.empty()) {
  236. auto* i = work_list.back();
  237. work_list.pop_back();
  238. get_def_use_mgr()->ForEachUser(
  239. i, [&work_list, to_kill, &seen](Instruction* user) {
  240. if (user->IsNonSemanticInstruction() && seen.insert(user).second) {
  241. work_list.push_back(user);
  242. to_kill->insert(user);
  243. }
  244. });
  245. }
  246. }
  247. bool IRContext::KillDef(uint32_t id) {
  248. Instruction* def = get_def_use_mgr()->GetDef(id);
  249. if (def != nullptr) {
  250. KillInst(def);
  251. return true;
  252. }
  253. return false;
  254. }
  255. bool IRContext::RemoveCapability(spv::Capability capability) {
  256. const bool removed = KillInstructionIf(
  257. module()->capability_begin(), module()->capability_end(),
  258. [capability](Instruction* inst) {
  259. return static_cast<spv::Capability>(inst->GetSingleWordOperand(0)) ==
  260. capability;
  261. });
  262. if (removed && feature_mgr_ != nullptr) {
  263. feature_mgr_->RemoveCapability(capability);
  264. }
  265. return removed;
  266. }
  267. bool IRContext::RemoveExtension(Extension extension) {
  268. const std::string_view extensionName = ExtensionToString(extension);
  269. const bool removed = KillInstructionIf(
  270. module()->extension_begin(), module()->extension_end(),
  271. [&extensionName](Instruction* inst) {
  272. return inst->GetOperand(0).AsString() == extensionName;
  273. });
  274. if (removed && feature_mgr_ != nullptr) {
  275. feature_mgr_->RemoveExtension(extension);
  276. }
  277. return removed;
  278. }
  279. bool IRContext::ReplaceAllUsesWith(uint32_t before, uint32_t after) {
  280. return ReplaceAllUsesWithPredicate(before, after,
  281. [](Instruction*) { return true; });
  282. }
  283. bool IRContext::ReplaceAllUsesWithPredicate(
  284. uint32_t before, uint32_t after,
  285. const std::function<bool(Instruction*)>& predicate) {
  286. if (before == after) return false;
  287. if (AreAnalysesValid(kAnalysisDebugInfo)) {
  288. get_debug_info_mgr()->ReplaceAllUsesInDebugScopeWithPredicate(before, after,
  289. predicate);
  290. }
  291. // Ensure that |after| has been registered as def.
  292. assert(get_def_use_mgr()->GetDef(after) &&
  293. "'after' is not a registered def.");
  294. std::vector<std::pair<Instruction*, uint32_t>> uses_to_update;
  295. get_def_use_mgr()->ForEachUse(
  296. before, [&predicate, &uses_to_update](Instruction* user, uint32_t index) {
  297. if (predicate(user)) {
  298. uses_to_update.emplace_back(user, index);
  299. }
  300. });
  301. Instruction* prev = nullptr;
  302. for (auto p : uses_to_update) {
  303. Instruction* user = p.first;
  304. uint32_t index = p.second;
  305. if (prev == nullptr || prev != user) {
  306. ForgetUses(user);
  307. prev = user;
  308. }
  309. const uint32_t type_result_id_count =
  310. (user->result_id() != 0) + (user->type_id() != 0);
  311. if (index < type_result_id_count) {
  312. // Update the type_id. Note that result id is immutable so it should
  313. // never be updated.
  314. if (user->type_id() != 0 && index == 0) {
  315. user->SetResultType(after);
  316. } else if (user->type_id() == 0) {
  317. SPIRV_ASSERT(consumer_, false,
  318. "Result type id considered as use while the instruction "
  319. "doesn't have a result type id.");
  320. (void)consumer_; // Makes the compiler happy for release build.
  321. } else {
  322. SPIRV_ASSERT(consumer_, false,
  323. "Trying setting the immutable result id.");
  324. }
  325. } else {
  326. // Update an in-operand.
  327. uint32_t in_operand_pos = index - type_result_id_count;
  328. // Make the modification in the instruction.
  329. user->SetInOperand(in_operand_pos, {after});
  330. }
  331. AnalyzeUses(user);
  332. }
  333. return true;
  334. }
  335. bool IRContext::IsConsistent() {
  336. #ifndef SPIRV_CHECK_CONTEXT
  337. return true;
  338. #else
  339. if (AreAnalysesValid(kAnalysisDefUse)) {
  340. analysis::DefUseManager new_def_use(module());
  341. if (!CompareAndPrintDifferences(*get_def_use_mgr(), new_def_use)) {
  342. return false;
  343. }
  344. }
  345. if (AreAnalysesValid(kAnalysisIdToFuncMapping)) {
  346. for (auto& fn : *module_) {
  347. if (id_to_func_[fn.result_id()] != &fn) {
  348. return false;
  349. }
  350. }
  351. }
  352. if (AreAnalysesValid(kAnalysisInstrToBlockMapping)) {
  353. for (auto& func : *module()) {
  354. for (auto& block : func) {
  355. if (!block.WhileEachInst([this, &block](Instruction* inst) {
  356. if (get_instr_block(inst) != &block) {
  357. return false;
  358. }
  359. return true;
  360. }))
  361. return false;
  362. }
  363. }
  364. }
  365. if (!CheckCFG()) {
  366. return false;
  367. }
  368. if (AreAnalysesValid(kAnalysisDecorations)) {
  369. analysis::DecorationManager* dec_mgr = get_decoration_mgr();
  370. analysis::DecorationManager current(module());
  371. if (*dec_mgr != current) {
  372. return false;
  373. }
  374. }
  375. if (feature_mgr_ != nullptr) {
  376. FeatureManager current(grammar_);
  377. current.Analyze(module());
  378. if (current != *feature_mgr_) {
  379. return false;
  380. }
  381. }
  382. return true;
  383. #endif
  384. }
  385. void IRContext::ForgetUses(Instruction* inst) {
  386. if (AreAnalysesValid(kAnalysisDefUse)) {
  387. get_def_use_mgr()->EraseUseRecordsOfOperandIds(inst);
  388. }
  389. if (AreAnalysesValid(kAnalysisDecorations)) {
  390. if (inst->IsDecoration()) {
  391. get_decoration_mgr()->RemoveDecoration(inst);
  392. }
  393. }
  394. if (AreAnalysesValid(kAnalysisDebugInfo)) {
  395. get_debug_info_mgr()->ClearDebugInfo(inst);
  396. }
  397. RemoveFromIdToName(inst);
  398. }
  399. void IRContext::AnalyzeUses(Instruction* inst) {
  400. if (AreAnalysesValid(kAnalysisDefUse)) {
  401. get_def_use_mgr()->AnalyzeInstUse(inst);
  402. }
  403. if (AreAnalysesValid(kAnalysisDecorations)) {
  404. if (inst->IsDecoration()) {
  405. get_decoration_mgr()->AddDecoration(inst);
  406. }
  407. }
  408. if (AreAnalysesValid(kAnalysisDebugInfo)) {
  409. get_debug_info_mgr()->AnalyzeDebugInst(inst);
  410. }
  411. if (id_to_name_ && (inst->opcode() == spv::Op::OpName ||
  412. inst->opcode() == spv::Op::OpMemberName)) {
  413. id_to_name_->insert({inst->GetSingleWordInOperand(0), inst});
  414. }
  415. }
  416. void IRContext::KillNamesAndDecorates(uint32_t id) {
  417. analysis::DecorationManager* dec_mgr = get_decoration_mgr();
  418. dec_mgr->RemoveDecorationsFrom(id);
  419. std::vector<Instruction*> name_to_kill;
  420. for (auto name : GetNames(id)) {
  421. name_to_kill.push_back(name.second);
  422. }
  423. for (Instruction* name_inst : name_to_kill) {
  424. KillInst(name_inst);
  425. }
  426. }
  427. void IRContext::KillNamesAndDecorates(Instruction* inst) {
  428. const uint32_t rId = inst->result_id();
  429. if (rId == 0) return;
  430. KillNamesAndDecorates(rId);
  431. }
  432. void IRContext::KillOperandFromDebugInstructions(Instruction* inst) {
  433. const auto opcode = inst->opcode();
  434. const uint32_t id = inst->result_id();
  435. // Kill id of OpFunction from DebugFunction.
  436. if (opcode == spv::Op::OpFunction) {
  437. for (auto it = module()->ext_inst_debuginfo_begin();
  438. it != module()->ext_inst_debuginfo_end(); ++it) {
  439. if (it->GetOpenCL100DebugOpcode() != OpenCLDebugInfo100DebugFunction)
  440. continue;
  441. auto& operand = it->GetOperand(kDebugFunctionOperandFunctionIndex);
  442. if (operand.words[0] == id) {
  443. operand.words[0] =
  444. get_debug_info_mgr()->GetDebugInfoNone()->result_id();
  445. get_def_use_mgr()->AnalyzeInstUse(&*it);
  446. }
  447. }
  448. }
  449. // Kill id of OpVariable for global variable from DebugGlobalVariable.
  450. if (opcode == spv::Op::OpVariable || IsConstantInst(opcode)) {
  451. for (auto it = module()->ext_inst_debuginfo_begin();
  452. it != module()->ext_inst_debuginfo_end(); ++it) {
  453. if (it->GetCommonDebugOpcode() != CommonDebugInfoDebugGlobalVariable)
  454. continue;
  455. auto& operand = it->GetOperand(kDebugGlobalVariableOperandVariableIndex);
  456. if (operand.words[0] == id) {
  457. operand.words[0] =
  458. get_debug_info_mgr()->GetDebugInfoNone()->result_id();
  459. get_def_use_mgr()->AnalyzeInstUse(&*it);
  460. }
  461. }
  462. }
  463. }
  464. void IRContext::AddCombinatorsForCapability(uint32_t capability) {
  465. spv::Capability cap = spv::Capability(capability);
  466. if (cap == spv::Capability::Shader) {
  467. combinator_ops_[0].insert(
  468. {(uint32_t)spv::Op::OpNop,
  469. (uint32_t)spv::Op::OpUndef,
  470. (uint32_t)spv::Op::OpConstant,
  471. (uint32_t)spv::Op::OpConstantTrue,
  472. (uint32_t)spv::Op::OpConstantFalse,
  473. (uint32_t)spv::Op::OpConstantComposite,
  474. (uint32_t)spv::Op::OpConstantSampler,
  475. (uint32_t)spv::Op::OpConstantNull,
  476. (uint32_t)spv::Op::OpTypeVoid,
  477. (uint32_t)spv::Op::OpTypeBool,
  478. (uint32_t)spv::Op::OpTypeInt,
  479. (uint32_t)spv::Op::OpTypeFloat,
  480. (uint32_t)spv::Op::OpTypeVector,
  481. (uint32_t)spv::Op::OpTypeMatrix,
  482. (uint32_t)spv::Op::OpTypeImage,
  483. (uint32_t)spv::Op::OpTypeSampler,
  484. (uint32_t)spv::Op::OpTypeSampledImage,
  485. (uint32_t)spv::Op::OpTypeAccelerationStructureNV,
  486. (uint32_t)spv::Op::OpTypeAccelerationStructureKHR,
  487. (uint32_t)spv::Op::OpTypeRayQueryKHR,
  488. (uint32_t)spv::Op::OpTypeHitObjectNV,
  489. (uint32_t)spv::Op::OpTypeArray,
  490. (uint32_t)spv::Op::OpTypeRuntimeArray,
  491. (uint32_t)spv::Op::OpTypeNodePayloadArrayAMDX,
  492. (uint32_t)spv::Op::OpTypeStruct,
  493. (uint32_t)spv::Op::OpTypeOpaque,
  494. (uint32_t)spv::Op::OpTypePointer,
  495. (uint32_t)spv::Op::OpTypeFunction,
  496. (uint32_t)spv::Op::OpTypeEvent,
  497. (uint32_t)spv::Op::OpTypeDeviceEvent,
  498. (uint32_t)spv::Op::OpTypeReserveId,
  499. (uint32_t)spv::Op::OpTypeQueue,
  500. (uint32_t)spv::Op::OpTypePipe,
  501. (uint32_t)spv::Op::OpTypeForwardPointer,
  502. (uint32_t)spv::Op::OpVariable,
  503. (uint32_t)spv::Op::OpImageTexelPointer,
  504. (uint32_t)spv::Op::OpLoad,
  505. (uint32_t)spv::Op::OpAccessChain,
  506. (uint32_t)spv::Op::OpInBoundsAccessChain,
  507. (uint32_t)spv::Op::OpArrayLength,
  508. (uint32_t)spv::Op::OpVectorExtractDynamic,
  509. (uint32_t)spv::Op::OpVectorInsertDynamic,
  510. (uint32_t)spv::Op::OpVectorShuffle,
  511. (uint32_t)spv::Op::OpCompositeConstruct,
  512. (uint32_t)spv::Op::OpCompositeExtract,
  513. (uint32_t)spv::Op::OpCompositeInsert,
  514. (uint32_t)spv::Op::OpCopyLogical,
  515. (uint32_t)spv::Op::OpCopyObject,
  516. (uint32_t)spv::Op::OpTranspose,
  517. (uint32_t)spv::Op::OpSampledImage,
  518. (uint32_t)spv::Op::OpImageSampleImplicitLod,
  519. (uint32_t)spv::Op::OpImageSampleExplicitLod,
  520. (uint32_t)spv::Op::OpImageSampleDrefImplicitLod,
  521. (uint32_t)spv::Op::OpImageSampleDrefExplicitLod,
  522. (uint32_t)spv::Op::OpImageSampleProjImplicitLod,
  523. (uint32_t)spv::Op::OpImageSampleProjExplicitLod,
  524. (uint32_t)spv::Op::OpImageSampleProjDrefImplicitLod,
  525. (uint32_t)spv::Op::OpImageSampleProjDrefExplicitLod,
  526. (uint32_t)spv::Op::OpImageFetch,
  527. (uint32_t)spv::Op::OpImageGather,
  528. (uint32_t)spv::Op::OpImageDrefGather,
  529. (uint32_t)spv::Op::OpImageRead,
  530. (uint32_t)spv::Op::OpImage,
  531. (uint32_t)spv::Op::OpImageQueryFormat,
  532. (uint32_t)spv::Op::OpImageQueryOrder,
  533. (uint32_t)spv::Op::OpImageQuerySizeLod,
  534. (uint32_t)spv::Op::OpImageQuerySize,
  535. (uint32_t)spv::Op::OpImageQueryLevels,
  536. (uint32_t)spv::Op::OpImageQuerySamples,
  537. (uint32_t)spv::Op::OpConvertFToU,
  538. (uint32_t)spv::Op::OpConvertFToS,
  539. (uint32_t)spv::Op::OpConvertSToF,
  540. (uint32_t)spv::Op::OpConvertUToF,
  541. (uint32_t)spv::Op::OpUConvert,
  542. (uint32_t)spv::Op::OpSConvert,
  543. (uint32_t)spv::Op::OpFConvert,
  544. (uint32_t)spv::Op::OpQuantizeToF16,
  545. (uint32_t)spv::Op::OpBitcast,
  546. (uint32_t)spv::Op::OpSNegate,
  547. (uint32_t)spv::Op::OpFNegate,
  548. (uint32_t)spv::Op::OpIAdd,
  549. (uint32_t)spv::Op::OpFAdd,
  550. (uint32_t)spv::Op::OpISub,
  551. (uint32_t)spv::Op::OpFSub,
  552. (uint32_t)spv::Op::OpIMul,
  553. (uint32_t)spv::Op::OpFMul,
  554. (uint32_t)spv::Op::OpUDiv,
  555. (uint32_t)spv::Op::OpSDiv,
  556. (uint32_t)spv::Op::OpFDiv,
  557. (uint32_t)spv::Op::OpUMod,
  558. (uint32_t)spv::Op::OpSRem,
  559. (uint32_t)spv::Op::OpSMod,
  560. (uint32_t)spv::Op::OpFRem,
  561. (uint32_t)spv::Op::OpFMod,
  562. (uint32_t)spv::Op::OpVectorTimesScalar,
  563. (uint32_t)spv::Op::OpMatrixTimesScalar,
  564. (uint32_t)spv::Op::OpVectorTimesMatrix,
  565. (uint32_t)spv::Op::OpMatrixTimesVector,
  566. (uint32_t)spv::Op::OpMatrixTimesMatrix,
  567. (uint32_t)spv::Op::OpOuterProduct,
  568. (uint32_t)spv::Op::OpDot,
  569. (uint32_t)spv::Op::OpIAddCarry,
  570. (uint32_t)spv::Op::OpISubBorrow,
  571. (uint32_t)spv::Op::OpUMulExtended,
  572. (uint32_t)spv::Op::OpSMulExtended,
  573. (uint32_t)spv::Op::OpAny,
  574. (uint32_t)spv::Op::OpAll,
  575. (uint32_t)spv::Op::OpIsNan,
  576. (uint32_t)spv::Op::OpIsInf,
  577. (uint32_t)spv::Op::OpLogicalEqual,
  578. (uint32_t)spv::Op::OpLogicalNotEqual,
  579. (uint32_t)spv::Op::OpLogicalOr,
  580. (uint32_t)spv::Op::OpLogicalAnd,
  581. (uint32_t)spv::Op::OpLogicalNot,
  582. (uint32_t)spv::Op::OpSelect,
  583. (uint32_t)spv::Op::OpIEqual,
  584. (uint32_t)spv::Op::OpINotEqual,
  585. (uint32_t)spv::Op::OpUGreaterThan,
  586. (uint32_t)spv::Op::OpSGreaterThan,
  587. (uint32_t)spv::Op::OpUGreaterThanEqual,
  588. (uint32_t)spv::Op::OpSGreaterThanEqual,
  589. (uint32_t)spv::Op::OpULessThan,
  590. (uint32_t)spv::Op::OpSLessThan,
  591. (uint32_t)spv::Op::OpULessThanEqual,
  592. (uint32_t)spv::Op::OpSLessThanEqual,
  593. (uint32_t)spv::Op::OpFOrdEqual,
  594. (uint32_t)spv::Op::OpFUnordEqual,
  595. (uint32_t)spv::Op::OpFOrdNotEqual,
  596. (uint32_t)spv::Op::OpFUnordNotEqual,
  597. (uint32_t)spv::Op::OpFOrdLessThan,
  598. (uint32_t)spv::Op::OpFUnordLessThan,
  599. (uint32_t)spv::Op::OpFOrdGreaterThan,
  600. (uint32_t)spv::Op::OpFUnordGreaterThan,
  601. (uint32_t)spv::Op::OpFOrdLessThanEqual,
  602. (uint32_t)spv::Op::OpFUnordLessThanEqual,
  603. (uint32_t)spv::Op::OpFOrdGreaterThanEqual,
  604. (uint32_t)spv::Op::OpFUnordGreaterThanEqual,
  605. (uint32_t)spv::Op::OpShiftRightLogical,
  606. (uint32_t)spv::Op::OpShiftRightArithmetic,
  607. (uint32_t)spv::Op::OpShiftLeftLogical,
  608. (uint32_t)spv::Op::OpBitwiseOr,
  609. (uint32_t)spv::Op::OpBitwiseXor,
  610. (uint32_t)spv::Op::OpBitwiseAnd,
  611. (uint32_t)spv::Op::OpNot,
  612. (uint32_t)spv::Op::OpBitFieldInsert,
  613. (uint32_t)spv::Op::OpBitFieldSExtract,
  614. (uint32_t)spv::Op::OpBitFieldUExtract,
  615. (uint32_t)spv::Op::OpBitReverse,
  616. (uint32_t)spv::Op::OpBitCount,
  617. (uint32_t)spv::Op::OpPhi,
  618. (uint32_t)spv::Op::OpImageSparseSampleImplicitLod,
  619. (uint32_t)spv::Op::OpImageSparseSampleExplicitLod,
  620. (uint32_t)spv::Op::OpImageSparseSampleDrefImplicitLod,
  621. (uint32_t)spv::Op::OpImageSparseSampleDrefExplicitLod,
  622. (uint32_t)spv::Op::OpImageSparseSampleProjImplicitLod,
  623. (uint32_t)spv::Op::OpImageSparseSampleProjExplicitLod,
  624. (uint32_t)spv::Op::OpImageSparseSampleProjDrefImplicitLod,
  625. (uint32_t)spv::Op::OpImageSparseSampleProjDrefExplicitLod,
  626. (uint32_t)spv::Op::OpImageSparseFetch,
  627. (uint32_t)spv::Op::OpImageSparseGather,
  628. (uint32_t)spv::Op::OpImageSparseDrefGather,
  629. (uint32_t)spv::Op::OpImageSparseTexelsResident,
  630. (uint32_t)spv::Op::OpImageSparseRead,
  631. (uint32_t)spv::Op::OpSizeOf});
  632. }
  633. }
  634. void IRContext::AddCombinatorsForExtension(Instruction* extension) {
  635. assert(extension->opcode() == spv::Op::OpExtInstImport &&
  636. "Expecting an import of an extension's instruction set.");
  637. const std::string extension_name = extension->GetInOperand(0).AsString();
  638. if (extension_name == "GLSL.std.450") {
  639. combinator_ops_[extension->result_id()] = {
  640. (uint32_t)GLSLstd450Round,
  641. (uint32_t)GLSLstd450RoundEven,
  642. (uint32_t)GLSLstd450Trunc,
  643. (uint32_t)GLSLstd450FAbs,
  644. (uint32_t)GLSLstd450SAbs,
  645. (uint32_t)GLSLstd450FSign,
  646. (uint32_t)GLSLstd450SSign,
  647. (uint32_t)GLSLstd450Floor,
  648. (uint32_t)GLSLstd450Ceil,
  649. (uint32_t)GLSLstd450Fract,
  650. (uint32_t)GLSLstd450Radians,
  651. (uint32_t)GLSLstd450Degrees,
  652. (uint32_t)GLSLstd450Sin,
  653. (uint32_t)GLSLstd450Cos,
  654. (uint32_t)GLSLstd450Tan,
  655. (uint32_t)GLSLstd450Asin,
  656. (uint32_t)GLSLstd450Acos,
  657. (uint32_t)GLSLstd450Atan,
  658. (uint32_t)GLSLstd450Sinh,
  659. (uint32_t)GLSLstd450Cosh,
  660. (uint32_t)GLSLstd450Tanh,
  661. (uint32_t)GLSLstd450Asinh,
  662. (uint32_t)GLSLstd450Acosh,
  663. (uint32_t)GLSLstd450Atanh,
  664. (uint32_t)GLSLstd450Atan2,
  665. (uint32_t)GLSLstd450Pow,
  666. (uint32_t)GLSLstd450Exp,
  667. (uint32_t)GLSLstd450Log,
  668. (uint32_t)GLSLstd450Exp2,
  669. (uint32_t)GLSLstd450Log2,
  670. (uint32_t)GLSLstd450Sqrt,
  671. (uint32_t)GLSLstd450InverseSqrt,
  672. (uint32_t)GLSLstd450Determinant,
  673. (uint32_t)GLSLstd450MatrixInverse,
  674. (uint32_t)GLSLstd450ModfStruct,
  675. (uint32_t)GLSLstd450FMin,
  676. (uint32_t)GLSLstd450UMin,
  677. (uint32_t)GLSLstd450SMin,
  678. (uint32_t)GLSLstd450FMax,
  679. (uint32_t)GLSLstd450UMax,
  680. (uint32_t)GLSLstd450SMax,
  681. (uint32_t)GLSLstd450FClamp,
  682. (uint32_t)GLSLstd450UClamp,
  683. (uint32_t)GLSLstd450SClamp,
  684. (uint32_t)GLSLstd450FMix,
  685. (uint32_t)GLSLstd450IMix,
  686. (uint32_t)GLSLstd450Step,
  687. (uint32_t)GLSLstd450SmoothStep,
  688. (uint32_t)GLSLstd450Fma,
  689. (uint32_t)GLSLstd450FrexpStruct,
  690. (uint32_t)GLSLstd450Ldexp,
  691. (uint32_t)GLSLstd450PackSnorm4x8,
  692. (uint32_t)GLSLstd450PackUnorm4x8,
  693. (uint32_t)GLSLstd450PackSnorm2x16,
  694. (uint32_t)GLSLstd450PackUnorm2x16,
  695. (uint32_t)GLSLstd450PackHalf2x16,
  696. (uint32_t)GLSLstd450PackDouble2x32,
  697. (uint32_t)GLSLstd450UnpackSnorm2x16,
  698. (uint32_t)GLSLstd450UnpackUnorm2x16,
  699. (uint32_t)GLSLstd450UnpackHalf2x16,
  700. (uint32_t)GLSLstd450UnpackSnorm4x8,
  701. (uint32_t)GLSLstd450UnpackUnorm4x8,
  702. (uint32_t)GLSLstd450UnpackDouble2x32,
  703. (uint32_t)GLSLstd450Length,
  704. (uint32_t)GLSLstd450Distance,
  705. (uint32_t)GLSLstd450Cross,
  706. (uint32_t)GLSLstd450Normalize,
  707. (uint32_t)GLSLstd450FaceForward,
  708. (uint32_t)GLSLstd450Reflect,
  709. (uint32_t)GLSLstd450Refract,
  710. (uint32_t)GLSLstd450FindILsb,
  711. (uint32_t)GLSLstd450FindSMsb,
  712. (uint32_t)GLSLstd450FindUMsb,
  713. (uint32_t)GLSLstd450InterpolateAtCentroid,
  714. (uint32_t)GLSLstd450InterpolateAtSample,
  715. (uint32_t)GLSLstd450InterpolateAtOffset,
  716. (uint32_t)GLSLstd450NMin,
  717. (uint32_t)GLSLstd450NMax,
  718. (uint32_t)GLSLstd450NClamp};
  719. } else {
  720. // Map the result id to the empty set.
  721. combinator_ops_[extension->result_id()];
  722. }
  723. }
  724. void IRContext::InitializeCombinators() {
  725. for (auto capability : get_feature_mgr()->GetCapabilities()) {
  726. AddCombinatorsForCapability(uint32_t(capability));
  727. }
  728. for (auto& extension : module()->ext_inst_imports()) {
  729. AddCombinatorsForExtension(&extension);
  730. }
  731. valid_analyses_ |= kAnalysisCombinators;
  732. }
  733. void IRContext::RemoveFromIdToName(const Instruction* inst) {
  734. if (id_to_name_ && (inst->opcode() == spv::Op::OpName ||
  735. inst->opcode() == spv::Op::OpMemberName)) {
  736. auto range = id_to_name_->equal_range(inst->GetSingleWordInOperand(0));
  737. for (auto it = range.first; it != range.second; ++it) {
  738. if (it->second == inst) {
  739. id_to_name_->erase(it);
  740. break;
  741. }
  742. }
  743. }
  744. }
  745. LoopDescriptor* IRContext::GetLoopDescriptor(const Function* f) {
  746. if (!AreAnalysesValid(kAnalysisLoopAnalysis)) {
  747. ResetLoopAnalysis();
  748. }
  749. std::unordered_map<const Function*, LoopDescriptor>::iterator it =
  750. loop_descriptors_.find(f);
  751. if (it == loop_descriptors_.end()) {
  752. return &loop_descriptors_
  753. .emplace(std::make_pair(f, LoopDescriptor(this, f)))
  754. .first->second;
  755. }
  756. return &it->second;
  757. }
  758. uint32_t IRContext::FindBuiltinInputVar(uint32_t builtin) {
  759. for (auto& a : module_->annotations()) {
  760. if (spv::Op(a.opcode()) != spv::Op::OpDecorate) continue;
  761. if (spv::Decoration(a.GetSingleWordInOperand(
  762. kSpvDecorateDecorationInIdx)) != spv::Decoration::BuiltIn)
  763. continue;
  764. if (a.GetSingleWordInOperand(kSpvDecorateBuiltinInIdx) != builtin) continue;
  765. uint32_t target_id = a.GetSingleWordInOperand(kSpvDecorateTargetIdInIdx);
  766. Instruction* b_var = get_def_use_mgr()->GetDef(target_id);
  767. if (b_var->opcode() != spv::Op::OpVariable) continue;
  768. if (spv::StorageClass(b_var->GetSingleWordInOperand(0)) !=
  769. spv::StorageClass::Input)
  770. continue;
  771. return target_id;
  772. }
  773. return 0;
  774. }
  775. void IRContext::AddVarToEntryPoints(uint32_t var_id) {
  776. uint32_t ocnt = 0;
  777. for (auto& e : module()->entry_points()) {
  778. bool found = false;
  779. e.ForEachInOperand([&ocnt, &found, &var_id](const uint32_t* idp) {
  780. if (ocnt >= kEntryPointInterfaceInIdx) {
  781. if (*idp == var_id) found = true;
  782. }
  783. ++ocnt;
  784. });
  785. if (!found) {
  786. e.AddOperand({SPV_OPERAND_TYPE_ID, {var_id}});
  787. get_def_use_mgr()->AnalyzeInstDefUse(&e);
  788. }
  789. }
  790. }
  791. uint32_t IRContext::GetBuiltinInputVarId(uint32_t builtin) {
  792. if (!AreAnalysesValid(kAnalysisBuiltinVarId)) ResetBuiltinAnalysis();
  793. // If cached, return it.
  794. std::unordered_map<uint32_t, uint32_t>::iterator it =
  795. builtin_var_id_map_.find(builtin);
  796. if (it != builtin_var_id_map_.end()) return it->second;
  797. // Look for one in shader
  798. uint32_t var_id = FindBuiltinInputVar(builtin);
  799. if (var_id == 0) {
  800. // If not found, create it
  801. // TODO(greg-lunarg): Add support for all builtins
  802. analysis::TypeManager* type_mgr = get_type_mgr();
  803. analysis::Type* reg_type;
  804. switch (spv::BuiltIn(builtin)) {
  805. case spv::BuiltIn::FragCoord: {
  806. analysis::Float float_ty(32);
  807. analysis::Type* reg_float_ty = type_mgr->GetRegisteredType(&float_ty);
  808. analysis::Vector v4float_ty(reg_float_ty, 4);
  809. reg_type = type_mgr->GetRegisteredType(&v4float_ty);
  810. break;
  811. }
  812. case spv::BuiltIn::VertexIndex:
  813. case spv::BuiltIn::InstanceIndex:
  814. case spv::BuiltIn::PrimitiveId:
  815. case spv::BuiltIn::InvocationId:
  816. case spv::BuiltIn::SubgroupLocalInvocationId: {
  817. analysis::Integer uint_ty(32, false);
  818. reg_type = type_mgr->GetRegisteredType(&uint_ty);
  819. break;
  820. }
  821. case spv::BuiltIn::GlobalInvocationId:
  822. case spv::BuiltIn::LaunchIdNV: {
  823. analysis::Integer uint_ty(32, false);
  824. analysis::Type* reg_uint_ty = type_mgr->GetRegisteredType(&uint_ty);
  825. analysis::Vector v3uint_ty(reg_uint_ty, 3);
  826. reg_type = type_mgr->GetRegisteredType(&v3uint_ty);
  827. break;
  828. }
  829. case spv::BuiltIn::TessCoord: {
  830. analysis::Float float_ty(32);
  831. analysis::Type* reg_float_ty = type_mgr->GetRegisteredType(&float_ty);
  832. analysis::Vector v3float_ty(reg_float_ty, 3);
  833. reg_type = type_mgr->GetRegisteredType(&v3float_ty);
  834. break;
  835. }
  836. case spv::BuiltIn::SubgroupLtMask: {
  837. analysis::Integer uint_ty(32, false);
  838. analysis::Type* reg_uint_ty = type_mgr->GetRegisteredType(&uint_ty);
  839. analysis::Vector v4uint_ty(reg_uint_ty, 4);
  840. reg_type = type_mgr->GetRegisteredType(&v4uint_ty);
  841. break;
  842. }
  843. default: {
  844. assert(false && "unhandled builtin");
  845. return 0;
  846. }
  847. }
  848. uint32_t type_id = type_mgr->GetTypeInstruction(reg_type);
  849. uint32_t varTyPtrId =
  850. type_mgr->FindPointerToType(type_id, spv::StorageClass::Input);
  851. // TODO(1841): Handle id overflow.
  852. var_id = TakeNextId();
  853. std::unique_ptr<Instruction> newVarOp(
  854. new Instruction(this, spv::Op::OpVariable, varTyPtrId, var_id,
  855. {{spv_operand_type_t::SPV_OPERAND_TYPE_LITERAL_INTEGER,
  856. {uint32_t(spv::StorageClass::Input)}}}));
  857. get_def_use_mgr()->AnalyzeInstDefUse(&*newVarOp);
  858. module()->AddGlobalValue(std::move(newVarOp));
  859. get_decoration_mgr()->AddDecorationVal(
  860. var_id, uint32_t(spv::Decoration::BuiltIn), builtin);
  861. AddVarToEntryPoints(var_id);
  862. }
  863. builtin_var_id_map_[builtin] = var_id;
  864. return var_id;
  865. }
  866. void IRContext::AddCalls(const Function* func, std::queue<uint32_t>* todo) {
  867. for (auto bi = func->begin(); bi != func->end(); ++bi)
  868. for (auto ii = bi->begin(); ii != bi->end(); ++ii) {
  869. if (ii->opcode() == spv::Op::OpFunctionCall)
  870. todo->push(ii->GetSingleWordInOperand(0));
  871. if (ii->opcode() == spv::Op::OpCooperativeMatrixPerElementOpNV)
  872. todo->push(ii->GetSingleWordInOperand(1));
  873. if (ii->opcode() == spv::Op::OpCooperativeMatrixReduceNV)
  874. todo->push(ii->GetSingleWordInOperand(2));
  875. if (ii->opcode() == spv::Op::OpCooperativeMatrixLoadTensorNV) {
  876. const auto memory_operands_index = 3;
  877. auto mask = ii->GetSingleWordInOperand(memory_operands_index);
  878. uint32_t count = 1;
  879. if (mask & uint32_t(spv::MemoryAccessMask::Aligned)) ++count;
  880. if (mask & uint32_t(spv::MemoryAccessMask::MakePointerAvailableKHR))
  881. ++count;
  882. if (mask & uint32_t(spv::MemoryAccessMask::MakePointerVisibleKHR))
  883. ++count;
  884. const auto tensor_operands_index = memory_operands_index + count;
  885. mask = ii->GetSingleWordInOperand(tensor_operands_index);
  886. count = 1;
  887. if (mask & uint32_t(spv::TensorAddressingOperandsMask::TensorView))
  888. ++count;
  889. if (mask & uint32_t(spv::TensorAddressingOperandsMask::DecodeFunc)) {
  890. todo->push(ii->GetSingleWordInOperand(tensor_operands_index + count));
  891. }
  892. }
  893. }
  894. }
  895. bool IRContext::ProcessEntryPointCallTree(ProcessFunction& pfn) {
  896. // Collect all of the entry points as the roots.
  897. std::queue<uint32_t> roots;
  898. for (auto& e : module()->entry_points()) {
  899. roots.push(e.GetSingleWordInOperand(kEntryPointFunctionIdInIdx));
  900. }
  901. return ProcessCallTreeFromRoots(pfn, &roots);
  902. }
  903. bool IRContext::ProcessReachableCallTree(ProcessFunction& pfn) {
  904. std::queue<uint32_t> roots;
  905. // Add all entry points since they can be reached from outside the module.
  906. for (auto& e : module()->entry_points())
  907. roots.push(e.GetSingleWordInOperand(kEntryPointFunctionIdInIdx));
  908. // Add all exported functions since they can be reached from outside the
  909. // module.
  910. for (auto& a : annotations()) {
  911. // TODO: Handle group decorations as well. Currently not generate by any
  912. // front-end, but could be coming.
  913. if (a.opcode() == spv::Op::OpDecorate) {
  914. if (spv::Decoration(a.GetSingleWordOperand(1)) ==
  915. spv::Decoration::LinkageAttributes) {
  916. uint32_t lastOperand = a.NumOperands() - 1;
  917. if (spv::LinkageType(a.GetSingleWordOperand(lastOperand)) ==
  918. spv::LinkageType::Export) {
  919. uint32_t id = a.GetSingleWordOperand(0);
  920. if (GetFunction(id)) {
  921. roots.push(id);
  922. }
  923. }
  924. }
  925. }
  926. }
  927. return ProcessCallTreeFromRoots(pfn, &roots);
  928. }
  929. bool IRContext::ProcessCallTreeFromRoots(ProcessFunction& pfn,
  930. std::queue<uint32_t>* roots) {
  931. // Process call tree
  932. bool modified = false;
  933. std::unordered_set<uint32_t> done;
  934. while (!roots->empty()) {
  935. const uint32_t fi = roots->front();
  936. roots->pop();
  937. if (done.insert(fi).second) {
  938. Function* fn = GetFunction(fi);
  939. assert(fn && "Trying to process a function that does not exist.");
  940. modified = pfn(fn) || modified;
  941. AddCalls(fn, roots);
  942. }
  943. }
  944. return modified;
  945. }
  946. void IRContext::CollectCallTreeFromRoots(unsigned entryId,
  947. std::unordered_set<uint32_t>* funcs) {
  948. std::queue<uint32_t> roots;
  949. roots.push(entryId);
  950. while (!roots.empty()) {
  951. const uint32_t fi = roots.front();
  952. roots.pop();
  953. funcs->insert(fi);
  954. Function* fn = GetFunction(fi);
  955. AddCalls(fn, &roots);
  956. }
  957. }
  958. void IRContext::EmitErrorMessage(std::string message, Instruction* inst) {
  959. if (!consumer()) {
  960. return;
  961. }
  962. Instruction* line_inst = inst;
  963. while (line_inst != nullptr) { // Stop at the beginning of the basic block.
  964. if (!line_inst->dbg_line_insts().empty()) {
  965. line_inst = &line_inst->dbg_line_insts().back();
  966. if (line_inst->IsNoLine()) {
  967. line_inst = nullptr;
  968. }
  969. break;
  970. }
  971. line_inst = line_inst->PreviousNode();
  972. }
  973. uint32_t line_number = 0;
  974. uint32_t col_number = 0;
  975. std::string source;
  976. if (line_inst != nullptr) {
  977. Instruction* file_name =
  978. get_def_use_mgr()->GetDef(line_inst->GetSingleWordInOperand(0));
  979. source = file_name->GetInOperand(0).AsString();
  980. // Get the line number and column number.
  981. line_number = line_inst->GetSingleWordInOperand(1);
  982. col_number = line_inst->GetSingleWordInOperand(2);
  983. }
  984. message +=
  985. "\n " + inst->PrettyPrint(SPV_BINARY_TO_TEXT_OPTION_FRIENDLY_NAMES);
  986. consumer()(SPV_MSG_ERROR, source.c_str(), {line_number, col_number, 0},
  987. message.c_str());
  988. }
  989. // Gets the dominator analysis for function |f|.
  990. DominatorAnalysis* IRContext::GetDominatorAnalysis(const Function* f) {
  991. if (!AreAnalysesValid(kAnalysisDominatorAnalysis)) {
  992. ResetDominatorAnalysis();
  993. }
  994. if (dominator_trees_.find(f) == dominator_trees_.end()) {
  995. dominator_trees_[f].InitializeTree(*cfg(), f);
  996. }
  997. return &dominator_trees_[f];
  998. }
  999. // Gets the postdominator analysis for function |f|.
  1000. PostDominatorAnalysis* IRContext::GetPostDominatorAnalysis(const Function* f) {
  1001. if (!AreAnalysesValid(kAnalysisDominatorAnalysis)) {
  1002. ResetDominatorAnalysis();
  1003. }
  1004. if (post_dominator_trees_.find(f) == post_dominator_trees_.end()) {
  1005. post_dominator_trees_[f].InitializeTree(*cfg(), f);
  1006. }
  1007. return &post_dominator_trees_[f];
  1008. }
  1009. bool IRContext::CheckCFG() {
  1010. std::unordered_map<uint32_t, std::vector<uint32_t>> real_preds;
  1011. if (!AreAnalysesValid(kAnalysisCFG)) {
  1012. return true;
  1013. }
  1014. for (Function& function : *module()) {
  1015. for (const auto& bb : function) {
  1016. bb.ForEachSuccessorLabel([&bb, &real_preds](const uint32_t lab_id) {
  1017. real_preds[lab_id].push_back(bb.id());
  1018. });
  1019. }
  1020. for (auto& bb : function) {
  1021. std::vector<uint32_t> preds = cfg()->preds(bb.id());
  1022. std::vector<uint32_t> real = real_preds[bb.id()];
  1023. std::sort(preds.begin(), preds.end());
  1024. std::sort(real.begin(), real.end());
  1025. bool same = true;
  1026. if (preds.size() != real.size()) {
  1027. same = false;
  1028. }
  1029. for (size_t i = 0; i < real.size() && same; i++) {
  1030. if (preds[i] != real[i]) {
  1031. same = false;
  1032. }
  1033. }
  1034. if (!same) {
  1035. std::cerr << "Predecessors for " << bb.id() << " are different:\n";
  1036. std::cerr << "Real:";
  1037. for (uint32_t i : real) {
  1038. std::cerr << ' ' << i;
  1039. }
  1040. std::cerr << std::endl;
  1041. std::cerr << "Recorded:";
  1042. for (uint32_t i : preds) {
  1043. std::cerr << ' ' << i;
  1044. }
  1045. std::cerr << std::endl;
  1046. }
  1047. if (!same) return false;
  1048. }
  1049. }
  1050. return true;
  1051. }
  1052. bool IRContext::IsReachable(const opt::BasicBlock& bb) {
  1053. auto enclosing_function = bb.GetParent();
  1054. return GetDominatorAnalysis(enclosing_function)
  1055. ->Dominates(enclosing_function->entry().get(), &bb);
  1056. }
  1057. spv::ExecutionModel IRContext::GetStage() {
  1058. const auto& entry_points = module()->entry_points();
  1059. if (entry_points.empty()) {
  1060. return spv::ExecutionModel::Max;
  1061. }
  1062. uint32_t stage = entry_points.begin()->GetSingleWordInOperand(
  1063. kEntryPointExecutionModelInIdx);
  1064. auto it = std::find_if(
  1065. entry_points.begin(), entry_points.end(), [stage](const Instruction& x) {
  1066. return x.GetSingleWordInOperand(kEntryPointExecutionModelInIdx) !=
  1067. stage;
  1068. });
  1069. if (it != entry_points.end()) {
  1070. EmitErrorMessage("Mixed stage shader module not supported", &(*it));
  1071. }
  1072. return static_cast<spv::ExecutionModel>(stage);
  1073. }
  1074. } // namespace opt
  1075. } // namespace spvtools