aggressive_dead_code_elim_pass.cpp 40 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112
  1. // Copyright (c) 2017 The Khronos Group Inc.
  2. // Copyright (c) 2017 Valve Corporation
  3. // Copyright (c) 2017 LunarG Inc.
  4. // Copyright (c) 2018-2021 Google LLC
  5. //
  6. // Licensed under the Apache License, Version 2.0 (the "License");
  7. // you may not use this file except in compliance with the License.
  8. // You may obtain a copy of the License at
  9. //
  10. // http://www.apache.org/licenses/LICENSE-2.0
  11. //
  12. // Unless required by applicable law or agreed to in writing, software
  13. // distributed under the License is distributed on an "AS IS" BASIS,
  14. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. // See the License for the specific language governing permissions and
  16. // limitations under the License.
  17. #include "source/opt/aggressive_dead_code_elim_pass.h"
  18. #include <memory>
  19. #include <stack>
  20. #include "source/cfa.h"
  21. #include "source/latest_version_glsl_std_450_header.h"
  22. #include "source/opt/eliminate_dead_functions_util.h"
  23. #include "source/opt/ir_builder.h"
  24. #include "source/opt/iterator.h"
  25. #include "source/opt/reflect.h"
  26. #include "source/spirv_constant.h"
  27. #include "source/util/string_utils.h"
  28. namespace spvtools {
  29. namespace opt {
  30. namespace {
  31. constexpr uint32_t kTypePointerStorageClassInIdx = 0;
  32. constexpr uint32_t kEntryPointFunctionIdInIdx = 1;
  33. constexpr uint32_t kSelectionMergeMergeBlockIdInIdx = 0;
  34. constexpr uint32_t kLoopMergeContinueBlockIdInIdx = 1;
  35. constexpr uint32_t kCopyMemoryTargetAddrInIdx = 0;
  36. constexpr uint32_t kCopyMemorySourceAddrInIdx = 1;
  37. constexpr uint32_t kLoadSourceAddrInIdx = 0;
  38. constexpr uint32_t kDebugDeclareOperandVariableIndex = 5;
  39. constexpr uint32_t kGlobalVariableVariableIndex = 12;
  40. // Sorting functor to present annotation instructions in an easy-to-process
  41. // order. The functor orders by opcode first and falls back on unique id
  42. // ordering if both instructions have the same opcode.
  43. //
  44. // Desired priority:
  45. // spv::Op::OpGroupDecorate
  46. // spv::Op::OpGroupMemberDecorate
  47. // spv::Op::OpDecorate
  48. // spv::Op::OpMemberDecorate
  49. // spv::Op::OpDecorateId
  50. // spv::Op::OpDecorateStringGOOGLE
  51. // spv::Op::OpDecorationGroup
  52. struct DecorationLess {
  53. bool operator()(const Instruction* lhs, const Instruction* rhs) const {
  54. assert(lhs && rhs);
  55. spv::Op lhsOp = lhs->opcode();
  56. spv::Op rhsOp = rhs->opcode();
  57. if (lhsOp != rhsOp) {
  58. #define PRIORITY_CASE(opcode) \
  59. if (lhsOp == opcode && rhsOp != opcode) return true; \
  60. if (rhsOp == opcode && lhsOp != opcode) return false;
  61. // OpGroupDecorate and OpGroupMember decorate are highest priority to
  62. // eliminate dead targets early and simplify subsequent checks.
  63. PRIORITY_CASE(spv::Op::OpGroupDecorate)
  64. PRIORITY_CASE(spv::Op::OpGroupMemberDecorate)
  65. PRIORITY_CASE(spv::Op::OpDecorate)
  66. PRIORITY_CASE(spv::Op::OpMemberDecorate)
  67. PRIORITY_CASE(spv::Op::OpDecorateId)
  68. PRIORITY_CASE(spv::Op::OpDecorateStringGOOGLE)
  69. // OpDecorationGroup is lowest priority to ensure use/def chains remain
  70. // usable for instructions that target this group.
  71. PRIORITY_CASE(spv::Op::OpDecorationGroup)
  72. #undef PRIORITY_CASE
  73. }
  74. // Fall back to maintain total ordering (compare unique ids).
  75. return *lhs < *rhs;
  76. }
  77. };
  78. } // namespace
  79. bool AggressiveDCEPass::IsVarOfStorage(uint32_t varId,
  80. spv::StorageClass storageClass) {
  81. if (varId == 0) return false;
  82. const Instruction* varInst = get_def_use_mgr()->GetDef(varId);
  83. const spv::Op op = varInst->opcode();
  84. if (op != spv::Op::OpVariable) return false;
  85. const uint32_t varTypeId = varInst->type_id();
  86. const Instruction* varTypeInst = get_def_use_mgr()->GetDef(varTypeId);
  87. if (varTypeInst->opcode() != spv::Op::OpTypePointer) return false;
  88. return spv::StorageClass(varTypeInst->GetSingleWordInOperand(
  89. kTypePointerStorageClassInIdx)) == storageClass;
  90. }
  91. bool AggressiveDCEPass::IsLocalVar(uint32_t varId, Function* func) {
  92. if (IsVarOfStorage(varId, spv::StorageClass::Function)) {
  93. return true;
  94. }
  95. if (!IsVarOfStorage(varId, spv::StorageClass::Private) &&
  96. !IsVarOfStorage(varId, spv::StorageClass::Workgroup)) {
  97. return false;
  98. }
  99. // For a variable in the Private or WorkGroup storage class, the variable will
  100. // get a new instance for every call to an entry point. If the entry point
  101. // does not have a call, then no other function can read or write to that
  102. // instance of the variable.
  103. return IsEntryPointWithNoCalls(func);
  104. }
  105. void AggressiveDCEPass::AddStores(Function* func, uint32_t ptrId) {
  106. get_def_use_mgr()->ForEachUser(ptrId, [this, ptrId, func](Instruction* user) {
  107. // If the user is not a part of |func|, skip it.
  108. BasicBlock* blk = context()->get_instr_block(user);
  109. if (blk && blk->GetParent() != func) return;
  110. switch (user->opcode()) {
  111. case spv::Op::OpAccessChain:
  112. case spv::Op::OpInBoundsAccessChain:
  113. case spv::Op::OpCopyObject:
  114. this->AddStores(func, user->result_id());
  115. break;
  116. case spv::Op::OpLoad:
  117. break;
  118. case spv::Op::OpCopyMemory:
  119. case spv::Op::OpCopyMemorySized:
  120. if (user->GetSingleWordInOperand(kCopyMemoryTargetAddrInIdx) == ptrId) {
  121. AddToWorklist(user);
  122. }
  123. break;
  124. // If default, assume it stores e.g. frexp, modf, function call
  125. case spv::Op::OpStore:
  126. default:
  127. AddToWorklist(user);
  128. break;
  129. }
  130. });
  131. }
  132. bool AggressiveDCEPass::AllExtensionsSupported() const {
  133. // If any extension not in allowlist, return false
  134. for (auto& ei : get_module()->extensions()) {
  135. const std::string extName = ei.GetInOperand(0).AsString();
  136. if (extensions_allowlist_.find(extName) == extensions_allowlist_.end())
  137. return false;
  138. }
  139. // Only allow NonSemantic.Shader.DebugInfo.100, we cannot safely optimise
  140. // around unknown extended instruction sets even if they are non-semantic
  141. for (auto& inst : context()->module()->ext_inst_imports()) {
  142. assert(inst.opcode() == spv::Op::OpExtInstImport &&
  143. "Expecting an import of an extension's instruction set.");
  144. const std::string extension_name = inst.GetInOperand(0).AsString();
  145. if (spvtools::utils::starts_with(extension_name, "NonSemantic.") &&
  146. extension_name != "NonSemantic.Shader.DebugInfo.100") {
  147. return false;
  148. }
  149. }
  150. return true;
  151. }
  152. bool AggressiveDCEPass::IsTargetDead(Instruction* inst) {
  153. const uint32_t tId = inst->GetSingleWordInOperand(0);
  154. Instruction* tInst = get_def_use_mgr()->GetDef(tId);
  155. if (IsAnnotationInst(tInst->opcode())) {
  156. // This must be a decoration group. We go through annotations in a specific
  157. // order. So if this is not used by any group or group member decorates, it
  158. // is dead.
  159. assert(tInst->opcode() == spv::Op::OpDecorationGroup);
  160. bool dead = true;
  161. get_def_use_mgr()->ForEachUser(tInst, [&dead](Instruction* user) {
  162. if (user->opcode() == spv::Op::OpGroupDecorate ||
  163. user->opcode() == spv::Op::OpGroupMemberDecorate)
  164. dead = false;
  165. });
  166. return dead;
  167. }
  168. return !IsLive(tInst);
  169. }
  170. void AggressiveDCEPass::ProcessLoad(Function* func, uint32_t varId) {
  171. // Only process locals
  172. if (!IsLocalVar(varId, func)) return;
  173. // Return if already processed
  174. if (live_local_vars_.find(varId) != live_local_vars_.end()) return;
  175. // Mark all stores to varId as live
  176. AddStores(func, varId);
  177. // Cache varId as processed
  178. live_local_vars_.insert(varId);
  179. }
  180. void AggressiveDCEPass::AddBranch(uint32_t labelId, BasicBlock* bp) {
  181. std::unique_ptr<Instruction> newBranch(
  182. new Instruction(context(), spv::Op::OpBranch, 0, 0,
  183. {{spv_operand_type_t::SPV_OPERAND_TYPE_ID, {labelId}}}));
  184. context()->AnalyzeDefUse(&*newBranch);
  185. context()->set_instr_block(&*newBranch, bp);
  186. bp->AddInstruction(std::move(newBranch));
  187. }
  188. void AggressiveDCEPass::AddBreaksAndContinuesToWorklist(
  189. Instruction* mergeInst) {
  190. assert(mergeInst->opcode() == spv::Op::OpSelectionMerge ||
  191. mergeInst->opcode() == spv::Op::OpLoopMerge);
  192. BasicBlock* header = context()->get_instr_block(mergeInst);
  193. const uint32_t mergeId = mergeInst->GetSingleWordInOperand(0);
  194. get_def_use_mgr()->ForEachUser(mergeId, [header, this](Instruction* user) {
  195. if (!user->IsBranch()) return;
  196. BasicBlock* block = context()->get_instr_block(user);
  197. if (BlockIsInConstruct(header, block)) {
  198. // This is a break from the loop.
  199. AddToWorklist(user);
  200. // Add branch's merge if there is one.
  201. Instruction* userMerge = GetMergeInstruction(user);
  202. if (userMerge != nullptr) AddToWorklist(userMerge);
  203. }
  204. });
  205. if (mergeInst->opcode() != spv::Op::OpLoopMerge) {
  206. return;
  207. }
  208. // For loops we need to find the continues as well.
  209. const uint32_t contId =
  210. mergeInst->GetSingleWordInOperand(kLoopMergeContinueBlockIdInIdx);
  211. get_def_use_mgr()->ForEachUser(contId, [&contId, this](Instruction* user) {
  212. spv::Op op = user->opcode();
  213. if (op == spv::Op::OpBranchConditional || op == spv::Op::OpSwitch) {
  214. // A conditional branch or switch can only be a continue if it does not
  215. // have a merge instruction or its merge block is not the continue block.
  216. Instruction* hdrMerge = GetMergeInstruction(user);
  217. if (hdrMerge != nullptr &&
  218. hdrMerge->opcode() == spv::Op::OpSelectionMerge) {
  219. uint32_t hdrMergeId =
  220. hdrMerge->GetSingleWordInOperand(kSelectionMergeMergeBlockIdInIdx);
  221. if (hdrMergeId == contId) return;
  222. // Need to mark merge instruction too
  223. AddToWorklist(hdrMerge);
  224. }
  225. } else if (op == spv::Op::OpBranch) {
  226. // An unconditional branch can only be a continue if it is not
  227. // branching to its own merge block.
  228. BasicBlock* blk = context()->get_instr_block(user);
  229. Instruction* hdrBranch = GetHeaderBranch(blk);
  230. if (hdrBranch == nullptr) return;
  231. Instruction* hdrMerge = GetMergeInstruction(hdrBranch);
  232. if (hdrMerge->opcode() == spv::Op::OpLoopMerge) return;
  233. uint32_t hdrMergeId =
  234. hdrMerge->GetSingleWordInOperand(kSelectionMergeMergeBlockIdInIdx);
  235. if (contId == hdrMergeId) return;
  236. } else {
  237. return;
  238. }
  239. AddToWorklist(user);
  240. });
  241. }
  242. bool AggressiveDCEPass::AggressiveDCE(Function* func) {
  243. std::list<BasicBlock*> structured_order;
  244. cfg()->ComputeStructuredOrder(func, &*func->begin(), &structured_order);
  245. live_local_vars_.clear();
  246. InitializeWorkList(func, structured_order);
  247. ProcessWorkList(func);
  248. return KillDeadInstructions(func, structured_order);
  249. }
  250. bool AggressiveDCEPass::KillDeadInstructions(
  251. const Function* func, std::list<BasicBlock*>& structured_order) {
  252. bool modified = false;
  253. for (auto bi = structured_order.begin(); bi != structured_order.end();) {
  254. uint32_t merge_block_id = 0;
  255. (*bi)->ForEachInst([this, &modified, &merge_block_id](Instruction* inst) {
  256. if (IsLive(inst)) return;
  257. if (inst->opcode() == spv::Op::OpLabel) return;
  258. // If dead instruction is selection merge, remember merge block
  259. // for new branch at end of block
  260. if (inst->opcode() == spv::Op::OpSelectionMerge ||
  261. inst->opcode() == spv::Op::OpLoopMerge)
  262. merge_block_id = inst->GetSingleWordInOperand(0);
  263. to_kill_.push_back(inst);
  264. modified = true;
  265. });
  266. // If a structured if or loop was deleted, add a branch to its merge
  267. // block, and traverse to the merge block and continue processing there.
  268. // We know the block still exists because the label is not deleted.
  269. if (merge_block_id != 0) {
  270. AddBranch(merge_block_id, *bi);
  271. for (++bi; (*bi)->id() != merge_block_id; ++bi) {
  272. }
  273. auto merge_terminator = (*bi)->terminator();
  274. if (merge_terminator->opcode() == spv::Op::OpUnreachable) {
  275. // The merge was unreachable. This is undefined behaviour so just
  276. // return (or return an undef). Then mark the new return as live.
  277. auto func_ret_type_inst = get_def_use_mgr()->GetDef(func->type_id());
  278. if (func_ret_type_inst->opcode() == spv::Op::OpTypeVoid) {
  279. merge_terminator->SetOpcode(spv::Op::OpReturn);
  280. } else {
  281. // Find an undef for the return value and make sure it gets kept by
  282. // the pass.
  283. auto undef_id = Type2Undef(func->type_id());
  284. auto undef = get_def_use_mgr()->GetDef(undef_id);
  285. live_insts_.Set(undef->unique_id());
  286. merge_terminator->SetOpcode(spv::Op::OpReturnValue);
  287. merge_terminator->SetInOperands({{SPV_OPERAND_TYPE_ID, {undef_id}}});
  288. get_def_use_mgr()->AnalyzeInstUse(merge_terminator);
  289. }
  290. live_insts_.Set(merge_terminator->unique_id());
  291. }
  292. } else {
  293. Instruction* inst = (*bi)->terminator();
  294. if (!IsLive(inst)) {
  295. // If the terminator is not live, this block has no live instructions,
  296. // and it will be unreachable.
  297. AddUnreachable(*bi);
  298. }
  299. ++bi;
  300. }
  301. }
  302. return modified;
  303. }
  304. void AggressiveDCEPass::ProcessWorkList(Function* func) {
  305. while (!worklist_.empty()) {
  306. Instruction* live_inst = worklist_.front();
  307. worklist_.pop();
  308. AddOperandsToWorkList(live_inst);
  309. MarkBlockAsLive(live_inst);
  310. MarkLoadedVariablesAsLive(func, live_inst);
  311. AddDecorationsToWorkList(live_inst);
  312. AddDebugInstructionsToWorkList(live_inst);
  313. }
  314. }
  315. void AggressiveDCEPass::AddDebugScopeToWorkList(const Instruction* inst) {
  316. auto scope = inst->GetDebugScope();
  317. auto lex_scope_id = scope.GetLexicalScope();
  318. if (lex_scope_id != kNoDebugScope)
  319. AddToWorklist(get_def_use_mgr()->GetDef(lex_scope_id));
  320. auto inlined_at_id = scope.GetInlinedAt();
  321. if (inlined_at_id != kNoInlinedAt)
  322. AddToWorklist(get_def_use_mgr()->GetDef(inlined_at_id));
  323. }
  324. void AggressiveDCEPass::AddDebugInstructionsToWorkList(
  325. const Instruction* inst) {
  326. for (auto& line_inst : inst->dbg_line_insts()) {
  327. if (line_inst.IsDebugLineInst()) {
  328. AddOperandsToWorkList(&line_inst);
  329. }
  330. AddDebugScopeToWorkList(&line_inst);
  331. }
  332. AddDebugScopeToWorkList(inst);
  333. }
  334. void AggressiveDCEPass::AddDecorationsToWorkList(const Instruction* inst) {
  335. // Add OpDecorateId instructions that apply to this instruction to the work
  336. // list. We use the decoration manager to look through the group
  337. // decorations to get to the OpDecorate* instructions themselves.
  338. auto decorations =
  339. get_decoration_mgr()->GetDecorationsFor(inst->result_id(), false);
  340. for (Instruction* dec : decorations) {
  341. // We only care about OpDecorateId instructions because the are the only
  342. // decorations that will reference an id that will have to be kept live
  343. // because of that use.
  344. if (dec->opcode() != spv::Op::OpDecorateId) {
  345. continue;
  346. }
  347. if (spv::Decoration(dec->GetSingleWordInOperand(1)) ==
  348. spv::Decoration::HlslCounterBufferGOOGLE) {
  349. // These decorations should not force the use id to be live. It will be
  350. // removed if either the target or the in operand are dead.
  351. continue;
  352. }
  353. AddToWorklist(dec);
  354. }
  355. }
  356. void AggressiveDCEPass::MarkLoadedVariablesAsLive(Function* func,
  357. Instruction* inst) {
  358. std::vector<uint32_t> live_variables = GetLoadedVariables(inst);
  359. for (uint32_t var_id : live_variables) {
  360. ProcessLoad(func, var_id);
  361. }
  362. }
  363. std::vector<uint32_t> AggressiveDCEPass::GetLoadedVariables(Instruction* inst) {
  364. if (inst->opcode() == spv::Op::OpFunctionCall) {
  365. return GetLoadedVariablesFromFunctionCall(inst);
  366. }
  367. uint32_t var_id = GetLoadedVariableFromNonFunctionCalls(inst);
  368. if (var_id == 0) {
  369. return {};
  370. }
  371. return {var_id};
  372. }
  373. uint32_t AggressiveDCEPass::GetLoadedVariableFromNonFunctionCalls(
  374. Instruction* inst) {
  375. std::vector<uint32_t> live_variables;
  376. if (inst->IsAtomicWithLoad()) {
  377. return GetVariableId(inst->GetSingleWordInOperand(kLoadSourceAddrInIdx));
  378. }
  379. switch (inst->opcode()) {
  380. case spv::Op::OpLoad:
  381. case spv::Op::OpImageTexelPointer:
  382. return GetVariableId(inst->GetSingleWordInOperand(kLoadSourceAddrInIdx));
  383. case spv::Op::OpCopyMemory:
  384. case spv::Op::OpCopyMemorySized:
  385. return GetVariableId(
  386. inst->GetSingleWordInOperand(kCopyMemorySourceAddrInIdx));
  387. default:
  388. break;
  389. }
  390. switch (inst->GetCommonDebugOpcode()) {
  391. case CommonDebugInfoDebugDeclare:
  392. return inst->GetSingleWordOperand(kDebugDeclareOperandVariableIndex);
  393. case CommonDebugInfoDebugValue: {
  394. analysis::DebugInfoManager* debug_info_mgr =
  395. context()->get_debug_info_mgr();
  396. return debug_info_mgr->GetVariableIdOfDebugValueUsedForDeclare(inst);
  397. }
  398. default:
  399. break;
  400. }
  401. return 0;
  402. }
  403. std::vector<uint32_t> AggressiveDCEPass::GetLoadedVariablesFromFunctionCall(
  404. const Instruction* inst) {
  405. assert(inst->opcode() == spv::Op::OpFunctionCall);
  406. std::vector<uint32_t> live_variables;
  407. inst->ForEachInId([this, &live_variables](const uint32_t* operand_id) {
  408. if (!IsPtr(*operand_id)) return;
  409. uint32_t var_id = GetVariableId(*operand_id);
  410. live_variables.push_back(var_id);
  411. });
  412. return live_variables;
  413. }
  414. uint32_t AggressiveDCEPass::GetVariableId(uint32_t ptr_id) {
  415. assert(IsPtr(ptr_id) &&
  416. "Cannot get the variable when input is not a pointer.");
  417. uint32_t varId = 0;
  418. (void)GetPtr(ptr_id, &varId);
  419. return varId;
  420. }
  421. void AggressiveDCEPass::MarkBlockAsLive(Instruction* inst) {
  422. BasicBlock* basic_block = context()->get_instr_block(inst);
  423. if (basic_block == nullptr) {
  424. return;
  425. }
  426. // If we intend to keep this instruction, we need the block label and
  427. // block terminator to have a valid block for the instruction.
  428. AddToWorklist(basic_block->GetLabelInst());
  429. // We need to mark the successors blocks that follow as live. If this is
  430. // header of the merge construct, the construct may be folded, but we will
  431. // definitely need the merge label. If it is not a construct, the terminator
  432. // must be live, and the successor blocks will be marked as live when
  433. // processing the terminator.
  434. uint32_t merge_id = basic_block->MergeBlockIdIfAny();
  435. if (merge_id == 0) {
  436. AddToWorklist(basic_block->terminator());
  437. } else {
  438. AddToWorklist(context()->get_def_use_mgr()->GetDef(merge_id));
  439. }
  440. // Mark the structured control flow constructs that contains this block as
  441. // live. If |inst| is an instruction in the loop header, then it is part of
  442. // the loop, so the loop construct must be live. We exclude the label because
  443. // it does not matter how many times it is executed. This could be extended
  444. // to more instructions, but we will need it for now.
  445. if (inst->opcode() != spv::Op::OpLabel)
  446. MarkLoopConstructAsLiveIfLoopHeader(basic_block);
  447. Instruction* next_branch_inst = GetBranchForNextHeader(basic_block);
  448. if (next_branch_inst != nullptr) {
  449. AddToWorklist(next_branch_inst);
  450. Instruction* mergeInst = GetMergeInstruction(next_branch_inst);
  451. AddToWorklist(mergeInst);
  452. }
  453. if (inst->opcode() == spv::Op::OpLoopMerge ||
  454. inst->opcode() == spv::Op::OpSelectionMerge) {
  455. AddBreaksAndContinuesToWorklist(inst);
  456. }
  457. }
  458. void AggressiveDCEPass::MarkLoopConstructAsLiveIfLoopHeader(
  459. BasicBlock* basic_block) {
  460. // If this is the header for a loop, then loop structure needs to keep as well
  461. // because the loop header is also part of the loop.
  462. Instruction* merge_inst = basic_block->GetLoopMergeInst();
  463. if (merge_inst != nullptr) {
  464. AddToWorklist(basic_block->terminator());
  465. AddToWorklist(merge_inst);
  466. }
  467. }
  468. void AggressiveDCEPass::AddOperandsToWorkList(const Instruction* inst) {
  469. inst->ForEachInId([this](const uint32_t* iid) {
  470. Instruction* inInst = get_def_use_mgr()->GetDef(*iid);
  471. AddToWorklist(inInst);
  472. });
  473. if (inst->type_id() != 0) {
  474. AddToWorklist(get_def_use_mgr()->GetDef(inst->type_id()));
  475. }
  476. }
  477. void AggressiveDCEPass::InitializeWorkList(
  478. Function* func, std::list<BasicBlock*>& structured_order) {
  479. AddToWorklist(&func->DefInst());
  480. MarkFunctionParameterAsLive(func);
  481. MarkFirstBlockAsLive(func);
  482. // Add instructions with external side effects to the worklist. Also add
  483. // branches that are not attached to a structured construct.
  484. // TODO(s-perron): The handling of branch seems to be adhoc. This needs to be
  485. // cleaned up.
  486. for (auto& bi : structured_order) {
  487. for (auto ii = bi->begin(); ii != bi->end(); ++ii) {
  488. spv::Op op = ii->opcode();
  489. if (ii->IsBranch()) {
  490. continue;
  491. }
  492. switch (op) {
  493. case spv::Op::OpStore: {
  494. uint32_t var_id = 0;
  495. (void)GetPtr(&*ii, &var_id);
  496. if (!IsLocalVar(var_id, func)) AddToWorklist(&*ii);
  497. } break;
  498. case spv::Op::OpCopyMemory:
  499. case spv::Op::OpCopyMemorySized: {
  500. uint32_t var_id = 0;
  501. uint32_t target_addr_id =
  502. ii->GetSingleWordInOperand(kCopyMemoryTargetAddrInIdx);
  503. (void)GetPtr(target_addr_id, &var_id);
  504. if (!IsLocalVar(var_id, func)) AddToWorklist(&*ii);
  505. } break;
  506. case spv::Op::OpLoopMerge:
  507. case spv::Op::OpSelectionMerge:
  508. case spv::Op::OpUnreachable:
  509. break;
  510. default: {
  511. // Function calls, atomics, function params, function returns, etc.
  512. if (!ii->IsOpcodeSafeToDelete()) {
  513. AddToWorklist(&*ii);
  514. }
  515. } break;
  516. }
  517. }
  518. }
  519. }
  520. void AggressiveDCEPass::InitializeModuleScopeLiveInstructions() {
  521. // Keep all execution modes.
  522. for (auto& exec : get_module()->execution_modes()) {
  523. AddToWorklist(&exec);
  524. }
  525. // Keep all entry points.
  526. for (auto& entry : get_module()->entry_points()) {
  527. if (!preserve_interface_) {
  528. live_insts_.Set(entry.unique_id());
  529. // The actual function is live always.
  530. AddToWorklist(
  531. get_def_use_mgr()->GetDef(entry.GetSingleWordInOperand(1u)));
  532. for (uint32_t i = 3; i < entry.NumInOperands(); ++i) {
  533. auto* var = get_def_use_mgr()->GetDef(entry.GetSingleWordInOperand(i));
  534. auto storage_class = var->GetSingleWordInOperand(0u);
  535. // Vulkan support outputs without an associated input, but not inputs
  536. // without an associated output. Don't remove outputs unless explicitly
  537. // allowed.
  538. if (!remove_outputs_ &&
  539. spv::StorageClass(storage_class) == spv::StorageClass::Output) {
  540. AddToWorklist(var);
  541. }
  542. }
  543. } else {
  544. AddToWorklist(&entry);
  545. }
  546. }
  547. for (auto& anno : get_module()->annotations()) {
  548. if (anno.opcode() == spv::Op::OpDecorate) {
  549. // Keep workgroup size.
  550. if (spv::Decoration(anno.GetSingleWordInOperand(1u)) ==
  551. spv::Decoration::BuiltIn &&
  552. spv::BuiltIn(anno.GetSingleWordInOperand(2u)) ==
  553. spv::BuiltIn::WorkgroupSize) {
  554. AddToWorklist(&anno);
  555. }
  556. if (context()->preserve_bindings()) {
  557. // Keep all bindings.
  558. if ((spv::Decoration(anno.GetSingleWordInOperand(1u)) ==
  559. spv::Decoration::DescriptorSet) ||
  560. (spv::Decoration(anno.GetSingleWordInOperand(1u)) ==
  561. spv::Decoration::Binding)) {
  562. AddToWorklist(&anno);
  563. }
  564. }
  565. if (context()->preserve_spec_constants()) {
  566. // Keep all specialization constant instructions
  567. if (spv::Decoration(anno.GetSingleWordInOperand(1u)) ==
  568. spv::Decoration::SpecId) {
  569. AddToWorklist(&anno);
  570. }
  571. }
  572. }
  573. }
  574. // For each DebugInfo GlobalVariable keep all operands except the Variable.
  575. // Later, if the variable is killed with KillInst(), we will set the operand
  576. // to DebugInfoNone. Create and save DebugInfoNone now for this possible
  577. // later use. This is slightly unoptimal, but it avoids generating it during
  578. // instruction killing when the module is not consistent.
  579. bool debug_global_seen = false;
  580. for (auto& dbg : get_module()->ext_inst_debuginfo()) {
  581. if (dbg.GetCommonDebugOpcode() != CommonDebugInfoDebugGlobalVariable)
  582. continue;
  583. debug_global_seen = true;
  584. dbg.ForEachInId([this](const uint32_t* iid) {
  585. Instruction* in_inst = get_def_use_mgr()->GetDef(*iid);
  586. if (in_inst->opcode() == spv::Op::OpVariable) return;
  587. AddToWorklist(in_inst);
  588. });
  589. }
  590. if (debug_global_seen) {
  591. auto dbg_none = context()->get_debug_info_mgr()->GetDebugInfoNone();
  592. AddToWorklist(dbg_none);
  593. }
  594. // Add top level DebugInfo to worklist
  595. for (auto& dbg : get_module()->ext_inst_debuginfo()) {
  596. auto op = dbg.GetShader100DebugOpcode();
  597. if (op == NonSemanticShaderDebugInfo100DebugCompilationUnit ||
  598. op == NonSemanticShaderDebugInfo100DebugEntryPoint ||
  599. op == NonSemanticShaderDebugInfo100DebugSourceContinued) {
  600. AddToWorklist(&dbg);
  601. }
  602. }
  603. }
  604. Pass::Status AggressiveDCEPass::ProcessImpl() {
  605. // Current functionality assumes shader capability
  606. // TODO(greg-lunarg): Handle additional capabilities
  607. if (!context()->get_feature_mgr()->HasCapability(spv::Capability::Shader))
  608. return Status::SuccessWithoutChange;
  609. // Current functionality assumes relaxed logical addressing (see
  610. // instruction.h)
  611. // TODO(greg-lunarg): Handle non-logical addressing
  612. if (context()->get_feature_mgr()->HasCapability(spv::Capability::Addresses))
  613. return Status::SuccessWithoutChange;
  614. // The variable pointer extension is no longer needed to use the capability,
  615. // so we have to look for the capability.
  616. if (context()->get_feature_mgr()->HasCapability(
  617. spv::Capability::VariablePointersStorageBuffer))
  618. return Status::SuccessWithoutChange;
  619. // If any extensions in the module are not explicitly supported,
  620. // return unmodified.
  621. if (!AllExtensionsSupported()) return Status::SuccessWithoutChange;
  622. // Eliminate Dead functions.
  623. bool modified = EliminateDeadFunctions();
  624. InitializeModuleScopeLiveInstructions();
  625. // Run |AggressiveDCE| on the remaining functions. The order does not matter,
  626. // since |AggressiveDCE| is intra-procedural. This can mean that function
  627. // will become dead if all function call to them are removed. These dead
  628. // function will still be in the module after this pass. We expect this to be
  629. // rare.
  630. for (Function& fp : *context()->module()) {
  631. modified |= AggressiveDCE(&fp);
  632. }
  633. // If the decoration manager is kept live then the context will try to keep it
  634. // up to date. ADCE deals with group decorations by changing the operands in
  635. // |OpGroupDecorate| instruction directly without informing the decoration
  636. // manager. This can put it in an invalid state which will cause an error
  637. // when the context tries to update it. To avoid this problem invalidate
  638. // the decoration manager upfront.
  639. //
  640. // We kill it at now because it is used when processing the entry point
  641. // functions.
  642. context()->InvalidateAnalyses(IRContext::Analysis::kAnalysisDecorations);
  643. // Process module-level instructions. Now that all live instructions have
  644. // been marked, it is safe to remove dead global values.
  645. modified |= ProcessGlobalValues();
  646. assert((to_kill_.empty() || modified) &&
  647. "A dead instruction was identified, but no change recorded.");
  648. // Kill all dead instructions.
  649. for (auto inst : to_kill_) {
  650. context()->KillInst(inst);
  651. }
  652. // Cleanup all CFG including all unreachable blocks.
  653. for (Function& fp : *context()->module()) {
  654. modified |= CFGCleanup(&fp);
  655. }
  656. return modified ? Status::SuccessWithChange : Status::SuccessWithoutChange;
  657. }
  658. bool AggressiveDCEPass::EliminateDeadFunctions() {
  659. // Identify live functions first. Those that are not live
  660. // are dead.
  661. std::unordered_set<const Function*> live_function_set;
  662. ProcessFunction mark_live = [&live_function_set](Function* fp) {
  663. live_function_set.insert(fp);
  664. return false;
  665. };
  666. context()->ProcessReachableCallTree(mark_live);
  667. bool modified = false;
  668. for (auto funcIter = get_module()->begin();
  669. funcIter != get_module()->end();) {
  670. if (live_function_set.count(&*funcIter) == 0) {
  671. modified = true;
  672. funcIter =
  673. eliminatedeadfunctionsutil::EliminateFunction(context(), &funcIter);
  674. } else {
  675. ++funcIter;
  676. }
  677. }
  678. return modified;
  679. }
  680. bool AggressiveDCEPass::ProcessGlobalValues() {
  681. // Remove debug and annotation statements referencing dead instructions.
  682. // This must be done before killing the instructions, otherwise there are
  683. // dead objects in the def/use database.
  684. bool modified = false;
  685. Instruction* instruction = &*get_module()->debug2_begin();
  686. while (instruction) {
  687. if (instruction->opcode() != spv::Op::OpName) {
  688. instruction = instruction->NextNode();
  689. continue;
  690. }
  691. if (IsTargetDead(instruction)) {
  692. instruction = context()->KillInst(instruction);
  693. modified = true;
  694. } else {
  695. instruction = instruction->NextNode();
  696. }
  697. }
  698. // This code removes all unnecessary decorations safely (see #1174). It also
  699. // does so in a more efficient manner than deleting them only as the targets
  700. // are deleted.
  701. std::vector<Instruction*> annotations;
  702. for (auto& inst : get_module()->annotations()) annotations.push_back(&inst);
  703. std::sort(annotations.begin(), annotations.end(), DecorationLess());
  704. for (auto annotation : annotations) {
  705. switch (annotation->opcode()) {
  706. case spv::Op::OpDecorate:
  707. case spv::Op::OpMemberDecorate:
  708. case spv::Op::OpDecorateStringGOOGLE:
  709. case spv::Op::OpMemberDecorateStringGOOGLE:
  710. if (IsTargetDead(annotation)) {
  711. context()->KillInst(annotation);
  712. modified = true;
  713. }
  714. break;
  715. case spv::Op::OpDecorateId:
  716. if (IsTargetDead(annotation)) {
  717. context()->KillInst(annotation);
  718. modified = true;
  719. } else {
  720. if (spv::Decoration(annotation->GetSingleWordInOperand(1)) ==
  721. spv::Decoration::HlslCounterBufferGOOGLE) {
  722. // HlslCounterBuffer will reference an id other than the target.
  723. // If that id is dead, then the decoration can be removed as well.
  724. uint32_t counter_buffer_id = annotation->GetSingleWordInOperand(2);
  725. Instruction* counter_buffer_inst =
  726. get_def_use_mgr()->GetDef(counter_buffer_id);
  727. if (!IsLive(counter_buffer_inst)) {
  728. context()->KillInst(annotation);
  729. modified = true;
  730. }
  731. }
  732. }
  733. break;
  734. case spv::Op::OpGroupDecorate: {
  735. // Go through the targets of this group decorate. Remove each dead
  736. // target. If all targets are dead, remove this decoration.
  737. bool dead = true;
  738. bool removed_operand = false;
  739. for (uint32_t i = 1; i < annotation->NumOperands();) {
  740. Instruction* opInst =
  741. get_def_use_mgr()->GetDef(annotation->GetSingleWordOperand(i));
  742. if (!IsLive(opInst)) {
  743. // Don't increment |i|.
  744. annotation->RemoveOperand(i);
  745. modified = true;
  746. removed_operand = true;
  747. } else {
  748. i++;
  749. dead = false;
  750. }
  751. }
  752. if (dead) {
  753. context()->KillInst(annotation);
  754. modified = true;
  755. } else if (removed_operand) {
  756. context()->UpdateDefUse(annotation);
  757. }
  758. break;
  759. }
  760. case spv::Op::OpGroupMemberDecorate: {
  761. // Go through the targets of this group member decorate. Remove each
  762. // dead target (and member index). If all targets are dead, remove this
  763. // decoration.
  764. bool dead = true;
  765. bool removed_operand = false;
  766. for (uint32_t i = 1; i < annotation->NumOperands();) {
  767. Instruction* opInst =
  768. get_def_use_mgr()->GetDef(annotation->GetSingleWordOperand(i));
  769. if (!IsLive(opInst)) {
  770. // Don't increment |i|.
  771. annotation->RemoveOperand(i + 1);
  772. annotation->RemoveOperand(i);
  773. modified = true;
  774. removed_operand = true;
  775. } else {
  776. i += 2;
  777. dead = false;
  778. }
  779. }
  780. if (dead) {
  781. context()->KillInst(annotation);
  782. modified = true;
  783. } else if (removed_operand) {
  784. context()->UpdateDefUse(annotation);
  785. }
  786. break;
  787. }
  788. case spv::Op::OpDecorationGroup:
  789. // By the time we hit decoration groups we've checked everything that
  790. // can target them. So if they have no uses they must be dead.
  791. if (get_def_use_mgr()->NumUsers(annotation) == 0) {
  792. context()->KillInst(annotation);
  793. modified = true;
  794. }
  795. break;
  796. default:
  797. assert(false);
  798. break;
  799. }
  800. }
  801. for (auto& dbg : get_module()->ext_inst_debuginfo()) {
  802. if (IsLive(&dbg)) continue;
  803. // Save GlobalVariable if its variable is live, otherwise null out variable
  804. // index
  805. if (dbg.GetCommonDebugOpcode() == CommonDebugInfoDebugGlobalVariable) {
  806. auto var_id = dbg.GetSingleWordOperand(kGlobalVariableVariableIndex);
  807. Instruction* var_inst = get_def_use_mgr()->GetDef(var_id);
  808. if (IsLive(var_inst)) continue;
  809. context()->ForgetUses(&dbg);
  810. dbg.SetOperand(
  811. kGlobalVariableVariableIndex,
  812. {context()->get_debug_info_mgr()->GetDebugInfoNone()->result_id()});
  813. context()->AnalyzeUses(&dbg);
  814. continue;
  815. }
  816. to_kill_.push_back(&dbg);
  817. modified = true;
  818. }
  819. // Since ADCE is disabled for non-shaders, we don't check for export linkage
  820. // attributes here.
  821. for (auto& val : get_module()->types_values()) {
  822. if (!IsLive(&val)) {
  823. // Save forwarded pointer if pointer is live since closure does not mark
  824. // this live as it does not have a result id. This is a little too
  825. // conservative since it is not known if the structure type that needed
  826. // it is still live. TODO(greg-lunarg): Only save if needed.
  827. if (val.opcode() == spv::Op::OpTypeForwardPointer) {
  828. uint32_t ptr_ty_id = val.GetSingleWordInOperand(0);
  829. Instruction* ptr_ty_inst = get_def_use_mgr()->GetDef(ptr_ty_id);
  830. if (IsLive(ptr_ty_inst)) continue;
  831. }
  832. to_kill_.push_back(&val);
  833. modified = true;
  834. }
  835. }
  836. if (!preserve_interface_) {
  837. // Remove the dead interface variables from the entry point interface list.
  838. for (auto& entry : get_module()->entry_points()) {
  839. std::vector<Operand> new_operands;
  840. for (uint32_t i = 0; i < entry.NumInOperands(); ++i) {
  841. if (i < 3) {
  842. // Execution model, function id and name are always valid.
  843. new_operands.push_back(entry.GetInOperand(i));
  844. } else {
  845. auto* var =
  846. get_def_use_mgr()->GetDef(entry.GetSingleWordInOperand(i));
  847. if (IsLive(var)) {
  848. new_operands.push_back(entry.GetInOperand(i));
  849. }
  850. }
  851. }
  852. if (new_operands.size() != entry.NumInOperands()) {
  853. entry.SetInOperands(std::move(new_operands));
  854. get_def_use_mgr()->UpdateDefUse(&entry);
  855. }
  856. }
  857. }
  858. return modified;
  859. }
  860. Pass::Status AggressiveDCEPass::Process() {
  861. // Initialize extensions allowlist
  862. InitExtensions();
  863. return ProcessImpl();
  864. }
  865. void AggressiveDCEPass::InitExtensions() {
  866. extensions_allowlist_.clear();
  867. extensions_allowlist_.insert({
  868. "SPV_AMD_shader_explicit_vertex_parameter",
  869. "SPV_AMD_shader_trinary_minmax",
  870. "SPV_AMD_gcn_shader",
  871. "SPV_KHR_shader_ballot",
  872. "SPV_AMD_shader_ballot",
  873. "SPV_AMD_gpu_shader_half_float",
  874. "SPV_KHR_shader_draw_parameters",
  875. "SPV_KHR_subgroup_vote",
  876. "SPV_KHR_8bit_storage",
  877. "SPV_KHR_16bit_storage",
  878. "SPV_KHR_device_group",
  879. "SPV_KHR_multiview",
  880. "SPV_NVX_multiview_per_view_attributes",
  881. "SPV_NV_viewport_array2",
  882. "SPV_NV_stereo_view_rendering",
  883. "SPV_NV_sample_mask_override_coverage",
  884. "SPV_NV_geometry_shader_passthrough",
  885. "SPV_AMD_texture_gather_bias_lod",
  886. "SPV_KHR_storage_buffer_storage_class",
  887. // SPV_KHR_variable_pointers
  888. // Currently do not support extended pointer expressions
  889. "SPV_AMD_gpu_shader_int16",
  890. "SPV_KHR_post_depth_coverage",
  891. "SPV_KHR_shader_atomic_counter_ops",
  892. "SPV_EXT_shader_stencil_export",
  893. "SPV_EXT_shader_viewport_index_layer",
  894. "SPV_AMD_shader_image_load_store_lod",
  895. "SPV_AMD_shader_fragment_mask",
  896. "SPV_EXT_fragment_fully_covered",
  897. "SPV_AMD_gpu_shader_half_float_fetch",
  898. "SPV_GOOGLE_decorate_string",
  899. "SPV_GOOGLE_hlsl_functionality1",
  900. "SPV_GOOGLE_user_type",
  901. "SPV_NV_shader_subgroup_partitioned",
  902. "SPV_EXT_demote_to_helper_invocation",
  903. "SPV_EXT_descriptor_indexing",
  904. "SPV_NV_fragment_shader_barycentric",
  905. "SPV_NV_compute_shader_derivatives",
  906. "SPV_NV_shader_image_footprint",
  907. "SPV_NV_shading_rate",
  908. "SPV_NV_mesh_shader",
  909. "SPV_NV_ray_tracing",
  910. "SPV_KHR_ray_tracing",
  911. "SPV_KHR_ray_query",
  912. "SPV_EXT_fragment_invocation_density",
  913. "SPV_EXT_physical_storage_buffer",
  914. "SPV_KHR_terminate_invocation",
  915. "SPV_KHR_shader_clock",
  916. "SPV_KHR_vulkan_memory_model",
  917. "SPV_KHR_subgroup_uniform_control_flow",
  918. "SPV_KHR_integer_dot_product",
  919. "SPV_EXT_shader_image_int64",
  920. "SPV_KHR_non_semantic_info",
  921. "SPV_KHR_uniform_group_instructions",
  922. "SPV_KHR_fragment_shader_barycentric",
  923. });
  924. }
  925. Instruction* AggressiveDCEPass::GetHeaderBranch(BasicBlock* blk) {
  926. if (blk == nullptr) {
  927. return nullptr;
  928. }
  929. BasicBlock* header_block = GetHeaderBlock(blk);
  930. if (header_block == nullptr) {
  931. return nullptr;
  932. }
  933. return header_block->terminator();
  934. }
  935. BasicBlock* AggressiveDCEPass::GetHeaderBlock(BasicBlock* blk) const {
  936. if (blk == nullptr) {
  937. return nullptr;
  938. }
  939. BasicBlock* header_block = nullptr;
  940. if (blk->IsLoopHeader()) {
  941. header_block = blk;
  942. } else {
  943. uint32_t header =
  944. context()->GetStructuredCFGAnalysis()->ContainingConstruct(blk->id());
  945. header_block = context()->get_instr_block(header);
  946. }
  947. return header_block;
  948. }
  949. Instruction* AggressiveDCEPass::GetMergeInstruction(Instruction* inst) {
  950. BasicBlock* bb = context()->get_instr_block(inst);
  951. if (bb == nullptr) {
  952. return nullptr;
  953. }
  954. return bb->GetMergeInst();
  955. }
  956. Instruction* AggressiveDCEPass::GetBranchForNextHeader(BasicBlock* blk) {
  957. if (blk == nullptr) {
  958. return nullptr;
  959. }
  960. if (blk->IsLoopHeader()) {
  961. uint32_t header =
  962. context()->GetStructuredCFGAnalysis()->ContainingConstruct(blk->id());
  963. blk = context()->get_instr_block(header);
  964. }
  965. return GetHeaderBranch(blk);
  966. }
  967. void AggressiveDCEPass::MarkFunctionParameterAsLive(const Function* func) {
  968. func->ForEachParam(
  969. [this](const Instruction* param) {
  970. AddToWorklist(const_cast<Instruction*>(param));
  971. },
  972. false);
  973. }
  974. bool AggressiveDCEPass::BlockIsInConstruct(BasicBlock* header_block,
  975. BasicBlock* bb) {
  976. if (bb == nullptr || header_block == nullptr) {
  977. return false;
  978. }
  979. uint32_t current_header = bb->id();
  980. while (current_header != 0) {
  981. if (current_header == header_block->id()) return true;
  982. current_header = context()->GetStructuredCFGAnalysis()->ContainingConstruct(
  983. current_header);
  984. }
  985. return false;
  986. }
  987. bool AggressiveDCEPass::IsEntryPointWithNoCalls(Function* func) {
  988. auto cached_result = entry_point_with_no_calls_cache_.find(func->result_id());
  989. if (cached_result != entry_point_with_no_calls_cache_.end()) {
  990. return cached_result->second;
  991. }
  992. bool result = IsEntryPoint(func) && !HasCall(func);
  993. entry_point_with_no_calls_cache_[func->result_id()] = result;
  994. return result;
  995. }
  996. bool AggressiveDCEPass::IsEntryPoint(Function* func) {
  997. for (const Instruction& entry_point : get_module()->entry_points()) {
  998. uint32_t entry_point_id =
  999. entry_point.GetSingleWordInOperand(kEntryPointFunctionIdInIdx);
  1000. if (entry_point_id == func->result_id()) {
  1001. return true;
  1002. }
  1003. }
  1004. return false;
  1005. }
  1006. bool AggressiveDCEPass::HasCall(Function* func) {
  1007. return !func->WhileEachInst([](Instruction* inst) {
  1008. return inst->opcode() != spv::Op::OpFunctionCall;
  1009. });
  1010. }
  1011. void AggressiveDCEPass::MarkFirstBlockAsLive(Function* func) {
  1012. BasicBlock* first_block = &*func->begin();
  1013. MarkBlockAsLive(first_block->GetLabelInst());
  1014. }
  1015. void AggressiveDCEPass::AddUnreachable(BasicBlock*& block) {
  1016. InstructionBuilder builder(
  1017. context(), block,
  1018. IRContext::kAnalysisInstrToBlockMapping | IRContext::kAnalysisDefUse);
  1019. builder.AddUnreachable();
  1020. }
  1021. } // namespace opt
  1022. } // namespace spvtools