dead_branch_elim_pass.cpp 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652
  1. // Copyright (c) 2017 The Khronos Group Inc.
  2. // Copyright (c) 2017 Valve Corporation
  3. // Copyright (c) 2017 LunarG Inc.
  4. // Copyright (c) 2018 Google Inc.
  5. //
  6. // Licensed under the Apache License, Version 2.0 (the "License");
  7. // you may not use this file except in compliance with the License.
  8. // You may obtain a copy of the License at
  9. //
  10. // http://www.apache.org/licenses/LICENSE-2.0
  11. //
  12. // Unless required by applicable law or agreed to in writing, software
  13. // distributed under the License is distributed on an "AS IS" BASIS,
  14. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. // See the License for the specific language governing permissions and
  16. // limitations under the License.
  17. #include "source/opt/dead_branch_elim_pass.h"
  18. #include <list>
  19. #include <memory>
  20. #include <vector>
  21. #include "source/cfa.h"
  22. #include "source/opt/ir_context.h"
  23. #include "source/opt/iterator.h"
  24. #include "source/opt/struct_cfg_analysis.h"
  25. #include "source/util/make_unique.h"
  26. namespace spvtools {
  27. namespace opt {
  28. namespace {
  29. constexpr uint32_t kBranchCondTrueLabIdInIdx = 1;
  30. constexpr uint32_t kBranchCondFalseLabIdInIdx = 2;
  31. } // namespace
  32. bool DeadBranchElimPass::GetConstCondition(uint32_t condId, bool* condVal) {
  33. bool condIsConst;
  34. Instruction* cInst = get_def_use_mgr()->GetDef(condId);
  35. switch (cInst->opcode()) {
  36. case spv::Op::OpConstantNull:
  37. case spv::Op::OpConstantFalse: {
  38. *condVal = false;
  39. condIsConst = true;
  40. } break;
  41. case spv::Op::OpConstantTrue: {
  42. *condVal = true;
  43. condIsConst = true;
  44. } break;
  45. case spv::Op::OpLogicalNot: {
  46. bool negVal;
  47. condIsConst =
  48. GetConstCondition(cInst->GetSingleWordInOperand(0), &negVal);
  49. if (condIsConst) *condVal = !negVal;
  50. } break;
  51. default: { condIsConst = false; } break;
  52. }
  53. return condIsConst;
  54. }
  55. bool DeadBranchElimPass::GetConstInteger(uint32_t selId, uint32_t* selVal) {
  56. Instruction* sInst = get_def_use_mgr()->GetDef(selId);
  57. uint32_t typeId = sInst->type_id();
  58. Instruction* typeInst = get_def_use_mgr()->GetDef(typeId);
  59. if (!typeInst || (typeInst->opcode() != spv::Op::OpTypeInt)) return false;
  60. // TODO(greg-lunarg): Support non-32 bit ints
  61. if (typeInst->GetSingleWordInOperand(0) != 32) return false;
  62. if (sInst->opcode() == spv::Op::OpConstant) {
  63. *selVal = sInst->GetSingleWordInOperand(0);
  64. return true;
  65. } else if (sInst->opcode() == spv::Op::OpConstantNull) {
  66. *selVal = 0;
  67. return true;
  68. }
  69. return false;
  70. }
  71. void DeadBranchElimPass::AddBranch(uint32_t labelId, BasicBlock* bp) {
  72. assert(get_def_use_mgr()->GetDef(labelId) != nullptr);
  73. std::unique_ptr<Instruction> newBranch(
  74. new Instruction(context(), spv::Op::OpBranch, 0, 0,
  75. {{spv_operand_type_t::SPV_OPERAND_TYPE_ID, {labelId}}}));
  76. context()->AnalyzeDefUse(&*newBranch);
  77. context()->set_instr_block(&*newBranch, bp);
  78. bp->AddInstruction(std::move(newBranch));
  79. }
  80. BasicBlock* DeadBranchElimPass::GetParentBlock(uint32_t id) {
  81. return context()->get_instr_block(get_def_use_mgr()->GetDef(id));
  82. }
  83. bool DeadBranchElimPass::MarkLiveBlocks(
  84. Function* func, std::unordered_set<BasicBlock*>* live_blocks) {
  85. std::vector<std::pair<BasicBlock*, uint32_t>> conditions_to_simplify;
  86. std::unordered_set<BasicBlock*> blocks_with_backedge;
  87. std::vector<BasicBlock*> stack;
  88. stack.push_back(&*func->begin());
  89. bool modified = false;
  90. while (!stack.empty()) {
  91. BasicBlock* block = stack.back();
  92. stack.pop_back();
  93. // Live blocks doubles as visited set.
  94. if (!live_blocks->insert(block).second) continue;
  95. uint32_t cont_id = block->ContinueBlockIdIfAny();
  96. if (cont_id != 0) {
  97. AddBlocksWithBackEdge(cont_id, block->id(), block->MergeBlockIdIfAny(),
  98. &blocks_with_backedge);
  99. }
  100. Instruction* terminator = block->terminator();
  101. uint32_t live_lab_id = 0;
  102. // Check if the terminator has a single valid successor.
  103. if (terminator->opcode() == spv::Op::OpBranchConditional) {
  104. bool condVal;
  105. if (GetConstCondition(terminator->GetSingleWordInOperand(0u), &condVal)) {
  106. live_lab_id = terminator->GetSingleWordInOperand(
  107. condVal ? kBranchCondTrueLabIdInIdx : kBranchCondFalseLabIdInIdx);
  108. }
  109. } else if (terminator->opcode() == spv::Op::OpSwitch) {
  110. uint32_t sel_val;
  111. if (GetConstInteger(terminator->GetSingleWordInOperand(0u), &sel_val)) {
  112. // Search switch operands for selector value, set live_lab_id to
  113. // corresponding label, use default if not found.
  114. uint32_t icnt = 0;
  115. uint32_t case_val;
  116. terminator->WhileEachInOperand(
  117. [&icnt, &case_val, &sel_val, &live_lab_id](const uint32_t* idp) {
  118. if (icnt == 1) {
  119. // Start with default label.
  120. live_lab_id = *idp;
  121. } else if (icnt > 1) {
  122. if (icnt % 2 == 0) {
  123. case_val = *idp;
  124. } else {
  125. if (case_val == sel_val) {
  126. live_lab_id = *idp;
  127. return false;
  128. }
  129. }
  130. }
  131. ++icnt;
  132. return true;
  133. });
  134. }
  135. }
  136. // Don't simplify back edges unless it becomes a branch to the header. Every
  137. // loop must have exactly one back edge to the loop header, so we cannot
  138. // remove it.
  139. bool simplify = false;
  140. if (live_lab_id != 0) {
  141. if (!blocks_with_backedge.count(block)) {
  142. // This is not a back edge.
  143. simplify = true;
  144. } else {
  145. const auto& struct_cfg_analysis = context()->GetStructuredCFGAnalysis();
  146. uint32_t header_id = struct_cfg_analysis->ContainingLoop(block->id());
  147. if (live_lab_id == header_id) {
  148. // The new branch will be a branch to the header.
  149. simplify = true;
  150. }
  151. }
  152. }
  153. if (simplify) {
  154. conditions_to_simplify.push_back({block, live_lab_id});
  155. stack.push_back(GetParentBlock(live_lab_id));
  156. } else {
  157. // All successors are live.
  158. const auto* const_block = block;
  159. const_block->ForEachSuccessorLabel([&stack, this](const uint32_t label) {
  160. stack.push_back(GetParentBlock(label));
  161. });
  162. }
  163. }
  164. // Traverse |conditions_to_simplify| in reverse order. This is done so that
  165. // we simplify nested constructs before simplifying the constructs that
  166. // contain them.
  167. for (auto b = conditions_to_simplify.rbegin();
  168. b != conditions_to_simplify.rend(); ++b) {
  169. modified |= SimplifyBranch(b->first, b->second);
  170. }
  171. return modified;
  172. }
  173. bool DeadBranchElimPass::SimplifyBranch(BasicBlock* block,
  174. uint32_t live_lab_id) {
  175. Instruction* merge_inst = block->GetMergeInst();
  176. Instruction* terminator = block->terminator();
  177. if (merge_inst && merge_inst->opcode() == spv::Op::OpSelectionMerge) {
  178. if (merge_inst->NextNode()->opcode() == spv::Op::OpSwitch &&
  179. SwitchHasNestedBreak(block->id())) {
  180. if (terminator->NumInOperands() == 2) {
  181. // We cannot remove the branch, and it already has a single case, so no
  182. // work to do.
  183. return false;
  184. }
  185. // We have to keep the switch because it has a nest break, so we
  186. // remove all cases except for the live one.
  187. Instruction::OperandList new_operands;
  188. new_operands.push_back(terminator->GetInOperand(0));
  189. new_operands.push_back({SPV_OPERAND_TYPE_ID, {live_lab_id}});
  190. terminator->SetInOperands(std::move(new_operands));
  191. context()->UpdateDefUse(terminator);
  192. } else {
  193. // Check if the merge instruction is still needed because of a
  194. // non-nested break from the construct. Move the merge instruction if
  195. // it is still needed.
  196. StructuredCFGAnalysis* cfg_analysis =
  197. context()->GetStructuredCFGAnalysis();
  198. Instruction* first_break = FindFirstExitFromSelectionMerge(
  199. live_lab_id, merge_inst->GetSingleWordInOperand(0),
  200. cfg_analysis->LoopMergeBlock(live_lab_id),
  201. cfg_analysis->LoopContinueBlock(live_lab_id),
  202. cfg_analysis->SwitchMergeBlock(live_lab_id));
  203. AddBranch(live_lab_id, block);
  204. context()->KillInst(terminator);
  205. if (first_break == nullptr) {
  206. context()->KillInst(merge_inst);
  207. } else {
  208. merge_inst->RemoveFromList();
  209. first_break->InsertBefore(std::unique_ptr<Instruction>(merge_inst));
  210. context()->set_instr_block(merge_inst,
  211. context()->get_instr_block(first_break));
  212. }
  213. }
  214. } else {
  215. AddBranch(live_lab_id, block);
  216. context()->KillInst(terminator);
  217. }
  218. return true;
  219. }
  220. void DeadBranchElimPass::MarkUnreachableStructuredTargets(
  221. const std::unordered_set<BasicBlock*>& live_blocks,
  222. std::unordered_set<BasicBlock*>* unreachable_merges,
  223. std::unordered_map<BasicBlock*, BasicBlock*>* unreachable_continues) {
  224. for (auto block : live_blocks) {
  225. if (auto merge_id = block->MergeBlockIdIfAny()) {
  226. BasicBlock* merge_block = GetParentBlock(merge_id);
  227. if (!live_blocks.count(merge_block)) {
  228. unreachable_merges->insert(merge_block);
  229. }
  230. if (auto cont_id = block->ContinueBlockIdIfAny()) {
  231. BasicBlock* cont_block = GetParentBlock(cont_id);
  232. if (!live_blocks.count(cont_block)) {
  233. (*unreachable_continues)[cont_block] = block;
  234. }
  235. }
  236. }
  237. }
  238. }
  239. bool DeadBranchElimPass::FixPhiNodesInLiveBlocks(
  240. Function* func, const std::unordered_set<BasicBlock*>& live_blocks,
  241. const std::unordered_map<BasicBlock*, BasicBlock*>& unreachable_continues) {
  242. bool modified = false;
  243. for (auto& block : *func) {
  244. if (live_blocks.count(&block)) {
  245. for (auto iter = block.begin(); iter != block.end();) {
  246. if (iter->opcode() != spv::Op::OpPhi) {
  247. break;
  248. }
  249. bool changed = false;
  250. bool backedge_added = false;
  251. Instruction* inst = &*iter;
  252. std::vector<Operand> operands;
  253. // Build a complete set of operands (not just input operands). Start
  254. // with type and result id operands.
  255. operands.push_back(inst->GetOperand(0u));
  256. operands.push_back(inst->GetOperand(1u));
  257. // Iterate through the incoming labels and determine which to keep
  258. // and/or modify. If there in an unreachable continue block, there will
  259. // be an edge from that block to the header. We need to keep it to
  260. // maintain the structured control flow. If the header has more that 2
  261. // incoming edges, then the OpPhi must have an entry for that edge.
  262. // However, if there is only one other incoming edge, the OpPhi can be
  263. // eliminated.
  264. for (uint32_t i = 1; i < inst->NumInOperands(); i += 2) {
  265. BasicBlock* inc = GetParentBlock(inst->GetSingleWordInOperand(i));
  266. auto cont_iter = unreachable_continues.find(inc);
  267. if (cont_iter != unreachable_continues.end() &&
  268. cont_iter->second == &block && inst->NumInOperands() > 4) {
  269. if (get_def_use_mgr()
  270. ->GetDef(inst->GetSingleWordInOperand(i - 1))
  271. ->opcode() == spv::Op::OpUndef) {
  272. // Already undef incoming value, no change necessary.
  273. operands.push_back(inst->GetInOperand(i - 1));
  274. operands.push_back(inst->GetInOperand(i));
  275. backedge_added = true;
  276. } else {
  277. // Replace incoming value with undef if this phi exists in the
  278. // loop header. Otherwise, this edge is not live since the
  279. // unreachable continue block will be replaced with an
  280. // unconditional branch to the header only.
  281. operands.emplace_back(
  282. SPV_OPERAND_TYPE_ID,
  283. std::initializer_list<uint32_t>{Type2Undef(inst->type_id())});
  284. operands.push_back(inst->GetInOperand(i));
  285. changed = true;
  286. backedge_added = true;
  287. }
  288. } else if (live_blocks.count(inc) && inc->IsSuccessor(&block)) {
  289. // Keep live incoming edge.
  290. operands.push_back(inst->GetInOperand(i - 1));
  291. operands.push_back(inst->GetInOperand(i));
  292. } else {
  293. // Remove incoming edge.
  294. changed = true;
  295. }
  296. }
  297. if (changed) {
  298. modified = true;
  299. uint32_t continue_id = block.ContinueBlockIdIfAny();
  300. if (!backedge_added && continue_id != 0 &&
  301. unreachable_continues.count(GetParentBlock(continue_id)) &&
  302. operands.size() > 4) {
  303. // Changed the backedge to branch from the continue block instead
  304. // of a successor of the continue block. Add an entry to the phi to
  305. // provide an undef for the continue block. Since the successor of
  306. // the continue must also be unreachable (dominated by the continue
  307. // block), any entry for the original backedge has been removed
  308. // from the phi operands.
  309. operands.emplace_back(
  310. SPV_OPERAND_TYPE_ID,
  311. std::initializer_list<uint32_t>{Type2Undef(inst->type_id())});
  312. operands.emplace_back(SPV_OPERAND_TYPE_ID,
  313. std::initializer_list<uint32_t>{continue_id});
  314. }
  315. // Either replace the phi with a single value or rebuild the phi out
  316. // of |operands|.
  317. //
  318. // We always have type and result id operands. So this phi has a
  319. // single source if there are two more operands beyond those.
  320. if (operands.size() == 4) {
  321. // First input data operands is at index 2.
  322. uint32_t replId = operands[2u].words[0];
  323. context()->KillNamesAndDecorates(inst->result_id());
  324. context()->ReplaceAllUsesWith(inst->result_id(), replId);
  325. iter = context()->KillInst(&*inst);
  326. } else {
  327. // We've rewritten the operands, so first instruct the def/use
  328. // manager to forget uses in the phi before we replace them. After
  329. // replacing operands update the def/use manager by re-analyzing
  330. // the used ids in this phi.
  331. get_def_use_mgr()->EraseUseRecordsOfOperandIds(inst);
  332. inst->ReplaceOperands(operands);
  333. get_def_use_mgr()->AnalyzeInstUse(inst);
  334. ++iter;
  335. }
  336. } else {
  337. ++iter;
  338. }
  339. }
  340. }
  341. }
  342. return modified;
  343. }
  344. bool DeadBranchElimPass::EraseDeadBlocks(
  345. Function* func, const std::unordered_set<BasicBlock*>& live_blocks,
  346. const std::unordered_set<BasicBlock*>& unreachable_merges,
  347. const std::unordered_map<BasicBlock*, BasicBlock*>& unreachable_continues) {
  348. bool modified = false;
  349. for (auto ebi = func->begin(); ebi != func->end();) {
  350. if (unreachable_continues.count(&*ebi)) {
  351. uint32_t cont_id = unreachable_continues.find(&*ebi)->second->id();
  352. if (ebi->begin() != ebi->tail() ||
  353. ebi->terminator()->opcode() != spv::Op::OpBranch ||
  354. ebi->terminator()->GetSingleWordInOperand(0u) != cont_id) {
  355. // Make unreachable, but leave the label.
  356. KillAllInsts(&*ebi, false);
  357. // Add unconditional branch to header.
  358. assert(unreachable_continues.count(&*ebi));
  359. ebi->AddInstruction(MakeUnique<Instruction>(
  360. context(), spv::Op::OpBranch, 0, 0,
  361. std::initializer_list<Operand>{{SPV_OPERAND_TYPE_ID, {cont_id}}}));
  362. get_def_use_mgr()->AnalyzeInstUse(&*ebi->tail());
  363. context()->set_instr_block(&*ebi->tail(), &*ebi);
  364. modified = true;
  365. }
  366. ++ebi;
  367. } else if (unreachable_merges.count(&*ebi)) {
  368. if (ebi->begin() != ebi->tail() ||
  369. ebi->terminator()->opcode() != spv::Op::OpUnreachable) {
  370. // Make unreachable, but leave the label.
  371. KillAllInsts(&*ebi, false);
  372. // Add unreachable terminator.
  373. ebi->AddInstruction(
  374. MakeUnique<Instruction>(context(), spv::Op::OpUnreachable, 0, 0,
  375. std::initializer_list<Operand>{}));
  376. context()->AnalyzeUses(ebi->terminator());
  377. context()->set_instr_block(ebi->terminator(), &*ebi);
  378. modified = true;
  379. }
  380. ++ebi;
  381. } else if (!live_blocks.count(&*ebi)) {
  382. // Kill this block.
  383. KillAllInsts(&*ebi);
  384. ebi = ebi.Erase();
  385. modified = true;
  386. } else {
  387. ++ebi;
  388. }
  389. }
  390. return modified;
  391. }
  392. bool DeadBranchElimPass::EliminateDeadBranches(Function* func) {
  393. if (func->IsDeclaration()) {
  394. return false;
  395. }
  396. bool modified = false;
  397. std::unordered_set<BasicBlock*> live_blocks;
  398. modified |= MarkLiveBlocks(func, &live_blocks);
  399. std::unordered_set<BasicBlock*> unreachable_merges;
  400. std::unordered_map<BasicBlock*, BasicBlock*> unreachable_continues;
  401. MarkUnreachableStructuredTargets(live_blocks, &unreachable_merges,
  402. &unreachable_continues);
  403. modified |= FixPhiNodesInLiveBlocks(func, live_blocks, unreachable_continues);
  404. modified |= EraseDeadBlocks(func, live_blocks, unreachable_merges,
  405. unreachable_continues);
  406. return modified;
  407. }
  408. void DeadBranchElimPass::FixBlockOrder() {
  409. context()->BuildInvalidAnalyses(IRContext::kAnalysisCFG |
  410. IRContext::kAnalysisDominatorAnalysis);
  411. // Reorders blocks according to DFS of dominator tree.
  412. ProcessFunction reorder_dominators = [this](Function* function) {
  413. DominatorAnalysis* dominators = context()->GetDominatorAnalysis(function);
  414. std::vector<BasicBlock*> blocks;
  415. for (auto iter = dominators->GetDomTree().begin();
  416. iter != dominators->GetDomTree().end(); ++iter) {
  417. if (iter->id() != 0) {
  418. blocks.push_back(iter->bb_);
  419. }
  420. }
  421. for (uint32_t i = 1; i < blocks.size(); ++i) {
  422. function->MoveBasicBlockToAfter(blocks[i]->id(), blocks[i - 1]);
  423. }
  424. return true;
  425. };
  426. // Reorders blocks according to structured order.
  427. ProcessFunction reorder_structured = [](Function* function) {
  428. function->ReorderBasicBlocksInStructuredOrder();
  429. return true;
  430. };
  431. // Structured order is more intuitive so use it where possible.
  432. if (context()->get_feature_mgr()->HasCapability(spv::Capability::Shader)) {
  433. context()->ProcessReachableCallTree(reorder_structured);
  434. } else {
  435. context()->ProcessReachableCallTree(reorder_dominators);
  436. }
  437. }
  438. Pass::Status DeadBranchElimPass::Process() {
  439. // Do not process if module contains OpGroupDecorate. Additional
  440. // support required in KillNamesAndDecorates().
  441. // TODO(greg-lunarg): Add support for OpGroupDecorate
  442. for (auto& ai : get_module()->annotations())
  443. if (ai.opcode() == spv::Op::OpGroupDecorate)
  444. return Status::SuccessWithoutChange;
  445. // Process all entry point functions
  446. ProcessFunction pfn = [this](Function* fp) {
  447. return EliminateDeadBranches(fp);
  448. };
  449. bool modified = context()->ProcessReachableCallTree(pfn);
  450. if (modified) FixBlockOrder();
  451. return modified ? Status::SuccessWithChange : Status::SuccessWithoutChange;
  452. }
  453. Instruction* DeadBranchElimPass::FindFirstExitFromSelectionMerge(
  454. uint32_t start_block_id, uint32_t merge_block_id, uint32_t loop_merge_id,
  455. uint32_t loop_continue_id, uint32_t switch_merge_id) {
  456. // To find the "first" exit, we follow branches looking for a conditional
  457. // branch that is not in a nested construct and is not the header of a new
  458. // construct. We follow the control flow from |start_block_id| to find the
  459. // first one.
  460. while (start_block_id != merge_block_id && start_block_id != loop_merge_id &&
  461. start_block_id != loop_continue_id) {
  462. BasicBlock* start_block = context()->get_instr_block(start_block_id);
  463. Instruction* branch = start_block->terminator();
  464. uint32_t next_block_id = 0;
  465. switch (branch->opcode()) {
  466. case spv::Op::OpBranchConditional:
  467. next_block_id = start_block->MergeBlockIdIfAny();
  468. if (next_block_id == 0) {
  469. // If a possible target is the |loop_merge_id| or |loop_continue_id|,
  470. // which are not the current merge node, then we continue the search
  471. // with the other target.
  472. for (uint32_t i = 1; i < 3; i++) {
  473. if (branch->GetSingleWordInOperand(i) == loop_merge_id &&
  474. loop_merge_id != merge_block_id) {
  475. next_block_id = branch->GetSingleWordInOperand(3 - i);
  476. break;
  477. }
  478. if (branch->GetSingleWordInOperand(i) == loop_continue_id &&
  479. loop_continue_id != merge_block_id) {
  480. next_block_id = branch->GetSingleWordInOperand(3 - i);
  481. break;
  482. }
  483. if (branch->GetSingleWordInOperand(i) == switch_merge_id &&
  484. switch_merge_id != merge_block_id) {
  485. next_block_id = branch->GetSingleWordInOperand(3 - i);
  486. break;
  487. }
  488. }
  489. if (next_block_id == 0) {
  490. return branch;
  491. }
  492. }
  493. break;
  494. case spv::Op::OpSwitch:
  495. next_block_id = start_block->MergeBlockIdIfAny();
  496. if (next_block_id == 0) {
  497. // A switch with no merge instructions can have at most 5 targets:
  498. // a. |merge_block_id|
  499. // b. |loop_merge_id|
  500. // c. |loop_continue_id|
  501. // d. |switch_merge_id|
  502. // e. 1 block inside the current region.
  503. //
  504. // Note that because this is a switch, |merge_block_id| must equal
  505. // |switch_merge_id|.
  506. //
  507. // This leads to a number of cases of what to do.
  508. //
  509. // 1. Does not jump to a block inside of the current construct. In
  510. // this case, there is not conditional break, so we should return
  511. // |nullptr|.
  512. //
  513. // 2. Jumps to |merge_block_id| and a block inside the current
  514. // construct. In this case, this branch conditionally break to the
  515. // end of the current construct, so return the current branch.
  516. //
  517. // 3. Otherwise, this branch may break, but not to the current merge
  518. // block. So we continue with the block that is inside the loop.
  519. bool found_break = false;
  520. for (uint32_t i = 1; i < branch->NumInOperands(); i += 2) {
  521. uint32_t target = branch->GetSingleWordInOperand(i);
  522. if (target == merge_block_id) {
  523. found_break = true;
  524. } else if (target != loop_merge_id && target != loop_continue_id) {
  525. next_block_id = branch->GetSingleWordInOperand(i);
  526. }
  527. }
  528. if (next_block_id == 0) {
  529. // Case 1.
  530. return nullptr;
  531. }
  532. if (found_break) {
  533. // Case 2.
  534. return branch;
  535. }
  536. // The fall through is case 3.
  537. }
  538. break;
  539. case spv::Op::OpBranch:
  540. // Need to check if this is the header of a loop nested in the
  541. // selection construct.
  542. next_block_id = start_block->MergeBlockIdIfAny();
  543. if (next_block_id == 0) {
  544. next_block_id = branch->GetSingleWordInOperand(0);
  545. }
  546. break;
  547. default:
  548. return nullptr;
  549. }
  550. start_block_id = next_block_id;
  551. }
  552. return nullptr;
  553. }
  554. void DeadBranchElimPass::AddBlocksWithBackEdge(
  555. uint32_t cont_id, uint32_t header_id, uint32_t merge_id,
  556. std::unordered_set<BasicBlock*>* blocks_with_back_edges) {
  557. std::unordered_set<uint32_t> visited;
  558. visited.insert(cont_id);
  559. visited.insert(header_id);
  560. visited.insert(merge_id);
  561. std::vector<uint32_t> work_list;
  562. work_list.push_back(cont_id);
  563. while (!work_list.empty()) {
  564. uint32_t bb_id = work_list.back();
  565. work_list.pop_back();
  566. BasicBlock* bb = context()->get_instr_block(bb_id);
  567. bool has_back_edge = false;
  568. bb->ForEachSuccessorLabel([header_id, &visited, &work_list,
  569. &has_back_edge](uint32_t* succ_label_id) {
  570. if (visited.insert(*succ_label_id).second) {
  571. work_list.push_back(*succ_label_id);
  572. }
  573. if (*succ_label_id == header_id) {
  574. has_back_edge = true;
  575. }
  576. });
  577. if (has_back_edge) {
  578. blocks_with_back_edges->insert(bb);
  579. }
  580. }
  581. }
  582. bool DeadBranchElimPass::SwitchHasNestedBreak(uint32_t switch_header_id) {
  583. std::vector<BasicBlock*> block_in_construct;
  584. BasicBlock* start_block = context()->get_instr_block(switch_header_id);
  585. uint32_t merge_block_id = start_block->MergeBlockIdIfAny();
  586. StructuredCFGAnalysis* cfg_analysis = context()->GetStructuredCFGAnalysis();
  587. return !get_def_use_mgr()->WhileEachUser(
  588. merge_block_id,
  589. [this, cfg_analysis, switch_header_id](Instruction* inst) {
  590. if (!inst->IsBranch()) {
  591. return true;
  592. }
  593. BasicBlock* bb = context()->get_instr_block(inst);
  594. if (bb->id() == switch_header_id) {
  595. return true;
  596. }
  597. return (cfg_analysis->ContainingConstruct(inst) == switch_header_id &&
  598. bb->GetMergeInst() == nullptr);
  599. });
  600. }
  601. } // namespace opt
  602. } // namespace spvtools