dead_branch_elim_pass.cpp 25 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651
  1. // Copyright (c) 2017 The Khronos Group Inc.
  2. // Copyright (c) 2017 Valve Corporation
  3. // Copyright (c) 2017 LunarG Inc.
  4. // Copyright (c) 2018 Google Inc.
  5. //
  6. // Licensed under the Apache License, Version 2.0 (the "License");
  7. // you may not use this file except in compliance with the License.
  8. // You may obtain a copy of the License at
  9. //
  10. // http://www.apache.org/licenses/LICENSE-2.0
  11. //
  12. // Unless required by applicable law or agreed to in writing, software
  13. // distributed under the License is distributed on an "AS IS" BASIS,
  14. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. // See the License for the specific language governing permissions and
  16. // limitations under the License.
  17. #include "source/opt/dead_branch_elim_pass.h"
  18. #include <list>
  19. #include <memory>
  20. #include <vector>
  21. #include "source/cfa.h"
  22. #include "source/opt/ir_context.h"
  23. #include "source/opt/struct_cfg_analysis.h"
  24. #include "source/util/make_unique.h"
  25. namespace spvtools {
  26. namespace opt {
  27. namespace {
  28. constexpr uint32_t kBranchCondTrueLabIdInIdx = 1;
  29. constexpr uint32_t kBranchCondFalseLabIdInIdx = 2;
  30. } // namespace
  31. bool DeadBranchElimPass::GetConstCondition(uint32_t condId, bool* condVal) {
  32. bool condIsConst;
  33. Instruction* cInst = get_def_use_mgr()->GetDef(condId);
  34. switch (cInst->opcode()) {
  35. case spv::Op::OpConstantNull:
  36. case spv::Op::OpConstantFalse: {
  37. *condVal = false;
  38. condIsConst = true;
  39. } break;
  40. case spv::Op::OpConstantTrue: {
  41. *condVal = true;
  42. condIsConst = true;
  43. } break;
  44. case spv::Op::OpLogicalNot: {
  45. bool negVal;
  46. condIsConst =
  47. GetConstCondition(cInst->GetSingleWordInOperand(0), &negVal);
  48. if (condIsConst) *condVal = !negVal;
  49. } break;
  50. default: { condIsConst = false; } break;
  51. }
  52. return condIsConst;
  53. }
  54. bool DeadBranchElimPass::GetConstInteger(uint32_t selId, uint32_t* selVal) {
  55. Instruction* sInst = get_def_use_mgr()->GetDef(selId);
  56. uint32_t typeId = sInst->type_id();
  57. Instruction* typeInst = get_def_use_mgr()->GetDef(typeId);
  58. if (!typeInst || (typeInst->opcode() != spv::Op::OpTypeInt)) return false;
  59. // TODO(greg-lunarg): Support non-32 bit ints
  60. if (typeInst->GetSingleWordInOperand(0) != 32) return false;
  61. if (sInst->opcode() == spv::Op::OpConstant) {
  62. *selVal = sInst->GetSingleWordInOperand(0);
  63. return true;
  64. } else if (sInst->opcode() == spv::Op::OpConstantNull) {
  65. *selVal = 0;
  66. return true;
  67. }
  68. return false;
  69. }
  70. void DeadBranchElimPass::AddBranch(uint32_t labelId, BasicBlock* bp) {
  71. assert(get_def_use_mgr()->GetDef(labelId) != nullptr);
  72. std::unique_ptr<Instruction> newBranch(
  73. new Instruction(context(), spv::Op::OpBranch, 0, 0,
  74. {{spv_operand_type_t::SPV_OPERAND_TYPE_ID, {labelId}}}));
  75. context()->AnalyzeDefUse(&*newBranch);
  76. context()->set_instr_block(&*newBranch, bp);
  77. bp->AddInstruction(std::move(newBranch));
  78. }
  79. BasicBlock* DeadBranchElimPass::GetParentBlock(uint32_t id) {
  80. return context()->get_instr_block(get_def_use_mgr()->GetDef(id));
  81. }
  82. bool DeadBranchElimPass::MarkLiveBlocks(
  83. Function* func, std::unordered_set<BasicBlock*>* live_blocks) {
  84. std::vector<std::pair<BasicBlock*, uint32_t>> conditions_to_simplify;
  85. std::unordered_set<BasicBlock*> blocks_with_backedge;
  86. std::vector<BasicBlock*> stack;
  87. stack.push_back(&*func->begin());
  88. bool modified = false;
  89. while (!stack.empty()) {
  90. BasicBlock* block = stack.back();
  91. stack.pop_back();
  92. // Live blocks doubles as visited set.
  93. if (!live_blocks->insert(block).second) continue;
  94. uint32_t cont_id = block->ContinueBlockIdIfAny();
  95. if (cont_id != 0) {
  96. AddBlocksWithBackEdge(cont_id, block->id(), block->MergeBlockIdIfAny(),
  97. &blocks_with_backedge);
  98. }
  99. Instruction* terminator = block->terminator();
  100. uint32_t live_lab_id = 0;
  101. // Check if the terminator has a single valid successor.
  102. if (terminator->opcode() == spv::Op::OpBranchConditional) {
  103. bool condVal;
  104. if (GetConstCondition(terminator->GetSingleWordInOperand(0u), &condVal)) {
  105. live_lab_id = terminator->GetSingleWordInOperand(
  106. condVal ? kBranchCondTrueLabIdInIdx : kBranchCondFalseLabIdInIdx);
  107. }
  108. } else if (terminator->opcode() == spv::Op::OpSwitch) {
  109. uint32_t sel_val;
  110. if (GetConstInteger(terminator->GetSingleWordInOperand(0u), &sel_val)) {
  111. // Search switch operands for selector value, set live_lab_id to
  112. // corresponding label, use default if not found.
  113. uint32_t icnt = 0;
  114. uint32_t case_val;
  115. terminator->WhileEachInOperand(
  116. [&icnt, &case_val, &sel_val, &live_lab_id](const uint32_t* idp) {
  117. if (icnt == 1) {
  118. // Start with default label.
  119. live_lab_id = *idp;
  120. } else if (icnt > 1) {
  121. if (icnt % 2 == 0) {
  122. case_val = *idp;
  123. } else {
  124. if (case_val == sel_val) {
  125. live_lab_id = *idp;
  126. return false;
  127. }
  128. }
  129. }
  130. ++icnt;
  131. return true;
  132. });
  133. }
  134. }
  135. // Don't simplify back edges unless it becomes a branch to the header. Every
  136. // loop must have exactly one back edge to the loop header, so we cannot
  137. // remove it.
  138. bool simplify = false;
  139. if (live_lab_id != 0) {
  140. if (!blocks_with_backedge.count(block)) {
  141. // This is not a back edge.
  142. simplify = true;
  143. } else {
  144. const auto& struct_cfg_analysis = context()->GetStructuredCFGAnalysis();
  145. uint32_t header_id = struct_cfg_analysis->ContainingLoop(block->id());
  146. if (live_lab_id == header_id) {
  147. // The new branch will be a branch to the header.
  148. simplify = true;
  149. }
  150. }
  151. }
  152. if (simplify) {
  153. conditions_to_simplify.push_back({block, live_lab_id});
  154. stack.push_back(GetParentBlock(live_lab_id));
  155. } else {
  156. // All successors are live.
  157. const auto* const_block = block;
  158. const_block->ForEachSuccessorLabel([&stack, this](const uint32_t label) {
  159. stack.push_back(GetParentBlock(label));
  160. });
  161. }
  162. }
  163. // Traverse |conditions_to_simplify| in reverse order. This is done so that
  164. // we simplify nested constructs before simplifying the constructs that
  165. // contain them.
  166. for (auto b = conditions_to_simplify.rbegin();
  167. b != conditions_to_simplify.rend(); ++b) {
  168. modified |= SimplifyBranch(b->first, b->second);
  169. }
  170. return modified;
  171. }
  172. bool DeadBranchElimPass::SimplifyBranch(BasicBlock* block,
  173. uint32_t live_lab_id) {
  174. Instruction* merge_inst = block->GetMergeInst();
  175. Instruction* terminator = block->terminator();
  176. if (merge_inst && merge_inst->opcode() == spv::Op::OpSelectionMerge) {
  177. if (merge_inst->NextNode()->opcode() == spv::Op::OpSwitch &&
  178. SwitchHasNestedBreak(block->id())) {
  179. if (terminator->NumInOperands() == 2) {
  180. // We cannot remove the branch, and it already has a single case, so no
  181. // work to do.
  182. return false;
  183. }
  184. // We have to keep the switch because it has a nest break, so we
  185. // remove all cases except for the live one.
  186. Instruction::OperandList new_operands;
  187. new_operands.push_back(terminator->GetInOperand(0));
  188. new_operands.push_back({SPV_OPERAND_TYPE_ID, {live_lab_id}});
  189. terminator->SetInOperands(std::move(new_operands));
  190. context()->UpdateDefUse(terminator);
  191. } else {
  192. // Check if the merge instruction is still needed because of a
  193. // non-nested break from the construct. Move the merge instruction if
  194. // it is still needed.
  195. StructuredCFGAnalysis* cfg_analysis =
  196. context()->GetStructuredCFGAnalysis();
  197. Instruction* first_break = FindFirstExitFromSelectionMerge(
  198. live_lab_id, merge_inst->GetSingleWordInOperand(0),
  199. cfg_analysis->LoopMergeBlock(live_lab_id),
  200. cfg_analysis->LoopContinueBlock(live_lab_id),
  201. cfg_analysis->SwitchMergeBlock(live_lab_id));
  202. AddBranch(live_lab_id, block);
  203. context()->KillInst(terminator);
  204. if (first_break == nullptr) {
  205. context()->KillInst(merge_inst);
  206. } else {
  207. merge_inst->RemoveFromList();
  208. first_break->InsertBefore(std::unique_ptr<Instruction>(merge_inst));
  209. context()->set_instr_block(merge_inst,
  210. context()->get_instr_block(first_break));
  211. }
  212. }
  213. } else {
  214. AddBranch(live_lab_id, block);
  215. context()->KillInst(terminator);
  216. }
  217. return true;
  218. }
  219. void DeadBranchElimPass::MarkUnreachableStructuredTargets(
  220. const std::unordered_set<BasicBlock*>& live_blocks,
  221. std::unordered_set<BasicBlock*>* unreachable_merges,
  222. std::unordered_map<BasicBlock*, BasicBlock*>* unreachable_continues) {
  223. for (auto block : live_blocks) {
  224. if (auto merge_id = block->MergeBlockIdIfAny()) {
  225. BasicBlock* merge_block = GetParentBlock(merge_id);
  226. if (!live_blocks.count(merge_block)) {
  227. unreachable_merges->insert(merge_block);
  228. }
  229. if (auto cont_id = block->ContinueBlockIdIfAny()) {
  230. BasicBlock* cont_block = GetParentBlock(cont_id);
  231. if (!live_blocks.count(cont_block)) {
  232. (*unreachable_continues)[cont_block] = block;
  233. }
  234. }
  235. }
  236. }
  237. }
  238. bool DeadBranchElimPass::FixPhiNodesInLiveBlocks(
  239. Function* func, const std::unordered_set<BasicBlock*>& live_blocks,
  240. const std::unordered_map<BasicBlock*, BasicBlock*>& unreachable_continues) {
  241. bool modified = false;
  242. for (auto& block : *func) {
  243. if (live_blocks.count(&block)) {
  244. for (auto iter = block.begin(); iter != block.end();) {
  245. if (iter->opcode() != spv::Op::OpPhi) {
  246. break;
  247. }
  248. bool changed = false;
  249. bool backedge_added = false;
  250. Instruction* inst = &*iter;
  251. std::vector<Operand> operands;
  252. // Build a complete set of operands (not just input operands). Start
  253. // with type and result id operands.
  254. operands.push_back(inst->GetOperand(0u));
  255. operands.push_back(inst->GetOperand(1u));
  256. // Iterate through the incoming labels and determine which to keep
  257. // and/or modify. If there in an unreachable continue block, there will
  258. // be an edge from that block to the header. We need to keep it to
  259. // maintain the structured control flow. If the header has more that 2
  260. // incoming edges, then the OpPhi must have an entry for that edge.
  261. // However, if there is only one other incoming edge, the OpPhi can be
  262. // eliminated.
  263. for (uint32_t i = 1; i < inst->NumInOperands(); i += 2) {
  264. BasicBlock* inc = GetParentBlock(inst->GetSingleWordInOperand(i));
  265. auto cont_iter = unreachable_continues.find(inc);
  266. if (cont_iter != unreachable_continues.end() &&
  267. cont_iter->second == &block && inst->NumInOperands() > 4) {
  268. if (get_def_use_mgr()
  269. ->GetDef(inst->GetSingleWordInOperand(i - 1))
  270. ->opcode() == spv::Op::OpUndef) {
  271. // Already undef incoming value, no change necessary.
  272. operands.push_back(inst->GetInOperand(i - 1));
  273. operands.push_back(inst->GetInOperand(i));
  274. backedge_added = true;
  275. } else {
  276. // Replace incoming value with undef if this phi exists in the
  277. // loop header. Otherwise, this edge is not live since the
  278. // unreachable continue block will be replaced with an
  279. // unconditional branch to the header only.
  280. operands.emplace_back(
  281. SPV_OPERAND_TYPE_ID,
  282. std::initializer_list<uint32_t>{Type2Undef(inst->type_id())});
  283. operands.push_back(inst->GetInOperand(i));
  284. changed = true;
  285. backedge_added = true;
  286. }
  287. } else if (live_blocks.count(inc) && inc->IsSuccessor(&block)) {
  288. // Keep live incoming edge.
  289. operands.push_back(inst->GetInOperand(i - 1));
  290. operands.push_back(inst->GetInOperand(i));
  291. } else {
  292. // Remove incoming edge.
  293. changed = true;
  294. }
  295. }
  296. if (changed) {
  297. modified = true;
  298. uint32_t continue_id = block.ContinueBlockIdIfAny();
  299. if (!backedge_added && continue_id != 0 &&
  300. unreachable_continues.count(GetParentBlock(continue_id)) &&
  301. operands.size() > 4) {
  302. // Changed the backedge to branch from the continue block instead
  303. // of a successor of the continue block. Add an entry to the phi to
  304. // provide an undef for the continue block. Since the successor of
  305. // the continue must also be unreachable (dominated by the continue
  306. // block), any entry for the original backedge has been removed
  307. // from the phi operands.
  308. operands.emplace_back(
  309. SPV_OPERAND_TYPE_ID,
  310. std::initializer_list<uint32_t>{Type2Undef(inst->type_id())});
  311. operands.emplace_back(SPV_OPERAND_TYPE_ID,
  312. std::initializer_list<uint32_t>{continue_id});
  313. }
  314. // Either replace the phi with a single value or rebuild the phi out
  315. // of |operands|.
  316. //
  317. // We always have type and result id operands. So this phi has a
  318. // single source if there are two more operands beyond those.
  319. if (operands.size() == 4) {
  320. // First input data operands is at index 2.
  321. uint32_t replId = operands[2u].words[0];
  322. context()->KillNamesAndDecorates(inst->result_id());
  323. context()->ReplaceAllUsesWith(inst->result_id(), replId);
  324. iter = context()->KillInst(&*inst);
  325. } else {
  326. // We've rewritten the operands, so first instruct the def/use
  327. // manager to forget uses in the phi before we replace them. After
  328. // replacing operands update the def/use manager by re-analyzing
  329. // the used ids in this phi.
  330. get_def_use_mgr()->EraseUseRecordsOfOperandIds(inst);
  331. inst->ReplaceOperands(operands);
  332. get_def_use_mgr()->AnalyzeInstUse(inst);
  333. ++iter;
  334. }
  335. } else {
  336. ++iter;
  337. }
  338. }
  339. }
  340. }
  341. return modified;
  342. }
  343. bool DeadBranchElimPass::EraseDeadBlocks(
  344. Function* func, const std::unordered_set<BasicBlock*>& live_blocks,
  345. const std::unordered_set<BasicBlock*>& unreachable_merges,
  346. const std::unordered_map<BasicBlock*, BasicBlock*>& unreachable_continues) {
  347. bool modified = false;
  348. for (auto ebi = func->begin(); ebi != func->end();) {
  349. if (unreachable_continues.count(&*ebi)) {
  350. uint32_t cont_id = unreachable_continues.find(&*ebi)->second->id();
  351. if (ebi->begin() != ebi->tail() ||
  352. ebi->terminator()->opcode() != spv::Op::OpBranch ||
  353. ebi->terminator()->GetSingleWordInOperand(0u) != cont_id) {
  354. // Make unreachable, but leave the label.
  355. KillAllInsts(&*ebi, false);
  356. // Add unconditional branch to header.
  357. assert(unreachable_continues.count(&*ebi));
  358. ebi->AddInstruction(MakeUnique<Instruction>(
  359. context(), spv::Op::OpBranch, 0, 0,
  360. std::initializer_list<Operand>{{SPV_OPERAND_TYPE_ID, {cont_id}}}));
  361. get_def_use_mgr()->AnalyzeInstUse(&*ebi->tail());
  362. context()->set_instr_block(&*ebi->tail(), &*ebi);
  363. modified = true;
  364. }
  365. ++ebi;
  366. } else if (unreachable_merges.count(&*ebi)) {
  367. if (ebi->begin() != ebi->tail() ||
  368. ebi->terminator()->opcode() != spv::Op::OpUnreachable) {
  369. // Make unreachable, but leave the label.
  370. KillAllInsts(&*ebi, false);
  371. // Add unreachable terminator.
  372. ebi->AddInstruction(
  373. MakeUnique<Instruction>(context(), spv::Op::OpUnreachable, 0, 0,
  374. std::initializer_list<Operand>{}));
  375. context()->AnalyzeUses(ebi->terminator());
  376. context()->set_instr_block(ebi->terminator(), &*ebi);
  377. modified = true;
  378. }
  379. ++ebi;
  380. } else if (!live_blocks.count(&*ebi)) {
  381. // Kill this block.
  382. KillAllInsts(&*ebi);
  383. ebi = ebi.Erase();
  384. modified = true;
  385. } else {
  386. ++ebi;
  387. }
  388. }
  389. return modified;
  390. }
  391. bool DeadBranchElimPass::EliminateDeadBranches(Function* func) {
  392. if (func->IsDeclaration()) {
  393. return false;
  394. }
  395. bool modified = false;
  396. std::unordered_set<BasicBlock*> live_blocks;
  397. modified |= MarkLiveBlocks(func, &live_blocks);
  398. std::unordered_set<BasicBlock*> unreachable_merges;
  399. std::unordered_map<BasicBlock*, BasicBlock*> unreachable_continues;
  400. MarkUnreachableStructuredTargets(live_blocks, &unreachable_merges,
  401. &unreachable_continues);
  402. modified |= FixPhiNodesInLiveBlocks(func, live_blocks, unreachable_continues);
  403. modified |= EraseDeadBlocks(func, live_blocks, unreachable_merges,
  404. unreachable_continues);
  405. return modified;
  406. }
  407. void DeadBranchElimPass::FixBlockOrder() {
  408. context()->BuildInvalidAnalyses(IRContext::kAnalysisCFG |
  409. IRContext::kAnalysisDominatorAnalysis);
  410. // Reorders blocks according to DFS of dominator tree.
  411. ProcessFunction reorder_dominators = [this](Function* function) {
  412. DominatorAnalysis* dominators = context()->GetDominatorAnalysis(function);
  413. std::vector<BasicBlock*> blocks;
  414. for (auto iter = dominators->GetDomTree().begin();
  415. iter != dominators->GetDomTree().end(); ++iter) {
  416. if (iter->id() != 0) {
  417. blocks.push_back(iter->bb_);
  418. }
  419. }
  420. for (uint32_t i = 1; i < blocks.size(); ++i) {
  421. function->MoveBasicBlockToAfter(blocks[i]->id(), blocks[i - 1]);
  422. }
  423. return true;
  424. };
  425. // Reorders blocks according to structured order.
  426. ProcessFunction reorder_structured = [](Function* function) {
  427. function->ReorderBasicBlocksInStructuredOrder();
  428. return true;
  429. };
  430. // Structured order is more intuitive so use it where possible.
  431. if (context()->get_feature_mgr()->HasCapability(spv::Capability::Shader)) {
  432. context()->ProcessReachableCallTree(reorder_structured);
  433. } else {
  434. context()->ProcessReachableCallTree(reorder_dominators);
  435. }
  436. }
  437. Pass::Status DeadBranchElimPass::Process() {
  438. // Do not process if module contains OpGroupDecorate. Additional
  439. // support required in KillNamesAndDecorates().
  440. // TODO(greg-lunarg): Add support for OpGroupDecorate
  441. for (auto& ai : get_module()->annotations())
  442. if (ai.opcode() == spv::Op::OpGroupDecorate)
  443. return Status::SuccessWithoutChange;
  444. // Process all entry point functions
  445. ProcessFunction pfn = [this](Function* fp) {
  446. return EliminateDeadBranches(fp);
  447. };
  448. bool modified = context()->ProcessReachableCallTree(pfn);
  449. if (modified) FixBlockOrder();
  450. return modified ? Status::SuccessWithChange : Status::SuccessWithoutChange;
  451. }
  452. Instruction* DeadBranchElimPass::FindFirstExitFromSelectionMerge(
  453. uint32_t start_block_id, uint32_t merge_block_id, uint32_t loop_merge_id,
  454. uint32_t loop_continue_id, uint32_t switch_merge_id) {
  455. // To find the "first" exit, we follow branches looking for a conditional
  456. // branch that is not in a nested construct and is not the header of a new
  457. // construct. We follow the control flow from |start_block_id| to find the
  458. // first one.
  459. while (start_block_id != merge_block_id && start_block_id != loop_merge_id &&
  460. start_block_id != loop_continue_id) {
  461. BasicBlock* start_block = context()->get_instr_block(start_block_id);
  462. Instruction* branch = start_block->terminator();
  463. uint32_t next_block_id = 0;
  464. switch (branch->opcode()) {
  465. case spv::Op::OpBranchConditional:
  466. next_block_id = start_block->MergeBlockIdIfAny();
  467. if (next_block_id == 0) {
  468. // If a possible target is the |loop_merge_id| or |loop_continue_id|,
  469. // which are not the current merge node, then we continue the search
  470. // with the other target.
  471. for (uint32_t i = 1; i < 3; i++) {
  472. if (branch->GetSingleWordInOperand(i) == loop_merge_id &&
  473. loop_merge_id != merge_block_id) {
  474. next_block_id = branch->GetSingleWordInOperand(3 - i);
  475. break;
  476. }
  477. if (branch->GetSingleWordInOperand(i) == loop_continue_id &&
  478. loop_continue_id != merge_block_id) {
  479. next_block_id = branch->GetSingleWordInOperand(3 - i);
  480. break;
  481. }
  482. if (branch->GetSingleWordInOperand(i) == switch_merge_id &&
  483. switch_merge_id != merge_block_id) {
  484. next_block_id = branch->GetSingleWordInOperand(3 - i);
  485. break;
  486. }
  487. }
  488. if (next_block_id == 0) {
  489. return branch;
  490. }
  491. }
  492. break;
  493. case spv::Op::OpSwitch:
  494. next_block_id = start_block->MergeBlockIdIfAny();
  495. if (next_block_id == 0) {
  496. // A switch with no merge instructions can have at most 5 targets:
  497. // a. |merge_block_id|
  498. // b. |loop_merge_id|
  499. // c. |loop_continue_id|
  500. // d. |switch_merge_id|
  501. // e. 1 block inside the current region.
  502. //
  503. // Note that because this is a switch, |merge_block_id| must equal
  504. // |switch_merge_id|.
  505. //
  506. // This leads to a number of cases of what to do.
  507. //
  508. // 1. Does not jump to a block inside of the current construct. In
  509. // this case, there is not conditional break, so we should return
  510. // |nullptr|.
  511. //
  512. // 2. Jumps to |merge_block_id| and a block inside the current
  513. // construct. In this case, this branch conditionally break to the
  514. // end of the current construct, so return the current branch.
  515. //
  516. // 3. Otherwise, this branch may break, but not to the current merge
  517. // block. So we continue with the block that is inside the loop.
  518. bool found_break = false;
  519. for (uint32_t i = 1; i < branch->NumInOperands(); i += 2) {
  520. uint32_t target = branch->GetSingleWordInOperand(i);
  521. if (target == merge_block_id) {
  522. found_break = true;
  523. } else if (target != loop_merge_id && target != loop_continue_id) {
  524. next_block_id = branch->GetSingleWordInOperand(i);
  525. }
  526. }
  527. if (next_block_id == 0) {
  528. // Case 1.
  529. return nullptr;
  530. }
  531. if (found_break) {
  532. // Case 2.
  533. return branch;
  534. }
  535. // The fall through is case 3.
  536. }
  537. break;
  538. case spv::Op::OpBranch:
  539. // Need to check if this is the header of a loop nested in the
  540. // selection construct.
  541. next_block_id = start_block->MergeBlockIdIfAny();
  542. if (next_block_id == 0) {
  543. next_block_id = branch->GetSingleWordInOperand(0);
  544. }
  545. break;
  546. default:
  547. return nullptr;
  548. }
  549. start_block_id = next_block_id;
  550. }
  551. return nullptr;
  552. }
  553. void DeadBranchElimPass::AddBlocksWithBackEdge(
  554. uint32_t cont_id, uint32_t header_id, uint32_t merge_id,
  555. std::unordered_set<BasicBlock*>* blocks_with_back_edges) {
  556. std::unordered_set<uint32_t> visited;
  557. visited.insert(cont_id);
  558. visited.insert(header_id);
  559. visited.insert(merge_id);
  560. std::vector<uint32_t> work_list;
  561. work_list.push_back(cont_id);
  562. while (!work_list.empty()) {
  563. uint32_t bb_id = work_list.back();
  564. work_list.pop_back();
  565. BasicBlock* bb = context()->get_instr_block(bb_id);
  566. bool has_back_edge = false;
  567. bb->ForEachSuccessorLabel([header_id, &visited, &work_list,
  568. &has_back_edge](uint32_t* succ_label_id) {
  569. if (visited.insert(*succ_label_id).second) {
  570. work_list.push_back(*succ_label_id);
  571. }
  572. if (*succ_label_id == header_id) {
  573. has_back_edge = true;
  574. }
  575. });
  576. if (has_back_edge) {
  577. blocks_with_back_edges->insert(bb);
  578. }
  579. }
  580. }
  581. bool DeadBranchElimPass::SwitchHasNestedBreak(uint32_t switch_header_id) {
  582. std::vector<BasicBlock*> block_in_construct;
  583. BasicBlock* start_block = context()->get_instr_block(switch_header_id);
  584. uint32_t merge_block_id = start_block->MergeBlockIdIfAny();
  585. StructuredCFGAnalysis* cfg_analysis = context()->GetStructuredCFGAnalysis();
  586. return !get_def_use_mgr()->WhileEachUser(
  587. merge_block_id,
  588. [this, cfg_analysis, switch_header_id](Instruction* inst) {
  589. if (!inst->IsBranch()) {
  590. return true;
  591. }
  592. BasicBlock* bb = context()->get_instr_block(inst);
  593. if (bb->id() == switch_header_id) {
  594. return true;
  595. }
  596. return (cfg_analysis->ContainingConstruct(inst) == switch_header_id &&
  597. bb->GetMergeInst() == nullptr);
  598. });
  599. }
  600. } // namespace opt
  601. } // namespace spvtools