validate_cfg.cpp 49 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279
  1. // Copyright (c) 2015-2016 The Khronos Group Inc.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. #include <cassert>
  15. #include <functional>
  16. #include <iostream>
  17. #include <map>
  18. #include <string>
  19. #include <tuple>
  20. #include <unordered_map>
  21. #include <unordered_set>
  22. #include <utility>
  23. #include <vector>
  24. #include "source/cfa.h"
  25. #include "source/opcode.h"
  26. #include "source/spirv_constant.h"
  27. #include "source/spirv_validator_options.h"
  28. #include "source/val/basic_block.h"
  29. #include "source/val/construct.h"
  30. #include "source/val/function.h"
  31. #include "source/val/validate.h"
  32. #include "source/val/validation_state.h"
  33. namespace spvtools {
  34. namespace val {
  35. namespace {
  36. spv_result_t ValidatePhi(ValidationState_t& _, const Instruction* inst) {
  37. auto block = inst->block();
  38. size_t num_in_ops = inst->words().size() - 3;
  39. if (num_in_ops % 2 != 0) {
  40. return _.diag(SPV_ERROR_INVALID_ID, inst)
  41. << "OpPhi does not have an equal number of incoming values and "
  42. "basic blocks.";
  43. }
  44. if (_.IsVoidType(inst->type_id())) {
  45. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  46. << "OpPhi must not have void result type";
  47. }
  48. if (_.IsPointerType(inst->type_id()) &&
  49. _.addressing_model() == spv::AddressingModel::Logical) {
  50. if (!_.features().variable_pointers) {
  51. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  52. << "Using pointers with OpPhi requires capability "
  53. << "VariablePointers or VariablePointersStorageBuffer";
  54. }
  55. }
  56. const Instruction* type_inst = _.FindDef(inst->type_id());
  57. assert(type_inst);
  58. const spv::Op type_opcode = type_inst->opcode();
  59. if (!_.options()->before_hlsl_legalization &&
  60. !_.HasCapability(spv::Capability::BindlessTextureNV)) {
  61. if (type_opcode == spv::Op::OpTypeSampledImage ||
  62. (_.HasCapability(spv::Capability::Shader) &&
  63. (type_opcode == spv::Op::OpTypeImage ||
  64. type_opcode == spv::Op::OpTypeSampler))) {
  65. return _.diag(SPV_ERROR_INVALID_ID, inst)
  66. << "Result type cannot be Op" << spvOpcodeString(type_opcode);
  67. }
  68. }
  69. // Create a uniqued vector of predecessor ids for comparison against
  70. // incoming values. OpBranchConditional %cond %label %label produces two
  71. // predecessors in the CFG.
  72. std::vector<uint32_t> pred_ids;
  73. std::transform(block->predecessors()->begin(), block->predecessors()->end(),
  74. std::back_inserter(pred_ids),
  75. [](const BasicBlock* b) { return b->id(); });
  76. std::sort(pred_ids.begin(), pred_ids.end());
  77. pred_ids.erase(std::unique(pred_ids.begin(), pred_ids.end()), pred_ids.end());
  78. size_t num_edges = num_in_ops / 2;
  79. if (num_edges != pred_ids.size()) {
  80. return _.diag(SPV_ERROR_INVALID_ID, inst)
  81. << "OpPhi's number of incoming blocks (" << num_edges
  82. << ") does not match block's predecessor count ("
  83. << block->predecessors()->size() << ").";
  84. }
  85. std::unordered_set<uint32_t> observed_predecessors;
  86. for (size_t i = 3; i < inst->words().size(); ++i) {
  87. auto inc_id = inst->word(i);
  88. if (i % 2 == 1) {
  89. // Incoming value type must match the phi result type.
  90. auto inc_type_id = _.GetTypeId(inc_id);
  91. if (inst->type_id() != inc_type_id) {
  92. return _.diag(SPV_ERROR_INVALID_ID, inst)
  93. << "OpPhi's result type <id> " << _.getIdName(inst->type_id())
  94. << " does not match incoming value <id> " << _.getIdName(inc_id)
  95. << " type <id> " << _.getIdName(inc_type_id) << ".";
  96. }
  97. } else {
  98. if (_.GetIdOpcode(inc_id) != spv::Op::OpLabel) {
  99. return _.diag(SPV_ERROR_INVALID_ID, inst)
  100. << "OpPhi's incoming basic block <id> " << _.getIdName(inc_id)
  101. << " is not an OpLabel.";
  102. }
  103. // Incoming basic block must be an immediate predecessor of the phi's
  104. // block.
  105. if (!std::binary_search(pred_ids.begin(), pred_ids.end(), inc_id)) {
  106. return _.diag(SPV_ERROR_INVALID_ID, inst)
  107. << "OpPhi's incoming basic block <id> " << _.getIdName(inc_id)
  108. << " is not a predecessor of <id> " << _.getIdName(block->id())
  109. << ".";
  110. }
  111. // We must not have already seen this predecessor as one of the phi's
  112. // operands.
  113. if (observed_predecessors.count(inc_id) != 0) {
  114. return _.diag(SPV_ERROR_INVALID_ID, inst)
  115. << "OpPhi references incoming basic block <id> "
  116. << _.getIdName(inc_id) << " multiple times.";
  117. }
  118. // Note the fact that we have now observed this predecessor.
  119. observed_predecessors.insert(inc_id);
  120. }
  121. }
  122. return SPV_SUCCESS;
  123. }
  124. spv_result_t ValidateBranch(ValidationState_t& _, const Instruction* inst) {
  125. // target operands must be OpLabel
  126. const auto id = inst->GetOperandAs<uint32_t>(0);
  127. const auto target = _.FindDef(id);
  128. if (!target || spv::Op::OpLabel != target->opcode()) {
  129. return _.diag(SPV_ERROR_INVALID_ID, inst)
  130. << "'Target Label' operands for OpBranch must be the ID "
  131. "of an OpLabel instruction";
  132. }
  133. return SPV_SUCCESS;
  134. }
  135. spv_result_t ValidateBranchConditional(ValidationState_t& _,
  136. const Instruction* inst) {
  137. // num_operands is either 3 or 5 --- if 5, the last two need to be literal
  138. // integers
  139. const auto num_operands = inst->operands().size();
  140. if (num_operands != 3 && num_operands != 5) {
  141. return _.diag(SPV_ERROR_INVALID_ID, inst)
  142. << "OpBranchConditional requires either 3 or 5 parameters";
  143. }
  144. // grab the condition operand and check that it is a bool
  145. const auto cond_id = inst->GetOperandAs<uint32_t>(0);
  146. const auto cond_op = _.FindDef(cond_id);
  147. if (!cond_op || !cond_op->type_id() ||
  148. !_.IsBoolScalarType(cond_op->type_id())) {
  149. return _.diag(SPV_ERROR_INVALID_ID, inst) << "Condition operand for "
  150. "OpBranchConditional must be "
  151. "of boolean type";
  152. }
  153. // target operands must be OpLabel
  154. // note that we don't need to check that the target labels are in the same
  155. // function,
  156. // PerformCfgChecks already checks for that
  157. const auto true_id = inst->GetOperandAs<uint32_t>(1);
  158. const auto true_target = _.FindDef(true_id);
  159. if (!true_target || spv::Op::OpLabel != true_target->opcode()) {
  160. return _.diag(SPV_ERROR_INVALID_ID, inst)
  161. << "The 'True Label' operand for OpBranchConditional must be the "
  162. "ID of an OpLabel instruction";
  163. }
  164. const auto false_id = inst->GetOperandAs<uint32_t>(2);
  165. const auto false_target = _.FindDef(false_id);
  166. if (!false_target || spv::Op::OpLabel != false_target->opcode()) {
  167. return _.diag(SPV_ERROR_INVALID_ID, inst)
  168. << "The 'False Label' operand for OpBranchConditional must be the "
  169. "ID of an OpLabel instruction";
  170. }
  171. // A similar requirement for SPV_KHR_maximal_reconvergence is deferred until
  172. // entry point call trees have been reconrded.
  173. if (_.version() >= SPV_SPIRV_VERSION_WORD(1, 6) && true_id == false_id) {
  174. return _.diag(SPV_ERROR_INVALID_ID, inst)
  175. << "In SPIR-V 1.6 or later, True Label and False Label must be "
  176. "different labels";
  177. }
  178. return SPV_SUCCESS;
  179. }
  180. spv_result_t ValidateSwitch(ValidationState_t& _, const Instruction* inst) {
  181. const auto num_operands = inst->operands().size();
  182. // At least two operands (selector, default), any more than that are
  183. // literal/target.
  184. const auto sel_type_id = _.GetOperandTypeId(inst, 0);
  185. if (!_.IsIntScalarType(sel_type_id)) {
  186. return _.diag(SPV_ERROR_INVALID_ID, inst)
  187. << "Selector type must be OpTypeInt";
  188. }
  189. const auto default_label = _.FindDef(inst->GetOperandAs<uint32_t>(1));
  190. if (default_label->opcode() != spv::Op::OpLabel) {
  191. return _.diag(SPV_ERROR_INVALID_ID, inst)
  192. << "Default must be an OpLabel instruction";
  193. }
  194. // target operands must be OpLabel
  195. for (size_t i = 2; i < num_operands; i += 2) {
  196. // literal, id
  197. const auto id = inst->GetOperandAs<uint32_t>(i + 1);
  198. const auto target = _.FindDef(id);
  199. if (!target || spv::Op::OpLabel != target->opcode()) {
  200. return _.diag(SPV_ERROR_INVALID_ID, inst)
  201. << "'Target Label' operands for OpSwitch must be IDs of an "
  202. "OpLabel instruction";
  203. }
  204. }
  205. return SPV_SUCCESS;
  206. }
  207. spv_result_t ValidateReturnValue(ValidationState_t& _,
  208. const Instruction* inst) {
  209. const auto value_id = inst->GetOperandAs<uint32_t>(0);
  210. const auto value = _.FindDef(value_id);
  211. if (!value || !value->type_id()) {
  212. return _.diag(SPV_ERROR_INVALID_ID, inst)
  213. << "OpReturnValue Value <id> " << _.getIdName(value_id)
  214. << " does not represent a value.";
  215. }
  216. auto value_type = _.FindDef(value->type_id());
  217. if (!value_type || spv::Op::OpTypeVoid == value_type->opcode()) {
  218. return _.diag(SPV_ERROR_INVALID_ID, inst)
  219. << "OpReturnValue value's type <id> "
  220. << _.getIdName(value->type_id()) << " is missing or void.";
  221. }
  222. if (_.addressing_model() == spv::AddressingModel::Logical &&
  223. (spv::Op::OpTypePointer == value_type->opcode() ||
  224. spv::Op::OpTypeUntypedPointerKHR == value_type->opcode()) &&
  225. !_.features().variable_pointers && !_.options()->relax_logical_pointer) {
  226. return _.diag(SPV_ERROR_INVALID_ID, inst)
  227. << "OpReturnValue value's type <id> "
  228. << _.getIdName(value->type_id())
  229. << " is a pointer, which is invalid in the Logical addressing "
  230. "model.";
  231. }
  232. const auto function = inst->function();
  233. const auto return_type = _.FindDef(function->GetResultTypeId());
  234. if (!return_type || return_type->id() != value_type->id()) {
  235. return _.diag(SPV_ERROR_INVALID_ID, inst)
  236. << "OpReturnValue Value <id> " << _.getIdName(value_id)
  237. << "s type does not match OpFunction's return type.";
  238. }
  239. return SPV_SUCCESS;
  240. }
  241. uint32_t operator>>(const spv::LoopControlShift& lhs,
  242. const spv::LoopControlShift& rhs) {
  243. return uint32_t(lhs) >> uint32_t(rhs);
  244. }
  245. spv_result_t ValidateLoopMerge(ValidationState_t& _, const Instruction* inst) {
  246. const auto merge_id = inst->GetOperandAs<uint32_t>(0);
  247. const auto merge = _.FindDef(merge_id);
  248. if (!merge || merge->opcode() != spv::Op::OpLabel) {
  249. return _.diag(SPV_ERROR_INVALID_ID, inst)
  250. << "Merge Block " << _.getIdName(merge_id) << " must be an OpLabel";
  251. }
  252. if (merge_id == inst->block()->id()) {
  253. return _.diag(SPV_ERROR_INVALID_ID, inst)
  254. << "Merge Block may not be the block containing the OpLoopMerge\n";
  255. }
  256. const auto continue_id = inst->GetOperandAs<uint32_t>(1);
  257. const auto continue_target = _.FindDef(continue_id);
  258. if (!continue_target || continue_target->opcode() != spv::Op::OpLabel) {
  259. return _.diag(SPV_ERROR_INVALID_ID, inst)
  260. << "Continue Target " << _.getIdName(continue_id)
  261. << " must be an OpLabel";
  262. }
  263. if (merge_id == continue_id) {
  264. return _.diag(SPV_ERROR_INVALID_ID, inst)
  265. << "Merge Block and Continue Target must be different ids";
  266. }
  267. const auto loop_control = inst->GetOperandAs<spv::LoopControlShift>(2);
  268. if ((loop_control >> spv::LoopControlShift::Unroll) & 0x1 &&
  269. (loop_control >> spv::LoopControlShift::DontUnroll) & 0x1) {
  270. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  271. << "Unroll and DontUnroll loop controls must not both be specified";
  272. }
  273. if ((loop_control >> spv::LoopControlShift::DontUnroll) & 0x1 &&
  274. (loop_control >> spv::LoopControlShift::PeelCount) & 0x1) {
  275. return _.diag(SPV_ERROR_INVALID_DATA, inst) << "PeelCount and DontUnroll "
  276. "loop controls must not "
  277. "both be specified";
  278. }
  279. if ((loop_control >> spv::LoopControlShift::DontUnroll) & 0x1 &&
  280. (loop_control >> spv::LoopControlShift::PartialCount) & 0x1) {
  281. return _.diag(SPV_ERROR_INVALID_DATA, inst) << "PartialCount and "
  282. "DontUnroll loop controls "
  283. "must not both be specified";
  284. }
  285. uint32_t operand = 3;
  286. if ((loop_control >> spv::LoopControlShift::DependencyLength) & 0x1) {
  287. ++operand;
  288. }
  289. if ((loop_control >> spv::LoopControlShift::MinIterations) & 0x1) {
  290. ++operand;
  291. }
  292. if ((loop_control >> spv::LoopControlShift::MaxIterations) & 0x1) {
  293. ++operand;
  294. }
  295. if ((loop_control >> spv::LoopControlShift::IterationMultiple) & 0x1) {
  296. if (inst->operands().size() < operand ||
  297. inst->GetOperandAs<uint32_t>(operand) == 0) {
  298. return _.diag(SPV_ERROR_INVALID_DATA, inst) << "IterationMultiple loop "
  299. "control operand must be "
  300. "greater than zero";
  301. }
  302. ++operand;
  303. }
  304. if ((loop_control >> spv::LoopControlShift::PeelCount) & 0x1) {
  305. ++operand;
  306. }
  307. if ((loop_control >> spv::LoopControlShift::PartialCount) & 0x1) {
  308. ++operand;
  309. }
  310. // That the right number of operands is present is checked by the parser. The
  311. // above code tracks operands for expanded validation checking in the future.
  312. return SPV_SUCCESS;
  313. }
  314. } // namespace
  315. void printDominatorList(const BasicBlock& b) {
  316. std::cout << b.id() << " is dominated by: ";
  317. const BasicBlock* bb = &b;
  318. while (bb->immediate_dominator() != bb) {
  319. bb = bb->immediate_dominator();
  320. std::cout << bb->id() << " ";
  321. }
  322. }
  323. #define CFG_ASSERT(ASSERT_FUNC, TARGET) \
  324. if (spv_result_t rcode = ASSERT_FUNC(_, TARGET)) return rcode
  325. spv_result_t FirstBlockAssert(ValidationState_t& _, uint32_t target) {
  326. if (_.current_function().IsFirstBlock(target)) {
  327. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(_.current_function().id()))
  328. << "First block " << _.getIdName(target) << " of function "
  329. << _.getIdName(_.current_function().id()) << " is targeted by block "
  330. << _.getIdName(_.current_function().current_block()->id());
  331. }
  332. return SPV_SUCCESS;
  333. }
  334. spv_result_t MergeBlockAssert(ValidationState_t& _, uint32_t merge_block) {
  335. if (_.current_function().IsBlockType(merge_block, kBlockTypeMerge)) {
  336. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(_.current_function().id()))
  337. << "Block " << _.getIdName(merge_block)
  338. << " is already a merge block for another header";
  339. }
  340. return SPV_SUCCESS;
  341. }
  342. /// Update the continue construct's exit blocks once the backedge blocks are
  343. /// identified in the CFG.
  344. void UpdateContinueConstructExitBlocks(
  345. Function& function,
  346. const std::vector<std::pair<uint32_t, uint32_t>>& back_edges) {
  347. auto& constructs = function.constructs();
  348. // TODO(umar): Think of a faster way to do this
  349. for (auto& edge : back_edges) {
  350. uint32_t back_edge_block_id;
  351. uint32_t loop_header_block_id;
  352. std::tie(back_edge_block_id, loop_header_block_id) = edge;
  353. auto is_this_header = [=](Construct& c) {
  354. return c.type() == ConstructType::kLoop &&
  355. c.entry_block()->id() == loop_header_block_id;
  356. };
  357. for (auto construct : constructs) {
  358. if (is_this_header(construct)) {
  359. Construct* continue_construct =
  360. construct.corresponding_constructs().back();
  361. assert(continue_construct->type() == ConstructType::kContinue);
  362. BasicBlock* back_edge_block;
  363. std::tie(back_edge_block, std::ignore) =
  364. function.GetBlock(back_edge_block_id);
  365. continue_construct->set_exit(back_edge_block);
  366. }
  367. }
  368. }
  369. }
  370. std::tuple<std::string, std::string, std::string> ConstructNames(
  371. ConstructType type) {
  372. std::string construct_name, header_name, exit_name;
  373. switch (type) {
  374. case ConstructType::kSelection:
  375. construct_name = "selection";
  376. header_name = "selection header";
  377. exit_name = "merge block";
  378. break;
  379. case ConstructType::kLoop:
  380. construct_name = "loop";
  381. header_name = "loop header";
  382. exit_name = "merge block";
  383. break;
  384. case ConstructType::kContinue:
  385. construct_name = "continue";
  386. header_name = "continue target";
  387. exit_name = "back-edge block";
  388. break;
  389. case ConstructType::kCase:
  390. construct_name = "case";
  391. header_name = "case entry block";
  392. exit_name = "case exit block";
  393. break;
  394. default:
  395. assert(1 == 0 && "Not defined type");
  396. }
  397. return std::make_tuple(construct_name, header_name, exit_name);
  398. }
  399. /// Constructs an error message for construct validation errors
  400. std::string ConstructErrorString(const Construct& construct,
  401. const std::string& header_string,
  402. const std::string& exit_string,
  403. const std::string& dominate_text) {
  404. std::string construct_name, header_name, exit_name;
  405. std::tie(construct_name, header_name, exit_name) =
  406. ConstructNames(construct.type());
  407. // TODO(umar): Add header block for continue constructs to error message
  408. return "The " + construct_name + " construct with the " + header_name + " " +
  409. header_string + " " + dominate_text + " the " + exit_name + " " +
  410. exit_string;
  411. }
  412. // Finds the fall through case construct of |target_block| and records it in
  413. // |case_fall_through|. Returns SPV_ERROR_INVALID_CFG if the case construct
  414. // headed by |target_block| branches to multiple case constructs.
  415. spv_result_t FindCaseFallThrough(
  416. ValidationState_t& _, BasicBlock* target_block, uint32_t* case_fall_through,
  417. const Construct& switch_construct,
  418. const std::unordered_set<uint32_t>& case_targets) {
  419. const auto* merge = switch_construct.exit_block();
  420. std::vector<BasicBlock*> stack;
  421. stack.push_back(target_block);
  422. std::unordered_set<const BasicBlock*> visited;
  423. bool target_reachable = target_block->structurally_reachable();
  424. while (!stack.empty()) {
  425. auto block = stack.back();
  426. stack.pop_back();
  427. if (block == merge) continue;
  428. if (!visited.insert(block).second) continue;
  429. if (target_reachable && block->structurally_reachable() &&
  430. target_block->structurally_dominates(*block)) {
  431. // Still in the case construct.
  432. for (auto successor : *block->successors()) {
  433. stack.push_back(successor);
  434. }
  435. } else {
  436. // Exiting the case construct to non-merge block.
  437. if (!case_targets.count(block->id())) {
  438. // We have already filtered out the following:
  439. // * The switch's merge
  440. // * Other case targets
  441. // * Blocks in the same case construct
  442. //
  443. // So the only remaining valid branches are the structured exits from
  444. // the overall selection construct of the switch.
  445. if (switch_construct.IsStructuredExit(_, block)) {
  446. continue;
  447. }
  448. return _.diag(SPV_ERROR_INVALID_CFG, target_block->label())
  449. << "Case construct that targets "
  450. << _.getIdName(target_block->id())
  451. << " has invalid branch to block " << _.getIdName(block->id())
  452. << " (not another case construct, corresponding merge, outer "
  453. "loop merge or outer loop continue)";
  454. }
  455. if (*case_fall_through == 0u) {
  456. if (target_block != block) {
  457. *case_fall_through = block->id();
  458. }
  459. } else if (*case_fall_through != block->id()) {
  460. // Case construct has at most one branch to another case construct.
  461. return _.diag(SPV_ERROR_INVALID_CFG, target_block->label())
  462. << "Case construct that targets "
  463. << _.getIdName(target_block->id())
  464. << " has branches to multiple other case construct targets "
  465. << _.getIdName(*case_fall_through) << " and "
  466. << _.getIdName(block->id());
  467. }
  468. }
  469. }
  470. return SPV_SUCCESS;
  471. }
  472. spv_result_t StructuredSwitchChecks(ValidationState_t& _, Function* function,
  473. const Construct& switch_construct) {
  474. const auto* header = switch_construct.entry_block();
  475. const auto* merge = switch_construct.exit_block();
  476. const auto* switch_inst = header->terminator();
  477. std::unordered_set<uint32_t> case_targets;
  478. for (uint32_t i = 1; i < switch_inst->operands().size(); i += 2) {
  479. uint32_t target = switch_inst->GetOperandAs<uint32_t>(i);
  480. if (target != merge->id()) case_targets.insert(target);
  481. }
  482. // Tracks how many times each case construct is targeted by another case
  483. // construct.
  484. std::map<uint32_t, uint32_t> num_fall_through_targeted;
  485. uint32_t default_case_fall_through = 0u;
  486. uint32_t default_target = switch_inst->GetOperandAs<uint32_t>(1u);
  487. bool default_appears_multiple_times = false;
  488. for (uint32_t i = 3; i < switch_inst->operands().size(); i += 2) {
  489. if (default_target == switch_inst->GetOperandAs<uint32_t>(i)) {
  490. default_appears_multiple_times = true;
  491. break;
  492. }
  493. }
  494. std::unordered_map<uint32_t, uint32_t> seen_to_fall_through;
  495. for (uint32_t i = 1; i < switch_inst->operands().size(); i += 2) {
  496. uint32_t target = switch_inst->GetOperandAs<uint32_t>(i);
  497. if (target == merge->id()) continue;
  498. uint32_t case_fall_through = 0u;
  499. auto seen_iter = seen_to_fall_through.find(target);
  500. if (seen_iter == seen_to_fall_through.end()) {
  501. const auto target_block = function->GetBlock(target).first;
  502. // OpSwitch must dominate all its case constructs.
  503. if (header->structurally_reachable() &&
  504. target_block->structurally_reachable() &&
  505. !header->structurally_dominates(*target_block)) {
  506. return _.diag(SPV_ERROR_INVALID_CFG, header->label())
  507. << "Switch header " << _.getIdName(header->id())
  508. << " does not structurally dominate its case construct "
  509. << _.getIdName(target);
  510. }
  511. if (auto error = FindCaseFallThrough(_, target_block, &case_fall_through,
  512. switch_construct, case_targets)) {
  513. return error;
  514. }
  515. // Track how many time the fall through case has been targeted.
  516. if (case_fall_through != 0u) {
  517. auto where = num_fall_through_targeted.lower_bound(case_fall_through);
  518. if (where == num_fall_through_targeted.end() ||
  519. where->first != case_fall_through) {
  520. num_fall_through_targeted.insert(
  521. where, std::make_pair(case_fall_through, 1));
  522. } else {
  523. where->second++;
  524. }
  525. }
  526. seen_to_fall_through.insert(std::make_pair(target, case_fall_through));
  527. } else {
  528. case_fall_through = seen_iter->second;
  529. }
  530. if (case_fall_through == default_target &&
  531. !default_appears_multiple_times) {
  532. case_fall_through = default_case_fall_through;
  533. }
  534. if (case_fall_through != 0u) {
  535. bool is_default = i == 1;
  536. if (is_default) {
  537. default_case_fall_through = case_fall_through;
  538. } else {
  539. // Allow code like:
  540. // case x:
  541. // case y:
  542. // ...
  543. // case z:
  544. //
  545. // Where x and y target the same block and fall through to z.
  546. uint32_t j = i;
  547. while ((j + 2 < switch_inst->operands().size()) &&
  548. target == switch_inst->GetOperandAs<uint32_t>(j + 2)) {
  549. j += 2;
  550. }
  551. // If Target T1 branches to Target T2, or if Target T1 branches to the
  552. // Default target and the Default target branches to Target T2, then T1
  553. // must immediately precede T2 in the list of OpSwitch Target operands.
  554. if ((switch_inst->operands().size() < j + 2) ||
  555. (case_fall_through != switch_inst->GetOperandAs<uint32_t>(j + 2))) {
  556. return _.diag(SPV_ERROR_INVALID_CFG, switch_inst)
  557. << "Case construct that targets " << _.getIdName(target)
  558. << " has branches to the case construct that targets "
  559. << _.getIdName(case_fall_through)
  560. << ", but does not immediately precede it in the "
  561. "OpSwitch's target list";
  562. }
  563. }
  564. }
  565. }
  566. // Each case construct must be branched to by at most one other case
  567. // construct.
  568. for (const auto& pair : num_fall_through_targeted) {
  569. if (pair.second > 1) {
  570. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(pair.first))
  571. << "Multiple case constructs have branches to the case construct "
  572. "that targets "
  573. << _.getIdName(pair.first);
  574. }
  575. }
  576. return SPV_SUCCESS;
  577. }
  578. // Validates that all CFG divergences (i.e. conditional branch or switch) are
  579. // structured correctly. Either divergence is preceded by a merge instruction
  580. // or the divergence introduces at most one unseen label.
  581. spv_result_t ValidateStructuredSelections(
  582. ValidationState_t& _, const std::vector<const BasicBlock*>& postorder) {
  583. std::unordered_set<uint32_t> seen;
  584. for (auto iter = postorder.rbegin(); iter != postorder.rend(); ++iter) {
  585. const auto* block = *iter;
  586. const auto* terminator = block->terminator();
  587. if (!terminator) continue;
  588. const auto index = terminator - &_.ordered_instructions()[0];
  589. auto* merge = &_.ordered_instructions()[index - 1];
  590. // Marks merges and continues as seen.
  591. if (merge->opcode() == spv::Op::OpSelectionMerge) {
  592. seen.insert(merge->GetOperandAs<uint32_t>(0));
  593. } else if (merge->opcode() == spv::Op::OpLoopMerge) {
  594. seen.insert(merge->GetOperandAs<uint32_t>(0));
  595. seen.insert(merge->GetOperandAs<uint32_t>(1));
  596. } else {
  597. // Only track the pointer if it is a merge instruction.
  598. merge = nullptr;
  599. }
  600. // Skip unreachable blocks.
  601. if (!block->structurally_reachable()) continue;
  602. if (terminator->opcode() == spv::Op::OpBranchConditional) {
  603. const auto true_label = terminator->GetOperandAs<uint32_t>(1);
  604. const auto false_label = terminator->GetOperandAs<uint32_t>(2);
  605. // Mark the upcoming blocks as seen now, but only error out if this block
  606. // was missing a merge instruction and both labels hadn't been seen
  607. // previously.
  608. const bool true_label_unseen = seen.insert(true_label).second;
  609. const bool false_label_unseen = seen.insert(false_label).second;
  610. if ((!merge || merge->opcode() == spv::Op::OpLoopMerge) &&
  611. true_label_unseen && false_label_unseen) {
  612. return _.diag(SPV_ERROR_INVALID_CFG, terminator)
  613. << "Selection must be structured";
  614. }
  615. } else if (terminator->opcode() == spv::Op::OpSwitch) {
  616. if (!merge) {
  617. return _.diag(SPV_ERROR_INVALID_CFG, terminator)
  618. << "OpSwitch must be preceded by an OpSelectionMerge "
  619. "instruction";
  620. }
  621. // Mark the targets as seen.
  622. for (uint32_t i = 1; i < terminator->operands().size(); i += 2) {
  623. const auto target = terminator->GetOperandAs<uint32_t>(i);
  624. seen.insert(target);
  625. }
  626. }
  627. }
  628. return SPV_SUCCESS;
  629. }
  630. spv_result_t StructuredControlFlowChecks(
  631. ValidationState_t& _, Function* function,
  632. const std::vector<std::pair<uint32_t, uint32_t>>& back_edges,
  633. const std::vector<const BasicBlock*>& postorder) {
  634. /// Check all backedges target only loop headers and have exactly one
  635. /// back-edge branching to it
  636. // Map a loop header to blocks with back-edges to the loop header.
  637. std::map<uint32_t, std::unordered_set<uint32_t>> loop_latch_blocks;
  638. for (auto back_edge : back_edges) {
  639. uint32_t back_edge_block;
  640. uint32_t header_block;
  641. std::tie(back_edge_block, header_block) = back_edge;
  642. if (!function->IsBlockType(header_block, kBlockTypeLoop)) {
  643. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(back_edge_block))
  644. << "Back-edges (" << _.getIdName(back_edge_block) << " -> "
  645. << _.getIdName(header_block)
  646. << ") can only be formed between a block and a loop header.";
  647. }
  648. loop_latch_blocks[header_block].insert(back_edge_block);
  649. }
  650. // Check the loop headers have exactly one back-edge branching to it
  651. for (BasicBlock* loop_header : function->ordered_blocks()) {
  652. if (!loop_header->structurally_reachable()) continue;
  653. if (!loop_header->is_type(kBlockTypeLoop)) continue;
  654. auto loop_header_id = loop_header->id();
  655. auto num_latch_blocks = loop_latch_blocks[loop_header_id].size();
  656. if (num_latch_blocks != 1) {
  657. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(loop_header_id))
  658. << "Loop header " << _.getIdName(loop_header_id)
  659. << " is targeted by " << num_latch_blocks
  660. << " back-edge blocks but the standard requires exactly one";
  661. }
  662. }
  663. // Check construct rules
  664. for (const Construct& construct : function->constructs()) {
  665. auto header = construct.entry_block();
  666. if (!header->structurally_reachable()) continue;
  667. auto merge = construct.exit_block();
  668. if (!merge) {
  669. std::string construct_name, header_name, exit_name;
  670. std::tie(construct_name, header_name, exit_name) =
  671. ConstructNames(construct.type());
  672. return _.diag(SPV_ERROR_INTERNAL, _.FindDef(header->id()))
  673. << "Construct " + construct_name + " with " + header_name + " " +
  674. _.getIdName(header->id()) + " does not have a " +
  675. exit_name + ". This may be a bug in the validator.";
  676. }
  677. // If the header is reachable, the merge is guaranteed to be structurally
  678. // reachable.
  679. if (!header->structurally_dominates(*merge)) {
  680. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(merge->id()))
  681. << ConstructErrorString(construct, _.getIdName(header->id()),
  682. _.getIdName(merge->id()),
  683. "does not structurally dominate");
  684. }
  685. // If it's really a merge block for a selection or loop, then it must be
  686. // *strictly* structrually dominated by the header.
  687. if (construct.ExitBlockIsMergeBlock() && (header == merge)) {
  688. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(merge->id()))
  689. << ConstructErrorString(construct, _.getIdName(header->id()),
  690. _.getIdName(merge->id()),
  691. "does not strictly structurally dominate");
  692. }
  693. // Check post-dominance for continue constructs. But dominance and
  694. // post-dominance only make sense when the construct is reachable.
  695. if (construct.type() == ConstructType::kContinue) {
  696. if (!merge->structurally_postdominates(*header)) {
  697. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(merge->id()))
  698. << ConstructErrorString(construct, _.getIdName(header->id()),
  699. _.getIdName(merge->id()),
  700. "is not structurally post dominated by");
  701. }
  702. }
  703. Construct::ConstructBlockSet construct_blocks = construct.blocks(function);
  704. std::string construct_name, header_name, exit_name;
  705. std::tie(construct_name, header_name, exit_name) =
  706. ConstructNames(construct.type());
  707. for (auto block : construct_blocks) {
  708. // Check that all exits from the construct are via structured exits.
  709. for (auto succ : *block->successors()) {
  710. if (!construct_blocks.count(succ) &&
  711. !construct.IsStructuredExit(_, succ)) {
  712. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  713. << "block <ID> " << _.getIdName(block->id()) << " exits the "
  714. << construct_name << " headed by <ID> "
  715. << _.getIdName(header->id())
  716. << ", but not via a structured exit";
  717. }
  718. }
  719. if (block == header) continue;
  720. // Check that for all non-header blocks, all predecessors are within this
  721. // construct.
  722. for (auto pred : *block->predecessors()) {
  723. if (pred->structurally_reachable() && !construct_blocks.count(pred)) {
  724. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(pred->id()))
  725. << "block <ID> " << pred->id() << " branches to the "
  726. << construct_name << " construct, but not to the "
  727. << header_name << " <ID> " << header->id();
  728. }
  729. }
  730. if (block->is_type(BlockType::kBlockTypeSelection) ||
  731. block->is_type(BlockType::kBlockTypeLoop)) {
  732. size_t index = (block->terminator() - &_.ordered_instructions()[0]) - 1;
  733. const auto& merge_inst = _.ordered_instructions()[index];
  734. if (merge_inst.opcode() == spv::Op::OpSelectionMerge ||
  735. merge_inst.opcode() == spv::Op::OpLoopMerge) {
  736. uint32_t merge_id = merge_inst.GetOperandAs<uint32_t>(0);
  737. auto merge_block = function->GetBlock(merge_id).first;
  738. if (merge_block->structurally_reachable() &&
  739. !construct_blocks.count(merge_block)) {
  740. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  741. << "Header block " << _.getIdName(block->id())
  742. << " is contained in the " << construct_name
  743. << " construct headed by " << _.getIdName(header->id())
  744. << ", but its merge block " << _.getIdName(merge_id)
  745. << " is not";
  746. }
  747. }
  748. }
  749. }
  750. if (construct.type() == ConstructType::kLoop) {
  751. // If the continue target differs from the loop header, then check that
  752. // all edges into the continue construct come from within the loop.
  753. const auto index = header->terminator() - &_.ordered_instructions()[0];
  754. const auto& merge_inst = _.ordered_instructions()[index - 1];
  755. const auto continue_id = merge_inst.GetOperandAs<uint32_t>(1);
  756. const auto* continue_inst = _.FindDef(continue_id);
  757. // OpLabel instructions aren't stored as part of the basic block for
  758. // legacy reaasons. Grab the next instruction and use it's block pointer
  759. // instead.
  760. const auto next_index =
  761. (continue_inst - &_.ordered_instructions()[0]) + 1;
  762. const auto& next_inst = _.ordered_instructions()[next_index];
  763. const auto* continue_target = next_inst.block();
  764. if (header->id() != continue_id) {
  765. for (auto pred : *continue_target->predecessors()) {
  766. if (!pred->structurally_reachable()) {
  767. continue;
  768. }
  769. // Ignore back-edges from within the continue construct.
  770. bool is_back_edge = false;
  771. for (auto back_edge : back_edges) {
  772. uint32_t back_edge_block;
  773. uint32_t header_block;
  774. std::tie(back_edge_block, header_block) = back_edge;
  775. if (header_block == continue_id && back_edge_block == pred->id())
  776. is_back_edge = true;
  777. }
  778. if (!construct_blocks.count(pred) && !is_back_edge) {
  779. return _.diag(SPV_ERROR_INVALID_CFG, pred->terminator())
  780. << "Block " << _.getIdName(pred->id())
  781. << " branches to the loop continue target "
  782. << _.getIdName(continue_id)
  783. << ", but is not contained in the associated loop construct "
  784. << _.getIdName(header->id());
  785. }
  786. }
  787. }
  788. }
  789. // Checks rules for case constructs.
  790. if (construct.type() == ConstructType::kSelection &&
  791. header->terminator()->opcode() == spv::Op::OpSwitch) {
  792. if (auto error = StructuredSwitchChecks(_, function, construct)) {
  793. return error;
  794. }
  795. }
  796. }
  797. if (auto error = ValidateStructuredSelections(_, postorder)) {
  798. return error;
  799. }
  800. return SPV_SUCCESS;
  801. }
  802. spv_result_t MaximalReconvergenceChecks(ValidationState_t& _) {
  803. // Find all the entry points with the MaximallyReconvergencesKHR execution
  804. // mode.
  805. std::unordered_set<uint32_t> maximal_funcs;
  806. std::unordered_set<uint32_t> maximal_entry_points;
  807. for (auto entry_point : _.entry_points()) {
  808. const auto* exec_modes = _.GetExecutionModes(entry_point);
  809. if (exec_modes &&
  810. exec_modes->count(spv::ExecutionMode::MaximallyReconvergesKHR)) {
  811. maximal_entry_points.insert(entry_point);
  812. maximal_funcs.insert(entry_point);
  813. }
  814. }
  815. if (maximal_entry_points.empty()) {
  816. return SPV_SUCCESS;
  817. }
  818. // Find all the functions reachable from a maximal reconvergence entry point.
  819. for (const auto& func : _.functions()) {
  820. const auto& entry_points = _.EntryPointReferences(func.id());
  821. for (auto id : entry_points) {
  822. if (maximal_entry_points.count(id)) {
  823. maximal_funcs.insert(func.id());
  824. break;
  825. }
  826. }
  827. }
  828. // Check for conditional branches with the same true and false targets.
  829. for (const auto& inst : _.ordered_instructions()) {
  830. if (inst.opcode() == spv::Op::OpBranchConditional) {
  831. const auto true_id = inst.GetOperandAs<uint32_t>(1);
  832. const auto false_id = inst.GetOperandAs<uint32_t>(2);
  833. if (true_id == false_id && maximal_funcs.count(inst.function()->id())) {
  834. return _.diag(SPV_ERROR_INVALID_ID, &inst)
  835. << "In entry points using the MaximallyReconvergesKHR execution "
  836. "mode, True Label and False Label must be different labels";
  837. }
  838. }
  839. }
  840. // Check for invalid multiple predecessors. Only loop headers, continue
  841. // targets, merge targets or switch targets or defaults may have multiple
  842. // unique predecessors.
  843. for (const auto& func : _.functions()) {
  844. if (!maximal_funcs.count(func.id())) continue;
  845. for (const auto* block : func.ordered_blocks()) {
  846. std::unordered_set<uint32_t> unique_preds;
  847. const auto* preds = block->predecessors();
  848. if (!preds) continue;
  849. for (const auto* pred : *preds) {
  850. unique_preds.insert(pred->id());
  851. }
  852. if (unique_preds.size() < 2) continue;
  853. const auto* terminator = block->terminator();
  854. const auto index = terminator - &_.ordered_instructions()[0];
  855. const auto* pre_terminator = &_.ordered_instructions()[index - 1];
  856. if (pre_terminator->opcode() == spv::Op::OpLoopMerge) continue;
  857. const auto* label = _.FindDef(block->id());
  858. bool ok = false;
  859. for (const auto& pair : label->uses()) {
  860. const auto* use_inst = pair.first;
  861. switch (use_inst->opcode()) {
  862. case spv::Op::OpSelectionMerge:
  863. case spv::Op::OpLoopMerge:
  864. case spv::Op::OpSwitch:
  865. ok = true;
  866. break;
  867. default:
  868. break;
  869. }
  870. }
  871. if (!ok) {
  872. return _.diag(SPV_ERROR_INVALID_CFG, label)
  873. << "In entry points using the MaximallyReconvergesKHR "
  874. "execution mode, this basic block must not have multiple "
  875. "unique predecessors";
  876. }
  877. }
  878. }
  879. return SPV_SUCCESS;
  880. }
  881. spv_result_t PerformCfgChecks(ValidationState_t& _) {
  882. for (auto& function : _.functions()) {
  883. // Check all referenced blocks are defined within a function
  884. if (function.undefined_block_count() != 0) {
  885. std::string undef_blocks("{");
  886. bool first = true;
  887. for (auto undefined_block : function.undefined_blocks()) {
  888. undef_blocks += _.getIdName(undefined_block);
  889. if (!first) {
  890. undef_blocks += " ";
  891. }
  892. first = false;
  893. }
  894. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(function.id()))
  895. << "Block(s) " << undef_blocks << "}"
  896. << " are referenced but not defined in function "
  897. << _.getIdName(function.id());
  898. }
  899. // Set each block's immediate dominator.
  900. //
  901. // We want to analyze all the blocks in the function, even in degenerate
  902. // control flow cases including unreachable blocks. So use the augmented
  903. // CFG to ensure we cover all the blocks.
  904. std::vector<const BasicBlock*> postorder;
  905. auto ignore_block = [](const BasicBlock*) {};
  906. auto no_terminal_blocks = [](const BasicBlock*) { return false; };
  907. if (!function.ordered_blocks().empty()) {
  908. /// calculate dominators
  909. CFA<BasicBlock>::DepthFirstTraversal(
  910. function.first_block(), function.AugmentedCFGSuccessorsFunction(),
  911. ignore_block, [&](const BasicBlock* b) { postorder.push_back(b); },
  912. no_terminal_blocks);
  913. auto edges = CFA<BasicBlock>::CalculateDominators(
  914. postorder, function.AugmentedCFGPredecessorsFunction());
  915. for (auto edge : edges) {
  916. if (edge.first != edge.second)
  917. edge.first->SetImmediateDominator(edge.second);
  918. }
  919. }
  920. auto& blocks = function.ordered_blocks();
  921. if (!blocks.empty()) {
  922. // Check if the order of blocks in the binary appear before the blocks
  923. // they dominate
  924. for (auto block = begin(blocks) + 1; block != end(blocks); ++block) {
  925. if (auto idom = (*block)->immediate_dominator()) {
  926. if (idom != function.pseudo_entry_block() &&
  927. block == std::find(begin(blocks), block, idom)) {
  928. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(idom->id()))
  929. << "Block " << _.getIdName((*block)->id())
  930. << " appears in the binary before its dominator "
  931. << _.getIdName(idom->id());
  932. }
  933. }
  934. }
  935. // If we have structured control flow, check that no block has a control
  936. // flow nesting depth larger than the limit.
  937. if (_.HasCapability(spv::Capability::Shader)) {
  938. const int control_flow_nesting_depth_limit =
  939. _.options()->universal_limits_.max_control_flow_nesting_depth;
  940. for (auto block = begin(blocks); block != end(blocks); ++block) {
  941. if (function.GetBlockDepth(*block) >
  942. control_flow_nesting_depth_limit) {
  943. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef((*block)->id()))
  944. << "Maximum Control Flow nesting depth exceeded.";
  945. }
  946. }
  947. }
  948. }
  949. /// Structured control flow checks are only required for shader capabilities
  950. if (_.HasCapability(spv::Capability::Shader)) {
  951. // Calculate structural dominance.
  952. postorder.clear();
  953. std::vector<const BasicBlock*> postdom_postorder;
  954. std::vector<std::pair<uint32_t, uint32_t>> back_edges;
  955. if (!function.ordered_blocks().empty()) {
  956. /// calculate dominators
  957. CFA<BasicBlock>::DepthFirstTraversal(
  958. function.first_block(),
  959. function.AugmentedStructuralCFGSuccessorsFunction(), ignore_block,
  960. [&](const BasicBlock* b) { postorder.push_back(b); },
  961. no_terminal_blocks);
  962. auto edges = CFA<BasicBlock>::CalculateDominators(
  963. postorder, function.AugmentedStructuralCFGPredecessorsFunction());
  964. for (auto edge : edges) {
  965. if (edge.first != edge.second)
  966. edge.first->SetImmediateStructuralDominator(edge.second);
  967. }
  968. /// calculate post dominators
  969. CFA<BasicBlock>::DepthFirstTraversal(
  970. function.pseudo_exit_block(),
  971. function.AugmentedStructuralCFGPredecessorsFunction(), ignore_block,
  972. [&](const BasicBlock* b) { postdom_postorder.push_back(b); },
  973. no_terminal_blocks);
  974. auto postdom_edges = CFA<BasicBlock>::CalculateDominators(
  975. postdom_postorder,
  976. function.AugmentedStructuralCFGSuccessorsFunction());
  977. for (auto edge : postdom_edges) {
  978. edge.first->SetImmediateStructuralPostDominator(edge.second);
  979. }
  980. /// calculate back edges.
  981. CFA<BasicBlock>::DepthFirstTraversal(
  982. function.pseudo_entry_block(),
  983. function.AugmentedStructuralCFGSuccessorsFunction(), ignore_block,
  984. ignore_block,
  985. [&](const BasicBlock* from, const BasicBlock* to) {
  986. // A back edge must be a real edge. Since the augmented successors
  987. // contain structural edges, filter those from consideration.
  988. for (const auto* succ : *(from->successors())) {
  989. if (succ == to) back_edges.emplace_back(from->id(), to->id());
  990. }
  991. },
  992. no_terminal_blocks);
  993. }
  994. UpdateContinueConstructExitBlocks(function, back_edges);
  995. if (auto error =
  996. StructuredControlFlowChecks(_, &function, back_edges, postorder))
  997. return error;
  998. }
  999. }
  1000. if (auto error = MaximalReconvergenceChecks(_)) {
  1001. return error;
  1002. }
  1003. return SPV_SUCCESS;
  1004. }
  1005. spv_result_t CfgPass(ValidationState_t& _, const Instruction* inst) {
  1006. spv::Op opcode = inst->opcode();
  1007. switch (opcode) {
  1008. case spv::Op::OpLabel:
  1009. if (auto error = _.current_function().RegisterBlock(inst->id()))
  1010. return error;
  1011. // TODO(github:1661) This should be done in the
  1012. // ValidationState::RegisterInstruction method but because of the order of
  1013. // passes the OpLabel ends up not being part of the basic block it starts.
  1014. _.current_function().current_block()->set_label(inst);
  1015. break;
  1016. case spv::Op::OpLoopMerge: {
  1017. uint32_t merge_block = inst->GetOperandAs<uint32_t>(0);
  1018. uint32_t continue_block = inst->GetOperandAs<uint32_t>(1);
  1019. CFG_ASSERT(MergeBlockAssert, merge_block);
  1020. if (auto error = _.current_function().RegisterLoopMerge(merge_block,
  1021. continue_block))
  1022. return error;
  1023. } break;
  1024. case spv::Op::OpSelectionMerge: {
  1025. uint32_t merge_block = inst->GetOperandAs<uint32_t>(0);
  1026. CFG_ASSERT(MergeBlockAssert, merge_block);
  1027. if (auto error = _.current_function().RegisterSelectionMerge(merge_block))
  1028. return error;
  1029. } break;
  1030. case spv::Op::OpBranch: {
  1031. uint32_t target = inst->GetOperandAs<uint32_t>(0);
  1032. CFG_ASSERT(FirstBlockAssert, target);
  1033. _.current_function().RegisterBlockEnd({target});
  1034. } break;
  1035. case spv::Op::OpBranchConditional: {
  1036. uint32_t tlabel = inst->GetOperandAs<uint32_t>(1);
  1037. uint32_t flabel = inst->GetOperandAs<uint32_t>(2);
  1038. CFG_ASSERT(FirstBlockAssert, tlabel);
  1039. CFG_ASSERT(FirstBlockAssert, flabel);
  1040. _.current_function().RegisterBlockEnd({tlabel, flabel});
  1041. } break;
  1042. case spv::Op::OpSwitch: {
  1043. std::vector<uint32_t> cases;
  1044. for (size_t i = 1; i < inst->operands().size(); i += 2) {
  1045. uint32_t target = inst->GetOperandAs<uint32_t>(i);
  1046. CFG_ASSERT(FirstBlockAssert, target);
  1047. cases.push_back(target);
  1048. }
  1049. _.current_function().RegisterBlockEnd({cases});
  1050. } break;
  1051. case spv::Op::OpReturn: {
  1052. const uint32_t return_type = _.current_function().GetResultTypeId();
  1053. const Instruction* return_type_inst = _.FindDef(return_type);
  1054. assert(return_type_inst);
  1055. if (return_type_inst->opcode() != spv::Op::OpTypeVoid)
  1056. return _.diag(SPV_ERROR_INVALID_CFG, inst)
  1057. << "OpReturn can only be called from a function with void "
  1058. << "return type.";
  1059. _.current_function().RegisterBlockEnd(std::vector<uint32_t>());
  1060. break;
  1061. }
  1062. case spv::Op::OpKill:
  1063. case spv::Op::OpReturnValue:
  1064. case spv::Op::OpUnreachable:
  1065. case spv::Op::OpTerminateInvocation:
  1066. case spv::Op::OpIgnoreIntersectionKHR:
  1067. case spv::Op::OpTerminateRayKHR:
  1068. case spv::Op::OpEmitMeshTasksEXT:
  1069. _.current_function().RegisterBlockEnd(std::vector<uint32_t>());
  1070. // Ops with dedicated passes check for the Execution Model there
  1071. if (opcode == spv::Op::OpKill) {
  1072. _.current_function().RegisterExecutionModelLimitation(
  1073. spv::ExecutionModel::Fragment,
  1074. "OpKill requires Fragment execution model");
  1075. }
  1076. if (opcode == spv::Op::OpTerminateInvocation) {
  1077. _.current_function().RegisterExecutionModelLimitation(
  1078. spv::ExecutionModel::Fragment,
  1079. "OpTerminateInvocation requires Fragment execution model");
  1080. }
  1081. if (opcode == spv::Op::OpIgnoreIntersectionKHR) {
  1082. _.current_function().RegisterExecutionModelLimitation(
  1083. spv::ExecutionModel::AnyHitKHR,
  1084. "OpIgnoreIntersectionKHR requires AnyHitKHR execution model");
  1085. }
  1086. if (opcode == spv::Op::OpTerminateRayKHR) {
  1087. _.current_function().RegisterExecutionModelLimitation(
  1088. spv::ExecutionModel::AnyHitKHR,
  1089. "OpTerminateRayKHR requires AnyHitKHR execution model");
  1090. }
  1091. break;
  1092. default:
  1093. break;
  1094. }
  1095. return SPV_SUCCESS;
  1096. }
  1097. void ReachabilityPass(ValidationState_t& _) {
  1098. for (auto& f : _.functions()) {
  1099. std::vector<BasicBlock*> stack;
  1100. auto entry = f.first_block();
  1101. // Skip function declarations.
  1102. if (entry) stack.push_back(entry);
  1103. while (!stack.empty()) {
  1104. auto block = stack.back();
  1105. stack.pop_back();
  1106. if (block->reachable()) continue;
  1107. block->set_reachable(true);
  1108. for (auto succ : *block->successors()) {
  1109. stack.push_back(succ);
  1110. }
  1111. }
  1112. }
  1113. // Repeat for structural reachability.
  1114. for (auto& f : _.functions()) {
  1115. std::vector<BasicBlock*> stack;
  1116. auto entry = f.first_block();
  1117. // Skip function declarations.
  1118. if (entry) stack.push_back(entry);
  1119. while (!stack.empty()) {
  1120. auto block = stack.back();
  1121. stack.pop_back();
  1122. if (block->structurally_reachable()) continue;
  1123. block->set_structurally_reachable(true);
  1124. for (auto succ : *block->structural_successors()) {
  1125. stack.push_back(succ);
  1126. }
  1127. }
  1128. }
  1129. }
  1130. spv_result_t ControlFlowPass(ValidationState_t& _, const Instruction* inst) {
  1131. switch (inst->opcode()) {
  1132. case spv::Op::OpPhi:
  1133. if (auto error = ValidatePhi(_, inst)) return error;
  1134. break;
  1135. case spv::Op::OpBranch:
  1136. if (auto error = ValidateBranch(_, inst)) return error;
  1137. break;
  1138. case spv::Op::OpBranchConditional:
  1139. if (auto error = ValidateBranchConditional(_, inst)) return error;
  1140. break;
  1141. case spv::Op::OpReturnValue:
  1142. if (auto error = ValidateReturnValue(_, inst)) return error;
  1143. break;
  1144. case spv::Op::OpSwitch:
  1145. if (auto error = ValidateSwitch(_, inst)) return error;
  1146. break;
  1147. case spv::Op::OpLoopMerge:
  1148. if (auto error = ValidateLoopMerge(_, inst)) return error;
  1149. break;
  1150. default:
  1151. break;
  1152. }
  1153. return SPV_SUCCESS;
  1154. }
  1155. } // namespace val
  1156. } // namespace spvtools