validate_cfg.cpp 47 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197
  1. // Copyright (c) 2015-2016 The Khronos Group Inc.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. #include <algorithm>
  15. #include <cassert>
  16. #include <functional>
  17. #include <iostream>
  18. #include <iterator>
  19. #include <map>
  20. #include <string>
  21. #include <tuple>
  22. #include <unordered_map>
  23. #include <unordered_set>
  24. #include <utility>
  25. #include <vector>
  26. #include "source/cfa.h"
  27. #include "source/opcode.h"
  28. #include "source/spirv_target_env.h"
  29. #include "source/spirv_validator_options.h"
  30. #include "source/val/basic_block.h"
  31. #include "source/val/construct.h"
  32. #include "source/val/function.h"
  33. #include "source/val/validate.h"
  34. #include "source/val/validation_state.h"
  35. namespace spvtools {
  36. namespace val {
  37. namespace {
  38. spv_result_t ValidatePhi(ValidationState_t& _, const Instruction* inst) {
  39. auto block = inst->block();
  40. size_t num_in_ops = inst->words().size() - 3;
  41. if (num_in_ops % 2 != 0) {
  42. return _.diag(SPV_ERROR_INVALID_ID, inst)
  43. << "OpPhi does not have an equal number of incoming values and "
  44. "basic blocks.";
  45. }
  46. if (_.IsVoidType(inst->type_id())) {
  47. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  48. << "OpPhi must not have void result type";
  49. }
  50. if (_.IsPointerType(inst->type_id()) &&
  51. _.addressing_model() == SpvAddressingModelLogical) {
  52. if (!_.features().variable_pointers &&
  53. !_.features().variable_pointers_storage_buffer) {
  54. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  55. << "Using pointers with OpPhi requires capability "
  56. << "VariablePointers or VariablePointersStorageBuffer";
  57. }
  58. }
  59. const Instruction* type_inst = _.FindDef(inst->type_id());
  60. assert(type_inst);
  61. const SpvOp type_opcode = type_inst->opcode();
  62. if (!_.options()->before_hlsl_legalization) {
  63. if (type_opcode == SpvOpTypeSampledImage ||
  64. (_.HasCapability(SpvCapabilityShader) &&
  65. (type_opcode == SpvOpTypeImage || type_opcode == SpvOpTypeSampler))) {
  66. return _.diag(SPV_ERROR_INVALID_ID, inst)
  67. << "Result type cannot be Op" << spvOpcodeString(type_opcode);
  68. }
  69. }
  70. // Create a uniqued vector of predecessor ids for comparison against
  71. // incoming values. OpBranchConditional %cond %label %label produces two
  72. // predecessors in the CFG.
  73. std::vector<uint32_t> pred_ids;
  74. std::transform(block->predecessors()->begin(), block->predecessors()->end(),
  75. std::back_inserter(pred_ids),
  76. [](const BasicBlock* b) { return b->id(); });
  77. std::sort(pred_ids.begin(), pred_ids.end());
  78. pred_ids.erase(std::unique(pred_ids.begin(), pred_ids.end()), pred_ids.end());
  79. size_t num_edges = num_in_ops / 2;
  80. if (num_edges != pred_ids.size()) {
  81. return _.diag(SPV_ERROR_INVALID_ID, inst)
  82. << "OpPhi's number of incoming blocks (" << num_edges
  83. << ") does not match block's predecessor count ("
  84. << block->predecessors()->size() << ").";
  85. }
  86. std::unordered_set<uint32_t> observed_predecessors;
  87. for (size_t i = 3; i < inst->words().size(); ++i) {
  88. auto inc_id = inst->word(i);
  89. if (i % 2 == 1) {
  90. // Incoming value type must match the phi result type.
  91. auto inc_type_id = _.GetTypeId(inc_id);
  92. if (inst->type_id() != inc_type_id) {
  93. return _.diag(SPV_ERROR_INVALID_ID, inst)
  94. << "OpPhi's result type <id> " << _.getIdName(inst->type_id())
  95. << " does not match incoming value <id> " << _.getIdName(inc_id)
  96. << " type <id> " << _.getIdName(inc_type_id) << ".";
  97. }
  98. } else {
  99. if (_.GetIdOpcode(inc_id) != SpvOpLabel) {
  100. return _.diag(SPV_ERROR_INVALID_ID, inst)
  101. << "OpPhi's incoming basic block <id> " << _.getIdName(inc_id)
  102. << " is not an OpLabel.";
  103. }
  104. // Incoming basic block must be an immediate predecessor of the phi's
  105. // block.
  106. if (!std::binary_search(pred_ids.begin(), pred_ids.end(), inc_id)) {
  107. return _.diag(SPV_ERROR_INVALID_ID, inst)
  108. << "OpPhi's incoming basic block <id> " << _.getIdName(inc_id)
  109. << " is not a predecessor of <id> " << _.getIdName(block->id())
  110. << ".";
  111. }
  112. // We must not have already seen this predecessor as one of the phi's
  113. // operands.
  114. if (observed_predecessors.count(inc_id) != 0) {
  115. return _.diag(SPV_ERROR_INVALID_ID, inst)
  116. << "OpPhi references incoming basic block <id> "
  117. << _.getIdName(inc_id) << " multiple times.";
  118. }
  119. // Note the fact that we have now observed this predecessor.
  120. observed_predecessors.insert(inc_id);
  121. }
  122. }
  123. return SPV_SUCCESS;
  124. }
  125. spv_result_t ValidateBranch(ValidationState_t& _, const Instruction* inst) {
  126. // target operands must be OpLabel
  127. const auto id = inst->GetOperandAs<uint32_t>(0);
  128. const auto target = _.FindDef(id);
  129. if (!target || SpvOpLabel != target->opcode()) {
  130. return _.diag(SPV_ERROR_INVALID_ID, inst)
  131. << "'Target Label' operands for OpBranch must be the ID "
  132. "of an OpLabel instruction";
  133. }
  134. return SPV_SUCCESS;
  135. }
  136. spv_result_t ValidateBranchConditional(ValidationState_t& _,
  137. const Instruction* inst) {
  138. // num_operands is either 3 or 5 --- if 5, the last two need to be literal
  139. // integers
  140. const auto num_operands = inst->operands().size();
  141. if (num_operands != 3 && num_operands != 5) {
  142. return _.diag(SPV_ERROR_INVALID_ID, inst)
  143. << "OpBranchConditional requires either 3 or 5 parameters";
  144. }
  145. // grab the condition operand and check that it is a bool
  146. const auto cond_id = inst->GetOperandAs<uint32_t>(0);
  147. const auto cond_op = _.FindDef(cond_id);
  148. if (!cond_op || !cond_op->type_id() ||
  149. !_.IsBoolScalarType(cond_op->type_id())) {
  150. return _.diag(SPV_ERROR_INVALID_ID, inst) << "Condition operand for "
  151. "OpBranchConditional must be "
  152. "of boolean type";
  153. }
  154. // target operands must be OpLabel
  155. // note that we don't need to check that the target labels are in the same
  156. // function,
  157. // PerformCfgChecks already checks for that
  158. const auto true_id = inst->GetOperandAs<uint32_t>(1);
  159. const auto true_target = _.FindDef(true_id);
  160. if (!true_target || SpvOpLabel != true_target->opcode()) {
  161. return _.diag(SPV_ERROR_INVALID_ID, inst)
  162. << "The 'True Label' operand for OpBranchConditional must be the "
  163. "ID of an OpLabel instruction";
  164. }
  165. const auto false_id = inst->GetOperandAs<uint32_t>(2);
  166. const auto false_target = _.FindDef(false_id);
  167. if (!false_target || SpvOpLabel != false_target->opcode()) {
  168. return _.diag(SPV_ERROR_INVALID_ID, inst)
  169. << "The 'False Label' operand for OpBranchConditional must be the "
  170. "ID of an OpLabel instruction";
  171. }
  172. return SPV_SUCCESS;
  173. }
  174. spv_result_t ValidateSwitch(ValidationState_t& _, const Instruction* inst) {
  175. const auto num_operands = inst->operands().size();
  176. // At least two operands (selector, default), any more than that are
  177. // literal/target.
  178. // target operands must be OpLabel
  179. for (size_t i = 2; i < num_operands; i += 2) {
  180. // literal, id
  181. const auto id = inst->GetOperandAs<uint32_t>(i + 1);
  182. const auto target = _.FindDef(id);
  183. if (!target || SpvOpLabel != target->opcode()) {
  184. return _.diag(SPV_ERROR_INVALID_ID, inst)
  185. << "'Target Label' operands for OpSwitch must be IDs of an "
  186. "OpLabel instruction";
  187. }
  188. }
  189. return SPV_SUCCESS;
  190. }
  191. spv_result_t ValidateReturnValue(ValidationState_t& _,
  192. const Instruction* inst) {
  193. const auto value_id = inst->GetOperandAs<uint32_t>(0);
  194. const auto value = _.FindDef(value_id);
  195. if (!value || !value->type_id()) {
  196. return _.diag(SPV_ERROR_INVALID_ID, inst)
  197. << "OpReturnValue Value <id> '" << _.getIdName(value_id)
  198. << "' does not represent a value.";
  199. }
  200. auto value_type = _.FindDef(value->type_id());
  201. if (!value_type || SpvOpTypeVoid == value_type->opcode()) {
  202. return _.diag(SPV_ERROR_INVALID_ID, inst)
  203. << "OpReturnValue value's type <id> '"
  204. << _.getIdName(value->type_id()) << "' is missing or void.";
  205. }
  206. const bool uses_variable_pointer =
  207. _.features().variable_pointers ||
  208. _.features().variable_pointers_storage_buffer;
  209. if (_.addressing_model() == SpvAddressingModelLogical &&
  210. SpvOpTypePointer == value_type->opcode() && !uses_variable_pointer &&
  211. !_.options()->relax_logical_pointer) {
  212. return _.diag(SPV_ERROR_INVALID_ID, inst)
  213. << "OpReturnValue value's type <id> '"
  214. << _.getIdName(value->type_id())
  215. << "' is a pointer, which is invalid in the Logical addressing "
  216. "model.";
  217. }
  218. const auto function = inst->function();
  219. const auto return_type = _.FindDef(function->GetResultTypeId());
  220. if (!return_type || return_type->id() != value_type->id()) {
  221. return _.diag(SPV_ERROR_INVALID_ID, inst)
  222. << "OpReturnValue Value <id> '" << _.getIdName(value_id)
  223. << "'s type does not match OpFunction's return type.";
  224. }
  225. return SPV_SUCCESS;
  226. }
  227. spv_result_t ValidateLoopMerge(ValidationState_t& _, const Instruction* inst) {
  228. const auto merge_id = inst->GetOperandAs<uint32_t>(0);
  229. const auto merge = _.FindDef(merge_id);
  230. if (!merge || merge->opcode() != SpvOpLabel) {
  231. return _.diag(SPV_ERROR_INVALID_ID, inst)
  232. << "Merge Block " << _.getIdName(merge_id) << " must be an OpLabel";
  233. }
  234. if (merge_id == inst->block()->id()) {
  235. return _.diag(SPV_ERROR_INVALID_ID, inst)
  236. << "Merge Block may not be the block containing the OpLoopMerge\n";
  237. }
  238. const auto continue_id = inst->GetOperandAs<uint32_t>(1);
  239. const auto continue_target = _.FindDef(continue_id);
  240. if (!continue_target || continue_target->opcode() != SpvOpLabel) {
  241. return _.diag(SPV_ERROR_INVALID_ID, inst)
  242. << "Continue Target " << _.getIdName(continue_id)
  243. << " must be an OpLabel";
  244. }
  245. if (merge_id == continue_id) {
  246. return _.diag(SPV_ERROR_INVALID_ID, inst)
  247. << "Merge Block and Continue Target must be different ids";
  248. }
  249. const auto loop_control = inst->GetOperandAs<uint32_t>(2);
  250. if ((loop_control >> SpvLoopControlUnrollShift) & 0x1 &&
  251. (loop_control >> SpvLoopControlDontUnrollShift) & 0x1) {
  252. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  253. << "Unroll and DontUnroll loop controls must not both be specified";
  254. }
  255. if ((loop_control >> SpvLoopControlDontUnrollShift) & 0x1 &&
  256. (loop_control >> SpvLoopControlPeelCountShift) & 0x1) {
  257. return _.diag(SPV_ERROR_INVALID_DATA, inst) << "PeelCount and DontUnroll "
  258. "loop controls must not "
  259. "both be specified";
  260. }
  261. if ((loop_control >> SpvLoopControlDontUnrollShift) & 0x1 &&
  262. (loop_control >> SpvLoopControlPartialCountShift) & 0x1) {
  263. return _.diag(SPV_ERROR_INVALID_DATA, inst) << "PartialCount and "
  264. "DontUnroll loop controls "
  265. "must not both be specified";
  266. }
  267. uint32_t operand = 3;
  268. if ((loop_control >> SpvLoopControlDependencyLengthShift) & 0x1) {
  269. ++operand;
  270. }
  271. if ((loop_control >> SpvLoopControlMinIterationsShift) & 0x1) {
  272. ++operand;
  273. }
  274. if ((loop_control >> SpvLoopControlMaxIterationsShift) & 0x1) {
  275. ++operand;
  276. }
  277. if ((loop_control >> SpvLoopControlIterationMultipleShift) & 0x1) {
  278. if (inst->operands().size() < operand ||
  279. inst->GetOperandAs<uint32_t>(operand) == 0) {
  280. return _.diag(SPV_ERROR_INVALID_DATA, inst) << "IterationMultiple loop "
  281. "control operand must be "
  282. "greater than zero";
  283. }
  284. ++operand;
  285. }
  286. if ((loop_control >> SpvLoopControlPeelCountShift) & 0x1) {
  287. ++operand;
  288. }
  289. if ((loop_control >> SpvLoopControlPartialCountShift) & 0x1) {
  290. ++operand;
  291. }
  292. // That the right number of operands is present is checked by the parser. The
  293. // above code tracks operands for expanded validation checking in the future.
  294. return SPV_SUCCESS;
  295. }
  296. } // namespace
  297. void printDominatorList(const BasicBlock& b) {
  298. std::cout << b.id() << " is dominated by: ";
  299. const BasicBlock* bb = &b;
  300. while (bb->immediate_dominator() != bb) {
  301. bb = bb->immediate_dominator();
  302. std::cout << bb->id() << " ";
  303. }
  304. }
  305. #define CFG_ASSERT(ASSERT_FUNC, TARGET) \
  306. if (spv_result_t rcode = ASSERT_FUNC(_, TARGET)) return rcode
  307. spv_result_t FirstBlockAssert(ValidationState_t& _, uint32_t target) {
  308. if (_.current_function().IsFirstBlock(target)) {
  309. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(_.current_function().id()))
  310. << "First block " << _.getIdName(target) << " of function "
  311. << _.getIdName(_.current_function().id()) << " is targeted by block "
  312. << _.getIdName(_.current_function().current_block()->id());
  313. }
  314. return SPV_SUCCESS;
  315. }
  316. spv_result_t MergeBlockAssert(ValidationState_t& _, uint32_t merge_block) {
  317. if (_.current_function().IsBlockType(merge_block, kBlockTypeMerge)) {
  318. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(_.current_function().id()))
  319. << "Block " << _.getIdName(merge_block)
  320. << " is already a merge block for another header";
  321. }
  322. return SPV_SUCCESS;
  323. }
  324. /// Update the continue construct's exit blocks once the backedge blocks are
  325. /// identified in the CFG.
  326. void UpdateContinueConstructExitBlocks(
  327. Function& function,
  328. const std::vector<std::pair<uint32_t, uint32_t>>& back_edges) {
  329. auto& constructs = function.constructs();
  330. // TODO(umar): Think of a faster way to do this
  331. for (auto& edge : back_edges) {
  332. uint32_t back_edge_block_id;
  333. uint32_t loop_header_block_id;
  334. std::tie(back_edge_block_id, loop_header_block_id) = edge;
  335. auto is_this_header = [=](Construct& c) {
  336. return c.type() == ConstructType::kLoop &&
  337. c.entry_block()->id() == loop_header_block_id;
  338. };
  339. for (auto construct : constructs) {
  340. if (is_this_header(construct)) {
  341. Construct* continue_construct =
  342. construct.corresponding_constructs().back();
  343. assert(continue_construct->type() == ConstructType::kContinue);
  344. BasicBlock* back_edge_block;
  345. std::tie(back_edge_block, std::ignore) =
  346. function.GetBlock(back_edge_block_id);
  347. continue_construct->set_exit(back_edge_block);
  348. }
  349. }
  350. }
  351. }
  352. std::tuple<std::string, std::string, std::string> ConstructNames(
  353. ConstructType type) {
  354. std::string construct_name, header_name, exit_name;
  355. switch (type) {
  356. case ConstructType::kSelection:
  357. construct_name = "selection";
  358. header_name = "selection header";
  359. exit_name = "merge block";
  360. break;
  361. case ConstructType::kLoop:
  362. construct_name = "loop";
  363. header_name = "loop header";
  364. exit_name = "merge block";
  365. break;
  366. case ConstructType::kContinue:
  367. construct_name = "continue";
  368. header_name = "continue target";
  369. exit_name = "back-edge block";
  370. break;
  371. case ConstructType::kCase:
  372. construct_name = "case";
  373. header_name = "case entry block";
  374. exit_name = "case exit block";
  375. break;
  376. default:
  377. assert(1 == 0 && "Not defined type");
  378. }
  379. return std::make_tuple(construct_name, header_name, exit_name);
  380. }
  381. /// Constructs an error message for construct validation errors
  382. std::string ConstructErrorString(const Construct& construct,
  383. const std::string& header_string,
  384. const std::string& exit_string,
  385. const std::string& dominate_text) {
  386. std::string construct_name, header_name, exit_name;
  387. std::tie(construct_name, header_name, exit_name) =
  388. ConstructNames(construct.type());
  389. // TODO(umar): Add header block for continue constructs to error message
  390. return "The " + construct_name + " construct with the " + header_name + " " +
  391. header_string + " " + dominate_text + " the " + exit_name + " " +
  392. exit_string;
  393. }
  394. // Finds the fall through case construct of |target_block| and records it in
  395. // |case_fall_through|. Returns SPV_ERROR_INVALID_CFG if the case construct
  396. // headed by |target_block| branches to multiple case constructs.
  397. spv_result_t FindCaseFallThrough(
  398. ValidationState_t& _, BasicBlock* target_block, uint32_t* case_fall_through,
  399. const BasicBlock* merge, const std::unordered_set<uint32_t>& case_targets,
  400. Function* function) {
  401. std::vector<BasicBlock*> stack;
  402. stack.push_back(target_block);
  403. std::unordered_set<const BasicBlock*> visited;
  404. bool target_reachable = target_block->reachable();
  405. int target_depth = function->GetBlockDepth(target_block);
  406. while (!stack.empty()) {
  407. auto block = stack.back();
  408. stack.pop_back();
  409. if (block == merge) continue;
  410. if (!visited.insert(block).second) continue;
  411. if (target_reachable && block->reachable() &&
  412. target_block->dominates(*block)) {
  413. // Still in the case construct.
  414. for (auto successor : *block->successors()) {
  415. stack.push_back(successor);
  416. }
  417. } else {
  418. // Exiting the case construct to non-merge block.
  419. if (!case_targets.count(block->id())) {
  420. int depth = function->GetBlockDepth(block);
  421. if ((depth < target_depth) ||
  422. (depth == target_depth && block->is_type(kBlockTypeContinue))) {
  423. continue;
  424. }
  425. return _.diag(SPV_ERROR_INVALID_CFG, target_block->label())
  426. << "Case construct that targets "
  427. << _.getIdName(target_block->id())
  428. << " has invalid branch to block " << _.getIdName(block->id())
  429. << " (not another case construct, corresponding merge, outer "
  430. "loop merge or outer loop continue)";
  431. }
  432. if (*case_fall_through == 0u) {
  433. if (target_block != block) {
  434. *case_fall_through = block->id();
  435. }
  436. } else if (*case_fall_through != block->id()) {
  437. // Case construct has at most one branch to another case construct.
  438. return _.diag(SPV_ERROR_INVALID_CFG, target_block->label())
  439. << "Case construct that targets "
  440. << _.getIdName(target_block->id())
  441. << " has branches to multiple other case construct targets "
  442. << _.getIdName(*case_fall_through) << " and "
  443. << _.getIdName(block->id());
  444. }
  445. }
  446. }
  447. return SPV_SUCCESS;
  448. }
  449. spv_result_t StructuredSwitchChecks(ValidationState_t& _, Function* function,
  450. const Instruction* switch_inst,
  451. const BasicBlock* header,
  452. const BasicBlock* merge) {
  453. std::unordered_set<uint32_t> case_targets;
  454. for (uint32_t i = 1; i < switch_inst->operands().size(); i += 2) {
  455. uint32_t target = switch_inst->GetOperandAs<uint32_t>(i);
  456. if (target != merge->id()) case_targets.insert(target);
  457. }
  458. // Tracks how many times each case construct is targeted by another case
  459. // construct.
  460. std::map<uint32_t, uint32_t> num_fall_through_targeted;
  461. uint32_t default_case_fall_through = 0u;
  462. uint32_t default_target = switch_inst->GetOperandAs<uint32_t>(1u);
  463. bool default_appears_multiple_times = false;
  464. for (uint32_t i = 3; i < switch_inst->operands().size(); i += 2) {
  465. if (default_target == switch_inst->GetOperandAs<uint32_t>(i)) {
  466. default_appears_multiple_times = true;
  467. break;
  468. }
  469. }
  470. std::unordered_map<uint32_t, uint32_t> seen_to_fall_through;
  471. for (uint32_t i = 1; i < switch_inst->operands().size(); i += 2) {
  472. uint32_t target = switch_inst->GetOperandAs<uint32_t>(i);
  473. if (target == merge->id()) continue;
  474. uint32_t case_fall_through = 0u;
  475. auto seen_iter = seen_to_fall_through.find(target);
  476. if (seen_iter == seen_to_fall_through.end()) {
  477. const auto target_block = function->GetBlock(target).first;
  478. // OpSwitch must dominate all its case constructs.
  479. if (header->reachable() && target_block->reachable() &&
  480. !header->dominates(*target_block)) {
  481. return _.diag(SPV_ERROR_INVALID_CFG, header->label())
  482. << "Selection header " << _.getIdName(header->id())
  483. << " does not dominate its case construct "
  484. << _.getIdName(target);
  485. }
  486. if (auto error = FindCaseFallThrough(_, target_block, &case_fall_through,
  487. merge, case_targets, function)) {
  488. return error;
  489. }
  490. // Track how many time the fall through case has been targeted.
  491. if (case_fall_through != 0u) {
  492. auto where = num_fall_through_targeted.lower_bound(case_fall_through);
  493. if (where == num_fall_through_targeted.end() ||
  494. where->first != case_fall_through) {
  495. num_fall_through_targeted.insert(
  496. where, std::make_pair(case_fall_through, 1));
  497. } else {
  498. where->second++;
  499. }
  500. }
  501. seen_to_fall_through.insert(std::make_pair(target, case_fall_through));
  502. } else {
  503. case_fall_through = seen_iter->second;
  504. }
  505. if (case_fall_through == default_target &&
  506. !default_appears_multiple_times) {
  507. case_fall_through = default_case_fall_through;
  508. }
  509. if (case_fall_through != 0u) {
  510. bool is_default = i == 1;
  511. if (is_default) {
  512. default_case_fall_through = case_fall_through;
  513. } else {
  514. // Allow code like:
  515. // case x:
  516. // case y:
  517. // ...
  518. // case z:
  519. //
  520. // Where x and y target the same block and fall through to z.
  521. uint32_t j = i;
  522. while ((j + 2 < switch_inst->operands().size()) &&
  523. target == switch_inst->GetOperandAs<uint32_t>(j + 2)) {
  524. j += 2;
  525. }
  526. // If Target T1 branches to Target T2, or if Target T1 branches to the
  527. // Default target and the Default target branches to Target T2, then T1
  528. // must immediately precede T2 in the list of OpSwitch Target operands.
  529. if ((switch_inst->operands().size() < j + 2) ||
  530. (case_fall_through != switch_inst->GetOperandAs<uint32_t>(j + 2))) {
  531. return _.diag(SPV_ERROR_INVALID_CFG, switch_inst)
  532. << "Case construct that targets " << _.getIdName(target)
  533. << " has branches to the case construct that targets "
  534. << _.getIdName(case_fall_through)
  535. << ", but does not immediately precede it in the "
  536. "OpSwitch's target list";
  537. }
  538. }
  539. }
  540. }
  541. // Each case construct must be branched to by at most one other case
  542. // construct.
  543. for (const auto& pair : num_fall_through_targeted) {
  544. if (pair.second > 1) {
  545. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(pair.first))
  546. << "Multiple case constructs have branches to the case construct "
  547. "that targets "
  548. << _.getIdName(pair.first);
  549. }
  550. }
  551. return SPV_SUCCESS;
  552. }
  553. // Validates that all CFG divergences (i.e. conditional branch or switch) are
  554. // structured correctly. Either divergence is preceded by a merge instruction
  555. // or the divergence introduces at most one unseen label.
  556. spv_result_t ValidateStructuredSelections(
  557. ValidationState_t& _, const std::vector<const BasicBlock*>& postorder) {
  558. std::unordered_set<uint32_t> seen;
  559. for (auto iter = postorder.rbegin(); iter != postorder.rend(); ++iter) {
  560. const auto* block = *iter;
  561. const auto* terminator = block->terminator();
  562. if (!terminator) continue;
  563. const auto index = terminator - &_.ordered_instructions()[0];
  564. auto* merge = &_.ordered_instructions()[index - 1];
  565. // Marks merges and continues as seen.
  566. if (merge->opcode() == SpvOpSelectionMerge) {
  567. seen.insert(merge->GetOperandAs<uint32_t>(0));
  568. } else if (merge->opcode() == SpvOpLoopMerge) {
  569. seen.insert(merge->GetOperandAs<uint32_t>(0));
  570. seen.insert(merge->GetOperandAs<uint32_t>(1));
  571. } else {
  572. // Only track the pointer if it is a merge instruction.
  573. merge = nullptr;
  574. }
  575. // Skip unreachable blocks.
  576. if (!block->reachable()) continue;
  577. if (terminator->opcode() == SpvOpBranchConditional) {
  578. const auto true_label = terminator->GetOperandAs<uint32_t>(1);
  579. const auto false_label = terminator->GetOperandAs<uint32_t>(2);
  580. // Mark the upcoming blocks as seen now, but only error out if this block
  581. // was missing a merge instruction and both labels hadn't been seen
  582. // previously.
  583. const bool both_unseen =
  584. seen.insert(true_label).second && seen.insert(false_label).second;
  585. if (!merge && both_unseen) {
  586. return _.diag(SPV_ERROR_INVALID_CFG, terminator)
  587. << "Selection must be structured";
  588. }
  589. } else if (terminator->opcode() == SpvOpSwitch) {
  590. uint32_t count = 0;
  591. // Mark the targets as seen now, but only error out if this block was
  592. // missing a merge instruction and there were multiple unseen labels.
  593. for (uint32_t i = 1; i < terminator->operands().size(); i += 2) {
  594. const auto target = terminator->GetOperandAs<uint32_t>(i);
  595. if (seen.insert(target).second) {
  596. count++;
  597. }
  598. }
  599. if (!merge && count > 1) {
  600. return _.diag(SPV_ERROR_INVALID_CFG, terminator)
  601. << "Selection must be structured";
  602. }
  603. }
  604. }
  605. return SPV_SUCCESS;
  606. }
  607. spv_result_t StructuredControlFlowChecks(
  608. ValidationState_t& _, Function* function,
  609. const std::vector<std::pair<uint32_t, uint32_t>>& back_edges,
  610. const std::vector<const BasicBlock*>& postorder) {
  611. /// Check all backedges target only loop headers and have exactly one
  612. /// back-edge branching to it
  613. // Map a loop header to blocks with back-edges to the loop header.
  614. std::map<uint32_t, std::unordered_set<uint32_t>> loop_latch_blocks;
  615. for (auto back_edge : back_edges) {
  616. uint32_t back_edge_block;
  617. uint32_t header_block;
  618. std::tie(back_edge_block, header_block) = back_edge;
  619. if (!function->IsBlockType(header_block, kBlockTypeLoop)) {
  620. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(back_edge_block))
  621. << "Back-edges (" << _.getIdName(back_edge_block) << " -> "
  622. << _.getIdName(header_block)
  623. << ") can only be formed between a block and a loop header.";
  624. }
  625. loop_latch_blocks[header_block].insert(back_edge_block);
  626. }
  627. // Check the loop headers have exactly one back-edge branching to it
  628. for (BasicBlock* loop_header : function->ordered_blocks()) {
  629. if (!loop_header->reachable()) continue;
  630. if (!loop_header->is_type(kBlockTypeLoop)) continue;
  631. auto loop_header_id = loop_header->id();
  632. auto num_latch_blocks = loop_latch_blocks[loop_header_id].size();
  633. if (num_latch_blocks != 1) {
  634. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(loop_header_id))
  635. << "Loop header " << _.getIdName(loop_header_id)
  636. << " is targeted by " << num_latch_blocks
  637. << " back-edge blocks but the standard requires exactly one";
  638. }
  639. }
  640. // Check construct rules
  641. for (const Construct& construct : function->constructs()) {
  642. auto header = construct.entry_block();
  643. auto merge = construct.exit_block();
  644. if (header->reachable() && !merge) {
  645. std::string construct_name, header_name, exit_name;
  646. std::tie(construct_name, header_name, exit_name) =
  647. ConstructNames(construct.type());
  648. return _.diag(SPV_ERROR_INTERNAL, _.FindDef(header->id()))
  649. << "Construct " + construct_name + " with " + header_name + " " +
  650. _.getIdName(header->id()) + " does not have a " +
  651. exit_name + ". This may be a bug in the validator.";
  652. }
  653. // If the exit block is reachable then it's dominated by the
  654. // header.
  655. if (merge && merge->reachable()) {
  656. if (!header->dominates(*merge)) {
  657. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(merge->id()))
  658. << ConstructErrorString(construct, _.getIdName(header->id()),
  659. _.getIdName(merge->id()),
  660. "does not dominate");
  661. }
  662. // If it's really a merge block for a selection or loop, then it must be
  663. // *strictly* dominated by the header.
  664. if (construct.ExitBlockIsMergeBlock() && (header == merge)) {
  665. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(merge->id()))
  666. << ConstructErrorString(construct, _.getIdName(header->id()),
  667. _.getIdName(merge->id()),
  668. "does not strictly dominate");
  669. }
  670. }
  671. // Check post-dominance for continue constructs. But dominance and
  672. // post-dominance only make sense when the construct is reachable.
  673. if (header->reachable() && construct.type() == ConstructType::kContinue) {
  674. if (!merge->postdominates(*header)) {
  675. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(merge->id()))
  676. << ConstructErrorString(construct, _.getIdName(header->id()),
  677. _.getIdName(merge->id()),
  678. "is not post dominated by");
  679. }
  680. }
  681. Construct::ConstructBlockSet construct_blocks = construct.blocks(function);
  682. std::string construct_name, header_name, exit_name;
  683. std::tie(construct_name, header_name, exit_name) =
  684. ConstructNames(construct.type());
  685. for (auto block : construct_blocks) {
  686. // Check that all exits from the construct are via structured exits.
  687. for (auto succ : *block->successors()) {
  688. if (block->reachable() && !construct_blocks.count(succ) &&
  689. !construct.IsStructuredExit(_, succ)) {
  690. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  691. << "block <ID> " << _.getIdName(block->id()) << " exits the "
  692. << construct_name << " headed by <ID> "
  693. << _.getIdName(header->id())
  694. << ", but not via a structured exit";
  695. }
  696. }
  697. if (block == header) continue;
  698. // Check that for all non-header blocks, all predecessors are within this
  699. // construct.
  700. for (auto pred : *block->predecessors()) {
  701. if (pred->reachable() && !construct_blocks.count(pred)) {
  702. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(pred->id()))
  703. << "block <ID> " << pred->id() << " branches to the "
  704. << construct_name << " construct, but not to the "
  705. << header_name << " <ID> " << header->id();
  706. }
  707. }
  708. if (block->is_type(BlockType::kBlockTypeSelection) ||
  709. block->is_type(BlockType::kBlockTypeLoop)) {
  710. size_t index = (block->terminator() - &_.ordered_instructions()[0]) - 1;
  711. const auto& merge_inst = _.ordered_instructions()[index];
  712. if (merge_inst.opcode() == SpvOpSelectionMerge ||
  713. merge_inst.opcode() == SpvOpLoopMerge) {
  714. uint32_t merge_id = merge_inst.GetOperandAs<uint32_t>(0);
  715. auto merge_block = function->GetBlock(merge_id).first;
  716. if (merge_block->reachable() &&
  717. !construct_blocks.count(merge_block)) {
  718. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  719. << "Header block " << _.getIdName(block->id())
  720. << " is contained in the " << construct_name
  721. << " construct headed by " << _.getIdName(header->id())
  722. << ", but its merge block " << _.getIdName(merge_id)
  723. << " is not";
  724. }
  725. }
  726. }
  727. }
  728. // Checks rules for case constructs.
  729. if (construct.type() == ConstructType::kSelection &&
  730. header->terminator()->opcode() == SpvOpSwitch) {
  731. const auto terminator = header->terminator();
  732. if (auto error =
  733. StructuredSwitchChecks(_, function, terminator, header, merge)) {
  734. return error;
  735. }
  736. }
  737. }
  738. if (auto error = ValidateStructuredSelections(_, postorder)) {
  739. return error;
  740. }
  741. return SPV_SUCCESS;
  742. }
  743. spv_result_t PerformWebGPUCfgChecks(ValidationState_t& _, Function* function) {
  744. for (auto& block : function->ordered_blocks()) {
  745. if (block->reachable()) continue;
  746. if (block->is_type(kBlockTypeMerge)) {
  747. // 1. Find the referencing merge and confirm that it is reachable.
  748. BasicBlock* merge_header = function->GetMergeHeader(block);
  749. assert(merge_header != nullptr);
  750. if (!merge_header->reachable()) {
  751. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  752. << "For WebGPU, unreachable merge-blocks must be referenced by "
  753. "a reachable merge instruction.";
  754. }
  755. // 2. Check that the only instructions are OpLabel and OpUnreachable.
  756. auto* label_inst = block->label();
  757. auto* terminator_inst = block->terminator();
  758. assert(label_inst != nullptr);
  759. assert(terminator_inst != nullptr);
  760. if (terminator_inst->opcode() != SpvOpUnreachable) {
  761. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  762. << "For WebGPU, unreachable merge-blocks must terminate with "
  763. "OpUnreachable.";
  764. }
  765. auto label_idx = label_inst - &_.ordered_instructions()[0];
  766. auto terminator_idx = terminator_inst - &_.ordered_instructions()[0];
  767. if (label_idx + 1 != terminator_idx) {
  768. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  769. << "For WebGPU, unreachable merge-blocks must only contain an "
  770. "OpLabel and OpUnreachable instruction.";
  771. }
  772. // 3. Use label instruction to confirm there is no uses by branches.
  773. for (auto use : label_inst->uses()) {
  774. const auto* use_inst = use.first;
  775. if (spvOpcodeIsBranch(use_inst->opcode())) {
  776. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  777. << "For WebGPU, unreachable merge-blocks cannot be the target "
  778. "of a branch.";
  779. }
  780. }
  781. } else if (block->is_type(kBlockTypeContinue)) {
  782. // 1. Find referencing loop and confirm that it is reachable.
  783. std::vector<BasicBlock*> continue_headers =
  784. function->GetContinueHeaders(block);
  785. if (continue_headers.empty()) {
  786. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  787. << "For WebGPU, unreachable continue-target must be referenced "
  788. "by a loop instruction.";
  789. }
  790. std::vector<BasicBlock*> reachable_headers(continue_headers.size());
  791. auto iter =
  792. std::copy_if(continue_headers.begin(), continue_headers.end(),
  793. reachable_headers.begin(),
  794. [](BasicBlock* header) { return header->reachable(); });
  795. reachable_headers.resize(std::distance(reachable_headers.begin(), iter));
  796. if (reachable_headers.empty()) {
  797. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  798. << "For WebGPU, unreachable continue-target must be referenced "
  799. "by a reachable loop instruction.";
  800. }
  801. // 2. Check that the only instructions are OpLabel and OpBranch.
  802. auto* label_inst = block->label();
  803. auto* terminator_inst = block->terminator();
  804. assert(label_inst != nullptr);
  805. assert(terminator_inst != nullptr);
  806. if (terminator_inst->opcode() != SpvOpBranch) {
  807. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  808. << "For WebGPU, unreachable continue-target must terminate with "
  809. "OpBranch.";
  810. }
  811. auto label_idx = label_inst - &_.ordered_instructions()[0];
  812. auto terminator_idx = terminator_inst - &_.ordered_instructions()[0];
  813. if (label_idx + 1 != terminator_idx) {
  814. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  815. << "For WebGPU, unreachable continue-target must only contain "
  816. "an OpLabel and an OpBranch instruction.";
  817. }
  818. // 3. Use label instruction to confirm there is no uses by branches.
  819. for (auto use : label_inst->uses()) {
  820. const auto* use_inst = use.first;
  821. if (spvOpcodeIsBranch(use_inst->opcode())) {
  822. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  823. << "For WebGPU, unreachable continue-target cannot be the "
  824. "target of a branch.";
  825. }
  826. }
  827. // 4. Confirm that continue-target has a back edge to a reachable loop
  828. // header block.
  829. auto branch_target = terminator_inst->GetOperandAs<uint32_t>(0);
  830. for (auto* continue_header : reachable_headers) {
  831. if (branch_target != continue_header->id()) {
  832. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  833. << "For WebGPU, unreachable continue-target must only have a "
  834. "back edge to a single reachable loop instruction.";
  835. }
  836. }
  837. } else {
  838. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  839. << "For WebGPU, all blocks must be reachable, unless they are "
  840. << "degenerate cases of merge-block or continue-target.";
  841. }
  842. }
  843. return SPV_SUCCESS;
  844. }
  845. spv_result_t PerformCfgChecks(ValidationState_t& _) {
  846. for (auto& function : _.functions()) {
  847. // Check all referenced blocks are defined within a function
  848. if (function.undefined_block_count() != 0) {
  849. std::string undef_blocks("{");
  850. bool first = true;
  851. for (auto undefined_block : function.undefined_blocks()) {
  852. undef_blocks += _.getIdName(undefined_block);
  853. if (!first) {
  854. undef_blocks += " ";
  855. }
  856. first = false;
  857. }
  858. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(function.id()))
  859. << "Block(s) " << undef_blocks << "}"
  860. << " are referenced but not defined in function "
  861. << _.getIdName(function.id());
  862. }
  863. // Set each block's immediate dominator and immediate postdominator,
  864. // and find all back-edges.
  865. //
  866. // We want to analyze all the blocks in the function, even in degenerate
  867. // control flow cases including unreachable blocks. So use the augmented
  868. // CFG to ensure we cover all the blocks.
  869. std::vector<const BasicBlock*> postorder;
  870. std::vector<const BasicBlock*> postdom_postorder;
  871. std::vector<std::pair<uint32_t, uint32_t>> back_edges;
  872. auto ignore_block = [](const BasicBlock*) {};
  873. auto ignore_edge = [](const BasicBlock*, const BasicBlock*) {};
  874. if (!function.ordered_blocks().empty()) {
  875. /// calculate dominators
  876. CFA<BasicBlock>::DepthFirstTraversal(
  877. function.first_block(), function.AugmentedCFGSuccessorsFunction(),
  878. ignore_block, [&](const BasicBlock* b) { postorder.push_back(b); },
  879. ignore_edge);
  880. auto edges = CFA<BasicBlock>::CalculateDominators(
  881. postorder, function.AugmentedCFGPredecessorsFunction());
  882. for (auto edge : edges) {
  883. if (edge.first != edge.second)
  884. edge.first->SetImmediateDominator(edge.second);
  885. }
  886. /// calculate post dominators
  887. CFA<BasicBlock>::DepthFirstTraversal(
  888. function.pseudo_exit_block(),
  889. function.AugmentedCFGPredecessorsFunction(), ignore_block,
  890. [&](const BasicBlock* b) { postdom_postorder.push_back(b); },
  891. ignore_edge);
  892. auto postdom_edges = CFA<BasicBlock>::CalculateDominators(
  893. postdom_postorder, function.AugmentedCFGSuccessorsFunction());
  894. for (auto edge : postdom_edges) {
  895. edge.first->SetImmediatePostDominator(edge.second);
  896. }
  897. /// calculate back edges.
  898. CFA<BasicBlock>::DepthFirstTraversal(
  899. function.pseudo_entry_block(),
  900. function
  901. .AugmentedCFGSuccessorsFunctionIncludingHeaderToContinueEdge(),
  902. ignore_block, ignore_block,
  903. [&](const BasicBlock* from, const BasicBlock* to) {
  904. back_edges.emplace_back(from->id(), to->id());
  905. });
  906. }
  907. UpdateContinueConstructExitBlocks(function, back_edges);
  908. auto& blocks = function.ordered_blocks();
  909. if (!blocks.empty()) {
  910. // Check if the order of blocks in the binary appear before the blocks
  911. // they dominate
  912. for (auto block = begin(blocks) + 1; block != end(blocks); ++block) {
  913. if (auto idom = (*block)->immediate_dominator()) {
  914. if (idom != function.pseudo_entry_block() &&
  915. block == std::find(begin(blocks), block, idom)) {
  916. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(idom->id()))
  917. << "Block " << _.getIdName((*block)->id())
  918. << " appears in the binary before its dominator "
  919. << _.getIdName(idom->id());
  920. }
  921. }
  922. // For WebGPU check that all unreachable blocks are degenerate cases for
  923. // merge-block or continue-target.
  924. if (spvIsWebGPUEnv(_.context()->target_env)) {
  925. spv_result_t result = PerformWebGPUCfgChecks(_, &function);
  926. if (result != SPV_SUCCESS) return result;
  927. }
  928. }
  929. // If we have structed control flow, check that no block has a control
  930. // flow nesting depth larger than the limit.
  931. if (_.HasCapability(SpvCapabilityShader)) {
  932. const int control_flow_nesting_depth_limit =
  933. _.options()->universal_limits_.max_control_flow_nesting_depth;
  934. for (auto block = begin(blocks); block != end(blocks); ++block) {
  935. if (function.GetBlockDepth(*block) >
  936. control_flow_nesting_depth_limit) {
  937. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef((*block)->id()))
  938. << "Maximum Control Flow nesting depth exceeded.";
  939. }
  940. }
  941. }
  942. }
  943. /// Structured control flow checks are only required for shader capabilities
  944. if (_.HasCapability(SpvCapabilityShader)) {
  945. if (auto error =
  946. StructuredControlFlowChecks(_, &function, back_edges, postorder))
  947. return error;
  948. }
  949. }
  950. return SPV_SUCCESS;
  951. }
  952. spv_result_t CfgPass(ValidationState_t& _, const Instruction* inst) {
  953. SpvOp opcode = inst->opcode();
  954. switch (opcode) {
  955. case SpvOpLabel:
  956. if (auto error = _.current_function().RegisterBlock(inst->id()))
  957. return error;
  958. // TODO(github:1661) This should be done in the
  959. // ValidationState::RegisterInstruction method but because of the order of
  960. // passes the OpLabel ends up not being part of the basic block it starts.
  961. _.current_function().current_block()->set_label(inst);
  962. break;
  963. case SpvOpLoopMerge: {
  964. uint32_t merge_block = inst->GetOperandAs<uint32_t>(0);
  965. uint32_t continue_block = inst->GetOperandAs<uint32_t>(1);
  966. CFG_ASSERT(MergeBlockAssert, merge_block);
  967. if (auto error = _.current_function().RegisterLoopMerge(merge_block,
  968. continue_block))
  969. return error;
  970. } break;
  971. case SpvOpSelectionMerge: {
  972. uint32_t merge_block = inst->GetOperandAs<uint32_t>(0);
  973. CFG_ASSERT(MergeBlockAssert, merge_block);
  974. if (auto error = _.current_function().RegisterSelectionMerge(merge_block))
  975. return error;
  976. } break;
  977. case SpvOpBranch: {
  978. uint32_t target = inst->GetOperandAs<uint32_t>(0);
  979. CFG_ASSERT(FirstBlockAssert, target);
  980. _.current_function().RegisterBlockEnd({target});
  981. } break;
  982. case SpvOpBranchConditional: {
  983. uint32_t tlabel = inst->GetOperandAs<uint32_t>(1);
  984. uint32_t flabel = inst->GetOperandAs<uint32_t>(2);
  985. CFG_ASSERT(FirstBlockAssert, tlabel);
  986. CFG_ASSERT(FirstBlockAssert, flabel);
  987. _.current_function().RegisterBlockEnd({tlabel, flabel});
  988. } break;
  989. case SpvOpSwitch: {
  990. std::vector<uint32_t> cases;
  991. for (size_t i = 1; i < inst->operands().size(); i += 2) {
  992. uint32_t target = inst->GetOperandAs<uint32_t>(i);
  993. CFG_ASSERT(FirstBlockAssert, target);
  994. cases.push_back(target);
  995. }
  996. _.current_function().RegisterBlockEnd({cases});
  997. } break;
  998. case SpvOpReturn: {
  999. const uint32_t return_type = _.current_function().GetResultTypeId();
  1000. const Instruction* return_type_inst = _.FindDef(return_type);
  1001. assert(return_type_inst);
  1002. if (return_type_inst->opcode() != SpvOpTypeVoid)
  1003. return _.diag(SPV_ERROR_INVALID_CFG, inst)
  1004. << "OpReturn can only be called from a function with void "
  1005. << "return type.";
  1006. _.current_function().RegisterBlockEnd(std::vector<uint32_t>());
  1007. break;
  1008. }
  1009. case SpvOpKill:
  1010. case SpvOpReturnValue:
  1011. case SpvOpUnreachable:
  1012. case SpvOpTerminateInvocation:
  1013. case SpvOpIgnoreIntersectionKHR:
  1014. case SpvOpTerminateRayKHR:
  1015. _.current_function().RegisterBlockEnd(std::vector<uint32_t>());
  1016. if (opcode == SpvOpKill) {
  1017. _.current_function().RegisterExecutionModelLimitation(
  1018. SpvExecutionModelFragment,
  1019. "OpKill requires Fragment execution model");
  1020. }
  1021. if (opcode == SpvOpTerminateInvocation) {
  1022. _.current_function().RegisterExecutionModelLimitation(
  1023. SpvExecutionModelFragment,
  1024. "OpTerminateInvocation requires Fragment execution model");
  1025. }
  1026. if (opcode == SpvOpIgnoreIntersectionKHR) {
  1027. _.current_function().RegisterExecutionModelLimitation(
  1028. SpvExecutionModelAnyHitKHR,
  1029. "OpIgnoreIntersectionKHR requires AnyHit execution model");
  1030. }
  1031. if (opcode == SpvOpTerminateRayKHR) {
  1032. _.current_function().RegisterExecutionModelLimitation(
  1033. SpvExecutionModelAnyHitKHR,
  1034. "OpTerminateRayKHR requires AnyHit execution model");
  1035. }
  1036. break;
  1037. default:
  1038. break;
  1039. }
  1040. return SPV_SUCCESS;
  1041. }
  1042. void ReachabilityPass(ValidationState_t& _) {
  1043. for (auto& f : _.functions()) {
  1044. std::vector<BasicBlock*> stack;
  1045. auto entry = f.first_block();
  1046. // Skip function declarations.
  1047. if (entry) stack.push_back(entry);
  1048. while (!stack.empty()) {
  1049. auto block = stack.back();
  1050. stack.pop_back();
  1051. if (block->reachable()) continue;
  1052. block->set_reachable(true);
  1053. for (auto succ : *block->successors()) {
  1054. stack.push_back(succ);
  1055. }
  1056. }
  1057. }
  1058. }
  1059. spv_result_t ControlFlowPass(ValidationState_t& _, const Instruction* inst) {
  1060. switch (inst->opcode()) {
  1061. case SpvOpPhi:
  1062. if (auto error = ValidatePhi(_, inst)) return error;
  1063. break;
  1064. case SpvOpBranch:
  1065. if (auto error = ValidateBranch(_, inst)) return error;
  1066. break;
  1067. case SpvOpBranchConditional:
  1068. if (auto error = ValidateBranchConditional(_, inst)) return error;
  1069. break;
  1070. case SpvOpReturnValue:
  1071. if (auto error = ValidateReturnValue(_, inst)) return error;
  1072. break;
  1073. case SpvOpSwitch:
  1074. if (auto error = ValidateSwitch(_, inst)) return error;
  1075. break;
  1076. case SpvOpLoopMerge:
  1077. if (auto error = ValidateLoopMerge(_, inst)) return error;
  1078. break;
  1079. default:
  1080. break;
  1081. }
  1082. return SPV_SUCCESS;
  1083. }
  1084. } // namespace val
  1085. } // namespace spvtools