validate_cfg.cpp 41 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047
  1. // Copyright (c) 2015-2016 The Khronos Group Inc.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. #include "source/val/validate.h"
  15. #include <algorithm>
  16. #include <cassert>
  17. #include <functional>
  18. #include <iostream>
  19. #include <iterator>
  20. #include <map>
  21. #include <string>
  22. #include <tuple>
  23. #include <unordered_map>
  24. #include <unordered_set>
  25. #include <utility>
  26. #include <vector>
  27. #include "source/cfa.h"
  28. #include "source/opcode.h"
  29. #include "source/spirv_target_env.h"
  30. #include "source/spirv_validator_options.h"
  31. #include "source/val/basic_block.h"
  32. #include "source/val/construct.h"
  33. #include "source/val/function.h"
  34. #include "source/val/validation_state.h"
  35. namespace spvtools {
  36. namespace val {
  37. namespace {
  38. spv_result_t ValidatePhi(ValidationState_t& _, const Instruction* inst) {
  39. auto block = inst->block();
  40. size_t num_in_ops = inst->words().size() - 3;
  41. if (num_in_ops % 2 != 0) {
  42. return _.diag(SPV_ERROR_INVALID_ID, inst)
  43. << "OpPhi does not have an equal number of incoming values and "
  44. "basic blocks.";
  45. }
  46. const Instruction* type_inst = _.FindDef(inst->type_id());
  47. assert(type_inst);
  48. const SpvOp type_opcode = type_inst->opcode();
  49. if (type_opcode == SpvOpTypePointer &&
  50. _.addressing_model() == SpvAddressingModelLogical) {
  51. if (!_.features().variable_pointers &&
  52. !_.features().variable_pointers_storage_buffer) {
  53. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  54. << "Using pointers with OpPhi requires capability "
  55. << "VariablePointers or VariablePointersStorageBuffer";
  56. }
  57. }
  58. // Create a uniqued vector of predecessor ids for comparison against
  59. // incoming values. OpBranchConditional %cond %label %label produces two
  60. // predecessors in the CFG.
  61. std::vector<uint32_t> pred_ids;
  62. std::transform(block->predecessors()->begin(), block->predecessors()->end(),
  63. std::back_inserter(pred_ids),
  64. [](const BasicBlock* b) { return b->id(); });
  65. std::sort(pred_ids.begin(), pred_ids.end());
  66. pred_ids.erase(std::unique(pred_ids.begin(), pred_ids.end()), pred_ids.end());
  67. size_t num_edges = num_in_ops / 2;
  68. if (num_edges != pred_ids.size()) {
  69. return _.diag(SPV_ERROR_INVALID_ID, inst)
  70. << "OpPhi's number of incoming blocks (" << num_edges
  71. << ") does not match block's predecessor count ("
  72. << block->predecessors()->size() << ").";
  73. }
  74. for (size_t i = 3; i < inst->words().size(); ++i) {
  75. auto inc_id = inst->word(i);
  76. if (i % 2 == 1) {
  77. // Incoming value type must match the phi result type.
  78. auto inc_type_id = _.GetTypeId(inc_id);
  79. if (inst->type_id() != inc_type_id) {
  80. return _.diag(SPV_ERROR_INVALID_ID, inst)
  81. << "OpPhi's result type <id> " << _.getIdName(inst->type_id())
  82. << " does not match incoming value <id> " << _.getIdName(inc_id)
  83. << " type <id> " << _.getIdName(inc_type_id) << ".";
  84. }
  85. } else {
  86. if (_.GetIdOpcode(inc_id) != SpvOpLabel) {
  87. return _.diag(SPV_ERROR_INVALID_ID, inst)
  88. << "OpPhi's incoming basic block <id> " << _.getIdName(inc_id)
  89. << " is not an OpLabel.";
  90. }
  91. // Incoming basic block must be an immediate predecessor of the phi's
  92. // block.
  93. if (!std::binary_search(pred_ids.begin(), pred_ids.end(), inc_id)) {
  94. return _.diag(SPV_ERROR_INVALID_ID, inst)
  95. << "OpPhi's incoming basic block <id> " << _.getIdName(inc_id)
  96. << " is not a predecessor of <id> " << _.getIdName(block->id())
  97. << ".";
  98. }
  99. }
  100. }
  101. return SPV_SUCCESS;
  102. }
  103. spv_result_t ValidateBranch(ValidationState_t& _, const Instruction* inst) {
  104. // target operands must be OpLabel
  105. const auto id = inst->GetOperandAs<uint32_t>(0);
  106. const auto target = _.FindDef(id);
  107. if (!target || SpvOpLabel != target->opcode()) {
  108. return _.diag(SPV_ERROR_INVALID_ID, inst)
  109. << "'Target Label' operands for OpBranch must be the ID "
  110. "of an OpLabel instruction";
  111. }
  112. return SPV_SUCCESS;
  113. }
  114. spv_result_t ValidateBranchConditional(ValidationState_t& _,
  115. const Instruction* inst) {
  116. // num_operands is either 3 or 5 --- if 5, the last two need to be literal
  117. // integers
  118. const auto num_operands = inst->operands().size();
  119. if (num_operands != 3 && num_operands != 5) {
  120. return _.diag(SPV_ERROR_INVALID_ID, inst)
  121. << "OpBranchConditional requires either 3 or 5 parameters";
  122. }
  123. // grab the condition operand and check that it is a bool
  124. const auto cond_id = inst->GetOperandAs<uint32_t>(0);
  125. const auto cond_op = _.FindDef(cond_id);
  126. if (!cond_op || !cond_op->type_id() ||
  127. !_.IsBoolScalarType(cond_op->type_id())) {
  128. return _.diag(SPV_ERROR_INVALID_ID, inst) << "Condition operand for "
  129. "OpBranchConditional must be "
  130. "of boolean type";
  131. }
  132. // target operands must be OpLabel
  133. // note that we don't need to check that the target labels are in the same
  134. // function,
  135. // PerformCfgChecks already checks for that
  136. const auto true_id = inst->GetOperandAs<uint32_t>(1);
  137. const auto true_target = _.FindDef(true_id);
  138. if (!true_target || SpvOpLabel != true_target->opcode()) {
  139. return _.diag(SPV_ERROR_INVALID_ID, inst)
  140. << "The 'True Label' operand for OpBranchConditional must be the "
  141. "ID of an OpLabel instruction";
  142. }
  143. const auto false_id = inst->GetOperandAs<uint32_t>(2);
  144. const auto false_target = _.FindDef(false_id);
  145. if (!false_target || SpvOpLabel != false_target->opcode()) {
  146. return _.diag(SPV_ERROR_INVALID_ID, inst)
  147. << "The 'False Label' operand for OpBranchConditional must be the "
  148. "ID of an OpLabel instruction";
  149. }
  150. return SPV_SUCCESS;
  151. }
  152. spv_result_t ValidateSwitch(ValidationState_t& _, const Instruction* inst) {
  153. const auto num_operands = inst->operands().size();
  154. // At least two operands (selector, default), any more than that are
  155. // literal/target.
  156. // target operands must be OpLabel
  157. for (size_t i = 2; i < num_operands; i += 2) {
  158. // literal, id
  159. const auto id = inst->GetOperandAs<uint32_t>(i + 1);
  160. const auto target = _.FindDef(id);
  161. if (!target || SpvOpLabel != target->opcode()) {
  162. return _.diag(SPV_ERROR_INVALID_ID, inst)
  163. << "'Target Label' operands for OpSwitch must be IDs of an "
  164. "OpLabel instruction";
  165. }
  166. }
  167. return SPV_SUCCESS;
  168. }
  169. spv_result_t ValidateReturnValue(ValidationState_t& _,
  170. const Instruction* inst) {
  171. const auto value_id = inst->GetOperandAs<uint32_t>(0);
  172. const auto value = _.FindDef(value_id);
  173. if (!value || !value->type_id()) {
  174. return _.diag(SPV_ERROR_INVALID_ID, inst)
  175. << "OpReturnValue Value <id> '" << _.getIdName(value_id)
  176. << "' does not represent a value.";
  177. }
  178. auto value_type = _.FindDef(value->type_id());
  179. if (!value_type || SpvOpTypeVoid == value_type->opcode()) {
  180. return _.diag(SPV_ERROR_INVALID_ID, inst)
  181. << "OpReturnValue value's type <id> '"
  182. << _.getIdName(value->type_id()) << "' is missing or void.";
  183. }
  184. const bool uses_variable_pointer =
  185. _.features().variable_pointers ||
  186. _.features().variable_pointers_storage_buffer;
  187. if (_.addressing_model() == SpvAddressingModelLogical &&
  188. SpvOpTypePointer == value_type->opcode() && !uses_variable_pointer &&
  189. !_.options()->relax_logical_pointer) {
  190. return _.diag(SPV_ERROR_INVALID_ID, inst)
  191. << "OpReturnValue value's type <id> '"
  192. << _.getIdName(value->type_id())
  193. << "' is a pointer, which is invalid in the Logical addressing "
  194. "model.";
  195. }
  196. const auto function = inst->function();
  197. const auto return_type = _.FindDef(function->GetResultTypeId());
  198. if (!return_type || return_type->id() != value_type->id()) {
  199. return _.diag(SPV_ERROR_INVALID_ID, inst)
  200. << "OpReturnValue Value <id> '" << _.getIdName(value_id)
  201. << "'s type does not match OpFunction's return type.";
  202. }
  203. return SPV_SUCCESS;
  204. }
  205. spv_result_t ValidateLoopMerge(ValidationState_t& _, const Instruction* inst) {
  206. const auto merge_id = inst->GetOperandAs<uint32_t>(0);
  207. const auto merge = _.FindDef(merge_id);
  208. if (!merge || merge->opcode() != SpvOpLabel) {
  209. return _.diag(SPV_ERROR_INVALID_ID, inst)
  210. << "Merge Block " << _.getIdName(merge_id) << " must be an OpLabel";
  211. }
  212. if (merge_id == inst->block()->id()) {
  213. return _.diag(SPV_ERROR_INVALID_ID, inst)
  214. << "Merge Block may not be the block containing the OpLoopMerge\n";
  215. }
  216. const auto continue_id = inst->GetOperandAs<uint32_t>(1);
  217. const auto continue_target = _.FindDef(continue_id);
  218. if (!continue_target || continue_target->opcode() != SpvOpLabel) {
  219. return _.diag(SPV_ERROR_INVALID_ID, inst)
  220. << "Continue Target " << _.getIdName(continue_id)
  221. << " must be an OpLabel";
  222. }
  223. if (merge_id == continue_id) {
  224. return _.diag(SPV_ERROR_INVALID_ID, inst)
  225. << "Merge Block and Continue Target must be different ids";
  226. }
  227. const auto loop_control = inst->GetOperandAs<uint32_t>(2);
  228. if ((loop_control >> SpvLoopControlUnrollShift) & 0x1 &&
  229. (loop_control >> SpvLoopControlDontUnrollShift) & 0x1) {
  230. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  231. << "Unroll and DontUnroll loop controls must not both be specified";
  232. }
  233. if ((loop_control >> SpvLoopControlDontUnrollShift) & 0x1 &&
  234. (loop_control >> SpvLoopControlPeelCountShift) & 0x1) {
  235. return _.diag(SPV_ERROR_INVALID_DATA, inst) << "PeelCount and DontUnroll "
  236. "loop controls must not "
  237. "both be specified";
  238. }
  239. if ((loop_control >> SpvLoopControlDontUnrollShift) & 0x1 &&
  240. (loop_control >> SpvLoopControlPartialCountShift) & 0x1) {
  241. return _.diag(SPV_ERROR_INVALID_DATA, inst) << "PartialCount and "
  242. "DontUnroll loop controls "
  243. "must not both be specified";
  244. }
  245. uint32_t operand = 3;
  246. if ((loop_control >> SpvLoopControlDependencyLengthShift) & 0x1) {
  247. ++operand;
  248. }
  249. if ((loop_control >> SpvLoopControlMinIterationsShift) & 0x1) {
  250. ++operand;
  251. }
  252. if ((loop_control >> SpvLoopControlMaxIterationsShift) & 0x1) {
  253. ++operand;
  254. }
  255. if ((loop_control >> SpvLoopControlIterationMultipleShift) & 0x1) {
  256. if (inst->operands().size() < operand ||
  257. inst->GetOperandAs<uint32_t>(operand) == 0) {
  258. return _.diag(SPV_ERROR_INVALID_DATA, inst) << "IterationMultiple loop "
  259. "control operand must be "
  260. "greater than zero";
  261. }
  262. ++operand;
  263. }
  264. if ((loop_control >> SpvLoopControlPeelCountShift) & 0x1) {
  265. ++operand;
  266. }
  267. if ((loop_control >> SpvLoopControlPartialCountShift) & 0x1) {
  268. ++operand;
  269. }
  270. // That the right number of operands is present is checked by the parser. The
  271. // above code tracks operands for expanded validation checking in the future.
  272. return SPV_SUCCESS;
  273. }
  274. } // namespace
  275. void printDominatorList(const BasicBlock& b) {
  276. std::cout << b.id() << " is dominated by: ";
  277. const BasicBlock* bb = &b;
  278. while (bb->immediate_dominator() != bb) {
  279. bb = bb->immediate_dominator();
  280. std::cout << bb->id() << " ";
  281. }
  282. }
  283. #define CFG_ASSERT(ASSERT_FUNC, TARGET) \
  284. if (spv_result_t rcode = ASSERT_FUNC(_, TARGET)) return rcode
  285. spv_result_t FirstBlockAssert(ValidationState_t& _, uint32_t target) {
  286. if (_.current_function().IsFirstBlock(target)) {
  287. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(_.current_function().id()))
  288. << "First block " << _.getIdName(target) << " of function "
  289. << _.getIdName(_.current_function().id()) << " is targeted by block "
  290. << _.getIdName(_.current_function().current_block()->id());
  291. }
  292. return SPV_SUCCESS;
  293. }
  294. spv_result_t MergeBlockAssert(ValidationState_t& _, uint32_t merge_block) {
  295. if (_.current_function().IsBlockType(merge_block, kBlockTypeMerge)) {
  296. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(_.current_function().id()))
  297. << "Block " << _.getIdName(merge_block)
  298. << " is already a merge block for another header";
  299. }
  300. return SPV_SUCCESS;
  301. }
  302. /// Update the continue construct's exit blocks once the backedge blocks are
  303. /// identified in the CFG.
  304. void UpdateContinueConstructExitBlocks(
  305. Function& function,
  306. const std::vector<std::pair<uint32_t, uint32_t>>& back_edges) {
  307. auto& constructs = function.constructs();
  308. // TODO(umar): Think of a faster way to do this
  309. for (auto& edge : back_edges) {
  310. uint32_t back_edge_block_id;
  311. uint32_t loop_header_block_id;
  312. std::tie(back_edge_block_id, loop_header_block_id) = edge;
  313. auto is_this_header = [=](Construct& c) {
  314. return c.type() == ConstructType::kLoop &&
  315. c.entry_block()->id() == loop_header_block_id;
  316. };
  317. for (auto construct : constructs) {
  318. if (is_this_header(construct)) {
  319. Construct* continue_construct =
  320. construct.corresponding_constructs().back();
  321. assert(continue_construct->type() == ConstructType::kContinue);
  322. BasicBlock* back_edge_block;
  323. std::tie(back_edge_block, std::ignore) =
  324. function.GetBlock(back_edge_block_id);
  325. continue_construct->set_exit(back_edge_block);
  326. }
  327. }
  328. }
  329. }
  330. std::tuple<std::string, std::string, std::string> ConstructNames(
  331. ConstructType type) {
  332. std::string construct_name, header_name, exit_name;
  333. switch (type) {
  334. case ConstructType::kSelection:
  335. construct_name = "selection";
  336. header_name = "selection header";
  337. exit_name = "merge block";
  338. break;
  339. case ConstructType::kLoop:
  340. construct_name = "loop";
  341. header_name = "loop header";
  342. exit_name = "merge block";
  343. break;
  344. case ConstructType::kContinue:
  345. construct_name = "continue";
  346. header_name = "continue target";
  347. exit_name = "back-edge block";
  348. break;
  349. case ConstructType::kCase:
  350. construct_name = "case";
  351. header_name = "case entry block";
  352. exit_name = "case exit block";
  353. break;
  354. default:
  355. assert(1 == 0 && "Not defined type");
  356. }
  357. return std::make_tuple(construct_name, header_name, exit_name);
  358. }
  359. /// Constructs an error message for construct validation errors
  360. std::string ConstructErrorString(const Construct& construct,
  361. const std::string& header_string,
  362. const std::string& exit_string,
  363. const std::string& dominate_text) {
  364. std::string construct_name, header_name, exit_name;
  365. std::tie(construct_name, header_name, exit_name) =
  366. ConstructNames(construct.type());
  367. // TODO(umar): Add header block for continue constructs to error message
  368. return "The " + construct_name + " construct with the " + header_name + " " +
  369. header_string + " " + dominate_text + " the " + exit_name + " " +
  370. exit_string;
  371. }
  372. // Finds the fall through case construct of |target_block| and records it in
  373. // |case_fall_through|. Returns SPV_ERROR_INVALID_CFG if the case construct
  374. // headed by |target_block| branches to multiple case constructs.
  375. spv_result_t FindCaseFallThrough(
  376. ValidationState_t& _, BasicBlock* target_block, uint32_t* case_fall_through,
  377. const BasicBlock* merge, const std::unordered_set<uint32_t>& case_targets,
  378. Function* function) {
  379. std::vector<BasicBlock*> stack;
  380. stack.push_back(target_block);
  381. std::unordered_set<const BasicBlock*> visited;
  382. bool target_reachable = target_block->reachable();
  383. int target_depth = function->GetBlockDepth(target_block);
  384. while (!stack.empty()) {
  385. auto block = stack.back();
  386. stack.pop_back();
  387. if (block == merge) continue;
  388. if (!visited.insert(block).second) continue;
  389. if (target_reachable && block->reachable() &&
  390. target_block->dominates(*block)) {
  391. // Still in the case construct.
  392. for (auto successor : *block->successors()) {
  393. stack.push_back(successor);
  394. }
  395. } else {
  396. // Exiting the case construct to non-merge block.
  397. if (!case_targets.count(block->id())) {
  398. int depth = function->GetBlockDepth(block);
  399. if ((depth < target_depth) ||
  400. (depth == target_depth && block->is_type(kBlockTypeContinue))) {
  401. continue;
  402. }
  403. return _.diag(SPV_ERROR_INVALID_CFG, target_block->label())
  404. << "Case construct that targets "
  405. << _.getIdName(target_block->id())
  406. << " has invalid branch to block " << _.getIdName(block->id())
  407. << " (not another case construct, corresponding merge, outer "
  408. "loop merge or outer loop continue)";
  409. }
  410. if (*case_fall_through == 0u) {
  411. if (target_block != block) {
  412. *case_fall_through = block->id();
  413. }
  414. } else if (*case_fall_through != block->id()) {
  415. // Case construct has at most one branch to another case construct.
  416. return _.diag(SPV_ERROR_INVALID_CFG, target_block->label())
  417. << "Case construct that targets "
  418. << _.getIdName(target_block->id())
  419. << " has branches to multiple other case construct targets "
  420. << _.getIdName(*case_fall_through) << " and "
  421. << _.getIdName(block->id());
  422. }
  423. }
  424. }
  425. return SPV_SUCCESS;
  426. }
  427. spv_result_t StructuredSwitchChecks(ValidationState_t& _, Function* function,
  428. const Instruction* switch_inst,
  429. const BasicBlock* header,
  430. const BasicBlock* merge) {
  431. std::unordered_set<uint32_t> case_targets;
  432. for (uint32_t i = 1; i < switch_inst->operands().size(); i += 2) {
  433. uint32_t target = switch_inst->GetOperandAs<uint32_t>(i);
  434. if (target != merge->id()) case_targets.insert(target);
  435. }
  436. // Tracks how many times each case construct is targeted by another case
  437. // construct.
  438. std::map<uint32_t, uint32_t> num_fall_through_targeted;
  439. uint32_t default_case_fall_through = 0u;
  440. uint32_t default_target = switch_inst->GetOperandAs<uint32_t>(1u);
  441. bool default_appears_multiple_times = false;
  442. for (uint32_t i = 3; i < switch_inst->operands().size(); i += 2) {
  443. if (default_target == switch_inst->GetOperandAs<uint32_t>(i)) {
  444. default_appears_multiple_times = true;
  445. break;
  446. }
  447. }
  448. std::unordered_map<uint32_t, uint32_t> seen_to_fall_through;
  449. for (uint32_t i = 1; i < switch_inst->operands().size(); i += 2) {
  450. uint32_t target = switch_inst->GetOperandAs<uint32_t>(i);
  451. if (target == merge->id()) continue;
  452. uint32_t case_fall_through = 0u;
  453. auto seen_iter = seen_to_fall_through.find(target);
  454. if (seen_iter == seen_to_fall_through.end()) {
  455. const auto target_block = function->GetBlock(target).first;
  456. // OpSwitch must dominate all its case constructs.
  457. if (header->reachable() && target_block->reachable() &&
  458. !header->dominates(*target_block)) {
  459. return _.diag(SPV_ERROR_INVALID_CFG, header->label())
  460. << "Selection header " << _.getIdName(header->id())
  461. << " does not dominate its case construct "
  462. << _.getIdName(target);
  463. }
  464. if (auto error = FindCaseFallThrough(_, target_block, &case_fall_through,
  465. merge, case_targets, function)) {
  466. return error;
  467. }
  468. // Track how many time the fall through case has been targeted.
  469. if (case_fall_through != 0u) {
  470. auto where = num_fall_through_targeted.lower_bound(case_fall_through);
  471. if (where == num_fall_through_targeted.end() ||
  472. where->first != case_fall_through) {
  473. num_fall_through_targeted.insert(
  474. where, std::make_pair(case_fall_through, 1));
  475. } else {
  476. where->second++;
  477. }
  478. }
  479. seen_to_fall_through.insert(std::make_pair(target, case_fall_through));
  480. } else {
  481. case_fall_through = seen_iter->second;
  482. }
  483. if (case_fall_through == default_target &&
  484. !default_appears_multiple_times) {
  485. case_fall_through = default_case_fall_through;
  486. }
  487. if (case_fall_through != 0u) {
  488. bool is_default = i == 1;
  489. if (is_default) {
  490. default_case_fall_through = case_fall_through;
  491. } else {
  492. // Allow code like:
  493. // case x:
  494. // case y:
  495. // ...
  496. // case z:
  497. //
  498. // Where x and y target the same block and fall through to z.
  499. uint32_t j = i;
  500. while ((j + 2 < switch_inst->operands().size()) &&
  501. target == switch_inst->GetOperandAs<uint32_t>(j + 2)) {
  502. j += 2;
  503. }
  504. // If Target T1 branches to Target T2, or if Target T1 branches to the
  505. // Default target and the Default target branches to Target T2, then T1
  506. // must immediately precede T2 in the list of OpSwitch Target operands.
  507. if ((switch_inst->operands().size() < j + 2) ||
  508. (case_fall_through != switch_inst->GetOperandAs<uint32_t>(j + 2))) {
  509. return _.diag(SPV_ERROR_INVALID_CFG, switch_inst)
  510. << "Case construct that targets " << _.getIdName(target)
  511. << " has branches to the case construct that targets "
  512. << _.getIdName(case_fall_through)
  513. << ", but does not immediately precede it in the "
  514. "OpSwitch's target list";
  515. }
  516. }
  517. }
  518. }
  519. // Each case construct must be branched to by at most one other case
  520. // construct.
  521. for (const auto& pair : num_fall_through_targeted) {
  522. if (pair.second > 1) {
  523. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(pair.first))
  524. << "Multiple case constructs have branches to the case construct "
  525. "that targets "
  526. << _.getIdName(pair.first);
  527. }
  528. }
  529. return SPV_SUCCESS;
  530. }
  531. spv_result_t StructuredControlFlowChecks(
  532. ValidationState_t& _, Function* function,
  533. const std::vector<std::pair<uint32_t, uint32_t>>& back_edges) {
  534. /// Check all backedges target only loop headers and have exactly one
  535. /// back-edge branching to it
  536. // Map a loop header to blocks with back-edges to the loop header.
  537. std::map<uint32_t, std::unordered_set<uint32_t>> loop_latch_blocks;
  538. for (auto back_edge : back_edges) {
  539. uint32_t back_edge_block;
  540. uint32_t header_block;
  541. std::tie(back_edge_block, header_block) = back_edge;
  542. if (!function->IsBlockType(header_block, kBlockTypeLoop)) {
  543. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(back_edge_block))
  544. << "Back-edges (" << _.getIdName(back_edge_block) << " -> "
  545. << _.getIdName(header_block)
  546. << ") can only be formed between a block and a loop header.";
  547. }
  548. loop_latch_blocks[header_block].insert(back_edge_block);
  549. }
  550. // Check the loop headers have exactly one back-edge branching to it
  551. for (BasicBlock* loop_header : function->ordered_blocks()) {
  552. if (!loop_header->reachable()) continue;
  553. if (!loop_header->is_type(kBlockTypeLoop)) continue;
  554. auto loop_header_id = loop_header->id();
  555. auto num_latch_blocks = loop_latch_blocks[loop_header_id].size();
  556. if (num_latch_blocks != 1) {
  557. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(loop_header_id))
  558. << "Loop header " << _.getIdName(loop_header_id)
  559. << " is targeted by " << num_latch_blocks
  560. << " back-edge blocks but the standard requires exactly one";
  561. }
  562. }
  563. // Check construct rules
  564. for (const Construct& construct : function->constructs()) {
  565. auto header = construct.entry_block();
  566. auto merge = construct.exit_block();
  567. if (header->reachable() && !merge) {
  568. std::string construct_name, header_name, exit_name;
  569. std::tie(construct_name, header_name, exit_name) =
  570. ConstructNames(construct.type());
  571. return _.diag(SPV_ERROR_INTERNAL, _.FindDef(header->id()))
  572. << "Construct " + construct_name + " with " + header_name + " " +
  573. _.getIdName(header->id()) + " does not have a " +
  574. exit_name + ". This may be a bug in the validator.";
  575. }
  576. // If the exit block is reachable then it's dominated by the
  577. // header.
  578. if (merge && merge->reachable()) {
  579. if (!header->dominates(*merge)) {
  580. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(merge->id()))
  581. << ConstructErrorString(construct, _.getIdName(header->id()),
  582. _.getIdName(merge->id()),
  583. "does not dominate");
  584. }
  585. // If it's really a merge block for a selection or loop, then it must be
  586. // *strictly* dominated by the header.
  587. if (construct.ExitBlockIsMergeBlock() && (header == merge)) {
  588. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(merge->id()))
  589. << ConstructErrorString(construct, _.getIdName(header->id()),
  590. _.getIdName(merge->id()),
  591. "does not strictly dominate");
  592. }
  593. }
  594. // Check post-dominance for continue constructs. But dominance and
  595. // post-dominance only make sense when the construct is reachable.
  596. if (header->reachable() && construct.type() == ConstructType::kContinue) {
  597. if (!merge->postdominates(*header)) {
  598. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(merge->id()))
  599. << ConstructErrorString(construct, _.getIdName(header->id()),
  600. _.getIdName(merge->id()),
  601. "is not post dominated by");
  602. }
  603. }
  604. Construct::ConstructBlockSet construct_blocks = construct.blocks(function);
  605. for (auto block : construct_blocks) {
  606. std::string construct_name, header_name, exit_name;
  607. std::tie(construct_name, header_name, exit_name) =
  608. ConstructNames(construct.type());
  609. // Check that all exits from the construct are via structured exits.
  610. for (auto succ : *block->successors()) {
  611. if (block->reachable() && !construct_blocks.count(succ) &&
  612. !construct.IsStructuredExit(_, succ)) {
  613. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  614. << "block <ID> " << _.getIdName(block->id()) << " exits the "
  615. << construct_name << " headed by <ID> "
  616. << _.getIdName(header->id())
  617. << ", but not via a structured exit";
  618. }
  619. }
  620. if (block == header) continue;
  621. // Check that for all non-header blocks, all predecessors are within this
  622. // construct.
  623. for (auto pred : *block->predecessors()) {
  624. if (pred->reachable() && !construct_blocks.count(pred)) {
  625. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(pred->id()))
  626. << "block <ID> " << pred->id() << " branches to the "
  627. << construct_name << " construct, but not to the "
  628. << header_name << " <ID> " << header->id();
  629. }
  630. }
  631. }
  632. // Checks rules for case constructs.
  633. if (construct.type() == ConstructType::kSelection &&
  634. header->terminator()->opcode() == SpvOpSwitch) {
  635. const auto terminator = header->terminator();
  636. if (auto error =
  637. StructuredSwitchChecks(_, function, terminator, header, merge)) {
  638. return error;
  639. }
  640. }
  641. }
  642. return SPV_SUCCESS;
  643. }
  644. spv_result_t PerformWebGPUCfgChecks(ValidationState_t& _, Function* function) {
  645. for (auto& block : function->ordered_blocks()) {
  646. if (block->reachable()) continue;
  647. if (block->is_type(kBlockTypeMerge)) {
  648. // 1. Find the referencing merge and confirm that it is reachable.
  649. BasicBlock* merge_header = function->GetMergeHeader(block);
  650. assert(merge_header != nullptr);
  651. if (!merge_header->reachable()) {
  652. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  653. << "For WebGPU, unreachable merge-blocks must be referenced by "
  654. "a reachable merge instruction.";
  655. }
  656. // 2. Check that the only instructions are OpLabel and OpUnreachable.
  657. auto* label_inst = block->label();
  658. auto* terminator_inst = block->terminator();
  659. assert(label_inst != nullptr);
  660. assert(terminator_inst != nullptr);
  661. if (terminator_inst->opcode() != SpvOpUnreachable) {
  662. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  663. << "For WebGPU, unreachable merge-blocks must terminate with "
  664. "OpUnreachable.";
  665. }
  666. auto label_idx = label_inst - &_.ordered_instructions()[0];
  667. auto terminator_idx = terminator_inst - &_.ordered_instructions()[0];
  668. if (label_idx + 1 != terminator_idx) {
  669. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  670. << "For WebGPU, unreachable merge-blocks must only contain an "
  671. "OpLabel and OpUnreachable instruction.";
  672. }
  673. // 3. Use label instruction to confirm there is no uses by branches.
  674. for (auto use : label_inst->uses()) {
  675. const auto* use_inst = use.first;
  676. if (spvOpcodeIsBranch(use_inst->opcode())) {
  677. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  678. << "For WebGPU, unreachable merge-blocks cannot be the target "
  679. "of a branch.";
  680. }
  681. }
  682. } else if (block->is_type(kBlockTypeContinue)) {
  683. // 1. Find referencing loop and confirm that it is reachable.
  684. std::vector<BasicBlock*> continue_headers =
  685. function->GetContinueHeaders(block);
  686. if (continue_headers.empty()) {
  687. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  688. << "For WebGPU, unreachable continue-target must be referenced "
  689. "by a loop instruction.";
  690. }
  691. std::vector<BasicBlock*> reachable_headers(continue_headers.size());
  692. auto iter =
  693. std::copy_if(continue_headers.begin(), continue_headers.end(),
  694. reachable_headers.begin(),
  695. [](BasicBlock* header) { return header->reachable(); });
  696. reachable_headers.resize(std::distance(reachable_headers.begin(), iter));
  697. if (reachable_headers.empty()) {
  698. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  699. << "For WebGPU, unreachable continue-target must be referenced "
  700. "by a reachable loop instruction.";
  701. }
  702. // 2. Check that the only instructions are OpLabel and OpBranch.
  703. auto* label_inst = block->label();
  704. auto* terminator_inst = block->terminator();
  705. assert(label_inst != nullptr);
  706. assert(terminator_inst != nullptr);
  707. if (terminator_inst->opcode() != SpvOpBranch) {
  708. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  709. << "For WebGPU, unreachable continue-target must terminate with "
  710. "OpBranch.";
  711. }
  712. auto label_idx = label_inst - &_.ordered_instructions()[0];
  713. auto terminator_idx = terminator_inst - &_.ordered_instructions()[0];
  714. if (label_idx + 1 != terminator_idx) {
  715. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  716. << "For WebGPU, unreachable continue-target must only contain "
  717. "an OpLabel and an OpBranch instruction.";
  718. }
  719. // 3. Use label instruction to confirm there is no uses by branches.
  720. for (auto use : label_inst->uses()) {
  721. const auto* use_inst = use.first;
  722. if (spvOpcodeIsBranch(use_inst->opcode())) {
  723. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  724. << "For WebGPU, unreachable continue-target cannot be the "
  725. "target of a branch.";
  726. }
  727. }
  728. // 4. Confirm that continue-target has a back edge to a reachable loop
  729. // header block.
  730. auto branch_target = terminator_inst->GetOperandAs<uint32_t>(0);
  731. for (auto* continue_header : reachable_headers) {
  732. if (branch_target != continue_header->id()) {
  733. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  734. << "For WebGPU, unreachable continue-target must only have a "
  735. "back edge to a single reachable loop instruction.";
  736. }
  737. }
  738. } else {
  739. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(block->id()))
  740. << "For WebGPU, all blocks must be reachable, unless they are "
  741. << "degenerate cases of merge-block or continue-target.";
  742. }
  743. }
  744. return SPV_SUCCESS;
  745. }
  746. spv_result_t PerformCfgChecks(ValidationState_t& _) {
  747. for (auto& function : _.functions()) {
  748. // Check all referenced blocks are defined within a function
  749. if (function.undefined_block_count() != 0) {
  750. std::string undef_blocks("{");
  751. bool first = true;
  752. for (auto undefined_block : function.undefined_blocks()) {
  753. undef_blocks += _.getIdName(undefined_block);
  754. if (!first) {
  755. undef_blocks += " ";
  756. }
  757. first = false;
  758. }
  759. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(function.id()))
  760. << "Block(s) " << undef_blocks << "}"
  761. << " are referenced but not defined in function "
  762. << _.getIdName(function.id());
  763. }
  764. // Set each block's immediate dominator and immediate postdominator,
  765. // and find all back-edges.
  766. //
  767. // We want to analyze all the blocks in the function, even in degenerate
  768. // control flow cases including unreachable blocks. So use the augmented
  769. // CFG to ensure we cover all the blocks.
  770. std::vector<const BasicBlock*> postorder;
  771. std::vector<const BasicBlock*> postdom_postorder;
  772. std::vector<std::pair<uint32_t, uint32_t>> back_edges;
  773. auto ignore_block = [](const BasicBlock*) {};
  774. auto ignore_edge = [](const BasicBlock*, const BasicBlock*) {};
  775. if (!function.ordered_blocks().empty()) {
  776. /// calculate dominators
  777. CFA<BasicBlock>::DepthFirstTraversal(
  778. function.first_block(), function.AugmentedCFGSuccessorsFunction(),
  779. ignore_block, [&](const BasicBlock* b) { postorder.push_back(b); },
  780. ignore_edge);
  781. auto edges = CFA<BasicBlock>::CalculateDominators(
  782. postorder, function.AugmentedCFGPredecessorsFunction());
  783. for (auto edge : edges) {
  784. if (edge.first != edge.second)
  785. edge.first->SetImmediateDominator(edge.second);
  786. }
  787. /// calculate post dominators
  788. CFA<BasicBlock>::DepthFirstTraversal(
  789. function.pseudo_exit_block(),
  790. function.AugmentedCFGPredecessorsFunction(), ignore_block,
  791. [&](const BasicBlock* b) { postdom_postorder.push_back(b); },
  792. ignore_edge);
  793. auto postdom_edges = CFA<BasicBlock>::CalculateDominators(
  794. postdom_postorder, function.AugmentedCFGSuccessorsFunction());
  795. for (auto edge : postdom_edges) {
  796. edge.first->SetImmediatePostDominator(edge.second);
  797. }
  798. /// calculate back edges.
  799. CFA<BasicBlock>::DepthFirstTraversal(
  800. function.pseudo_entry_block(),
  801. function
  802. .AugmentedCFGSuccessorsFunctionIncludingHeaderToContinueEdge(),
  803. ignore_block, ignore_block,
  804. [&](const BasicBlock* from, const BasicBlock* to) {
  805. back_edges.emplace_back(from->id(), to->id());
  806. });
  807. }
  808. UpdateContinueConstructExitBlocks(function, back_edges);
  809. auto& blocks = function.ordered_blocks();
  810. if (!blocks.empty()) {
  811. // Check if the order of blocks in the binary appear before the blocks
  812. // they dominate
  813. for (auto block = begin(blocks) + 1; block != end(blocks); ++block) {
  814. if (auto idom = (*block)->immediate_dominator()) {
  815. if (idom != function.pseudo_entry_block() &&
  816. block == std::find(begin(blocks), block, idom)) {
  817. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef(idom->id()))
  818. << "Block " << _.getIdName((*block)->id())
  819. << " appears in the binary before its dominator "
  820. << _.getIdName(idom->id());
  821. }
  822. }
  823. // For WebGPU check that all unreachable blocks are degenerate cases for
  824. // merge-block or continue-target.
  825. if (spvIsWebGPUEnv(_.context()->target_env)) {
  826. spv_result_t result = PerformWebGPUCfgChecks(_, &function);
  827. if (result != SPV_SUCCESS) return result;
  828. }
  829. }
  830. // If we have structed control flow, check that no block has a control
  831. // flow nesting depth larger than the limit.
  832. if (_.HasCapability(SpvCapabilityShader)) {
  833. const int control_flow_nesting_depth_limit =
  834. _.options()->universal_limits_.max_control_flow_nesting_depth;
  835. for (auto block = begin(blocks); block != end(blocks); ++block) {
  836. if (function.GetBlockDepth(*block) >
  837. control_flow_nesting_depth_limit) {
  838. return _.diag(SPV_ERROR_INVALID_CFG, _.FindDef((*block)->id()))
  839. << "Maximum Control Flow nesting depth exceeded.";
  840. }
  841. }
  842. }
  843. }
  844. /// Structured control flow checks are only required for shader capabilities
  845. if (_.HasCapability(SpvCapabilityShader)) {
  846. if (auto error = StructuredControlFlowChecks(_, &function, back_edges))
  847. return error;
  848. }
  849. }
  850. return SPV_SUCCESS;
  851. }
  852. spv_result_t CfgPass(ValidationState_t& _, const Instruction* inst) {
  853. SpvOp opcode = inst->opcode();
  854. switch (opcode) {
  855. case SpvOpLabel:
  856. if (auto error = _.current_function().RegisterBlock(inst->id()))
  857. return error;
  858. // TODO(github:1661) This should be done in the
  859. // ValidationState::RegisterInstruction method but because of the order of
  860. // passes the OpLabel ends up not being part of the basic block it starts.
  861. _.current_function().current_block()->set_label(inst);
  862. break;
  863. case SpvOpLoopMerge: {
  864. uint32_t merge_block = inst->GetOperandAs<uint32_t>(0);
  865. uint32_t continue_block = inst->GetOperandAs<uint32_t>(1);
  866. CFG_ASSERT(MergeBlockAssert, merge_block);
  867. if (auto error = _.current_function().RegisterLoopMerge(merge_block,
  868. continue_block))
  869. return error;
  870. } break;
  871. case SpvOpSelectionMerge: {
  872. uint32_t merge_block = inst->GetOperandAs<uint32_t>(0);
  873. CFG_ASSERT(MergeBlockAssert, merge_block);
  874. if (auto error = _.current_function().RegisterSelectionMerge(merge_block))
  875. return error;
  876. } break;
  877. case SpvOpBranch: {
  878. uint32_t target = inst->GetOperandAs<uint32_t>(0);
  879. CFG_ASSERT(FirstBlockAssert, target);
  880. _.current_function().RegisterBlockEnd({target}, opcode);
  881. } break;
  882. case SpvOpBranchConditional: {
  883. uint32_t tlabel = inst->GetOperandAs<uint32_t>(1);
  884. uint32_t flabel = inst->GetOperandAs<uint32_t>(2);
  885. CFG_ASSERT(FirstBlockAssert, tlabel);
  886. CFG_ASSERT(FirstBlockAssert, flabel);
  887. _.current_function().RegisterBlockEnd({tlabel, flabel}, opcode);
  888. } break;
  889. case SpvOpSwitch: {
  890. std::vector<uint32_t> cases;
  891. for (size_t i = 1; i < inst->operands().size(); i += 2) {
  892. uint32_t target = inst->GetOperandAs<uint32_t>(i);
  893. CFG_ASSERT(FirstBlockAssert, target);
  894. cases.push_back(target);
  895. }
  896. _.current_function().RegisterBlockEnd({cases}, opcode);
  897. } break;
  898. case SpvOpReturn: {
  899. const uint32_t return_type = _.current_function().GetResultTypeId();
  900. const Instruction* return_type_inst = _.FindDef(return_type);
  901. assert(return_type_inst);
  902. if (return_type_inst->opcode() != SpvOpTypeVoid)
  903. return _.diag(SPV_ERROR_INVALID_CFG, inst)
  904. << "OpReturn can only be called from a function with void "
  905. << "return type.";
  906. }
  907. // Fallthrough.
  908. case SpvOpKill:
  909. case SpvOpReturnValue:
  910. case SpvOpUnreachable:
  911. _.current_function().RegisterBlockEnd(std::vector<uint32_t>(), opcode);
  912. if (opcode == SpvOpKill) {
  913. _.current_function().RegisterExecutionModelLimitation(
  914. SpvExecutionModelFragment,
  915. "OpKill requires Fragment execution model");
  916. }
  917. break;
  918. default:
  919. break;
  920. }
  921. return SPV_SUCCESS;
  922. }
  923. spv_result_t ControlFlowPass(ValidationState_t& _, const Instruction* inst) {
  924. switch (inst->opcode()) {
  925. case SpvOpPhi:
  926. if (auto error = ValidatePhi(_, inst)) return error;
  927. break;
  928. case SpvOpBranch:
  929. if (auto error = ValidateBranch(_, inst)) return error;
  930. break;
  931. case SpvOpBranchConditional:
  932. if (auto error = ValidateBranchConditional(_, inst)) return error;
  933. break;
  934. case SpvOpReturnValue:
  935. if (auto error = ValidateReturnValue(_, inst)) return error;
  936. break;
  937. case SpvOpSwitch:
  938. if (auto error = ValidateSwitch(_, inst)) return error;
  939. break;
  940. case SpvOpLoopMerge:
  941. if (auto error = ValidateLoopMerge(_, inst)) return error;
  942. break;
  943. default:
  944. break;
  945. }
  946. return SPV_SUCCESS;
  947. }
  948. } // namespace val
  949. } // namespace spvtools