validate_atomics.cpp 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457
  1. // Copyright (c) 2017 Google Inc.
  2. // Modifications Copyright (C) 2020 Advanced Micro Devices, Inc. All rights
  3. // reserved.
  4. //
  5. // Licensed under the Apache License, Version 2.0 (the "License");
  6. // you may not use this file except in compliance with the License.
  7. // You may obtain a copy of the License at
  8. //
  9. // http://www.apache.org/licenses/LICENSE-2.0
  10. //
  11. // Unless required by applicable law or agreed to in writing, software
  12. // distributed under the License is distributed on an "AS IS" BASIS,
  13. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. // See the License for the specific language governing permissions and
  15. // limitations under the License.
  16. // Validates correctness of atomic SPIR-V instructions.
  17. #include "source/opcode.h"
  18. #include "source/spirv_target_env.h"
  19. #include "source/util/bitutils.h"
  20. #include "source/val/instruction.h"
  21. #include "source/val/validate.h"
  22. #include "source/val/validate_memory_semantics.h"
  23. #include "source/val/validate_scopes.h"
  24. #include "source/val/validation_state.h"
  25. namespace {
  26. bool IsStorageClassAllowedByUniversalRules(spv::StorageClass storage_class) {
  27. switch (storage_class) {
  28. case spv::StorageClass::Uniform:
  29. case spv::StorageClass::StorageBuffer:
  30. case spv::StorageClass::Workgroup:
  31. case spv::StorageClass::CrossWorkgroup:
  32. case spv::StorageClass::Generic:
  33. case spv::StorageClass::AtomicCounter:
  34. case spv::StorageClass::Image:
  35. case spv::StorageClass::Function:
  36. case spv::StorageClass::PhysicalStorageBuffer:
  37. case spv::StorageClass::TaskPayloadWorkgroupEXT:
  38. return true;
  39. break;
  40. default:
  41. return false;
  42. }
  43. }
  44. bool HasReturnType(spv::Op opcode) {
  45. switch (opcode) {
  46. case spv::Op::OpAtomicStore:
  47. case spv::Op::OpAtomicFlagClear:
  48. return false;
  49. break;
  50. default:
  51. return true;
  52. }
  53. }
  54. bool HasOnlyFloatReturnType(spv::Op opcode) {
  55. switch (opcode) {
  56. case spv::Op::OpAtomicFAddEXT:
  57. case spv::Op::OpAtomicFMinEXT:
  58. case spv::Op::OpAtomicFMaxEXT:
  59. return true;
  60. break;
  61. default:
  62. return false;
  63. }
  64. }
  65. bool HasOnlyIntReturnType(spv::Op opcode) {
  66. switch (opcode) {
  67. case spv::Op::OpAtomicCompareExchange:
  68. case spv::Op::OpAtomicCompareExchangeWeak:
  69. case spv::Op::OpAtomicIIncrement:
  70. case spv::Op::OpAtomicIDecrement:
  71. case spv::Op::OpAtomicIAdd:
  72. case spv::Op::OpAtomicISub:
  73. case spv::Op::OpAtomicSMin:
  74. case spv::Op::OpAtomicUMin:
  75. case spv::Op::OpAtomicSMax:
  76. case spv::Op::OpAtomicUMax:
  77. case spv::Op::OpAtomicAnd:
  78. case spv::Op::OpAtomicOr:
  79. case spv::Op::OpAtomicXor:
  80. return true;
  81. break;
  82. default:
  83. return false;
  84. }
  85. }
  86. bool HasIntOrFloatReturnType(spv::Op opcode) {
  87. switch (opcode) {
  88. case spv::Op::OpAtomicLoad:
  89. case spv::Op::OpAtomicExchange:
  90. return true;
  91. break;
  92. default:
  93. return false;
  94. }
  95. }
  96. bool HasOnlyBoolReturnType(spv::Op opcode) {
  97. switch (opcode) {
  98. case spv::Op::OpAtomicFlagTestAndSet:
  99. return true;
  100. break;
  101. default:
  102. return false;
  103. }
  104. }
  105. } // namespace
  106. namespace spvtools {
  107. namespace val {
  108. // Validates correctness of atomic instructions.
  109. spv_result_t AtomicsPass(ValidationState_t& _, const Instruction* inst) {
  110. const spv::Op opcode = inst->opcode();
  111. switch (opcode) {
  112. case spv::Op::OpAtomicLoad:
  113. case spv::Op::OpAtomicStore:
  114. case spv::Op::OpAtomicExchange:
  115. case spv::Op::OpAtomicFAddEXT:
  116. case spv::Op::OpAtomicCompareExchange:
  117. case spv::Op::OpAtomicCompareExchangeWeak:
  118. case spv::Op::OpAtomicIIncrement:
  119. case spv::Op::OpAtomicIDecrement:
  120. case spv::Op::OpAtomicIAdd:
  121. case spv::Op::OpAtomicISub:
  122. case spv::Op::OpAtomicSMin:
  123. case spv::Op::OpAtomicUMin:
  124. case spv::Op::OpAtomicFMinEXT:
  125. case spv::Op::OpAtomicSMax:
  126. case spv::Op::OpAtomicUMax:
  127. case spv::Op::OpAtomicFMaxEXT:
  128. case spv::Op::OpAtomicAnd:
  129. case spv::Op::OpAtomicOr:
  130. case spv::Op::OpAtomicXor:
  131. case spv::Op::OpAtomicFlagTestAndSet:
  132. case spv::Op::OpAtomicFlagClear: {
  133. const uint32_t result_type = inst->type_id();
  134. // Validate return type first so can just check if pointer type is same
  135. // (if applicable)
  136. if (HasReturnType(opcode)) {
  137. if (HasOnlyFloatReturnType(opcode) &&
  138. (!(_.HasCapability(spv::Capability::AtomicFloat16VectorNV) &&
  139. _.IsFloat16Vector2Or4Type(result_type)) &&
  140. !_.IsFloatScalarType(result_type))) {
  141. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  142. << spvOpcodeString(opcode)
  143. << ": expected Result Type to be float scalar type";
  144. } else if (HasOnlyIntReturnType(opcode) &&
  145. !_.IsIntScalarType(result_type)) {
  146. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  147. << spvOpcodeString(opcode)
  148. << ": expected Result Type to be integer scalar type";
  149. } else if (HasIntOrFloatReturnType(opcode) &&
  150. !_.IsFloatScalarType(result_type) &&
  151. !(opcode == spv::Op::OpAtomicExchange &&
  152. _.HasCapability(spv::Capability::AtomicFloat16VectorNV) &&
  153. _.IsFloat16Vector2Or4Type(result_type)) &&
  154. !_.IsIntScalarType(result_type)) {
  155. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  156. << spvOpcodeString(opcode)
  157. << ": expected Result Type to be integer or float scalar type";
  158. } else if (HasOnlyBoolReturnType(opcode) &&
  159. !_.IsBoolScalarType(result_type)) {
  160. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  161. << spvOpcodeString(opcode)
  162. << ": expected Result Type to be bool scalar type";
  163. }
  164. }
  165. uint32_t operand_index = HasReturnType(opcode) ? 2 : 0;
  166. const uint32_t pointer_type = _.GetOperandTypeId(inst, operand_index++);
  167. uint32_t data_type = 0;
  168. spv::StorageClass storage_class;
  169. if (!_.GetPointerTypeInfo(pointer_type, &data_type, &storage_class)) {
  170. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  171. << spvOpcodeString(opcode)
  172. << ": expected Pointer to be a pointer type";
  173. }
  174. // If the pointer is an untyped pointer, get the data type elsewhere.
  175. if (data_type == 0) {
  176. switch (opcode) {
  177. case spv::Op::OpAtomicLoad:
  178. case spv::Op::OpAtomicExchange:
  179. case spv::Op::OpAtomicFAddEXT:
  180. case spv::Op::OpAtomicCompareExchange:
  181. case spv::Op::OpAtomicCompareExchangeWeak:
  182. case spv::Op::OpAtomicIIncrement:
  183. case spv::Op::OpAtomicIDecrement:
  184. case spv::Op::OpAtomicIAdd:
  185. case spv::Op::OpAtomicISub:
  186. case spv::Op::OpAtomicSMin:
  187. case spv::Op::OpAtomicUMin:
  188. case spv::Op::OpAtomicFMinEXT:
  189. case spv::Op::OpAtomicSMax:
  190. case spv::Op::OpAtomicUMax:
  191. case spv::Op::OpAtomicFMaxEXT:
  192. case spv::Op::OpAtomicAnd:
  193. case spv::Op::OpAtomicOr:
  194. case spv::Op::OpAtomicXor:
  195. data_type = inst->type_id();
  196. break;
  197. case spv::Op::OpAtomicFlagTestAndSet:
  198. case spv::Op::OpAtomicFlagClear:
  199. return _.diag(SPV_ERROR_INVALID_ID, inst)
  200. << "Untyped pointers are not supported by atomic flag "
  201. "instructions";
  202. break;
  203. case spv::Op::OpAtomicStore:
  204. data_type = _.FindDef(inst->GetOperandAs<uint32_t>(3))->type_id();
  205. break;
  206. default:
  207. break;
  208. }
  209. }
  210. // Can't use result_type because OpAtomicStore doesn't have a result
  211. if (_.IsIntScalarType(data_type) && _.GetBitWidth(data_type) == 64 &&
  212. !_.HasCapability(spv::Capability::Int64Atomics)) {
  213. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  214. << spvOpcodeString(opcode)
  215. << ": 64-bit atomics require the Int64Atomics capability";
  216. }
  217. // Validate storage class against universal rules
  218. if (!IsStorageClassAllowedByUniversalRules(storage_class)) {
  219. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  220. << spvOpcodeString(opcode)
  221. << ": storage class forbidden by universal validation rules.";
  222. }
  223. // Then Shader rules
  224. if (_.HasCapability(spv::Capability::Shader)) {
  225. // Vulkan environment rule
  226. if (spvIsVulkanEnv(_.context()->target_env)) {
  227. if ((storage_class != spv::StorageClass::Uniform) &&
  228. (storage_class != spv::StorageClass::StorageBuffer) &&
  229. (storage_class != spv::StorageClass::Workgroup) &&
  230. (storage_class != spv::StorageClass::Image) &&
  231. (storage_class != spv::StorageClass::PhysicalStorageBuffer) &&
  232. (storage_class != spv::StorageClass::TaskPayloadWorkgroupEXT)) {
  233. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  234. << _.VkErrorID(4686) << spvOpcodeString(opcode)
  235. << ": Vulkan spec only allows storage classes for atomic to "
  236. "be: Uniform, Workgroup, Image, StorageBuffer, "
  237. "PhysicalStorageBuffer or TaskPayloadWorkgroupEXT.";
  238. }
  239. } else if (storage_class == spv::StorageClass::Function) {
  240. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  241. << spvOpcodeString(opcode)
  242. << ": Function storage class forbidden when the Shader "
  243. "capability is declared.";
  244. }
  245. if (opcode == spv::Op::OpAtomicFAddEXT) {
  246. // result type being float checked already
  247. if (_.GetBitWidth(result_type) == 16) {
  248. if (_.IsFloat16Vector2Or4Type(result_type)) {
  249. if (!_.HasCapability(spv::Capability::AtomicFloat16VectorNV))
  250. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  251. << spvOpcodeString(opcode)
  252. << ": float vector atomics require the "
  253. "AtomicFloat16VectorNV capability";
  254. } else {
  255. if (!_.HasCapability(spv::Capability::AtomicFloat16AddEXT)) {
  256. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  257. << spvOpcodeString(opcode)
  258. << ": float add atomics require the AtomicFloat32AddEXT "
  259. "capability";
  260. }
  261. }
  262. }
  263. if ((_.GetBitWidth(result_type) == 32) &&
  264. (!_.HasCapability(spv::Capability::AtomicFloat32AddEXT))) {
  265. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  266. << spvOpcodeString(opcode)
  267. << ": float add atomics require the AtomicFloat32AddEXT "
  268. "capability";
  269. }
  270. if ((_.GetBitWidth(result_type) == 64) &&
  271. (!_.HasCapability(spv::Capability::AtomicFloat64AddEXT))) {
  272. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  273. << spvOpcodeString(opcode)
  274. << ": float add atomics require the AtomicFloat64AddEXT "
  275. "capability";
  276. }
  277. } else if (opcode == spv::Op::OpAtomicFMinEXT ||
  278. opcode == spv::Op::OpAtomicFMaxEXT) {
  279. if (_.GetBitWidth(result_type) == 16) {
  280. if (_.IsFloat16Vector2Or4Type(result_type)) {
  281. if (!_.HasCapability(spv::Capability::AtomicFloat16VectorNV))
  282. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  283. << spvOpcodeString(opcode)
  284. << ": float vector atomics require the "
  285. "AtomicFloat16VectorNV capability";
  286. } else {
  287. if (!_.HasCapability(spv::Capability::AtomicFloat16MinMaxEXT)) {
  288. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  289. << spvOpcodeString(opcode)
  290. << ": float min/max atomics require the "
  291. "AtomicFloat16MinMaxEXT capability";
  292. }
  293. }
  294. }
  295. if ((_.GetBitWidth(result_type) == 32) &&
  296. (!_.HasCapability(spv::Capability::AtomicFloat32MinMaxEXT))) {
  297. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  298. << spvOpcodeString(opcode)
  299. << ": float min/max atomics require the "
  300. "AtomicFloat32MinMaxEXT capability";
  301. }
  302. if ((_.GetBitWidth(result_type) == 64) &&
  303. (!_.HasCapability(spv::Capability::AtomicFloat64MinMaxEXT))) {
  304. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  305. << spvOpcodeString(opcode)
  306. << ": float min/max atomics require the "
  307. "AtomicFloat64MinMaxEXT capability";
  308. }
  309. }
  310. }
  311. // And finally OpenCL environment rules
  312. if (spvIsOpenCLEnv(_.context()->target_env)) {
  313. if ((storage_class != spv::StorageClass::Function) &&
  314. (storage_class != spv::StorageClass::Workgroup) &&
  315. (storage_class != spv::StorageClass::CrossWorkgroup) &&
  316. (storage_class != spv::StorageClass::Generic)) {
  317. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  318. << spvOpcodeString(opcode)
  319. << ": storage class must be Function, Workgroup, "
  320. "CrossWorkGroup or Generic in the OpenCL environment.";
  321. }
  322. if (_.context()->target_env == SPV_ENV_OPENCL_1_2) {
  323. if (storage_class == spv::StorageClass::Generic) {
  324. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  325. << "Storage class cannot be Generic in OpenCL 1.2 "
  326. "environment";
  327. }
  328. }
  329. }
  330. // If result and pointer type are different, need to do special check here
  331. if (opcode == spv::Op::OpAtomicFlagTestAndSet ||
  332. opcode == spv::Op::OpAtomicFlagClear) {
  333. if (!_.IsIntScalarType(data_type) || _.GetBitWidth(data_type) != 32) {
  334. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  335. << spvOpcodeString(opcode)
  336. << ": expected Pointer to point to a value of 32-bit integer "
  337. "type";
  338. }
  339. } else if (opcode == spv::Op::OpAtomicStore) {
  340. if (!_.IsFloatScalarType(data_type) && !_.IsIntScalarType(data_type)) {
  341. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  342. << spvOpcodeString(opcode)
  343. << ": expected Pointer to be a pointer to integer or float "
  344. << "scalar type";
  345. }
  346. } else if (data_type != result_type) {
  347. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  348. << spvOpcodeString(opcode)
  349. << ": expected Pointer to point to a value of type Result "
  350. "Type";
  351. }
  352. auto memory_scope = inst->GetOperandAs<const uint32_t>(operand_index++);
  353. if (auto error = ValidateMemoryScope(_, inst, memory_scope)) {
  354. return error;
  355. }
  356. const auto equal_semantics_index = operand_index++;
  357. if (auto error = ValidateMemorySemantics(_, inst, equal_semantics_index,
  358. memory_scope))
  359. return error;
  360. if (opcode == spv::Op::OpAtomicCompareExchange ||
  361. opcode == spv::Op::OpAtomicCompareExchangeWeak) {
  362. const auto unequal_semantics_index = operand_index++;
  363. if (auto error = ValidateMemorySemantics(
  364. _, inst, unequal_semantics_index, memory_scope))
  365. return error;
  366. // Volatile bits must match for equal and unequal semantics. Previous
  367. // checks guarantee they are 32-bit constants, but we need to recheck
  368. // whether they are evaluatable constants.
  369. bool is_int32 = false;
  370. bool is_equal_const = false;
  371. bool is_unequal_const = false;
  372. uint32_t equal_value = 0;
  373. uint32_t unequal_value = 0;
  374. std::tie(is_int32, is_equal_const, equal_value) = _.EvalInt32IfConst(
  375. inst->GetOperandAs<uint32_t>(equal_semantics_index));
  376. std::tie(is_int32, is_unequal_const, unequal_value) =
  377. _.EvalInt32IfConst(
  378. inst->GetOperandAs<uint32_t>(unequal_semantics_index));
  379. if (is_equal_const && is_unequal_const &&
  380. ((equal_value & uint32_t(spv::MemorySemanticsMask::Volatile)) ^
  381. (unequal_value & uint32_t(spv::MemorySemanticsMask::Volatile)))) {
  382. return _.diag(SPV_ERROR_INVALID_ID, inst)
  383. << "Volatile mask setting must match for Equal and Unequal "
  384. "memory semantics";
  385. }
  386. }
  387. if (opcode == spv::Op::OpAtomicStore) {
  388. const uint32_t value_type = _.GetOperandTypeId(inst, 3);
  389. if (value_type != data_type) {
  390. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  391. << spvOpcodeString(opcode)
  392. << ": expected Value type and the type pointed to by "
  393. "Pointer to be the same";
  394. }
  395. } else if (opcode != spv::Op::OpAtomicLoad &&
  396. opcode != spv::Op::OpAtomicIIncrement &&
  397. opcode != spv::Op::OpAtomicIDecrement &&
  398. opcode != spv::Op::OpAtomicFlagTestAndSet &&
  399. opcode != spv::Op::OpAtomicFlagClear) {
  400. const uint32_t value_type = _.GetOperandTypeId(inst, operand_index++);
  401. if (value_type != result_type) {
  402. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  403. << spvOpcodeString(opcode)
  404. << ": expected Value to be of type Result Type";
  405. }
  406. }
  407. if (opcode == spv::Op::OpAtomicCompareExchange ||
  408. opcode == spv::Op::OpAtomicCompareExchangeWeak) {
  409. const uint32_t comparator_type =
  410. _.GetOperandTypeId(inst, operand_index++);
  411. if (comparator_type != result_type) {
  412. return _.diag(SPV_ERROR_INVALID_DATA, inst)
  413. << spvOpcodeString(opcode)
  414. << ": expected Comparator to be of type Result Type";
  415. }
  416. }
  417. break;
  418. }
  419. default:
  420. break;
  421. }
  422. return SPV_SUCCESS;
  423. }
  424. } // namespace val
  425. } // namespace spvtools