val_atomics_test.cpp 72 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184
  1. // Copyright (c) 2017 Google Inc.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. #include <sstream>
  15. #include <string>
  16. #include "gmock/gmock.h"
  17. #include "test/unit_spirv.h"
  18. #include "test/val/val_fixtures.h"
  19. namespace spvtools {
  20. namespace val {
  21. namespace {
  22. using ::testing::HasSubstr;
  23. using ::testing::Not;
  24. using ValidateAtomics = spvtest::ValidateBase<bool>;
  25. std::string GenerateShaderCodeImpl(
  26. const std::string& body, const std::string& capabilities_and_extensions,
  27. const std::string& definitions, const std::string& memory_model,
  28. const std::string& execution) {
  29. std::ostringstream ss;
  30. ss << R"(
  31. OpCapability Shader
  32. )";
  33. ss << capabilities_and_extensions;
  34. ss << "OpMemoryModel Logical " << memory_model << "\n";
  35. ss << execution;
  36. ss << R"(
  37. %void = OpTypeVoid
  38. %func = OpTypeFunction %void
  39. %bool = OpTypeBool
  40. %f32 = OpTypeFloat 32
  41. %u32 = OpTypeInt 32 0
  42. %f32vec4 = OpTypeVector %f32 4
  43. %f32_0 = OpConstant %f32 0
  44. %f32_1 = OpConstant %f32 1
  45. %u32_0 = OpConstant %u32 0
  46. %u32_1 = OpConstant %u32 1
  47. %f32vec4_0000 = OpConstantComposite %f32vec4 %f32_0 %f32_0 %f32_0 %f32_0
  48. %cross_device = OpConstant %u32 0
  49. %device = OpConstant %u32 1
  50. %workgroup = OpConstant %u32 2
  51. %subgroup = OpConstant %u32 3
  52. %invocation = OpConstant %u32 4
  53. %queuefamily = OpConstant %u32 5
  54. %relaxed = OpConstant %u32 0
  55. %acquire = OpConstant %u32 2
  56. %release = OpConstant %u32 4
  57. %acquire_release = OpConstant %u32 8
  58. %acquire_and_release = OpConstant %u32 6
  59. %sequentially_consistent = OpConstant %u32 16
  60. %acquire_release_uniform_workgroup = OpConstant %u32 328
  61. %f32_ptr = OpTypePointer Workgroup %f32
  62. %f32_var = OpVariable %f32_ptr Workgroup
  63. %u32_ptr = OpTypePointer Workgroup %u32
  64. %u32_var = OpVariable %u32_ptr Workgroup
  65. %f32vec4_ptr = OpTypePointer Workgroup %f32vec4
  66. %f32vec4_var = OpVariable %f32vec4_ptr Workgroup
  67. %f32_ptr_function = OpTypePointer Function %f32
  68. )";
  69. ss << definitions;
  70. ss << R"(
  71. %main = OpFunction %void None %func
  72. %main_entry = OpLabel
  73. )";
  74. ss << body;
  75. ss << R"(
  76. OpReturn
  77. OpFunctionEnd)";
  78. return ss.str();
  79. }
  80. std::string GenerateShaderCode(
  81. const std::string& body,
  82. const std::string& capabilities_and_extensions = "",
  83. const std::string& memory_model = "GLSL450") {
  84. const std::string execution = R"(
  85. OpEntryPoint Fragment %main "main"
  86. OpExecutionMode %main OriginUpperLeft
  87. )";
  88. const std::string defintions = R"(
  89. %u64 = OpTypeInt 64 0
  90. %s64 = OpTypeInt 64 1
  91. %u64_1 = OpConstant %u64 1
  92. %s64_1 = OpConstant %s64 1
  93. %u64_ptr = OpTypePointer Workgroup %u64
  94. %s64_ptr = OpTypePointer Workgroup %s64
  95. %u64_var = OpVariable %u64_ptr Workgroup
  96. %s64_var = OpVariable %s64_ptr Workgroup
  97. )";
  98. return GenerateShaderCodeImpl(
  99. body, "OpCapability Int64\n" + capabilities_and_extensions, defintions,
  100. memory_model, execution);
  101. }
  102. std::string GenerateShaderComputeCode(
  103. const std::string& body,
  104. const std::string& capabilities_and_extensions = "",
  105. const std::string& memory_model = "GLSL450") {
  106. const std::string execution = R"(
  107. OpEntryPoint GLCompute %main "main"
  108. OpExecutionMode %main LocalSize 32 1 1
  109. )";
  110. const std::string defintions = R"(
  111. %u64 = OpTypeInt 64 0
  112. %s64 = OpTypeInt 64 1
  113. %u64_1 = OpConstant %u64 1
  114. %s64_1 = OpConstant %s64 1
  115. %u64_ptr = OpTypePointer Workgroup %u64
  116. %s64_ptr = OpTypePointer Workgroup %s64
  117. %u64_var = OpVariable %u64_ptr Workgroup
  118. %s64_var = OpVariable %s64_ptr Workgroup
  119. )";
  120. return GenerateShaderCodeImpl(
  121. body, "OpCapability Int64\n" + capabilities_and_extensions, defintions,
  122. memory_model, execution);
  123. }
  124. std::string GenerateKernelCode(
  125. const std::string& body,
  126. const std::string& capabilities_and_extensions = "") {
  127. std::ostringstream ss;
  128. ss << R"(
  129. OpCapability Addresses
  130. OpCapability Kernel
  131. OpCapability Linkage
  132. OpCapability Int64
  133. )";
  134. ss << capabilities_and_extensions;
  135. ss << R"(
  136. OpMemoryModel Physical32 OpenCL
  137. %void = OpTypeVoid
  138. %func = OpTypeFunction %void
  139. %bool = OpTypeBool
  140. %f32 = OpTypeFloat 32
  141. %u32 = OpTypeInt 32 0
  142. %u64 = OpTypeInt 64 0
  143. %f32vec4 = OpTypeVector %f32 4
  144. %f32_0 = OpConstant %f32 0
  145. %f32_1 = OpConstant %f32 1
  146. %u32_0 = OpConstant %u32 0
  147. %u32_1 = OpConstant %u32 1
  148. %u64_1 = OpConstant %u64 1
  149. %f32vec4_0000 = OpConstantComposite %f32vec4 %f32_0 %f32_0 %f32_0 %f32_0
  150. %cross_device = OpConstant %u32 0
  151. %device = OpConstant %u32 1
  152. %workgroup = OpConstant %u32 2
  153. %subgroup = OpConstant %u32 3
  154. %invocation = OpConstant %u32 4
  155. %relaxed = OpConstant %u32 0
  156. %acquire = OpConstant %u32 2
  157. %release = OpConstant %u32 4
  158. %acquire_release = OpConstant %u32 8
  159. %acquire_and_release = OpConstant %u32 6
  160. %sequentially_consistent = OpConstant %u32 16
  161. %acquire_release_uniform_workgroup = OpConstant %u32 328
  162. %acquire_release_atomic_counter_workgroup = OpConstant %u32 1288
  163. %f32_ptr = OpTypePointer Workgroup %f32
  164. %f32_var = OpVariable %f32_ptr Workgroup
  165. %u32_ptr = OpTypePointer Workgroup %u32
  166. %u32_var = OpVariable %u32_ptr Workgroup
  167. %u64_ptr = OpTypePointer Workgroup %u64
  168. %u64_var = OpVariable %u64_ptr Workgroup
  169. %f32vec4_ptr = OpTypePointer Workgroup %f32vec4
  170. %f32vec4_var = OpVariable %f32vec4_ptr Workgroup
  171. %f32_ptr_function = OpTypePointer Function %f32
  172. %f32_ptr_uniformconstant = OpTypePointer UniformConstant %f32
  173. %f32_uc_var = OpVariable %f32_ptr_uniformconstant UniformConstant
  174. %f32_ptr_image = OpTypePointer Image %f32
  175. %f32_im_var = OpVariable %f32_ptr_image Image
  176. %main = OpFunction %void None %func
  177. %main_entry = OpLabel
  178. )";
  179. ss << body;
  180. ss << R"(
  181. OpReturn
  182. OpFunctionEnd)";
  183. return ss.str();
  184. }
  185. TEST_F(ValidateAtomics, AtomicLoadShaderSuccess) {
  186. const std::string body = R"(
  187. %val1 = OpAtomicLoad %u32 %u32_var %device %relaxed
  188. %val2 = OpAtomicLoad %u32 %u32_var %workgroup %acquire
  189. %val3 = OpAtomicLoad %u64 %u64_var %subgroup %sequentially_consistent
  190. )";
  191. CompileSuccessfully(GenerateShaderCode(body));
  192. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  193. }
  194. TEST_F(ValidateAtomics, AtomicLoadKernelSuccess) {
  195. const std::string body = R"(
  196. %val1 = OpAtomicLoad %f32 %f32_var %device %relaxed
  197. %val2 = OpAtomicLoad %u32 %u32_var %workgroup %sequentially_consistent
  198. %val3 = OpAtomicLoad %u64 %u64_var %subgroup %acquire
  199. )";
  200. CompileSuccessfully(GenerateKernelCode(body));
  201. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  202. }
  203. TEST_F(ValidateAtomics, AtomicLoadInt32VulkanSuccess) {
  204. const std::string body = R"(
  205. %val1 = OpAtomicLoad %u32 %u32_var %device %relaxed
  206. %val2 = OpAtomicLoad %u32 %u32_var %workgroup %acquire
  207. )";
  208. CompileSuccessfully(GenerateShaderComputeCode(body), SPV_ENV_VULKAN_1_0);
  209. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  210. }
  211. TEST_F(ValidateAtomics, AtomicAddIntVulkanWrongType1) {
  212. const std::string body = R"(
  213. %val1 = OpAtomicIAdd %f32 %f32_var %device %relaxed %f32_1
  214. )";
  215. CompileSuccessfully(GenerateShaderCode(body));
  216. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  217. EXPECT_THAT(getDiagnosticString(),
  218. HasSubstr("AtomicIAdd: "
  219. "expected Result Type to be int scalar type"));
  220. }
  221. TEST_F(ValidateAtomics, AtomicAddIntVulkanWrongType2) {
  222. const std::string body = R"(
  223. %val1 = OpAtomicIAdd %f32vec4 %f32vec4_var %device %relaxed %f32_1
  224. )";
  225. CompileSuccessfully(GenerateShaderCode(body));
  226. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  227. EXPECT_THAT(getDiagnosticString(),
  228. HasSubstr("AtomicIAdd: "
  229. "expected Result Type to be integer scalar type"));
  230. }
  231. TEST_F(ValidateAtomics, AtomicAddFloatVulkan) {
  232. const std::string body = R"(
  233. %val1 = OpAtomicFAddEXT %f32 %f32_var %device %relaxed %f32_1
  234. )";
  235. CompileSuccessfully(GenerateShaderCode(body));
  236. ASSERT_EQ(SPV_ERROR_INVALID_CAPABILITY, ValidateInstructions());
  237. EXPECT_THAT(
  238. getDiagnosticString(),
  239. HasSubstr("Opcode AtomicFAddEXT requires one of these capabilities: "
  240. "AtomicFloat32AddEXT AtomicFloat64AddEXT"));
  241. }
  242. TEST_F(ValidateAtomics, AtomicAddFloatVulkanWrongType1) {
  243. const std::string body = R"(
  244. %val1 = OpAtomicFAddEXT %f32vec4 %f32vec4_var %device %relaxed %f32_1
  245. )";
  246. const std::string extra = R"(
  247. OpCapability AtomicFloat32AddEXT
  248. OpExtension "SPV_EXT_shader_atomic_float_add"
  249. )";
  250. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  251. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  252. EXPECT_THAT(getDiagnosticString(),
  253. HasSubstr("AtomicFAddEXT: "
  254. "expected Result Type to be float scalar type"));
  255. }
  256. TEST_F(ValidateAtomics, AtomicAddFloatVulkanWrongType2) {
  257. const std::string body = R"(
  258. %val1 = OpAtomicFAddEXT %u32 %u32_var %device %relaxed %u32_1
  259. )";
  260. const std::string extra = R"(
  261. OpCapability AtomicFloat32AddEXT
  262. OpExtension "SPV_EXT_shader_atomic_float_add"
  263. )";
  264. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  265. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  266. EXPECT_THAT(getDiagnosticString(),
  267. HasSubstr("AtomicFAddEXT: "
  268. "expected Result Type to be float scalar type"));
  269. }
  270. TEST_F(ValidateAtomics, AtomicAddFloatVulkanWrongType3) {
  271. const std::string body = R"(
  272. %val1 = OpAtomicFAddEXT %u64 %u64_var %device %relaxed %u64_1
  273. )";
  274. const std::string extra = R"(
  275. OpCapability AtomicFloat32AddEXT
  276. OpExtension "SPV_EXT_shader_atomic_float_add"
  277. )";
  278. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  279. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  280. EXPECT_THAT(getDiagnosticString(),
  281. HasSubstr("AtomicFAddEXT: "
  282. "expected Result Type to be float scalar type"));
  283. }
  284. TEST_F(ValidateAtomics, AtomicAddFloatVulkanWrongCapability) {
  285. const std::string body = R"(
  286. %val1 = OpAtomicFAddEXT %f32 %f32_var %device %relaxed %f32_1
  287. )";
  288. const std::string extra = R"(
  289. OpCapability AtomicFloat64AddEXT
  290. OpExtension "SPV_EXT_shader_atomic_float_add"
  291. )";
  292. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  293. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  294. EXPECT_THAT(getDiagnosticString(),
  295. HasSubstr("AtomicFAddEXT: float add atomics "
  296. "require the AtomicFloat32AddEXT capability"));
  297. }
  298. TEST_F(ValidateAtomics, AtomicAddFloatVulkanSuccess) {
  299. const std::string body = R"(
  300. %val1 = OpAtomicFAddEXT %f32 %f32_var %device %relaxed %f32_1
  301. )";
  302. const std::string extra = R"(
  303. OpCapability AtomicFloat32AddEXT
  304. OpExtension "SPV_EXT_shader_atomic_float_add"
  305. )";
  306. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  307. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  308. }
  309. TEST_F(ValidateAtomics, AtomicLoadFloatVulkan) {
  310. const std::string body = R"(
  311. %val1 = OpAtomicLoad %f32 %f32_var %device %relaxed
  312. %val2 = OpAtomicLoad %f32 %f32_var %workgroup %acquire
  313. )";
  314. CompileSuccessfully(GenerateShaderComputeCode(body), SPV_ENV_VULKAN_1_0);
  315. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  316. }
  317. TEST_F(ValidateAtomics, AtomicStoreFloatVulkan) {
  318. const std::string body = R"(
  319. OpAtomicStore %f32_var %device %relaxed %f32_1
  320. )";
  321. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  322. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  323. }
  324. TEST_F(ValidateAtomics, AtomicExchangeFloatVulkan) {
  325. const std::string body = R"(
  326. %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %f32_0
  327. )";
  328. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  329. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  330. }
  331. TEST_F(ValidateAtomics, AtomicLoadInt64WithCapabilityVulkanSuccess) {
  332. const std::string body = R"(
  333. %val1 = OpAtomicLoad %u64 %u64_var %device %relaxed
  334. %val2 = OpAtomicLoad %u64 %u64_var %workgroup %acquire
  335. )";
  336. CompileSuccessfully(
  337. GenerateShaderComputeCode(body, "OpCapability Int64Atomics\n"),
  338. SPV_ENV_VULKAN_1_0);
  339. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  340. }
  341. TEST_F(ValidateAtomics, AtomicLoadInt64WithoutCapabilityVulkan) {
  342. const std::string body = R"(
  343. %val1 = OpAtomicLoad %u64 %u64_var %device %relaxed
  344. %val2 = OpAtomicLoad %u64 %u64_var %workgroup %acquire
  345. )";
  346. CompileSuccessfully(GenerateShaderComputeCode(body), SPV_ENV_VULKAN_1_0);
  347. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  348. EXPECT_THAT(getDiagnosticString(),
  349. HasSubstr("64-bit atomics require the Int64Atomics capability"));
  350. }
  351. TEST_F(ValidateAtomics, AtomicStoreOpenCLFunctionPointerStorageTypeSuccess) {
  352. const std::string body = R"(
  353. %f32_var_function = OpVariable %f32_ptr_function Function
  354. OpAtomicStore %f32_var_function %device %relaxed %f32_1
  355. )";
  356. CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_OPENCL_1_2);
  357. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_OPENCL_1_2));
  358. }
  359. TEST_F(ValidateAtomics, AtomicStoreVulkanFunctionPointerStorageType) {
  360. const std::string body = R"(
  361. %f32_var_function = OpVariable %f32_ptr_function Function
  362. OpAtomicStore %f32_var_function %device %relaxed %f32_1
  363. )";
  364. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  365. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  366. EXPECT_THAT(getDiagnosticString(),
  367. AnyVUID("VUID-StandaloneSpirv-None-04686"));
  368. EXPECT_THAT(
  369. getDiagnosticString(),
  370. HasSubstr("AtomicStore: Vulkan spec only allows storage classes for "
  371. "atomic to be: Uniform, Workgroup, Image, StorageBuffer, or "
  372. "PhysicalStorageBuffer."));
  373. }
  374. TEST_F(ValidateAtomics, AtomicStoreFunctionPointerStorageType) {
  375. const std::string body = R"(
  376. %f32_var_function = OpVariable %f32_ptr_function Function
  377. OpAtomicStore %f32_var_function %device %relaxed %f32_1
  378. )";
  379. CompileSuccessfully(GenerateShaderCode(body));
  380. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  381. EXPECT_THAT(getDiagnosticString(),
  382. HasSubstr("AtomicStore: Function storage class forbidden when "
  383. "the Shader capability is declared."));
  384. }
  385. // TODO([email protected]): the corresponding check fails Vulkan CTS,
  386. // reenable once fixed.
  387. TEST_F(ValidateAtomics, DISABLED_AtomicLoadVulkanSubgroup) {
  388. const std::string body = R"(
  389. %val1 = OpAtomicLoad %u32 %u32_var %subgroup %acquire
  390. )";
  391. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  392. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  393. EXPECT_THAT(getDiagnosticString(),
  394. HasSubstr("AtomicLoad: in Vulkan environment memory scope is "
  395. "limited to Device, Workgroup and Invocation"));
  396. }
  397. TEST_F(ValidateAtomics, AtomicLoadVulkanRelease) {
  398. const std::string body = R"(
  399. %val1 = OpAtomicLoad %u32 %u32_var %workgroup %release
  400. )";
  401. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  402. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  403. EXPECT_THAT(getDiagnosticString(),
  404. AnyVUID("VUID-StandaloneSpirv-OpAtomicLoad-04731"));
  405. EXPECT_THAT(
  406. getDiagnosticString(),
  407. HasSubstr("Vulkan spec disallows OpAtomicLoad with Memory Semantics "
  408. "Release, AcquireRelease and SequentiallyConsistent"));
  409. }
  410. TEST_F(ValidateAtomics, AtomicLoadVulkanAcquireRelease) {
  411. const std::string body = R"(
  412. %val1 = OpAtomicLoad %u32 %u32_var %workgroup %acquire_release
  413. )";
  414. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  415. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  416. EXPECT_THAT(getDiagnosticString(),
  417. AnyVUID("VUID-StandaloneSpirv-OpAtomicLoad-04731"));
  418. EXPECT_THAT(
  419. getDiagnosticString(),
  420. HasSubstr("Vulkan spec disallows OpAtomicLoad with Memory Semantics "
  421. "Release, AcquireRelease and SequentiallyConsistent"));
  422. }
  423. TEST_F(ValidateAtomics, AtomicLoadVulkanSequentiallyConsistent) {
  424. const std::string body = R"(
  425. %val1 = OpAtomicLoad %u32 %u32_var %workgroup %sequentially_consistent
  426. )";
  427. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  428. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  429. EXPECT_THAT(getDiagnosticString(),
  430. AnyVUID("VUID-StandaloneSpirv-OpAtomicLoad-04731"));
  431. EXPECT_THAT(
  432. getDiagnosticString(),
  433. HasSubstr("Vulkan spec disallows OpAtomicLoad with Memory Semantics "
  434. "Release, AcquireRelease and SequentiallyConsistent"));
  435. }
  436. TEST_F(ValidateAtomics, AtomicLoadShaderFloat) {
  437. const std::string body = R"(
  438. %val1 = OpAtomicLoad %f32 %f32_var %device %relaxed
  439. )";
  440. CompileSuccessfully(GenerateShaderCode(body));
  441. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  442. }
  443. TEST_F(ValidateAtomics, AtomicLoadVulkanInt64) {
  444. const std::string body = R"(
  445. %val1 = OpAtomicLoad %u64 %u64_var %device %relaxed
  446. )";
  447. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  448. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  449. EXPECT_THAT(
  450. getDiagnosticString(),
  451. HasSubstr(
  452. "AtomicLoad: 64-bit atomics require the Int64Atomics capability"));
  453. }
  454. TEST_F(ValidateAtomics, VK_KHR_shader_atomic_int64Success) {
  455. const std::string body = R"(
  456. %val1 = OpAtomicUMin %u64 %u64_var %device %relaxed %u64_1
  457. %val2 = OpAtomicUMax %u64 %u64_var %device %relaxed %u64_1
  458. %val3 = OpAtomicSMin %u64 %u64_var %device %relaxed %u64_1
  459. %val4 = OpAtomicSMax %u64 %u64_var %device %relaxed %u64_1
  460. %val5 = OpAtomicAnd %u64 %u64_var %device %relaxed %u64_1
  461. %val6 = OpAtomicOr %u64 %u64_var %device %relaxed %u64_1
  462. %val7 = OpAtomicXor %u64 %u64_var %device %relaxed %u64_1
  463. %val8 = OpAtomicIAdd %u64 %u64_var %device %relaxed %u64_1
  464. %val9 = OpAtomicExchange %u64 %u64_var %device %relaxed %u64_1
  465. %val10 = OpAtomicCompareExchange %u64 %u64_var %device %relaxed %relaxed %u64_1 %u64_1
  466. %val11 = OpAtomicUMin %s64 %s64_var %device %relaxed %s64_1
  467. %val12 = OpAtomicUMax %s64 %s64_var %device %relaxed %s64_1
  468. %val13 = OpAtomicSMin %s64 %s64_var %device %relaxed %s64_1
  469. %val14 = OpAtomicSMax %s64 %s64_var %device %relaxed %s64_1
  470. %val15 = OpAtomicAnd %s64 %s64_var %device %relaxed %s64_1
  471. %val16 = OpAtomicOr %s64 %s64_var %device %relaxed %s64_1
  472. %val17 = OpAtomicXor %s64 %s64_var %device %relaxed %s64_1
  473. %val18 = OpAtomicIAdd %s64 %s64_var %device %relaxed %s64_1
  474. %val19 = OpAtomicExchange %s64 %s64_var %device %relaxed %s64_1
  475. %val20 = OpAtomicCompareExchange %s64 %s64_var %device %relaxed %relaxed %s64_1 %s64_1
  476. %val21 = OpAtomicLoad %u64 %u64_var %device %relaxed
  477. %val22 = OpAtomicLoad %s64 %s64_var %device %relaxed
  478. OpAtomicStore %u64_var %device %relaxed %u64_1
  479. OpAtomicStore %s64_var %device %relaxed %s64_1
  480. )";
  481. CompileSuccessfully(GenerateShaderCode(body, "OpCapability Int64Atomics\n"),
  482. SPV_ENV_VULKAN_1_0);
  483. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  484. }
  485. TEST_F(ValidateAtomics, VK_KHR_shader_atomic_int64MissingCapability) {
  486. const std::string body = R"(
  487. %val1 = OpAtomicUMin %u64 %u64_var %device %relaxed %u64_1
  488. )";
  489. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  490. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  491. EXPECT_THAT(
  492. getDiagnosticString(),
  493. HasSubstr(
  494. "AtomicUMin: 64-bit atomics require the Int64Atomics capability"));
  495. }
  496. TEST_F(ValidateAtomics, AtomicLoadWrongResultType) {
  497. const std::string body = R"(
  498. %val1 = OpAtomicLoad %f32vec4 %f32vec4_var %device %relaxed
  499. )";
  500. CompileSuccessfully(GenerateKernelCode(body));
  501. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  502. EXPECT_THAT(getDiagnosticString(),
  503. HasSubstr("AtomicLoad: "
  504. "expected Result Type to be int or float scalar type"));
  505. }
  506. TEST_F(ValidateAtomics, AtomicLoadWrongPointerType) {
  507. const std::string body = R"(
  508. %val1 = OpAtomicLoad %f32 %f32_ptr %device %relaxed
  509. )";
  510. CompileSuccessfully(GenerateKernelCode(body));
  511. ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
  512. EXPECT_THAT(getDiagnosticString(),
  513. HasSubstr("Operand 27[%_ptr_Workgroup_float] cannot be a type"));
  514. }
  515. TEST_F(ValidateAtomics, AtomicLoadWrongPointerDataType) {
  516. const std::string body = R"(
  517. %val1 = OpAtomicLoad %u32 %f32_var %device %relaxed
  518. )";
  519. CompileSuccessfully(GenerateKernelCode(body));
  520. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  521. EXPECT_THAT(
  522. getDiagnosticString(),
  523. HasSubstr("AtomicLoad: "
  524. "expected Pointer to point to a value of type Result Type"));
  525. }
  526. TEST_F(ValidateAtomics, AtomicLoadWrongScopeType) {
  527. const std::string body = R"(
  528. %val1 = OpAtomicLoad %f32 %f32_var %f32_1 %relaxed
  529. )";
  530. CompileSuccessfully(GenerateKernelCode(body));
  531. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  532. EXPECT_THAT(getDiagnosticString(),
  533. HasSubstr("AtomicLoad: expected scope to be a 32-bit int"));
  534. }
  535. TEST_F(ValidateAtomics, AtomicLoadWrongMemorySemanticsType) {
  536. const std::string body = R"(
  537. %val1 = OpAtomicLoad %f32 %f32_var %device %u64_1
  538. )";
  539. CompileSuccessfully(GenerateKernelCode(body));
  540. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  541. EXPECT_THAT(
  542. getDiagnosticString(),
  543. HasSubstr("AtomicLoad: expected Memory Semantics to be a 32-bit int"));
  544. }
  545. TEST_F(ValidateAtomics, AtomicStoreKernelSuccess) {
  546. const std::string body = R"(
  547. OpAtomicStore %f32_var %device %relaxed %f32_1
  548. OpAtomicStore %u32_var %subgroup %release %u32_1
  549. )";
  550. CompileSuccessfully(GenerateKernelCode(body));
  551. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  552. }
  553. TEST_F(ValidateAtomics, AtomicStoreShaderSuccess) {
  554. const std::string body = R"(
  555. OpAtomicStore %u32_var %device %release %u32_1
  556. OpAtomicStore %u32_var %subgroup %sequentially_consistent %u32_1
  557. )";
  558. CompileSuccessfully(GenerateShaderCode(body));
  559. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  560. }
  561. TEST_F(ValidateAtomics, AtomicStoreVulkanSuccess) {
  562. const std::string body = R"(
  563. OpAtomicStore %u32_var %device %release %u32_1
  564. )";
  565. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  566. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  567. }
  568. TEST_F(ValidateAtomics, AtomicStoreVulkanAcquire) {
  569. const std::string body = R"(
  570. OpAtomicStore %u32_var %device %acquire %u32_1
  571. )";
  572. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  573. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  574. EXPECT_THAT(getDiagnosticString(),
  575. AnyVUID("VUID-StandaloneSpirv-OpAtomicStore-04730"));
  576. EXPECT_THAT(
  577. getDiagnosticString(),
  578. HasSubstr("Vulkan spec disallows OpAtomicStore with Memory Semantics "
  579. "Acquire, AcquireRelease and SequentiallyConsistent"));
  580. }
  581. TEST_F(ValidateAtomics, AtomicStoreVulkanAcquireRelease) {
  582. const std::string body = R"(
  583. OpAtomicStore %u32_var %device %acquire_release %u32_1
  584. )";
  585. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  586. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  587. EXPECT_THAT(getDiagnosticString(),
  588. AnyVUID("VUID-StandaloneSpirv-OpAtomicStore-04730"));
  589. EXPECT_THAT(
  590. getDiagnosticString(),
  591. HasSubstr("Vulkan spec disallows OpAtomicStore with Memory Semantics "
  592. "Acquire, AcquireRelease and SequentiallyConsistent"));
  593. }
  594. TEST_F(ValidateAtomics, AtomicStoreVulkanSequentiallyConsistent) {
  595. const std::string body = R"(
  596. OpAtomicStore %u32_var %device %sequentially_consistent %u32_1
  597. )";
  598. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  599. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  600. EXPECT_THAT(getDiagnosticString(),
  601. AnyVUID("VUID-StandaloneSpirv-OpAtomicStore-04730"));
  602. EXPECT_THAT(
  603. getDiagnosticString(),
  604. HasSubstr("Vulkan spec disallows OpAtomicStore with Memory Semantics "
  605. "Acquire, AcquireRelease and SequentiallyConsistent"));
  606. }
  607. TEST_F(ValidateAtomics, AtomicStoreWrongPointerType) {
  608. const std::string body = R"(
  609. OpAtomicStore %f32_1 %device %relaxed %f32_1
  610. )";
  611. CompileSuccessfully(GenerateKernelCode(body));
  612. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  613. EXPECT_THAT(
  614. getDiagnosticString(),
  615. HasSubstr("AtomicStore: expected Pointer to be of type OpTypePointer"));
  616. }
  617. TEST_F(ValidateAtomics, AtomicStoreWrongPointerDataType) {
  618. const std::string body = R"(
  619. OpAtomicStore %f32vec4_var %device %relaxed %f32_1
  620. )";
  621. CompileSuccessfully(GenerateKernelCode(body));
  622. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  623. EXPECT_THAT(
  624. getDiagnosticString(),
  625. HasSubstr("AtomicStore: "
  626. "expected Pointer to be a pointer to int or float scalar "
  627. "type"));
  628. }
  629. TEST_F(ValidateAtomics, AtomicStoreWrongPointerStorageTypeForOpenCL) {
  630. const std::string body = R"(
  631. OpAtomicStore %f32_im_var %device %relaxed %f32_1
  632. )";
  633. CompileSuccessfully(GenerateKernelCode(body));
  634. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_OPENCL_1_2));
  635. EXPECT_THAT(
  636. getDiagnosticString(),
  637. HasSubstr("AtomicStore: storage class must be Function, Workgroup, "
  638. "CrossWorkGroup or Generic in the OpenCL environment."));
  639. }
  640. TEST_F(ValidateAtomics, AtomicStoreWrongPointerStorageType) {
  641. const std::string body = R"(
  642. OpAtomicStore %f32_uc_var %device %relaxed %f32_1
  643. )";
  644. CompileSuccessfully(GenerateKernelCode(body));
  645. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  646. EXPECT_THAT(getDiagnosticString(),
  647. HasSubstr("AtomicStore: storage class forbidden by universal "
  648. "validation rules."));
  649. }
  650. TEST_F(ValidateAtomics, AtomicStoreWrongScopeType) {
  651. const std::string body = R"(
  652. OpAtomicStore %f32_var %f32_1 %relaxed %f32_1
  653. )";
  654. CompileSuccessfully(GenerateKernelCode(body));
  655. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  656. EXPECT_THAT(getDiagnosticString(),
  657. HasSubstr("AtomicStore: expected scope to be a 32-bit int\n "
  658. "OpAtomicStore %28 %float_1 %uint_0_1 %float_1\n"));
  659. }
  660. TEST_F(ValidateAtomics, AtomicStoreWrongMemorySemanticsType) {
  661. const std::string body = R"(
  662. OpAtomicStore %f32_var %device %f32_1 %f32_1
  663. )";
  664. CompileSuccessfully(GenerateKernelCode(body));
  665. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  666. EXPECT_THAT(
  667. getDiagnosticString(),
  668. HasSubstr("AtomicStore: expected Memory Semantics to be a 32-bit int"));
  669. }
  670. TEST_F(ValidateAtomics, AtomicStoreWrongValueType) {
  671. const std::string body = R"(
  672. OpAtomicStore %f32_var %device %relaxed %u32_1
  673. )";
  674. CompileSuccessfully(GenerateKernelCode(body));
  675. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  676. EXPECT_THAT(
  677. getDiagnosticString(),
  678. HasSubstr("AtomicStore: "
  679. "expected Value type and the type pointed to by Pointer to "
  680. "be the same"));
  681. }
  682. TEST_F(ValidateAtomics, AtomicExchangeShaderSuccess) {
  683. const std::string body = R"(
  684. OpAtomicStore %u32_var %device %relaxed %u32_1
  685. %val2 = OpAtomicExchange %u32 %u32_var %device %relaxed %u32_0
  686. )";
  687. CompileSuccessfully(GenerateShaderCode(body));
  688. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  689. }
  690. TEST_F(ValidateAtomics, AtomicExchangeKernelSuccess) {
  691. const std::string body = R"(
  692. OpAtomicStore %f32_var %device %relaxed %f32_1
  693. %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %f32_0
  694. OpAtomicStore %u32_var %device %relaxed %u32_1
  695. %val4 = OpAtomicExchange %u32 %u32_var %device %relaxed %u32_0
  696. )";
  697. CompileSuccessfully(GenerateKernelCode(body));
  698. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  699. }
  700. TEST_F(ValidateAtomics, AtomicExchangeShaderFloat) {
  701. const std::string body = R"(
  702. OpAtomicStore %f32_var %device %relaxed %f32_1
  703. %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %f32_0
  704. )";
  705. CompileSuccessfully(GenerateShaderCode(body));
  706. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  707. }
  708. TEST_F(ValidateAtomics, AtomicExchangeWrongResultType) {
  709. const std::string body = R"(
  710. OpStore %f32vec4_var %f32vec4_0000
  711. %val2 = OpAtomicExchange %f32vec4 %f32vec4_var %device %relaxed %f32vec4_0000
  712. )";
  713. CompileSuccessfully(GenerateKernelCode(body));
  714. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  715. EXPECT_THAT(getDiagnosticString(),
  716. HasSubstr("AtomicExchange: "
  717. "expected Result Type to be int or float scalar type"));
  718. }
  719. TEST_F(ValidateAtomics, AtomicExchangeWrongPointerType) {
  720. const std::string body = R"(
  721. %val2 = OpAtomicExchange %f32 %f32vec4_ptr %device %relaxed %f32vec4_0000
  722. )";
  723. CompileSuccessfully(GenerateKernelCode(body));
  724. ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
  725. EXPECT_THAT(getDiagnosticString(),
  726. HasSubstr("Operand 33[%_ptr_Workgroup_v4float] cannot be a "
  727. "type"));
  728. }
  729. TEST_F(ValidateAtomics, AtomicExchangeWrongPointerDataType) {
  730. const std::string body = R"(
  731. OpStore %f32vec4_var %f32vec4_0000
  732. %val2 = OpAtomicExchange %f32 %f32vec4_var %device %relaxed %f32vec4_0000
  733. )";
  734. CompileSuccessfully(GenerateKernelCode(body));
  735. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  736. EXPECT_THAT(
  737. getDiagnosticString(),
  738. HasSubstr("AtomicExchange: "
  739. "expected Pointer to point to a value of type Result Type"));
  740. }
  741. TEST_F(ValidateAtomics, AtomicExchangeWrongScopeType) {
  742. const std::string body = R"(
  743. OpAtomicStore %f32_var %device %relaxed %f32_1
  744. %val2 = OpAtomicExchange %f32 %f32_var %f32_1 %relaxed %f32_0
  745. )";
  746. CompileSuccessfully(GenerateKernelCode(body));
  747. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  748. EXPECT_THAT(getDiagnosticString(),
  749. HasSubstr("AtomicExchange: expected scope to be a 32-bit int"));
  750. }
  751. TEST_F(ValidateAtomics, AtomicExchangeWrongMemorySemanticsType) {
  752. const std::string body = R"(
  753. OpAtomicStore %f32_var %device %relaxed %f32_1
  754. %val2 = OpAtomicExchange %f32 %f32_var %device %f32_1 %f32_0
  755. )";
  756. CompileSuccessfully(GenerateKernelCode(body));
  757. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  758. EXPECT_THAT(
  759. getDiagnosticString(),
  760. HasSubstr(
  761. "AtomicExchange: expected Memory Semantics to be a 32-bit int"));
  762. }
  763. TEST_F(ValidateAtomics, AtomicExchangeWrongValueType) {
  764. const std::string body = R"(
  765. OpAtomicStore %f32_var %device %relaxed %f32_1
  766. %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %u32_0
  767. )";
  768. CompileSuccessfully(GenerateKernelCode(body));
  769. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  770. EXPECT_THAT(getDiagnosticString(),
  771. HasSubstr("AtomicExchange: "
  772. "expected Value to be of type Result Type"));
  773. }
  774. TEST_F(ValidateAtomics, AtomicCompareExchangeShaderSuccess) {
  775. const std::string body = R"(
  776. OpAtomicStore %u32_var %device %relaxed %u32_1
  777. %val2 = OpAtomicCompareExchange %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
  778. )";
  779. CompileSuccessfully(GenerateShaderCode(body));
  780. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  781. }
  782. TEST_F(ValidateAtomics, AtomicCompareExchangeKernelSuccess) {
  783. const std::string body = R"(
  784. OpAtomicStore %f32_var %device %relaxed %f32_1
  785. %val2 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %relaxed %f32_0 %f32_1
  786. OpAtomicStore %u32_var %device %relaxed %u32_1
  787. %val4 = OpAtomicCompareExchange %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
  788. )";
  789. CompileSuccessfully(GenerateKernelCode(body));
  790. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  791. }
  792. TEST_F(ValidateAtomics, AtomicCompareExchangeShaderFloat) {
  793. const std::string body = R"(
  794. OpAtomicStore %f32_var %device %relaxed %f32_1
  795. %val1 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %relaxed %f32_0 %f32_1
  796. )";
  797. CompileSuccessfully(GenerateShaderCode(body));
  798. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  799. EXPECT_THAT(getDiagnosticString(),
  800. HasSubstr("AtomicCompareExchange: "
  801. "expected Result Type to be int scalar type"));
  802. }
  803. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongResultType) {
  804. const std::string body = R"(
  805. OpStore %f32vec4_var %f32vec4_0000
  806. %val2 = OpAtomicCompareExchange %f32vec4 %f32vec4_var %device %relaxed %relaxed %f32vec4_0000 %f32vec4_0000
  807. )";
  808. CompileSuccessfully(GenerateKernelCode(body));
  809. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  810. EXPECT_THAT(getDiagnosticString(),
  811. HasSubstr("AtomicCompareExchange: "
  812. "expected Result Type to be int or float scalar type"));
  813. }
  814. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongPointerType) {
  815. const std::string body = R"(
  816. %val2 = OpAtomicCompareExchange %f32 %f32vec4_ptr %device %relaxed %relaxed %f32vec4_0000 %f32vec4_0000
  817. )";
  818. CompileSuccessfully(GenerateKernelCode(body));
  819. ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
  820. EXPECT_THAT(getDiagnosticString(),
  821. HasSubstr("Operand 33[%_ptr_Workgroup_v4float] cannot be a "
  822. "type"));
  823. }
  824. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongPointerDataType) {
  825. const std::string body = R"(
  826. OpStore %f32vec4_var %f32vec4_0000
  827. %val2 = OpAtomicCompareExchange %f32 %f32vec4_var %device %relaxed %relaxed %f32_0 %f32_1
  828. )";
  829. CompileSuccessfully(GenerateKernelCode(body));
  830. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  831. EXPECT_THAT(
  832. getDiagnosticString(),
  833. HasSubstr("AtomicCompareExchange: "
  834. "expected Pointer to point to a value of type Result Type"));
  835. }
  836. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongScopeType) {
  837. const std::string body = R"(
  838. OpAtomicStore %f32_var %device %relaxed %f32_1
  839. %val2 = OpAtomicCompareExchange %f32 %f32_var %f32_1 %relaxed %relaxed %f32_0 %f32_0
  840. )";
  841. CompileSuccessfully(GenerateKernelCode(body));
  842. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  843. EXPECT_THAT(getDiagnosticString(),
  844. HasSubstr("AtomicCompareExchange: expected scope to be a 32-bit "
  845. "int"));
  846. }
  847. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongMemorySemanticsType1) {
  848. const std::string body = R"(
  849. OpAtomicStore %f32_var %device %relaxed %f32_1
  850. %val2 = OpAtomicCompareExchange %f32 %f32_var %device %f32_1 %relaxed %f32_0 %f32_0
  851. )";
  852. CompileSuccessfully(GenerateKernelCode(body));
  853. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  854. EXPECT_THAT(getDiagnosticString(),
  855. HasSubstr("AtomicCompareExchange: expected Memory Semantics to "
  856. "be a 32-bit int"));
  857. }
  858. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongMemorySemanticsType2) {
  859. const std::string body = R"(
  860. OpAtomicStore %f32_var %device %relaxed %f32_1
  861. %val2 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %f32_1 %f32_0 %f32_0
  862. )";
  863. CompileSuccessfully(GenerateKernelCode(body));
  864. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  865. EXPECT_THAT(getDiagnosticString(),
  866. HasSubstr("AtomicCompareExchange: expected Memory Semantics to "
  867. "be a 32-bit int"));
  868. }
  869. TEST_F(ValidateAtomics, AtomicCompareExchangeUnequalRelease) {
  870. const std::string body = R"(
  871. OpAtomicStore %f32_var %device %relaxed %f32_1
  872. %val2 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %release %f32_0 %f32_0
  873. )";
  874. CompileSuccessfully(GenerateKernelCode(body));
  875. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  876. EXPECT_THAT(getDiagnosticString(),
  877. HasSubstr("AtomicCompareExchange: Memory Semantics Release and "
  878. "AcquireRelease cannot be used for operand Unequal"));
  879. }
  880. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongValueType) {
  881. const std::string body = R"(
  882. OpAtomicStore %f32_var %device %relaxed %f32_1
  883. %val2 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %relaxed %u32_0 %f32_1
  884. )";
  885. CompileSuccessfully(GenerateKernelCode(body));
  886. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  887. EXPECT_THAT(getDiagnosticString(),
  888. HasSubstr("AtomicCompareExchange: "
  889. "expected Value to be of type Result Type"));
  890. }
  891. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongComparatorType) {
  892. const std::string body = R"(
  893. OpAtomicStore %f32_var %device %relaxed %f32_1
  894. %val2 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %relaxed %f32_0 %u32_1
  895. )";
  896. CompileSuccessfully(GenerateKernelCode(body));
  897. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  898. EXPECT_THAT(getDiagnosticString(),
  899. HasSubstr("AtomicCompareExchange: "
  900. "expected Comparator to be of type Result Type"));
  901. }
  902. TEST_F(ValidateAtomics, AtomicCompareExchangeWeakSuccess) {
  903. const std::string body = R"(
  904. OpAtomicStore %u32_var %device %relaxed %u32_1
  905. %val4 = OpAtomicCompareExchangeWeak %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
  906. )";
  907. CompileSuccessfully(GenerateKernelCode(body));
  908. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  909. }
  910. TEST_F(ValidateAtomics, AtomicCompareExchangeWeakWrongResultType) {
  911. const std::string body = R"(
  912. OpAtomicStore %f32_var %device %relaxed %f32_1
  913. %val2 = OpAtomicCompareExchangeWeak %f32 %f32_var %device %relaxed %relaxed %f32_0 %f32_1
  914. )";
  915. CompileSuccessfully(GenerateKernelCode(body));
  916. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  917. EXPECT_THAT(getDiagnosticString(),
  918. HasSubstr("AtomicCompareExchangeWeak: "
  919. "expected Result Type to be int scalar type"));
  920. }
  921. TEST_F(ValidateAtomics, AtomicArithmeticsSuccess) {
  922. const std::string body = R"(
  923. OpAtomicStore %u32_var %device %relaxed %u32_1
  924. %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release
  925. %val2 = OpAtomicIDecrement %u32 %u32_var %device %acquire_release
  926. %val3 = OpAtomicIAdd %u32 %u32_var %device %acquire_release %u32_1
  927. %val4 = OpAtomicISub %u32 %u32_var %device %acquire_release %u32_1
  928. %val5 = OpAtomicUMin %u32 %u32_var %device %acquire_release %u32_1
  929. %val6 = OpAtomicUMax %u32 %u32_var %device %acquire_release %u32_1
  930. %val7 = OpAtomicSMin %u32 %u32_var %device %sequentially_consistent %u32_1
  931. %val8 = OpAtomicSMax %u32 %u32_var %device %sequentially_consistent %u32_1
  932. %val9 = OpAtomicAnd %u32 %u32_var %device %sequentially_consistent %u32_1
  933. %val10 = OpAtomicOr %u32 %u32_var %device %sequentially_consistent %u32_1
  934. %val11 = OpAtomicXor %u32 %u32_var %device %sequentially_consistent %u32_1
  935. )";
  936. CompileSuccessfully(GenerateKernelCode(body));
  937. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  938. }
  939. TEST_F(ValidateAtomics, AtomicFlagsSuccess) {
  940. const std::string body = R"(
  941. OpAtomicFlagClear %u32_var %device %release
  942. %val1 = OpAtomicFlagTestAndSet %bool %u32_var %device %relaxed
  943. )";
  944. CompileSuccessfully(GenerateKernelCode(body));
  945. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  946. }
  947. TEST_F(ValidateAtomics, AtomicFlagTestAndSetWrongResultType) {
  948. const std::string body = R"(
  949. %val1 = OpAtomicFlagTestAndSet %u32 %u32_var %device %relaxed
  950. )";
  951. CompileSuccessfully(GenerateKernelCode(body));
  952. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  953. EXPECT_THAT(getDiagnosticString(),
  954. HasSubstr("AtomicFlagTestAndSet: "
  955. "expected Result Type to be bool scalar type"));
  956. }
  957. TEST_F(ValidateAtomics, AtomicFlagTestAndSetNotPointer) {
  958. const std::string body = R"(
  959. %val1 = OpAtomicFlagTestAndSet %bool %u32_1 %device %relaxed
  960. )";
  961. CompileSuccessfully(GenerateKernelCode(body));
  962. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  963. EXPECT_THAT(getDiagnosticString(),
  964. HasSubstr("AtomicFlagTestAndSet: "
  965. "expected Pointer to be of type OpTypePointer"));
  966. }
  967. TEST_F(ValidateAtomics, AtomicFlagTestAndSetNotIntPointer) {
  968. const std::string body = R"(
  969. %val1 = OpAtomicFlagTestAndSet %bool %f32_var %device %relaxed
  970. )";
  971. CompileSuccessfully(GenerateKernelCode(body));
  972. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  973. EXPECT_THAT(
  974. getDiagnosticString(),
  975. HasSubstr("AtomicFlagTestAndSet: "
  976. "expected Pointer to point to a value of 32-bit int type"));
  977. }
  978. TEST_F(ValidateAtomics, AtomicFlagTestAndSetNotInt32Pointer) {
  979. const std::string body = R"(
  980. %val1 = OpAtomicFlagTestAndSet %bool %u64_var %device %relaxed
  981. )";
  982. CompileSuccessfully(GenerateKernelCode(body));
  983. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  984. EXPECT_THAT(
  985. getDiagnosticString(),
  986. HasSubstr("AtomicFlagTestAndSet: "
  987. "expected Pointer to point to a value of 32-bit int type"));
  988. }
  989. TEST_F(ValidateAtomics, AtomicFlagTestAndSetWrongScopeType) {
  990. const std::string body = R"(
  991. %val1 = OpAtomicFlagTestAndSet %bool %u32_var %u64_1 %relaxed
  992. )";
  993. CompileSuccessfully(GenerateKernelCode(body));
  994. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  995. EXPECT_THAT(
  996. getDiagnosticString(),
  997. HasSubstr("AtomicFlagTestAndSet: expected scope to be a 32-bit int"));
  998. }
  999. TEST_F(ValidateAtomics, AtomicFlagTestAndSetWrongMemorySemanticsType) {
  1000. const std::string body = R"(
  1001. %val1 = OpAtomicFlagTestAndSet %bool %u32_var %device %u64_1
  1002. )";
  1003. CompileSuccessfully(GenerateKernelCode(body));
  1004. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1005. EXPECT_THAT(getDiagnosticString(),
  1006. HasSubstr("AtomicFlagTestAndSet: "
  1007. "expected Memory Semantics to be a 32-bit int"));
  1008. }
  1009. TEST_F(ValidateAtomics, AtomicFlagClearAcquire) {
  1010. const std::string body = R"(
  1011. OpAtomicFlagClear %u32_var %device %acquire
  1012. )";
  1013. CompileSuccessfully(GenerateKernelCode(body));
  1014. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1015. EXPECT_THAT(getDiagnosticString(),
  1016. HasSubstr("Memory Semantics Acquire and AcquireRelease cannot be "
  1017. "used with AtomicFlagClear"));
  1018. }
  1019. TEST_F(ValidateAtomics, AtomicFlagClearNotPointer) {
  1020. const std::string body = R"(
  1021. OpAtomicFlagClear %u32_1 %device %relaxed
  1022. )";
  1023. CompileSuccessfully(GenerateKernelCode(body));
  1024. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1025. EXPECT_THAT(getDiagnosticString(),
  1026. HasSubstr("AtomicFlagClear: "
  1027. "expected Pointer to be of type OpTypePointer"));
  1028. }
  1029. TEST_F(ValidateAtomics, AtomicFlagClearNotIntPointer) {
  1030. const std::string body = R"(
  1031. OpAtomicFlagClear %f32_var %device %relaxed
  1032. )";
  1033. CompileSuccessfully(GenerateKernelCode(body));
  1034. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1035. EXPECT_THAT(
  1036. getDiagnosticString(),
  1037. HasSubstr("AtomicFlagClear: "
  1038. "expected Pointer to point to a value of 32-bit int type"));
  1039. }
  1040. TEST_F(ValidateAtomics, AtomicFlagClearNotInt32Pointer) {
  1041. const std::string body = R"(
  1042. OpAtomicFlagClear %u64_var %device %relaxed
  1043. )";
  1044. CompileSuccessfully(GenerateKernelCode(body));
  1045. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1046. EXPECT_THAT(
  1047. getDiagnosticString(),
  1048. HasSubstr("AtomicFlagClear: "
  1049. "expected Pointer to point to a value of 32-bit int type"));
  1050. }
  1051. TEST_F(ValidateAtomics, AtomicFlagClearWrongScopeType) {
  1052. const std::string body = R"(
  1053. OpAtomicFlagClear %u32_var %u64_1 %relaxed
  1054. )";
  1055. CompileSuccessfully(GenerateKernelCode(body));
  1056. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1057. EXPECT_THAT(getDiagnosticString(),
  1058. HasSubstr("AtomicFlagClear: expected scope to be a 32-bit "
  1059. "int\n OpAtomicFlagClear %30 %ulong_1 %uint_0_1\n"));
  1060. }
  1061. TEST_F(ValidateAtomics, AtomicFlagClearWrongMemorySemanticsType) {
  1062. const std::string body = R"(
  1063. OpAtomicFlagClear %u32_var %device %u64_1
  1064. )";
  1065. CompileSuccessfully(GenerateKernelCode(body));
  1066. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1067. EXPECT_THAT(
  1068. getDiagnosticString(),
  1069. HasSubstr(
  1070. "AtomicFlagClear: expected Memory Semantics to be a 32-bit int"));
  1071. }
  1072. TEST_F(ValidateAtomics, AtomicIIncrementAcquireAndRelease) {
  1073. const std::string body = R"(
  1074. OpAtomicStore %u32_var %device %relaxed %u32_1
  1075. %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_and_release
  1076. )";
  1077. CompileSuccessfully(GenerateKernelCode(body));
  1078. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1079. EXPECT_THAT(getDiagnosticString(),
  1080. HasSubstr("AtomicIIncrement: Memory Semantics can have at most "
  1081. "one of the following bits set: Acquire, Release, "
  1082. "AcquireRelease or SequentiallyConsistent"));
  1083. }
  1084. TEST_F(ValidateAtomics, AtomicUniformMemorySemanticsShader) {
  1085. const std::string body = R"(
  1086. OpAtomicStore %u32_var %device %relaxed %u32_1
  1087. %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release_uniform_workgroup
  1088. )";
  1089. CompileSuccessfully(GenerateShaderCode(body));
  1090. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1091. }
  1092. TEST_F(ValidateAtomics, AtomicUniformMemorySemanticsKernel) {
  1093. const std::string body = R"(
  1094. OpAtomicStore %u32_var %device %relaxed %u32_1
  1095. %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release_uniform_workgroup
  1096. )";
  1097. CompileSuccessfully(GenerateKernelCode(body));
  1098. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1099. EXPECT_THAT(getDiagnosticString(),
  1100. HasSubstr("AtomicIIncrement: Memory Semantics UniformMemory "
  1101. "requires capability Shader"));
  1102. }
  1103. // Lack of the AtomicStorage capability is intentionally ignored, see
  1104. // https://github.com/KhronosGroup/glslang/issues/1618 for the reasoning why.
  1105. TEST_F(ValidateAtomics, AtomicCounterMemorySemanticsNoCapability) {
  1106. const std::string body = R"(
  1107. OpAtomicStore %u32_var %device %relaxed %u32_1
  1108. %val1 = OpAtomicIIncrement %u32 %u32_var %device
  1109. %acquire_release_atomic_counter_workgroup
  1110. )";
  1111. CompileSuccessfully(GenerateKernelCode(body));
  1112. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1113. }
  1114. TEST_F(ValidateAtomics, AtomicCounterMemorySemanticsWithCapability) {
  1115. const std::string body = R"(
  1116. OpAtomicStore %u32_var %device %relaxed %u32_1
  1117. %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release_atomic_counter_workgroup
  1118. )";
  1119. CompileSuccessfully(GenerateKernelCode(body, "OpCapability AtomicStorage\n"));
  1120. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1121. }
  1122. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicLoad) {
  1123. const std::string body = R"(
  1124. %ld = OpAtomicLoad %u32 %u32_var %workgroup %sequentially_consistent
  1125. )";
  1126. const std::string extra = R"(
  1127. OpCapability VulkanMemoryModelKHR
  1128. OpExtension "SPV_KHR_vulkan_memory_model"
  1129. )";
  1130. CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
  1131. SPV_ENV_UNIVERSAL_1_3);
  1132. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1133. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1134. EXPECT_THAT(getDiagnosticString(),
  1135. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1136. "used with the VulkanKHR memory model."));
  1137. }
  1138. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicStore) {
  1139. const std::string body = R"(
  1140. OpAtomicStore %u32_var %workgroup %sequentially_consistent %u32_0
  1141. )";
  1142. const std::string extra = R"(
  1143. OpCapability VulkanMemoryModelKHR
  1144. OpExtension "SPV_KHR_vulkan_memory_model"
  1145. )";
  1146. CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
  1147. SPV_ENV_UNIVERSAL_1_3);
  1148. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1149. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1150. EXPECT_THAT(getDiagnosticString(),
  1151. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1152. "used with the VulkanKHR memory model."));
  1153. }
  1154. TEST_F(ValidateAtomics,
  1155. VulkanMemoryModelBanSequentiallyConsistentAtomicExchange) {
  1156. const std::string body = R"(
  1157. %ex = OpAtomicExchange %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1158. )";
  1159. const std::string extra = R"(
  1160. OpCapability VulkanMemoryModelKHR
  1161. OpExtension "SPV_KHR_vulkan_memory_model"
  1162. )";
  1163. CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
  1164. SPV_ENV_UNIVERSAL_1_3);
  1165. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1166. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1167. EXPECT_THAT(getDiagnosticString(),
  1168. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1169. "used with the VulkanKHR memory model."));
  1170. }
  1171. TEST_F(ValidateAtomics,
  1172. VulkanMemoryModelBanSequentiallyConsistentAtomicCompareExchangeEqual) {
  1173. const std::string body = R"(
  1174. %ex = OpAtomicCompareExchange %u32 %u32_var %workgroup %sequentially_consistent %relaxed %u32_0 %u32_0
  1175. )";
  1176. const std::string extra = R"(
  1177. OpCapability VulkanMemoryModelKHR
  1178. OpExtension "SPV_KHR_vulkan_memory_model"
  1179. )";
  1180. CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
  1181. SPV_ENV_UNIVERSAL_1_3);
  1182. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1183. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1184. EXPECT_THAT(getDiagnosticString(),
  1185. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1186. "used with the VulkanKHR memory model."));
  1187. }
  1188. TEST_F(ValidateAtomics,
  1189. VulkanMemoryModelBanSequentiallyConsistentAtomicCompareExchangeUnequal) {
  1190. const std::string body = R"(
  1191. %ex = OpAtomicCompareExchange %u32 %u32_var %workgroup %relaxed %sequentially_consistent %u32_0 %u32_0
  1192. )";
  1193. const std::string extra = R"(
  1194. OpCapability VulkanMemoryModelKHR
  1195. OpExtension "SPV_KHR_vulkan_memory_model"
  1196. )";
  1197. CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
  1198. SPV_ENV_UNIVERSAL_1_3);
  1199. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1200. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1201. EXPECT_THAT(getDiagnosticString(),
  1202. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1203. "used with the VulkanKHR memory model."));
  1204. }
  1205. TEST_F(ValidateAtomics,
  1206. VulkanMemoryModelBanSequentiallyConsistentAtomicIIncrement) {
  1207. const std::string body = R"(
  1208. %inc = OpAtomicIIncrement %u32 %u32_var %workgroup %sequentially_consistent
  1209. )";
  1210. const std::string extra = R"(
  1211. OpCapability VulkanMemoryModelKHR
  1212. OpExtension "SPV_KHR_vulkan_memory_model"
  1213. )";
  1214. CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
  1215. SPV_ENV_UNIVERSAL_1_3);
  1216. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1217. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1218. EXPECT_THAT(getDiagnosticString(),
  1219. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1220. "used with the VulkanKHR memory model."));
  1221. }
  1222. TEST_F(ValidateAtomics,
  1223. VulkanMemoryModelBanSequentiallyConsistentAtomicIDecrement) {
  1224. const std::string body = R"(
  1225. %dec = OpAtomicIDecrement %u32 %u32_var %workgroup %sequentially_consistent
  1226. )";
  1227. const std::string extra = R"(
  1228. OpCapability VulkanMemoryModelKHR
  1229. OpExtension "SPV_KHR_vulkan_memory_model"
  1230. )";
  1231. CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
  1232. SPV_ENV_UNIVERSAL_1_3);
  1233. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1234. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1235. EXPECT_THAT(getDiagnosticString(),
  1236. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1237. "used with the VulkanKHR memory model."));
  1238. }
  1239. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicIAdd) {
  1240. const std::string body = R"(
  1241. %add = OpAtomicIAdd %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1242. )";
  1243. const std::string extra = R"(
  1244. OpCapability VulkanMemoryModelKHR
  1245. OpExtension "SPV_KHR_vulkan_memory_model"
  1246. )";
  1247. CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
  1248. SPV_ENV_UNIVERSAL_1_3);
  1249. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1250. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1251. EXPECT_THAT(getDiagnosticString(),
  1252. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1253. "used with the VulkanKHR memory model."));
  1254. }
  1255. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicISub) {
  1256. const std::string body = R"(
  1257. %sub = OpAtomicISub %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1258. )";
  1259. const std::string extra = R"(
  1260. OpCapability VulkanMemoryModelKHR
  1261. OpExtension "SPV_KHR_vulkan_memory_model"
  1262. )";
  1263. CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
  1264. SPV_ENV_UNIVERSAL_1_3);
  1265. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1266. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1267. EXPECT_THAT(getDiagnosticString(),
  1268. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1269. "used with the VulkanKHR memory model."));
  1270. }
  1271. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicSMin) {
  1272. const std::string body = R"(
  1273. %min = OpAtomicSMin %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1274. )";
  1275. const std::string extra = R"(
  1276. OpCapability VulkanMemoryModelKHR
  1277. OpExtension "SPV_KHR_vulkan_memory_model"
  1278. )";
  1279. CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
  1280. SPV_ENV_UNIVERSAL_1_3);
  1281. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1282. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1283. EXPECT_THAT(getDiagnosticString(),
  1284. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1285. "used with the VulkanKHR memory model."));
  1286. }
  1287. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicUMin) {
  1288. const std::string body = R"(
  1289. %min = OpAtomicUMin %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1290. )";
  1291. const std::string extra = R"(
  1292. OpCapability VulkanMemoryModelKHR
  1293. OpExtension "SPV_KHR_vulkan_memory_model"
  1294. )";
  1295. CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
  1296. SPV_ENV_UNIVERSAL_1_3);
  1297. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1298. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1299. EXPECT_THAT(getDiagnosticString(),
  1300. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1301. "used with the VulkanKHR memory model."));
  1302. }
  1303. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicSMax) {
  1304. const std::string body = R"(
  1305. %max = OpAtomicSMax %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1306. )";
  1307. const std::string extra = R"(
  1308. OpCapability VulkanMemoryModelKHR
  1309. OpExtension "SPV_KHR_vulkan_memory_model"
  1310. )";
  1311. CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
  1312. SPV_ENV_UNIVERSAL_1_3);
  1313. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1314. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1315. EXPECT_THAT(getDiagnosticString(),
  1316. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1317. "used with the VulkanKHR memory model."));
  1318. }
  1319. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicUMax) {
  1320. const std::string body = R"(
  1321. %max = OpAtomicUMax %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1322. )";
  1323. const std::string extra = R"(
  1324. OpCapability VulkanMemoryModelKHR
  1325. OpExtension "SPV_KHR_vulkan_memory_model"
  1326. )";
  1327. CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
  1328. SPV_ENV_UNIVERSAL_1_3);
  1329. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1330. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1331. EXPECT_THAT(getDiagnosticString(),
  1332. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1333. "used with the VulkanKHR memory model."));
  1334. }
  1335. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicAnd) {
  1336. const std::string body = R"(
  1337. %and = OpAtomicAnd %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1338. )";
  1339. const std::string extra = R"(
  1340. OpCapability VulkanMemoryModelKHR
  1341. OpExtension "SPV_KHR_vulkan_memory_model"
  1342. )";
  1343. CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
  1344. SPV_ENV_UNIVERSAL_1_3);
  1345. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1346. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1347. EXPECT_THAT(getDiagnosticString(),
  1348. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1349. "used with the VulkanKHR memory model."));
  1350. }
  1351. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicOr) {
  1352. const std::string body = R"(
  1353. %or = OpAtomicOr %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1354. )";
  1355. const std::string extra = R"(
  1356. OpCapability VulkanMemoryModelKHR
  1357. OpExtension "SPV_KHR_vulkan_memory_model"
  1358. )";
  1359. CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
  1360. SPV_ENV_UNIVERSAL_1_3);
  1361. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1362. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1363. EXPECT_THAT(getDiagnosticString(),
  1364. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1365. "used with the VulkanKHR memory model."));
  1366. }
  1367. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicXor) {
  1368. const std::string body = R"(
  1369. %xor = OpAtomicXor %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1370. )";
  1371. const std::string extra = R"(
  1372. OpCapability VulkanMemoryModelKHR
  1373. OpExtension "SPV_KHR_vulkan_memory_model"
  1374. )";
  1375. CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
  1376. SPV_ENV_UNIVERSAL_1_3);
  1377. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1378. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1379. EXPECT_THAT(getDiagnosticString(),
  1380. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1381. "used with the VulkanKHR memory model."));
  1382. }
  1383. TEST_F(ValidateAtomics, OutputMemoryKHRRequiresVulkanMemoryModelKHR) {
  1384. const std::string text = R"(
  1385. OpCapability Shader
  1386. OpMemoryModel Logical GLSL450
  1387. OpEntryPoint Fragment %1 "func"
  1388. OpExecutionMode %1 OriginUpperLeft
  1389. %2 = OpTypeVoid
  1390. %3 = OpTypeInt 32 0
  1391. %semantics = OpConstant %3 4100
  1392. %5 = OpTypeFunction %2
  1393. %workgroup = OpConstant %3 2
  1394. %ptr = OpTypePointer Workgroup %3
  1395. %var = OpVariable %ptr Workgroup
  1396. %1 = OpFunction %2 None %5
  1397. %7 = OpLabel
  1398. OpAtomicStore %var %workgroup %semantics %workgroup
  1399. OpReturn
  1400. OpFunctionEnd
  1401. )";
  1402. CompileSuccessfully(text);
  1403. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1404. EXPECT_THAT(getDiagnosticString(),
  1405. HasSubstr("AtomicStore: Memory Semantics OutputMemoryKHR "
  1406. "requires capability VulkanMemoryModelKHR"));
  1407. }
  1408. TEST_F(ValidateAtomics, MakeAvailableKHRRequiresVulkanMemoryModelKHR) {
  1409. const std::string text = R"(
  1410. OpCapability Shader
  1411. OpMemoryModel Logical GLSL450
  1412. OpEntryPoint Fragment %1 "func"
  1413. OpExecutionMode %1 OriginUpperLeft
  1414. %2 = OpTypeVoid
  1415. %3 = OpTypeInt 32 0
  1416. %semantics = OpConstant %3 8196
  1417. %5 = OpTypeFunction %2
  1418. %workgroup = OpConstant %3 2
  1419. %ptr = OpTypePointer Workgroup %3
  1420. %var = OpVariable %ptr Workgroup
  1421. %1 = OpFunction %2 None %5
  1422. %7 = OpLabel
  1423. OpAtomicStore %var %workgroup %semantics %workgroup
  1424. OpReturn
  1425. OpFunctionEnd
  1426. )";
  1427. CompileSuccessfully(text);
  1428. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1429. EXPECT_THAT(getDiagnosticString(),
  1430. HasSubstr("AtomicStore: Memory Semantics MakeAvailableKHR "
  1431. "requires capability VulkanMemoryModelKHR"));
  1432. }
  1433. TEST_F(ValidateAtomics, MakeVisibleKHRRequiresVulkanMemoryModelKHR) {
  1434. const std::string text = R"(
  1435. OpCapability Shader
  1436. OpMemoryModel Logical GLSL450
  1437. OpEntryPoint Fragment %1 "func"
  1438. OpExecutionMode %1 OriginUpperLeft
  1439. %2 = OpTypeVoid
  1440. %3 = OpTypeInt 32 0
  1441. %semantics = OpConstant %3 16386
  1442. %5 = OpTypeFunction %2
  1443. %workgroup = OpConstant %3 2
  1444. %ptr = OpTypePointer Workgroup %3
  1445. %var = OpVariable %ptr Workgroup
  1446. %1 = OpFunction %2 None %5
  1447. %7 = OpLabel
  1448. %ld = OpAtomicLoad %3 %var %workgroup %semantics
  1449. OpReturn
  1450. OpFunctionEnd
  1451. )";
  1452. CompileSuccessfully(text);
  1453. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1454. EXPECT_THAT(getDiagnosticString(),
  1455. HasSubstr("AtomicLoad: Memory Semantics MakeVisibleKHR requires "
  1456. "capability VulkanMemoryModelKHR"));
  1457. }
  1458. TEST_F(ValidateAtomics, MakeAvailableKHRRequiresReleaseSemantics) {
  1459. const std::string text = R"(
  1460. OpCapability Shader
  1461. OpCapability VulkanMemoryModelKHR
  1462. OpExtension "SPV_KHR_vulkan_memory_model"
  1463. OpMemoryModel Logical VulkanKHR
  1464. OpEntryPoint Fragment %1 "func"
  1465. OpExecutionMode %1 OriginUpperLeft
  1466. %2 = OpTypeVoid
  1467. %3 = OpTypeInt 32 0
  1468. %semantics = OpConstant %3 8448
  1469. %5 = OpTypeFunction %2
  1470. %workgroup = OpConstant %3 2
  1471. %ptr = OpTypePointer Workgroup %3
  1472. %var = OpVariable %ptr Workgroup
  1473. %1 = OpFunction %2 None %5
  1474. %7 = OpLabel
  1475. OpAtomicStore %var %workgroup %semantics %workgroup
  1476. OpReturn
  1477. OpFunctionEnd
  1478. )";
  1479. CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
  1480. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1481. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1482. EXPECT_THAT(
  1483. getDiagnosticString(),
  1484. HasSubstr("AtomicStore: MakeAvailableKHR Memory Semantics also requires "
  1485. "either Release or AcquireRelease Memory Semantics"));
  1486. }
  1487. TEST_F(ValidateAtomics, MakeVisibleKHRRequiresAcquireSemantics) {
  1488. const std::string text = R"(
  1489. OpCapability Shader
  1490. OpCapability VulkanMemoryModelKHR
  1491. OpExtension "SPV_KHR_vulkan_memory_model"
  1492. OpMemoryModel Logical VulkanKHR
  1493. OpEntryPoint Fragment %1 "func"
  1494. OpExecutionMode %1 OriginUpperLeft
  1495. %2 = OpTypeVoid
  1496. %3 = OpTypeInt 32 0
  1497. %semantics = OpConstant %3 16640
  1498. %5 = OpTypeFunction %2
  1499. %workgroup = OpConstant %3 2
  1500. %ptr = OpTypePointer Workgroup %3
  1501. %var = OpVariable %ptr Workgroup
  1502. %1 = OpFunction %2 None %5
  1503. %7 = OpLabel
  1504. %ld = OpAtomicLoad %3 %var %workgroup %semantics
  1505. OpReturn
  1506. OpFunctionEnd
  1507. )";
  1508. CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
  1509. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1510. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1511. EXPECT_THAT(
  1512. getDiagnosticString(),
  1513. HasSubstr("AtomicLoad: MakeVisibleKHR Memory Semantics also requires "
  1514. "either Acquire or AcquireRelease Memory Semantics"));
  1515. }
  1516. TEST_F(ValidateAtomics, MakeAvailableKHRRequiresStorageSemantics) {
  1517. const std::string text = R"(
  1518. OpCapability Shader
  1519. OpCapability VulkanMemoryModelKHR
  1520. OpExtension "SPV_KHR_vulkan_memory_model"
  1521. OpMemoryModel Logical VulkanKHR
  1522. OpEntryPoint Fragment %1 "func"
  1523. OpExecutionMode %1 OriginUpperLeft
  1524. %2 = OpTypeVoid
  1525. %3 = OpTypeInt 32 0
  1526. %semantics = OpConstant %3 8196
  1527. %5 = OpTypeFunction %2
  1528. %workgroup = OpConstant %3 2
  1529. %ptr = OpTypePointer Workgroup %3
  1530. %var = OpVariable %ptr Workgroup
  1531. %1 = OpFunction %2 None %5
  1532. %7 = OpLabel
  1533. OpAtomicStore %var %workgroup %semantics %workgroup
  1534. OpReturn
  1535. OpFunctionEnd
  1536. )";
  1537. CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
  1538. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1539. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1540. EXPECT_THAT(
  1541. getDiagnosticString(),
  1542. HasSubstr(
  1543. "AtomicStore: expected Memory Semantics to include a storage class"));
  1544. }
  1545. TEST_F(ValidateAtomics, MakeVisibleKHRRequiresStorageSemantics) {
  1546. const std::string text = R"(
  1547. OpCapability Shader
  1548. OpCapability VulkanMemoryModelKHR
  1549. OpExtension "SPV_KHR_vulkan_memory_model"
  1550. OpMemoryModel Logical VulkanKHR
  1551. OpEntryPoint Fragment %1 "func"
  1552. OpExecutionMode %1 OriginUpperLeft
  1553. %2 = OpTypeVoid
  1554. %3 = OpTypeInt 32 0
  1555. %semantics = OpConstant %3 16386
  1556. %5 = OpTypeFunction %2
  1557. %workgroup = OpConstant %3 2
  1558. %ptr = OpTypePointer Workgroup %3
  1559. %var = OpVariable %ptr Workgroup
  1560. %1 = OpFunction %2 None %5
  1561. %7 = OpLabel
  1562. %ld = OpAtomicLoad %3 %var %workgroup %semantics
  1563. OpReturn
  1564. OpFunctionEnd
  1565. )";
  1566. CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
  1567. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1568. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1569. EXPECT_THAT(
  1570. getDiagnosticString(),
  1571. HasSubstr(
  1572. "AtomicLoad: expected Memory Semantics to include a storage class"));
  1573. }
  1574. TEST_F(ValidateAtomics, VulkanMemoryModelAllowsQueueFamilyKHR) {
  1575. const std::string body = R"(
  1576. %val = OpAtomicAnd %u32 %u32_var %queuefamily %relaxed %u32_1
  1577. )";
  1578. const std::string extra = R"(
  1579. OpCapability VulkanMemoryModelKHR
  1580. OpExtension "SPV_KHR_vulkan_memory_model"
  1581. )";
  1582. CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
  1583. SPV_ENV_VULKAN_1_1);
  1584. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_1));
  1585. }
  1586. TEST_F(ValidateAtomics, NonVulkanMemoryModelDisallowsQueueFamilyKHR) {
  1587. const std::string body = R"(
  1588. %val = OpAtomicAnd %u32 %u32_var %queuefamily %relaxed %u32_1
  1589. )";
  1590. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_1);
  1591. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_1));
  1592. EXPECT_THAT(getDiagnosticString(),
  1593. HasSubstr("AtomicAnd: Memory Scope QueueFamilyKHR requires "
  1594. "capability VulkanMemoryModelKHR\n %42 = OpAtomicAnd "
  1595. "%uint %29 %uint_5 %uint_0_1 %uint_1\n"));
  1596. }
  1597. TEST_F(ValidateAtomics, SemanticsSpecConstantShader) {
  1598. const std::string spirv = R"(
  1599. OpCapability Shader
  1600. OpMemoryModel Logical GLSL450
  1601. OpEntryPoint Fragment %func "func"
  1602. OpExecutionMode %func OriginUpperLeft
  1603. %void = OpTypeVoid
  1604. %int = OpTypeInt 32 0
  1605. %spec_const = OpSpecConstant %int 0
  1606. %workgroup = OpConstant %int 2
  1607. %ptr_int_workgroup = OpTypePointer Workgroup %int
  1608. %var = OpVariable %ptr_int_workgroup Workgroup
  1609. %voidfn = OpTypeFunction %void
  1610. %func = OpFunction %void None %voidfn
  1611. %entry = OpLabel
  1612. %ld = OpAtomicLoad %int %var %workgroup %spec_const
  1613. OpReturn
  1614. OpFunctionEnd
  1615. )";
  1616. CompileSuccessfully(spirv);
  1617. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1618. EXPECT_THAT(getDiagnosticString(),
  1619. HasSubstr("Memory Semantics ids must be OpConstant when Shader "
  1620. "capability is present"));
  1621. }
  1622. TEST_F(ValidateAtomics, SemanticsSpecConstantKernel) {
  1623. const std::string spirv = R"(
  1624. OpCapability Kernel
  1625. OpCapability Linkage
  1626. OpMemoryModel Logical OpenCL
  1627. %void = OpTypeVoid
  1628. %int = OpTypeInt 32 0
  1629. %spec_const = OpSpecConstant %int 0
  1630. %workgroup = OpConstant %int 2
  1631. %ptr_int_workgroup = OpTypePointer Workgroup %int
  1632. %var = OpVariable %ptr_int_workgroup Workgroup
  1633. %voidfn = OpTypeFunction %void
  1634. %func = OpFunction %void None %voidfn
  1635. %entry = OpLabel
  1636. %ld = OpAtomicLoad %int %var %workgroup %spec_const
  1637. OpReturn
  1638. OpFunctionEnd
  1639. )";
  1640. CompileSuccessfully(spirv);
  1641. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
  1642. }
  1643. TEST_F(ValidateAtomics, ScopeSpecConstantShader) {
  1644. const std::string spirv = R"(
  1645. OpCapability Shader
  1646. OpMemoryModel Logical GLSL450
  1647. OpEntryPoint Fragment %func "func"
  1648. OpExecutionMode %func OriginUpperLeft
  1649. %void = OpTypeVoid
  1650. %int = OpTypeInt 32 0
  1651. %spec_const = OpSpecConstant %int 0
  1652. %relaxed = OpConstant %int 0
  1653. %ptr_int_workgroup = OpTypePointer Workgroup %int
  1654. %var = OpVariable %ptr_int_workgroup Workgroup
  1655. %voidfn = OpTypeFunction %void
  1656. %func = OpFunction %void None %voidfn
  1657. %entry = OpLabel
  1658. %ld = OpAtomicLoad %int %var %spec_const %relaxed
  1659. OpReturn
  1660. OpFunctionEnd
  1661. )";
  1662. CompileSuccessfully(spirv);
  1663. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1664. EXPECT_THAT(
  1665. getDiagnosticString(),
  1666. HasSubstr(
  1667. "Scope ids must be OpConstant when Shader capability is present"));
  1668. }
  1669. TEST_F(ValidateAtomics, ScopeSpecConstantKernel) {
  1670. const std::string spirv = R"(
  1671. OpCapability Kernel
  1672. OpCapability Linkage
  1673. OpMemoryModel Logical OpenCL
  1674. %void = OpTypeVoid
  1675. %int = OpTypeInt 32 0
  1676. %spec_const = OpSpecConstant %int 0
  1677. %relaxed = OpConstant %int 0
  1678. %ptr_int_workgroup = OpTypePointer Workgroup %int
  1679. %var = OpVariable %ptr_int_workgroup Workgroup
  1680. %voidfn = OpTypeFunction %void
  1681. %func = OpFunction %void None %voidfn
  1682. %entry = OpLabel
  1683. %ld = OpAtomicLoad %int %var %spec_const %relaxed
  1684. OpReturn
  1685. OpFunctionEnd
  1686. )";
  1687. CompileSuccessfully(spirv);
  1688. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
  1689. }
  1690. TEST_F(ValidateAtomics, VulkanMemoryModelDeviceScopeBad) {
  1691. const std::string body = R"(
  1692. %val = OpAtomicAnd %u32 %u32_var %device %relaxed %u32_1
  1693. )";
  1694. const std::string extra = R"(OpCapability VulkanMemoryModelKHR
  1695. OpExtension "SPV_KHR_vulkan_memory_model"
  1696. )";
  1697. CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
  1698. SPV_ENV_UNIVERSAL_1_3);
  1699. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1700. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1701. EXPECT_THAT(
  1702. getDiagnosticString(),
  1703. HasSubstr("Use of device scope with VulkanKHR memory model requires the "
  1704. "VulkanMemoryModelDeviceScopeKHR capability"));
  1705. }
  1706. TEST_F(ValidateAtomics, VulkanMemoryModelDeviceScopeGood) {
  1707. const std::string body = R"(
  1708. %val = OpAtomicAnd %u32 %u32_var %device %relaxed %u32_1
  1709. )";
  1710. const std::string extra = R"(OpCapability VulkanMemoryModelKHR
  1711. OpCapability VulkanMemoryModelDeviceScopeKHR
  1712. OpExtension "SPV_KHR_vulkan_memory_model"
  1713. )";
  1714. CompileSuccessfully(GenerateShaderCode(body, extra, "VulkanKHR"),
  1715. SPV_ENV_UNIVERSAL_1_3);
  1716. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1717. }
  1718. TEST_F(ValidateAtomics, CompareExchangeWeakV13ValV14Good) {
  1719. const std::string body = R"(
  1720. %val1 = OpAtomicCompareExchangeWeak %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
  1721. )";
  1722. CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_UNIVERSAL_1_3);
  1723. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_UNIVERSAL_1_4));
  1724. }
  1725. TEST_F(ValidateAtomics, CompareExchangeWeakV14Bad) {
  1726. const std::string body = R"(
  1727. %val1 = OpAtomicCompareExchangeWeak %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
  1728. )";
  1729. CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_UNIVERSAL_1_4);
  1730. EXPECT_EQ(SPV_ERROR_WRONG_VERSION,
  1731. ValidateInstructions(SPV_ENV_UNIVERSAL_1_4));
  1732. EXPECT_THAT(
  1733. getDiagnosticString(),
  1734. HasSubstr(
  1735. "AtomicCompareExchangeWeak requires SPIR-V version 1.3 or earlier"));
  1736. }
  1737. TEST_F(ValidateAtomics, CompareExchangeVolatileMatch) {
  1738. const std::string spirv = R"(
  1739. OpCapability Shader
  1740. OpCapability VulkanMemoryModelKHR
  1741. OpCapability Linkage
  1742. OpExtension "SPV_KHR_vulkan_memory_model"
  1743. OpMemoryModel Logical VulkanKHR
  1744. %void = OpTypeVoid
  1745. %int = OpTypeInt 32 0
  1746. %int_0 = OpConstant %int 0
  1747. %int_1 = OpConstant %int 1
  1748. %workgroup = OpConstant %int 2
  1749. %volatile = OpConstant %int 32768
  1750. %ptr_wg_int = OpTypePointer Workgroup %int
  1751. %wg_var = OpVariable %ptr_wg_int Workgroup
  1752. %void_fn = OpTypeFunction %void
  1753. %func = OpFunction %void None %void_fn
  1754. %entry = OpLabel
  1755. %cmp_ex = OpAtomicCompareExchange %int %wg_var %workgroup %volatile %volatile %int_0 %int_1
  1756. OpReturn
  1757. OpFunctionEnd
  1758. )";
  1759. CompileSuccessfully(spirv);
  1760. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
  1761. }
  1762. TEST_F(ValidateAtomics, CompareExchangeVolatileMismatch) {
  1763. const std::string spirv = R"(
  1764. OpCapability Shader
  1765. OpCapability VulkanMemoryModelKHR
  1766. OpCapability Linkage
  1767. OpExtension "SPV_KHR_vulkan_memory_model"
  1768. OpMemoryModel Logical VulkanKHR
  1769. %void = OpTypeVoid
  1770. %int = OpTypeInt 32 0
  1771. %int_0 = OpConstant %int 0
  1772. %int_1 = OpConstant %int 1
  1773. %workgroup = OpConstant %int 2
  1774. %volatile = OpConstant %int 32768
  1775. %non_volatile = OpConstant %int 0
  1776. %ptr_wg_int = OpTypePointer Workgroup %int
  1777. %wg_var = OpVariable %ptr_wg_int Workgroup
  1778. %void_fn = OpTypeFunction %void
  1779. %func = OpFunction %void None %void_fn
  1780. %entry = OpLabel
  1781. %cmp_ex = OpAtomicCompareExchange %int %wg_var %workgroup %non_volatile %volatile %int_0 %int_1
  1782. OpReturn
  1783. OpFunctionEnd
  1784. )";
  1785. CompileSuccessfully(spirv);
  1786. EXPECT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
  1787. EXPECT_THAT(getDiagnosticString(),
  1788. HasSubstr("Volatile mask setting must match for Equal and "
  1789. "Unequal memory semantics"));
  1790. }
  1791. TEST_F(ValidateAtomics, CompareExchangeVolatileMismatchCooperativeMatrix) {
  1792. const std::string spirv = R"(
  1793. OpCapability Shader
  1794. OpCapability VulkanMemoryModelKHR
  1795. OpCapability Linkage
  1796. OpCapability CooperativeMatrixNV
  1797. OpExtension "SPV_KHR_vulkan_memory_model"
  1798. OpExtension "SPV_NV_cooperative_matrix"
  1799. OpMemoryModel Logical VulkanKHR
  1800. %void = OpTypeVoid
  1801. %int = OpTypeInt 32 0
  1802. %int_0 = OpConstant %int 0
  1803. %int_1 = OpConstant %int 1
  1804. %workgroup = OpConstant %int 2
  1805. %volatile = OpSpecConstant %int 32768
  1806. %non_volatile = OpSpecConstant %int 32768
  1807. %ptr_wg_int = OpTypePointer Workgroup %int
  1808. %wg_var = OpVariable %ptr_wg_int Workgroup
  1809. %void_fn = OpTypeFunction %void
  1810. %func = OpFunction %void None %void_fn
  1811. %entry = OpLabel
  1812. %cmp_ex = OpAtomicCompareExchange %int %wg_var %workgroup %volatile %non_volatile %int_0 %int_1
  1813. OpReturn
  1814. OpFunctionEnd
  1815. )";
  1816. // This is ok because we cannot evaluate the spec constant defaults.
  1817. CompileSuccessfully(spirv);
  1818. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
  1819. }
  1820. TEST_F(ValidateAtomics, VolatileRequiresVulkanMemoryModel) {
  1821. const std::string spirv = R"(
  1822. OpCapability Shader
  1823. OpCapability Linkage
  1824. OpMemoryModel Logical GLSL450
  1825. %void = OpTypeVoid
  1826. %int = OpTypeInt 32 0
  1827. %int_0 = OpConstant %int 0
  1828. %int_1 = OpConstant %int 1
  1829. %workgroup = OpConstant %int 2
  1830. %volatile = OpConstant %int 32768
  1831. %ptr_wg_int = OpTypePointer Workgroup %int
  1832. %wg_var = OpVariable %ptr_wg_int Workgroup
  1833. %void_fn = OpTypeFunction %void
  1834. %func = OpFunction %void None %void_fn
  1835. %entry = OpLabel
  1836. %ld = OpAtomicLoad %int %wg_var %workgroup %volatile
  1837. OpReturn
  1838. OpFunctionEnd
  1839. )";
  1840. CompileSuccessfully(spirv);
  1841. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1842. EXPECT_THAT(getDiagnosticString(),
  1843. HasSubstr("Memory Semantics Volatile requires capability "
  1844. "VulkanMemoryModelKHR"));
  1845. }
  1846. TEST_F(ValidateAtomics, CooperativeMatrixSemanticsMustBeConstant) {
  1847. const std::string spirv = R"(
  1848. OpCapability Shader
  1849. OpCapability Linkage
  1850. OpCapability CooperativeMatrixNV
  1851. OpExtension "SPV_NV_cooperative_matrix"
  1852. OpMemoryModel Logical GLSL450
  1853. %void = OpTypeVoid
  1854. %int = OpTypeInt 32 0
  1855. %int_0 = OpConstant %int 0
  1856. %int_1 = OpConstant %int 1
  1857. %workgroup = OpConstant %int 2
  1858. %undef = OpUndef %int
  1859. %ptr_wg_int = OpTypePointer Workgroup %int
  1860. %wg_var = OpVariable %ptr_wg_int Workgroup
  1861. %void_fn = OpTypeFunction %void
  1862. %func = OpFunction %void None %void_fn
  1863. %entry = OpLabel
  1864. %ld = OpAtomicLoad %int %wg_var %workgroup %undef
  1865. OpReturn
  1866. OpFunctionEnd
  1867. )";
  1868. CompileSuccessfully(spirv);
  1869. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1870. EXPECT_THAT(getDiagnosticString(),
  1871. HasSubstr("Memory Semantics must be a constant instruction when "
  1872. "CooperativeMatrixNV capability is present"));
  1873. }
  1874. } // namespace
  1875. } // namespace val
  1876. } // namespace spvtools