val_atomics_test.cpp 91 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718
  1. // Copyright (c) 2017 Google Inc.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. #include <sstream>
  15. #include <string>
  16. #include "gmock/gmock.h"
  17. #include "test/unit_spirv.h"
  18. #include "test/val/val_fixtures.h"
  19. namespace spvtools {
  20. namespace val {
  21. namespace {
  22. using ::testing::HasSubstr;
  23. using ::testing::Not;
  24. using ValidateAtomics = spvtest::ValidateBase<bool>;
  25. std::string GenerateShaderCodeImpl(
  26. const std::string& body, const std::string& capabilities_and_extensions,
  27. const std::string& definitions, const std::string& memory_model,
  28. const std::string& execution) {
  29. std::ostringstream ss;
  30. ss << R"(
  31. OpCapability Shader
  32. )";
  33. ss << capabilities_and_extensions;
  34. ss << "OpMemoryModel Logical " << memory_model << "\n";
  35. ss << execution;
  36. ss << R"(
  37. %void = OpTypeVoid
  38. %func = OpTypeFunction %void
  39. %bool = OpTypeBool
  40. %f32 = OpTypeFloat 32
  41. %u32 = OpTypeInt 32 0
  42. %f32vec4 = OpTypeVector %f32 4
  43. %f32_0 = OpConstant %f32 0
  44. %f32_1 = OpConstant %f32 1
  45. %u32_0 = OpConstant %u32 0
  46. %u32_1 = OpConstant %u32 1
  47. %f32vec4_0000 = OpConstantComposite %f32vec4 %f32_0 %f32_0 %f32_0 %f32_0
  48. %cross_device = OpConstant %u32 0
  49. %device = OpConstant %u32 1
  50. %workgroup = OpConstant %u32 2
  51. %subgroup = OpConstant %u32 3
  52. %invocation = OpConstant %u32 4
  53. %queuefamily = OpConstant %u32 5
  54. %relaxed = OpConstant %u32 0
  55. %acquire = OpConstant %u32 2
  56. %release = OpConstant %u32 4
  57. %acquire_release = OpConstant %u32 8
  58. %acquire_and_release = OpConstant %u32 6
  59. %sequentially_consistent = OpConstant %u32 16
  60. %acquire_release_uniform_workgroup = OpConstant %u32 328
  61. %f32_ptr = OpTypePointer Workgroup %f32
  62. %f32_var = OpVariable %f32_ptr Workgroup
  63. %u32_ptr = OpTypePointer Workgroup %u32
  64. %u32_var = OpVariable %u32_ptr Workgroup
  65. %f32vec4_ptr = OpTypePointer Workgroup %f32vec4
  66. %f32vec4_var = OpVariable %f32vec4_ptr Workgroup
  67. %f32_ptr_function = OpTypePointer Function %f32
  68. )";
  69. ss << definitions;
  70. ss << R"(
  71. %main = OpFunction %void None %func
  72. %main_entry = OpLabel
  73. )";
  74. ss << body;
  75. ss << R"(
  76. OpReturn
  77. OpFunctionEnd)";
  78. return ss.str();
  79. }
  80. std::string GenerateShaderCode(
  81. const std::string& body,
  82. const std::string& capabilities_and_extensions = "",
  83. const std::string& extra_defs = "",
  84. const std::string& memory_model = "GLSL450") {
  85. const std::string execution = R"(
  86. OpEntryPoint Fragment %main "main"
  87. OpExecutionMode %main OriginUpperLeft
  88. )";
  89. const std::string definitions = R"(
  90. %u64 = OpTypeInt 64 0
  91. %s64 = OpTypeInt 64 1
  92. %u64_1 = OpConstant %u64 1
  93. %s64_1 = OpConstant %s64 1
  94. %u64_ptr = OpTypePointer Workgroup %u64
  95. %s64_ptr = OpTypePointer Workgroup %s64
  96. %u64_var = OpVariable %u64_ptr Workgroup
  97. %s64_var = OpVariable %s64_ptr Workgroup
  98. )";
  99. return GenerateShaderCodeImpl(
  100. body, "OpCapability Int64\n" + capabilities_and_extensions,
  101. definitions + extra_defs, memory_model, execution);
  102. }
  103. std::string GenerateShaderComputeCode(
  104. const std::string& body,
  105. const std::string& capabilities_and_extensions = "",
  106. const std::string& extra_defs = "",
  107. const std::string& memory_model = "GLSL450") {
  108. const std::string execution = R"(
  109. OpEntryPoint GLCompute %main "main"
  110. OpExecutionMode %main LocalSize 32 1 1
  111. )";
  112. const std::string definitions = R"(
  113. %u64 = OpTypeInt 64 0
  114. %s64 = OpTypeInt 64 1
  115. %u64_1 = OpConstant %u64 1
  116. %s64_1 = OpConstant %s64 1
  117. %u64_ptr = OpTypePointer Workgroup %u64
  118. %s64_ptr = OpTypePointer Workgroup %s64
  119. %u64_var = OpVariable %u64_ptr Workgroup
  120. %s64_var = OpVariable %s64_ptr Workgroup
  121. )";
  122. return GenerateShaderCodeImpl(
  123. body, "OpCapability Int64\n" + capabilities_and_extensions,
  124. definitions + extra_defs, memory_model, execution);
  125. }
  126. std::string GenerateKernelCode(
  127. const std::string& body,
  128. const std::string& capabilities_and_extensions = "") {
  129. std::ostringstream ss;
  130. ss << R"(
  131. OpCapability Addresses
  132. OpCapability Kernel
  133. OpCapability Linkage
  134. OpCapability Int64
  135. )";
  136. ss << capabilities_and_extensions;
  137. ss << R"(
  138. OpMemoryModel Physical32 OpenCL
  139. %void = OpTypeVoid
  140. %func = OpTypeFunction %void
  141. %bool = OpTypeBool
  142. %f32 = OpTypeFloat 32
  143. %u32 = OpTypeInt 32 0
  144. %u64 = OpTypeInt 64 0
  145. %f32vec4 = OpTypeVector %f32 4
  146. %f32_0 = OpConstant %f32 0
  147. %f32_1 = OpConstant %f32 1
  148. %u32_0 = OpConstant %u32 0
  149. %u32_1 = OpConstant %u32 1
  150. %u64_1 = OpConstant %u64 1
  151. %f32vec4_0000 = OpConstantComposite %f32vec4 %f32_0 %f32_0 %f32_0 %f32_0
  152. %cross_device = OpConstant %u32 0
  153. %device = OpConstant %u32 1
  154. %workgroup = OpConstant %u32 2
  155. %subgroup = OpConstant %u32 3
  156. %invocation = OpConstant %u32 4
  157. %relaxed = OpConstant %u32 0
  158. %acquire = OpConstant %u32 2
  159. %release = OpConstant %u32 4
  160. %acquire_release = OpConstant %u32 8
  161. %acquire_and_release = OpConstant %u32 6
  162. %sequentially_consistent = OpConstant %u32 16
  163. %acquire_release_uniform_workgroup = OpConstant %u32 328
  164. %acquire_release_atomic_counter_workgroup = OpConstant %u32 1288
  165. %f32_ptr = OpTypePointer Workgroup %f32
  166. %f32_var = OpVariable %f32_ptr Workgroup
  167. %u32_ptr = OpTypePointer Workgroup %u32
  168. %u32_var = OpVariable %u32_ptr Workgroup
  169. %u64_ptr = OpTypePointer Workgroup %u64
  170. %u64_var = OpVariable %u64_ptr Workgroup
  171. %f32vec4_ptr = OpTypePointer Workgroup %f32vec4
  172. %f32vec4_var = OpVariable %f32vec4_ptr Workgroup
  173. %f32_ptr_function = OpTypePointer Function %f32
  174. %f32_ptr_uniformconstant = OpTypePointer UniformConstant %f32
  175. %f32_uc_var = OpVariable %f32_ptr_uniformconstant UniformConstant
  176. %f32_ptr_image = OpTypePointer Image %f32
  177. %f32_im_var = OpVariable %f32_ptr_image Image
  178. %main = OpFunction %void None %func
  179. %main_entry = OpLabel
  180. )";
  181. ss << body;
  182. ss << R"(
  183. OpReturn
  184. OpFunctionEnd)";
  185. return ss.str();
  186. }
  187. TEST_F(ValidateAtomics, AtomicLoadShaderSuccess) {
  188. const std::string body = R"(
  189. %val1 = OpAtomicLoad %u32 %u32_var %device %relaxed
  190. %val2 = OpAtomicLoad %u32 %u32_var %workgroup %acquire
  191. )";
  192. CompileSuccessfully(GenerateShaderCode(body));
  193. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  194. }
  195. TEST_F(ValidateAtomics, AtomicLoadKernelSuccess) {
  196. const std::string body = R"(
  197. %val1 = OpAtomicLoad %f32 %f32_var %device %relaxed
  198. %val2 = OpAtomicLoad %u32 %u32_var %workgroup %sequentially_consistent
  199. )";
  200. CompileSuccessfully(GenerateKernelCode(body));
  201. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  202. }
  203. TEST_F(ValidateAtomics, AtomicLoadInt64ShaderSuccess) {
  204. const std::string body = R"(
  205. %val1 = OpAtomicLoad %u64 %u64_var %subgroup %sequentially_consistent
  206. )";
  207. CompileSuccessfully(GenerateShaderCode(body, "OpCapability Int64Atomics\n"));
  208. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  209. }
  210. TEST_F(ValidateAtomics, AtomicLoadInt64KernelSuccess) {
  211. const std::string body = R"(
  212. %val1 = OpAtomicLoad %u64 %u64_var %subgroup %acquire
  213. )";
  214. CompileSuccessfully(GenerateKernelCode(body, "OpCapability Int64Atomics\n"));
  215. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  216. }
  217. TEST_F(ValidateAtomics, AtomicLoadInt32VulkanSuccess) {
  218. const std::string body = R"(
  219. %val1 = OpAtomicLoad %u32 %u32_var %device %relaxed
  220. %val2 = OpAtomicLoad %u32 %u32_var %workgroup %acquire
  221. %val3 = OpAtomicLoad %u32 %u32_var %invocation %relaxed
  222. )";
  223. CompileSuccessfully(GenerateShaderComputeCode(body), SPV_ENV_VULKAN_1_0);
  224. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  225. }
  226. TEST_F(ValidateAtomics, AtomicLoadVulkanWrongStorageClass) {
  227. const std::string body = R"(
  228. %val1 = OpAtomicLoad %u32 %u32_var %device %relaxed
  229. )";
  230. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  231. ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  232. EXPECT_THAT(getDiagnosticString(),
  233. AnyVUID("VUID-StandaloneSpirv-None-04645"));
  234. EXPECT_THAT(
  235. getDiagnosticString(),
  236. HasSubstr("in Vulkan environment, Workgroup Storage Class is limited to "
  237. "MeshNV, TaskNV, and GLCompute execution model"));
  238. }
  239. TEST_F(ValidateAtomics, AtomicAddIntVulkanWrongType1) {
  240. const std::string body = R"(
  241. %val1 = OpAtomicIAdd %f32 %f32_var %device %relaxed %f32_1
  242. )";
  243. CompileSuccessfully(GenerateShaderCode(body));
  244. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  245. EXPECT_THAT(getDiagnosticString(),
  246. HasSubstr("AtomicIAdd: "
  247. "expected Result Type to be integer scalar type"));
  248. }
  249. TEST_F(ValidateAtomics, AtomicAddIntVulkanWrongType2) {
  250. const std::string body = R"(
  251. %val1 = OpAtomicIAdd %f32vec4 %f32vec4_var %device %relaxed %f32_1
  252. )";
  253. CompileSuccessfully(GenerateShaderCode(body));
  254. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  255. EXPECT_THAT(getDiagnosticString(),
  256. HasSubstr("AtomicIAdd: "
  257. "expected Result Type to be integer scalar type"));
  258. }
  259. TEST_F(ValidateAtomics, AtomicAddFloatVulkan) {
  260. const std::string body = R"(
  261. %val1 = OpAtomicFAddEXT %f32 %f32_var %device %relaxed %f32_1
  262. )";
  263. CompileSuccessfully(GenerateShaderCode(body));
  264. ASSERT_EQ(SPV_ERROR_INVALID_CAPABILITY, ValidateInstructions());
  265. EXPECT_THAT(
  266. getDiagnosticString(),
  267. HasSubstr("Opcode AtomicFAddEXT requires one of these capabilities: "
  268. "AtomicFloat32AddEXT AtomicFloat64AddEXT AtomicFloat16AddEXT"));
  269. }
  270. TEST_F(ValidateAtomics, AtomicMinFloatVulkan) {
  271. const std::string body = R"(
  272. %val1 = OpAtomicFMinEXT %f32 %f32_var %device %relaxed %f32_1
  273. )";
  274. CompileSuccessfully(GenerateShaderCode(body));
  275. ASSERT_EQ(SPV_ERROR_INVALID_CAPABILITY, ValidateInstructions());
  276. EXPECT_THAT(
  277. getDiagnosticString(),
  278. HasSubstr("Opcode AtomicFMinEXT requires one of these capabilities: "
  279. "AtomicFloat32MinMaxEXT AtomicFloat64MinMaxEXT AtomicFloat16MinMaxEXT"));
  280. }
  281. TEST_F(ValidateAtomics, AtomicMaxFloatVulkan) {
  282. const std::string body = R"(
  283. %val1 = OpAtomicFMaxEXT %f32 %f32_var %device %relaxed %f32_1
  284. )";
  285. CompileSuccessfully(GenerateShaderCode(body));
  286. ASSERT_EQ(SPV_ERROR_INVALID_CAPABILITY, ValidateInstructions());
  287. EXPECT_THAT(
  288. getDiagnosticString(),
  289. HasSubstr("Opcode AtomicFMaxEXT requires one of these capabilities: "
  290. "AtomicFloat32MinMaxEXT AtomicFloat64MinMaxEXT AtomicFloat16MinMaxEXT"));
  291. }
  292. TEST_F(ValidateAtomics, AtomicAddFloatVulkanWrongType1) {
  293. const std::string body = R"(
  294. %val1 = OpAtomicFAddEXT %f32vec4 %f32vec4_var %device %relaxed %f32_1
  295. )";
  296. const std::string extra = R"(
  297. OpCapability AtomicFloat32AddEXT
  298. OpExtension "SPV_EXT_shader_atomic_float_add"
  299. )";
  300. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  301. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  302. EXPECT_THAT(getDiagnosticString(),
  303. HasSubstr("AtomicFAddEXT: "
  304. "expected Result Type to be float scalar type"));
  305. }
  306. TEST_F(ValidateAtomics, AtomicMinFloatVulkanWrongType1) {
  307. const std::string body = R"(
  308. %val1 = OpAtomicFMinEXT %f32vec4 %f32vec4_var %device %relaxed %f32_1
  309. )";
  310. const std::string extra = R"(
  311. OpCapability AtomicFloat32MinMaxEXT
  312. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  313. )";
  314. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  315. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  316. EXPECT_THAT(getDiagnosticString(),
  317. HasSubstr("AtomicFMinEXT: "
  318. "expected Result Type to be float scalar type"));
  319. }
  320. TEST_F(ValidateAtomics, AtomicMaxFloatVulkanWrongType1) {
  321. const std::string body = R"(
  322. %val1 = OpAtomicFMaxEXT %f32vec4 %f32vec4_var %device %relaxed %f32_1
  323. )";
  324. const std::string extra = R"(
  325. OpCapability AtomicFloat32MinMaxEXT
  326. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  327. )";
  328. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  329. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  330. EXPECT_THAT(getDiagnosticString(),
  331. HasSubstr("AtomicFMaxEXT: "
  332. "expected Result Type to be float scalar type"));
  333. }
  334. TEST_F(ValidateAtomics, AtomicAddFloatVulkanWrongType2) {
  335. const std::string body = R"(
  336. %val1 = OpAtomicFAddEXT %u32 %u32_var %device %relaxed %u32_1
  337. )";
  338. const std::string extra = R"(
  339. OpCapability AtomicFloat32AddEXT
  340. OpExtension "SPV_EXT_shader_atomic_float_add"
  341. )";
  342. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  343. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  344. EXPECT_THAT(getDiagnosticString(),
  345. HasSubstr("AtomicFAddEXT: "
  346. "expected Result Type to be float scalar type"));
  347. }
  348. TEST_F(ValidateAtomics, AtomicMinFloatVulkanWrongType2) {
  349. const std::string body = R"(
  350. %val1 = OpAtomicFMinEXT %u32 %u32_var %device %relaxed %u32_1
  351. )";
  352. const std::string extra = R"(
  353. OpCapability AtomicFloat32MinMaxEXT
  354. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  355. )";
  356. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  357. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  358. EXPECT_THAT(getDiagnosticString(),
  359. HasSubstr("AtomicFMinEXT: "
  360. "expected Result Type to be float scalar type"));
  361. }
  362. TEST_F(ValidateAtomics, AtomicMaxFloatVulkanWrongType2) {
  363. const std::string body = R"(
  364. %val1 = OpAtomicFMaxEXT %u32 %u32_var %device %relaxed %u32_1
  365. )";
  366. const std::string extra = R"(
  367. OpCapability AtomicFloat32MinMaxEXT
  368. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  369. )";
  370. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  371. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  372. EXPECT_THAT(getDiagnosticString(),
  373. HasSubstr("AtomicFMaxEXT: "
  374. "expected Result Type to be float scalar type"));
  375. }
  376. TEST_F(ValidateAtomics, AtomicAddFloatVulkanWrongType3) {
  377. const std::string body = R"(
  378. %val1 = OpAtomicFAddEXT %u64 %u64_var %device %relaxed %u64_1
  379. )";
  380. const std::string extra = R"(
  381. OpCapability AtomicFloat32AddEXT
  382. OpExtension "SPV_EXT_shader_atomic_float_add"
  383. )";
  384. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  385. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  386. EXPECT_THAT(getDiagnosticString(),
  387. HasSubstr("AtomicFAddEXT: "
  388. "expected Result Type to be float scalar type"));
  389. }
  390. TEST_F(ValidateAtomics, AtomicMinFloatVulkanWrongType3) {
  391. const std::string body = R"(
  392. %val1 = OpAtomicFMinEXT %u64 %u64_var %device %relaxed %u64_1
  393. )";
  394. const std::string extra = R"(
  395. OpCapability AtomicFloat32MinMaxEXT
  396. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  397. )";
  398. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  399. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  400. EXPECT_THAT(getDiagnosticString(),
  401. HasSubstr("AtomicFMinEXT: "
  402. "expected Result Type to be float scalar type"));
  403. }
  404. TEST_F(ValidateAtomics, AtomicMaxFloatVulkanWrongType3) {
  405. const std::string body = R"(
  406. %val1 = OpAtomicFMaxEXT %u64 %u64_var %device %relaxed %u64_1
  407. )";
  408. const std::string extra = R"(
  409. OpCapability AtomicFloat32MinMaxEXT
  410. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  411. )";
  412. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  413. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  414. EXPECT_THAT(getDiagnosticString(),
  415. HasSubstr("AtomicFMaxEXT: "
  416. "expected Result Type to be float scalar type"));
  417. }
  418. TEST_F(ValidateAtomics, AtomicAddFloatVulkanWrongCapability) {
  419. const std::string body = R"(
  420. %val1 = OpAtomicFAddEXT %f32 %f32_var %device %relaxed %f32_1
  421. )";
  422. const std::string extra = R"(
  423. OpCapability AtomicFloat64AddEXT
  424. OpExtension "SPV_EXT_shader_atomic_float_add"
  425. )";
  426. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  427. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  428. EXPECT_THAT(getDiagnosticString(),
  429. HasSubstr("AtomicFAddEXT: float add atomics "
  430. "require the AtomicFloat32AddEXT capability"));
  431. }
  432. TEST_F(ValidateAtomics, AtomicMinFloatVulkanWrongCapability) {
  433. const std::string body = R"(
  434. %val1 = OpAtomicFMinEXT %f32 %f32_var %device %relaxed %f32_1
  435. )";
  436. const std::string extra = R"(
  437. OpCapability AtomicFloat64MinMaxEXT
  438. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  439. )";
  440. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  441. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  442. EXPECT_THAT(getDiagnosticString(),
  443. HasSubstr("AtomicFMinEXT: float min/max atomics "
  444. "require the AtomicFloat32MinMaxEXT capability"));
  445. }
  446. TEST_F(ValidateAtomics, AtomicMaxFloatVulkanWrongCapability) {
  447. const std::string body = R"(
  448. %val1 = OpAtomicFMaxEXT %f32 %f32_var %device %relaxed %f32_1
  449. )";
  450. const std::string extra = R"(
  451. OpCapability AtomicFloat64MinMaxEXT
  452. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  453. )";
  454. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  455. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  456. EXPECT_THAT(getDiagnosticString(),
  457. HasSubstr("AtomicFMaxEXT: float min/max atomics "
  458. "require the AtomicFloat32MinMaxEXT capability"));
  459. }
  460. TEST_F(ValidateAtomics, AtomicAddFloat16VulkanSuccess) {
  461. const std::string defs = R"(
  462. %f16 = OpTypeFloat 16
  463. %f16_1 = OpConstant %f16 1
  464. %f16_ptr = OpTypePointer Workgroup %f16
  465. %f16_var = OpVariable %f16_ptr Workgroup
  466. )";
  467. const std::string body = R"(
  468. %val1 = OpAtomicFAddEXT %f16 %f16_var %device %relaxed %f16_1
  469. )";
  470. const std::string extra = R"(
  471. OpCapability Float16
  472. OpCapability AtomicFloat16AddEXT
  473. OpExtension "SPV_EXT_shader_atomic_float16_add"
  474. )";
  475. CompileSuccessfully(GenerateShaderComputeCode(body, extra, defs),
  476. SPV_ENV_VULKAN_1_0);
  477. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  478. }
  479. TEST_F(ValidateAtomics, AtomicAddFloatVulkanSuccess) {
  480. const std::string body = R"(
  481. %val1 = OpAtomicFAddEXT %f32 %f32_var %device %relaxed %f32_1
  482. %val2 = OpAtomicFAddEXT %f32 %f32_var %invocation %relaxed %f32_1
  483. )";
  484. const std::string extra = R"(
  485. OpCapability AtomicFloat32AddEXT
  486. OpExtension "SPV_EXT_shader_atomic_float_add"
  487. )";
  488. CompileSuccessfully(GenerateShaderComputeCode(body, extra),
  489. SPV_ENV_VULKAN_1_0);
  490. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  491. }
  492. TEST_F(ValidateAtomics, AtomicMinFloat16VulkanSuccess) {
  493. const std::string defs = R"(
  494. %f16 = OpTypeFloat 16
  495. %f16_1 = OpConstant %f16 1
  496. %f16_ptr = OpTypePointer Workgroup %f16
  497. %f16_var = OpVariable %f16_ptr Workgroup
  498. )";
  499. const std::string body = R"(
  500. %val1 = OpAtomicFMinEXT %f16 %f16_var %device %relaxed %f16_1
  501. )";
  502. const std::string extra = R"(
  503. OpCapability Float16
  504. OpCapability AtomicFloat16MinMaxEXT
  505. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  506. )";
  507. CompileSuccessfully(GenerateShaderComputeCode(body, extra, defs),
  508. SPV_ENV_VULKAN_1_0);
  509. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  510. }
  511. TEST_F(ValidateAtomics, AtomicMaxFloat16VulkanSuccess) {
  512. const std::string defs = R"(
  513. %f16 = OpTypeFloat 16
  514. %f16_1 = OpConstant %f16 1
  515. %f16_ptr = OpTypePointer Workgroup %f16
  516. %f16_var = OpVariable %f16_ptr Workgroup
  517. )";
  518. const std::string body = R"(
  519. %val1 = OpAtomicFMaxEXT %f16 %f16_var %device %relaxed %f16_1
  520. )";
  521. const std::string extra = R"(
  522. OpCapability Float16
  523. OpCapability AtomicFloat16MinMaxEXT
  524. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  525. )";
  526. CompileSuccessfully(GenerateShaderComputeCode(body, extra, defs),
  527. SPV_ENV_VULKAN_1_0);
  528. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  529. }
  530. TEST_F(ValidateAtomics, AtomicMinFloat32VulkanSuccess) {
  531. const std::string body = R"(
  532. %val1 = OpAtomicFMinEXT %f32 %f32_var %device %relaxed %f32_1
  533. )";
  534. const std::string extra = R"(
  535. OpCapability AtomicFloat32MinMaxEXT
  536. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  537. )";
  538. CompileSuccessfully(GenerateShaderComputeCode(body, extra),
  539. SPV_ENV_VULKAN_1_0);
  540. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  541. }
  542. TEST_F(ValidateAtomics, AtomicMaxFloat32VulkanSuccess) {
  543. const std::string body = R"(
  544. %val1 = OpAtomicFMaxEXT %f32 %f32_var %device %relaxed %f32_1
  545. )";
  546. const std::string extra = R"(
  547. OpCapability AtomicFloat32MinMaxEXT
  548. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  549. )";
  550. CompileSuccessfully(GenerateShaderComputeCode(body, extra),
  551. SPV_ENV_VULKAN_1_0);
  552. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  553. }
  554. TEST_F(ValidateAtomics, AtomicMinFloat64VulkanSuccess) {
  555. const std::string defs = R"(
  556. %f64 = OpTypeFloat 64
  557. %f64_1 = OpConstant %f64 1
  558. %f64_ptr = OpTypePointer Workgroup %f64
  559. %f64_var = OpVariable %f64_ptr Workgroup
  560. )";
  561. const std::string body = R"(
  562. %val1 = OpAtomicFMinEXT %f64 %f64_var %device %relaxed %f64_1
  563. )";
  564. const std::string extra = R"(
  565. OpCapability Float64
  566. OpCapability AtomicFloat64MinMaxEXT
  567. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  568. )";
  569. CompileSuccessfully(GenerateShaderComputeCode(body, extra, defs),
  570. SPV_ENV_VULKAN_1_0);
  571. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  572. }
  573. TEST_F(ValidateAtomics, AtomicMaxFloat64VulkanSuccess) {
  574. const std::string defs = R"(
  575. %f64 = OpTypeFloat 64
  576. %f64_1 = OpConstant %f64 1
  577. %f64_ptr = OpTypePointer Workgroup %f64
  578. %f64_var = OpVariable %f64_ptr Workgroup
  579. )";
  580. const std::string body = R"(
  581. %val1 = OpAtomicFMaxEXT %f64 %f64_var %device %relaxed %f64_1
  582. )";
  583. const std::string extra = R"(
  584. OpCapability Float64
  585. OpCapability AtomicFloat64MinMaxEXT
  586. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  587. )";
  588. CompileSuccessfully(GenerateShaderComputeCode(body, extra, defs),
  589. SPV_ENV_VULKAN_1_0);
  590. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  591. }
  592. TEST_F(ValidateAtomics, AtomicLoadFloatVulkan) {
  593. const std::string body = R"(
  594. %val1 = OpAtomicLoad %f32 %f32_var %device %relaxed
  595. %val2 = OpAtomicLoad %f32 %f32_var %workgroup %acquire
  596. )";
  597. CompileSuccessfully(GenerateShaderComputeCode(body), SPV_ENV_VULKAN_1_0);
  598. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  599. }
  600. TEST_F(ValidateAtomics, AtomicStoreVulkanWrongStorageClass) {
  601. const std::string body = R"(
  602. OpAtomicStore %f32_var %device %relaxed %f32_1
  603. )";
  604. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  605. ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  606. EXPECT_THAT(getDiagnosticString(),
  607. AnyVUID("VUID-StandaloneSpirv-None-04645"));
  608. EXPECT_THAT(
  609. getDiagnosticString(),
  610. HasSubstr("in Vulkan environment, Workgroup Storage Class is limited to "
  611. "MeshNV, TaskNV, and GLCompute execution model"));
  612. }
  613. TEST_F(ValidateAtomics, AtomicStoreFloatVulkan) {
  614. const std::string body = R"(
  615. OpAtomicStore %f32_var %device %relaxed %f32_1
  616. )";
  617. CompileSuccessfully(GenerateShaderComputeCode(body), SPV_ENV_VULKAN_1_0);
  618. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  619. }
  620. TEST_F(ValidateAtomics, AtomicExchangeFloatVulkan) {
  621. const std::string body = R"(
  622. %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %f32_0
  623. )";
  624. CompileSuccessfully(GenerateShaderComputeCode(body), SPV_ENV_VULKAN_1_0);
  625. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  626. }
  627. TEST_F(ValidateAtomics, AtomicLoadInt64WithCapabilityVulkanSuccess) {
  628. const std::string body = R"(
  629. %val1 = OpAtomicLoad %u64 %u64_var %device %relaxed
  630. %val2 = OpAtomicLoad %u64 %u64_var %workgroup %acquire
  631. %val3 = OpAtomicLoad %u64 %u64_var %invocation %relaxed
  632. )";
  633. CompileSuccessfully(
  634. GenerateShaderComputeCode(body, "OpCapability Int64Atomics\n"),
  635. SPV_ENV_VULKAN_1_0);
  636. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  637. }
  638. TEST_F(ValidateAtomics, AtomicLoadInt64WithoutCapabilityVulkan) {
  639. const std::string body = R"(
  640. %val1 = OpAtomicLoad %u64 %u64_var %device %relaxed
  641. %val2 = OpAtomicLoad %u64 %u64_var %workgroup %acquire
  642. )";
  643. CompileSuccessfully(GenerateShaderComputeCode(body), SPV_ENV_VULKAN_1_0);
  644. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  645. EXPECT_THAT(getDiagnosticString(),
  646. HasSubstr("64-bit atomics require the Int64Atomics capability"));
  647. }
  648. TEST_F(ValidateAtomics, AtomicStoreOpenCLFunctionPointerStorageTypeSuccess) {
  649. const std::string body = R"(
  650. %f32_var_function = OpVariable %f32_ptr_function Function
  651. OpAtomicStore %f32_var_function %device %relaxed %f32_1
  652. )";
  653. CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_OPENCL_1_2);
  654. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_OPENCL_1_2));
  655. }
  656. TEST_F(ValidateAtomics, AtomicStoreVulkanFunctionPointerStorageType) {
  657. const std::string body = R"(
  658. %f32_var_function = OpVariable %f32_ptr_function Function
  659. OpAtomicStore %f32_var_function %device %relaxed %f32_1
  660. )";
  661. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  662. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  663. EXPECT_THAT(getDiagnosticString(),
  664. AnyVUID("VUID-StandaloneSpirv-None-04686"));
  665. EXPECT_THAT(
  666. getDiagnosticString(),
  667. HasSubstr("AtomicStore: Vulkan spec only allows storage classes for "
  668. "atomic to be: Uniform, Workgroup, Image, StorageBuffer, "
  669. "PhysicalStorageBuffer or TaskPayloadWorkgroupEXT."));
  670. }
  671. TEST_F(ValidateAtomics, AtomicStoreFunctionPointerStorageType) {
  672. const std::string body = R"(
  673. %f32_var_function = OpVariable %f32_ptr_function Function
  674. OpAtomicStore %f32_var_function %device %relaxed %f32_1
  675. )";
  676. CompileSuccessfully(GenerateShaderCode(body));
  677. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  678. EXPECT_THAT(getDiagnosticString(),
  679. HasSubstr("AtomicStore: Function storage class forbidden when "
  680. "the Shader capability is declared."));
  681. }
  682. // TODO([email protected]): the corresponding check fails Vulkan CTS,
  683. // reenable once fixed.
  684. TEST_F(ValidateAtomics, DISABLED_AtomicLoadVulkanSubgroup) {
  685. const std::string body = R"(
  686. %val1 = OpAtomicLoad %u32 %u32_var %subgroup %acquire
  687. )";
  688. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  689. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  690. EXPECT_THAT(getDiagnosticString(),
  691. HasSubstr("AtomicLoad: in Vulkan environment memory scope is "
  692. "limited to Device, Workgroup and Invocation"));
  693. }
  694. TEST_F(ValidateAtomics, AtomicLoadVulkanRelease) {
  695. const std::string body = R"(
  696. %val1 = OpAtomicLoad %u32 %u32_var %workgroup %release
  697. )";
  698. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  699. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  700. EXPECT_THAT(getDiagnosticString(),
  701. AnyVUID("VUID-StandaloneSpirv-OpAtomicLoad-04731"));
  702. EXPECT_THAT(
  703. getDiagnosticString(),
  704. HasSubstr("Vulkan spec disallows OpAtomicLoad with Memory Semantics "
  705. "Release, AcquireRelease and SequentiallyConsistent"));
  706. }
  707. TEST_F(ValidateAtomics, AtomicLoadVulkanAcquireRelease) {
  708. const std::string body = R"(
  709. %val1 = OpAtomicLoad %u32 %u32_var %workgroup %acquire_release
  710. )";
  711. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  712. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  713. EXPECT_THAT(getDiagnosticString(),
  714. AnyVUID("VUID-StandaloneSpirv-OpAtomicLoad-04731"));
  715. EXPECT_THAT(
  716. getDiagnosticString(),
  717. HasSubstr("Vulkan spec disallows OpAtomicLoad with Memory Semantics "
  718. "Release, AcquireRelease and SequentiallyConsistent"));
  719. }
  720. TEST_F(ValidateAtomics, AtomicLoadVulkanSequentiallyConsistent) {
  721. const std::string body = R"(
  722. %val1 = OpAtomicLoad %u32 %u32_var %workgroup %sequentially_consistent
  723. )";
  724. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  725. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  726. EXPECT_THAT(getDiagnosticString(),
  727. AnyVUID("VUID-StandaloneSpirv-OpAtomicLoad-04731"));
  728. EXPECT_THAT(
  729. getDiagnosticString(),
  730. HasSubstr("Vulkan spec disallows OpAtomicLoad with Memory Semantics "
  731. "Release, AcquireRelease and SequentiallyConsistent"));
  732. }
  733. TEST_F(ValidateAtomics, AtomicLoadVulkanInvocationSemantics) {
  734. const std::string body = R"(
  735. %val1 = OpAtomicLoad %u32 %u32_var %invocation %acquire
  736. )";
  737. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  738. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  739. EXPECT_THAT(getDiagnosticString(),
  740. AnyVUID("VUID-StandaloneSpirv-None-04641"));
  741. EXPECT_THAT(
  742. getDiagnosticString(),
  743. HasSubstr("AtomicLoad: Vulkan specification requires Memory Semantics to "
  744. "be None if used with Invocation Memory Scope"));
  745. }
  746. TEST_F(ValidateAtomics, AtomicLoadShaderFloat) {
  747. const std::string body = R"(
  748. %val1 = OpAtomicLoad %f32 %f32_var %device %relaxed
  749. )";
  750. CompileSuccessfully(GenerateShaderCode(body));
  751. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  752. }
  753. TEST_F(ValidateAtomics, AtomicLoadVulkanInt64) {
  754. const std::string body = R"(
  755. %val1 = OpAtomicLoad %u64 %u64_var %device %relaxed
  756. )";
  757. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  758. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  759. EXPECT_THAT(
  760. getDiagnosticString(),
  761. HasSubstr(
  762. "AtomicLoad: 64-bit atomics require the Int64Atomics capability"));
  763. }
  764. TEST_F(ValidateAtomics, AtomicLoadKernelInt64) {
  765. const std::string body = R"(
  766. %val1 = OpAtomicLoad %u64 %u64_var %device %relaxed
  767. )";
  768. CompileSuccessfully(GenerateKernelCode(body));
  769. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  770. EXPECT_THAT(
  771. getDiagnosticString(),
  772. HasSubstr(
  773. "AtomicLoad: 64-bit atomics require the Int64Atomics capability"));
  774. }
  775. TEST_F(ValidateAtomics, AtomicStoreVulkanInt64) {
  776. const std::string body = R"(
  777. OpAtomicStore %u64_var %device %relaxed %u64_1
  778. )";
  779. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  780. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  781. EXPECT_THAT(
  782. getDiagnosticString(),
  783. HasSubstr(
  784. "AtomicStore: 64-bit atomics require the Int64Atomics capability"));
  785. }
  786. TEST_F(ValidateAtomics, AtomicStoreKernelInt64) {
  787. const std::string body = R"(
  788. OpAtomicStore %u64_var %device %relaxed %u64_1
  789. )";
  790. CompileSuccessfully(GenerateKernelCode(body));
  791. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  792. EXPECT_THAT(
  793. getDiagnosticString(),
  794. HasSubstr(
  795. "AtomicStore: 64-bit atomics require the Int64Atomics capability"));
  796. }
  797. TEST_F(ValidateAtomics, VK_KHR_shader_atomic_int64Success) {
  798. const std::string body = R"(
  799. %val1 = OpAtomicUMin %u64 %u64_var %device %relaxed %u64_1
  800. %val2 = OpAtomicUMax %u64 %u64_var %device %relaxed %u64_1
  801. %val3 = OpAtomicSMin %u64 %u64_var %device %relaxed %u64_1
  802. %val4 = OpAtomicSMax %u64 %u64_var %device %relaxed %u64_1
  803. %val5 = OpAtomicAnd %u64 %u64_var %device %relaxed %u64_1
  804. %val6 = OpAtomicOr %u64 %u64_var %device %relaxed %u64_1
  805. %val7 = OpAtomicXor %u64 %u64_var %device %relaxed %u64_1
  806. %val8 = OpAtomicIAdd %u64 %u64_var %device %relaxed %u64_1
  807. %val9 = OpAtomicExchange %u64 %u64_var %device %relaxed %u64_1
  808. %val10 = OpAtomicCompareExchange %u64 %u64_var %device %relaxed %relaxed %u64_1 %u64_1
  809. %val11 = OpAtomicUMin %s64 %s64_var %device %relaxed %s64_1
  810. %val12 = OpAtomicUMax %s64 %s64_var %device %relaxed %s64_1
  811. %val13 = OpAtomicSMin %s64 %s64_var %device %relaxed %s64_1
  812. %val14 = OpAtomicSMax %s64 %s64_var %device %relaxed %s64_1
  813. %val15 = OpAtomicAnd %s64 %s64_var %device %relaxed %s64_1
  814. %val16 = OpAtomicOr %s64 %s64_var %device %relaxed %s64_1
  815. %val17 = OpAtomicXor %s64 %s64_var %device %relaxed %s64_1
  816. %val18 = OpAtomicIAdd %s64 %s64_var %device %relaxed %s64_1
  817. %val19 = OpAtomicExchange %s64 %s64_var %device %relaxed %s64_1
  818. %val20 = OpAtomicCompareExchange %s64 %s64_var %device %relaxed %relaxed %s64_1 %s64_1
  819. %val21 = OpAtomicLoad %u64 %u64_var %device %relaxed
  820. %val22 = OpAtomicLoad %s64 %s64_var %device %relaxed
  821. OpAtomicStore %u64_var %device %relaxed %u64_1
  822. OpAtomicStore %s64_var %device %relaxed %s64_1
  823. )";
  824. CompileSuccessfully(
  825. GenerateShaderComputeCode(body, "OpCapability Int64Atomics\n"),
  826. SPV_ENV_VULKAN_1_0);
  827. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  828. }
  829. TEST_F(ValidateAtomics, VK_KHR_shader_atomic_int64MissingCapability) {
  830. const std::string body = R"(
  831. %val1 = OpAtomicUMin %u64 %u64_var %device %relaxed %u64_1
  832. )";
  833. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  834. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  835. EXPECT_THAT(
  836. getDiagnosticString(),
  837. HasSubstr(
  838. "AtomicUMin: 64-bit atomics require the Int64Atomics capability"));
  839. }
  840. TEST_F(ValidateAtomics, AtomicLoadWrongResultType) {
  841. const std::string body = R"(
  842. %val1 = OpAtomicLoad %f32vec4 %f32vec4_var %device %relaxed
  843. )";
  844. CompileSuccessfully(GenerateKernelCode(body));
  845. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  846. EXPECT_THAT(
  847. getDiagnosticString(),
  848. HasSubstr("AtomicLoad: "
  849. "expected Result Type to be integer or float scalar type"));
  850. }
  851. TEST_F(ValidateAtomics, AtomicLoadWrongPointerType) {
  852. const std::string body = R"(
  853. %val1 = OpAtomicLoad %f32 %f32_ptr %device %relaxed
  854. )";
  855. CompileSuccessfully(GenerateKernelCode(body));
  856. ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
  857. EXPECT_THAT(
  858. getDiagnosticString(),
  859. HasSubstr("Operand '27[%_ptr_Workgroup_float]' cannot be a type"));
  860. }
  861. TEST_F(ValidateAtomics, AtomicLoadWrongPointerDataType) {
  862. const std::string body = R"(
  863. %val1 = OpAtomicLoad %u32 %f32_var %device %relaxed
  864. )";
  865. CompileSuccessfully(GenerateKernelCode(body));
  866. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  867. EXPECT_THAT(
  868. getDiagnosticString(),
  869. HasSubstr("AtomicLoad: "
  870. "expected Pointer to point to a value of type Result Type"));
  871. }
  872. TEST_F(ValidateAtomics, AtomicLoadWrongScopeType) {
  873. const std::string body = R"(
  874. %val1 = OpAtomicLoad %f32 %f32_var %f32_1 %relaxed
  875. )";
  876. CompileSuccessfully(GenerateKernelCode(body));
  877. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  878. EXPECT_THAT(getDiagnosticString(),
  879. HasSubstr("AtomicLoad: expected scope to be a 32-bit int"));
  880. }
  881. TEST_F(ValidateAtomics, AtomicLoadWrongMemorySemanticsType) {
  882. const std::string body = R"(
  883. %val1 = OpAtomicLoad %f32 %f32_var %device %u64_1
  884. )";
  885. CompileSuccessfully(GenerateKernelCode(body));
  886. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  887. EXPECT_THAT(
  888. getDiagnosticString(),
  889. HasSubstr("AtomicLoad: expected Memory Semantics to be a 32-bit int"));
  890. }
  891. TEST_F(ValidateAtomics, AtomicStoreKernelSuccess) {
  892. const std::string body = R"(
  893. OpAtomicStore %f32_var %device %relaxed %f32_1
  894. OpAtomicStore %u32_var %subgroup %release %u32_1
  895. )";
  896. CompileSuccessfully(GenerateKernelCode(body));
  897. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  898. }
  899. TEST_F(ValidateAtomics, AtomicStoreShaderSuccess) {
  900. const std::string body = R"(
  901. OpAtomicStore %u32_var %device %release %u32_1
  902. OpAtomicStore %u32_var %subgroup %sequentially_consistent %u32_1
  903. )";
  904. CompileSuccessfully(GenerateShaderCode(body));
  905. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  906. }
  907. TEST_F(ValidateAtomics, AtomicStoreVulkanSuccess) {
  908. const std::string body = R"(
  909. OpAtomicStore %u32_var %device %release %u32_1
  910. OpAtomicStore %u32_var %invocation %relaxed %u32_1
  911. )";
  912. CompileSuccessfully(GenerateShaderComputeCode(body), SPV_ENV_VULKAN_1_0);
  913. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  914. }
  915. TEST_F(ValidateAtomics, AtomicStoreVulkanAcquire) {
  916. const std::string body = R"(
  917. OpAtomicStore %u32_var %device %acquire %u32_1
  918. )";
  919. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  920. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  921. EXPECT_THAT(getDiagnosticString(),
  922. AnyVUID("VUID-StandaloneSpirv-OpAtomicStore-04730"));
  923. EXPECT_THAT(
  924. getDiagnosticString(),
  925. HasSubstr("Vulkan spec disallows OpAtomicStore with Memory Semantics "
  926. "Acquire, AcquireRelease and SequentiallyConsistent"));
  927. }
  928. TEST_F(ValidateAtomics, AtomicStoreVulkanAcquireRelease) {
  929. const std::string body = R"(
  930. OpAtomicStore %u32_var %device %acquire_release %u32_1
  931. )";
  932. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  933. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  934. EXPECT_THAT(getDiagnosticString(),
  935. AnyVUID("VUID-StandaloneSpirv-OpAtomicStore-04730"));
  936. EXPECT_THAT(
  937. getDiagnosticString(),
  938. HasSubstr("Vulkan spec disallows OpAtomicStore with Memory Semantics "
  939. "Acquire, AcquireRelease and SequentiallyConsistent"));
  940. }
  941. TEST_F(ValidateAtomics, AtomicStoreVulkanSequentiallyConsistent) {
  942. const std::string body = R"(
  943. OpAtomicStore %u32_var %device %sequentially_consistent %u32_1
  944. )";
  945. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  946. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  947. EXPECT_THAT(getDiagnosticString(),
  948. AnyVUID("VUID-StandaloneSpirv-OpAtomicStore-04730"));
  949. EXPECT_THAT(
  950. getDiagnosticString(),
  951. HasSubstr("Vulkan spec disallows OpAtomicStore with Memory Semantics "
  952. "Acquire, AcquireRelease and SequentiallyConsistent"));
  953. }
  954. TEST_F(ValidateAtomics, AtomicStoreVulkanInvocationSemantics) {
  955. const std::string body = R"(
  956. OpAtomicStore %u32_var %invocation %acquire %u32_1
  957. )";
  958. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  959. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  960. EXPECT_THAT(getDiagnosticString(),
  961. AnyVUID("VUID-StandaloneSpirv-None-04641"));
  962. EXPECT_THAT(
  963. getDiagnosticString(),
  964. HasSubstr("AtomicStore: Vulkan specification requires Memory Semantics "
  965. "to be None if used with Invocation Memory Scope"));
  966. }
  967. TEST_F(ValidateAtomics, AtomicStoreWrongPointerType) {
  968. const std::string body = R"(
  969. OpAtomicStore %f32_1 %device %relaxed %f32_1
  970. )";
  971. CompileSuccessfully(GenerateKernelCode(body));
  972. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  973. EXPECT_THAT(
  974. getDiagnosticString(),
  975. HasSubstr("AtomicStore: expected Pointer to be of type OpTypePointer"));
  976. }
  977. TEST_F(ValidateAtomics, AtomicStoreWrongPointerDataType) {
  978. const std::string body = R"(
  979. OpAtomicStore %f32vec4_var %device %relaxed %f32_1
  980. )";
  981. CompileSuccessfully(GenerateKernelCode(body));
  982. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  983. EXPECT_THAT(
  984. getDiagnosticString(),
  985. HasSubstr(
  986. "AtomicStore: "
  987. "expected Pointer to be a pointer to integer or float scalar type"));
  988. }
  989. TEST_F(ValidateAtomics, AtomicStoreWrongPointerStorageTypeForOpenCL) {
  990. const std::string body = R"(
  991. OpAtomicStore %f32_im_var %device %relaxed %f32_1
  992. )";
  993. CompileSuccessfully(GenerateKernelCode(body));
  994. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_OPENCL_1_2));
  995. EXPECT_THAT(
  996. getDiagnosticString(),
  997. HasSubstr("AtomicStore: storage class must be Function, Workgroup, "
  998. "CrossWorkGroup or Generic in the OpenCL environment."));
  999. }
  1000. TEST_F(ValidateAtomics, AtomicStoreWrongPointerStorageType) {
  1001. const std::string body = R"(
  1002. OpAtomicStore %f32_uc_var %device %relaxed %f32_1
  1003. )";
  1004. CompileSuccessfully(GenerateKernelCode(body));
  1005. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1006. EXPECT_THAT(getDiagnosticString(),
  1007. HasSubstr("AtomicStore: storage class forbidden by universal "
  1008. "validation rules."));
  1009. }
  1010. TEST_F(ValidateAtomics, AtomicStoreWrongScopeType) {
  1011. const std::string body = R"(
  1012. OpAtomicStore %f32_var %f32_1 %relaxed %f32_1
  1013. )";
  1014. CompileSuccessfully(GenerateKernelCode(body));
  1015. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1016. EXPECT_THAT(getDiagnosticString(),
  1017. HasSubstr("AtomicStore: expected scope to be a 32-bit int\n "
  1018. "OpAtomicStore %28 %float_1 %uint_0_1 %float_1\n"));
  1019. }
  1020. TEST_F(ValidateAtomics, AtomicStoreWrongMemorySemanticsType) {
  1021. const std::string body = R"(
  1022. OpAtomicStore %f32_var %device %f32_1 %f32_1
  1023. )";
  1024. CompileSuccessfully(GenerateKernelCode(body));
  1025. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1026. EXPECT_THAT(
  1027. getDiagnosticString(),
  1028. HasSubstr("AtomicStore: expected Memory Semantics to be a 32-bit int"));
  1029. }
  1030. TEST_F(ValidateAtomics, AtomicStoreWrongValueType) {
  1031. const std::string body = R"(
  1032. OpAtomicStore %f32_var %device %relaxed %u32_1
  1033. )";
  1034. CompileSuccessfully(GenerateKernelCode(body));
  1035. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1036. EXPECT_THAT(
  1037. getDiagnosticString(),
  1038. HasSubstr("AtomicStore: "
  1039. "expected Value type and the type pointed to by Pointer to "
  1040. "be the same"));
  1041. }
  1042. TEST_F(ValidateAtomics, AtomicExchangeShaderSuccess) {
  1043. const std::string body = R"(
  1044. OpAtomicStore %u32_var %device %relaxed %u32_1
  1045. %val2 = OpAtomicExchange %u32 %u32_var %device %relaxed %u32_0
  1046. )";
  1047. CompileSuccessfully(GenerateShaderCode(body));
  1048. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1049. }
  1050. TEST_F(ValidateAtomics, AtomicExchangeKernelSuccess) {
  1051. const std::string body = R"(
  1052. OpAtomicStore %f32_var %device %relaxed %f32_1
  1053. %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %f32_0
  1054. OpAtomicStore %u32_var %device %relaxed %u32_1
  1055. %val4 = OpAtomicExchange %u32 %u32_var %device %relaxed %u32_0
  1056. )";
  1057. CompileSuccessfully(GenerateKernelCode(body));
  1058. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1059. }
  1060. TEST_F(ValidateAtomics, AtomicExchangeShaderFloat) {
  1061. const std::string body = R"(
  1062. OpAtomicStore %f32_var %device %relaxed %f32_1
  1063. %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %f32_0
  1064. )";
  1065. CompileSuccessfully(GenerateShaderCode(body));
  1066. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1067. }
  1068. TEST_F(ValidateAtomics, AtomicExchangeWrongResultType) {
  1069. const std::string body = R"(
  1070. OpStore %f32vec4_var %f32vec4_0000
  1071. %val2 = OpAtomicExchange %f32vec4 %f32vec4_var %device %relaxed %f32vec4_0000
  1072. )";
  1073. CompileSuccessfully(GenerateKernelCode(body));
  1074. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1075. EXPECT_THAT(
  1076. getDiagnosticString(),
  1077. HasSubstr("AtomicExchange: "
  1078. "expected Result Type to be integer or float scalar type"));
  1079. }
  1080. TEST_F(ValidateAtomics, AtomicExchangeWrongPointerType) {
  1081. const std::string body = R"(
  1082. %val2 = OpAtomicExchange %f32 %f32vec4_ptr %device %relaxed %f32vec4_0000
  1083. )";
  1084. CompileSuccessfully(GenerateKernelCode(body));
  1085. ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
  1086. EXPECT_THAT(getDiagnosticString(),
  1087. HasSubstr("Operand '33[%_ptr_Workgroup_v4float]' cannot be a "
  1088. "type"));
  1089. }
  1090. TEST_F(ValidateAtomics, AtomicExchangeWrongPointerDataType) {
  1091. const std::string body = R"(
  1092. OpStore %f32vec4_var %f32vec4_0000
  1093. %val2 = OpAtomicExchange %f32 %f32vec4_var %device %relaxed %f32vec4_0000
  1094. )";
  1095. CompileSuccessfully(GenerateKernelCode(body));
  1096. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1097. EXPECT_THAT(
  1098. getDiagnosticString(),
  1099. HasSubstr("AtomicExchange: "
  1100. "expected Pointer to point to a value of type Result Type"));
  1101. }
  1102. TEST_F(ValidateAtomics, AtomicExchangeWrongScopeType) {
  1103. const std::string body = R"(
  1104. OpAtomicStore %f32_var %device %relaxed %f32_1
  1105. %val2 = OpAtomicExchange %f32 %f32_var %f32_1 %relaxed %f32_0
  1106. )";
  1107. CompileSuccessfully(GenerateKernelCode(body));
  1108. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1109. EXPECT_THAT(getDiagnosticString(),
  1110. HasSubstr("AtomicExchange: expected scope to be a 32-bit int"));
  1111. }
  1112. TEST_F(ValidateAtomics, AtomicExchangeWrongMemorySemanticsType) {
  1113. const std::string body = R"(
  1114. OpAtomicStore %f32_var %device %relaxed %f32_1
  1115. %val2 = OpAtomicExchange %f32 %f32_var %device %f32_1 %f32_0
  1116. )";
  1117. CompileSuccessfully(GenerateKernelCode(body));
  1118. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1119. EXPECT_THAT(
  1120. getDiagnosticString(),
  1121. HasSubstr(
  1122. "AtomicExchange: expected Memory Semantics to be a 32-bit int"));
  1123. }
  1124. TEST_F(ValidateAtomics, AtomicExchangeWrongValueType) {
  1125. const std::string body = R"(
  1126. OpAtomicStore %f32_var %device %relaxed %f32_1
  1127. %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %u32_0
  1128. )";
  1129. CompileSuccessfully(GenerateKernelCode(body));
  1130. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1131. EXPECT_THAT(getDiagnosticString(),
  1132. HasSubstr("AtomicExchange: "
  1133. "expected Value to be of type Result Type"));
  1134. }
  1135. TEST_F(ValidateAtomics, AtomicExchangeVulkanInvocationSemantics) {
  1136. const std::string body = R"(
  1137. OpAtomicStore %u32_var %invocation %relaxed %u32_1
  1138. %val2 = OpAtomicExchange %u32 %u32_var %invocation %acquire %u32_0
  1139. )";
  1140. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  1141. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  1142. EXPECT_THAT(getDiagnosticString(),
  1143. AnyVUID("VUID-StandaloneSpirv-None-04641"));
  1144. EXPECT_THAT(
  1145. getDiagnosticString(),
  1146. HasSubstr("AtomicExchange: Vulkan specification requires Memory "
  1147. "Semantics to be None if used with Invocation Memory Scope"));
  1148. }
  1149. TEST_F(ValidateAtomics, AtomicCompareExchangeShaderSuccess) {
  1150. const std::string body = R"(
  1151. OpAtomicStore %u32_var %device %relaxed %u32_1
  1152. %val2 = OpAtomicCompareExchange %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
  1153. )";
  1154. CompileSuccessfully(GenerateShaderCode(body));
  1155. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1156. }
  1157. TEST_F(ValidateAtomics, AtomicCompareExchangeKernelSuccess) {
  1158. const std::string body = R"(
  1159. OpAtomicStore %u32_var %device %relaxed %u32_1
  1160. %val2 = OpAtomicCompareExchange %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
  1161. )";
  1162. CompileSuccessfully(GenerateKernelCode(body));
  1163. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1164. }
  1165. TEST_F(ValidateAtomics, AtomicCompareExchangeShaderFloat) {
  1166. const std::string body = R"(
  1167. OpAtomicStore %f32_var %device %relaxed %f32_1
  1168. %val1 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %relaxed %f32_0 %f32_1
  1169. )";
  1170. CompileSuccessfully(GenerateShaderCode(body));
  1171. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1172. EXPECT_THAT(getDiagnosticString(),
  1173. HasSubstr("AtomicCompareExchange: "
  1174. "expected Result Type to be integer scalar type"));
  1175. }
  1176. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongResultType) {
  1177. const std::string body = R"(
  1178. OpStore %f32vec4_var %f32vec4_0000
  1179. %val2 = OpAtomicCompareExchange %f32vec4 %f32vec4_var %device %relaxed %relaxed %f32vec4_0000 %f32vec4_0000
  1180. )";
  1181. CompileSuccessfully(GenerateKernelCode(body));
  1182. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1183. EXPECT_THAT(getDiagnosticString(),
  1184. HasSubstr("AtomicCompareExchange: "
  1185. "expected Result Type to be integer scalar type"));
  1186. }
  1187. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongPointerType) {
  1188. const std::string body = R"(
  1189. %val2 = OpAtomicCompareExchange %f32 %f32vec4_ptr %device %relaxed %relaxed %f32vec4_0000 %f32vec4_0000
  1190. )";
  1191. CompileSuccessfully(GenerateKernelCode(body));
  1192. ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
  1193. EXPECT_THAT(getDiagnosticString(),
  1194. HasSubstr("Operand '33[%_ptr_Workgroup_v4float]' cannot be a "
  1195. "type"));
  1196. }
  1197. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongPointerDataType) {
  1198. const std::string body = R"(
  1199. OpStore %f32vec4_var %f32vec4_0000
  1200. %val2 = OpAtomicCompareExchange %u32 %f32vec4_var %device %relaxed %relaxed %u32_0 %u32_0
  1201. )";
  1202. CompileSuccessfully(GenerateKernelCode(body));
  1203. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1204. EXPECT_THAT(
  1205. getDiagnosticString(),
  1206. HasSubstr("AtomicCompareExchange: "
  1207. "expected Pointer to point to a value of type Result Type"));
  1208. }
  1209. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongScopeType) {
  1210. const std::string body = R"(
  1211. OpAtomicStore %u64_var %device %relaxed %u64_1
  1212. %val2 = OpAtomicCompareExchange %u64 %u64_var %u64_1 %relaxed %relaxed %u32_0 %u32_0
  1213. )";
  1214. CompileSuccessfully(GenerateKernelCode(body, "OpCapability Int64Atomics\n"));
  1215. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1216. EXPECT_THAT(getDiagnosticString(),
  1217. HasSubstr("AtomicCompareExchange: expected scope to be a 32-bit "
  1218. "int"));
  1219. }
  1220. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongMemorySemanticsType1) {
  1221. const std::string body = R"(
  1222. OpAtomicStore %u32_var %device %relaxed %u32_1
  1223. %val2 = OpAtomicCompareExchange %u32 %u32_var %device %f32_1 %relaxed %u32_0 %u32_0
  1224. )";
  1225. CompileSuccessfully(GenerateKernelCode(body));
  1226. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1227. EXPECT_THAT(getDiagnosticString(),
  1228. HasSubstr("AtomicCompareExchange: expected Memory Semantics to "
  1229. "be a 32-bit int"));
  1230. }
  1231. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongMemorySemanticsType2) {
  1232. const std::string body = R"(
  1233. OpAtomicStore %u32_var %device %relaxed %u32_1
  1234. %val2 = OpAtomicCompareExchange %u32 %u32_var %device %relaxed %f32_1 %u32_0 %u32_0
  1235. )";
  1236. CompileSuccessfully(GenerateKernelCode(body));
  1237. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1238. EXPECT_THAT(getDiagnosticString(),
  1239. HasSubstr("AtomicCompareExchange: expected Memory Semantics to "
  1240. "be a 32-bit int"));
  1241. }
  1242. TEST_F(ValidateAtomics, AtomicCompareExchangeUnequalRelease) {
  1243. const std::string body = R"(
  1244. OpAtomicStore %u32_var %device %relaxed %u32_1
  1245. %val2 = OpAtomicCompareExchange %u32 %u32_var %device %relaxed %release %u32_0 %u32_0
  1246. )";
  1247. CompileSuccessfully(GenerateKernelCode(body));
  1248. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1249. EXPECT_THAT(getDiagnosticString(),
  1250. HasSubstr("AtomicCompareExchange: Memory Semantics Release and "
  1251. "AcquireRelease cannot be used for operand Unequal"));
  1252. }
  1253. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongValueType) {
  1254. const std::string body = R"(
  1255. OpAtomicStore %u32_var %device %relaxed %u32_1
  1256. %val2 = OpAtomicCompareExchange %u32 %u32_var %device %relaxed %relaxed %f32_1 %u32_0
  1257. )";
  1258. CompileSuccessfully(GenerateKernelCode(body));
  1259. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1260. EXPECT_THAT(getDiagnosticString(),
  1261. HasSubstr("AtomicCompareExchange: "
  1262. "expected Value to be of type Result Type"));
  1263. }
  1264. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongComparatorType) {
  1265. const std::string body = R"(
  1266. OpAtomicStore %u32_var %device %relaxed %u32_1
  1267. %val2 = OpAtomicCompareExchange %u32 %u32_var %device %relaxed %relaxed %u32_0 %f32_0
  1268. )";
  1269. CompileSuccessfully(GenerateKernelCode(body));
  1270. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1271. EXPECT_THAT(getDiagnosticString(),
  1272. HasSubstr("AtomicCompareExchange: "
  1273. "expected Comparator to be of type Result Type"));
  1274. }
  1275. TEST_F(ValidateAtomics, AtomicCompareExchangeWeakSuccess) {
  1276. const std::string body = R"(
  1277. OpAtomicStore %u32_var %device %relaxed %u32_1
  1278. %val4 = OpAtomicCompareExchangeWeak %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
  1279. )";
  1280. CompileSuccessfully(GenerateKernelCode(body));
  1281. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1282. }
  1283. TEST_F(ValidateAtomics, AtomicCompareExchangeWeakWrongResultType) {
  1284. const std::string body = R"(
  1285. OpAtomicStore %f32_var %device %relaxed %f32_1
  1286. %val2 = OpAtomicCompareExchangeWeak %f32 %f32_var %device %relaxed %relaxed %f32_0 %f32_1
  1287. )";
  1288. CompileSuccessfully(GenerateKernelCode(body));
  1289. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1290. EXPECT_THAT(getDiagnosticString(),
  1291. HasSubstr("AtomicCompareExchangeWeak: "
  1292. "expected Result Type to be integer scalar type"));
  1293. }
  1294. TEST_F(ValidateAtomics, AtomicCompareExchangeVulkanInvocationSemanticsEqual) {
  1295. const std::string body = R"(
  1296. OpAtomicStore %u32_var %device %relaxed %u32_1
  1297. %val2 = OpAtomicCompareExchange %u32 %u32_var %invocation %release %relaxed %u32_0 %u32_0
  1298. )";
  1299. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  1300. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  1301. EXPECT_THAT(getDiagnosticString(),
  1302. AnyVUID("VUID-StandaloneSpirv-None-04641"));
  1303. EXPECT_THAT(
  1304. getDiagnosticString(),
  1305. HasSubstr("AtomicCompareExchange: Vulkan specification requires Memory "
  1306. "Semantics to be None if used with Invocation Memory Scope"));
  1307. }
  1308. TEST_F(ValidateAtomics, AtomicCompareExchangeVulkanInvocationSemanticsUnequal) {
  1309. const std::string body = R"(
  1310. OpAtomicStore %u32_var %device %relaxed %u32_1
  1311. %val2 = OpAtomicCompareExchange %u32 %u32_var %invocation %relaxed %acquire %u32_0 %u32_0
  1312. )";
  1313. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  1314. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  1315. EXPECT_THAT(getDiagnosticString(),
  1316. AnyVUID("VUID-StandaloneSpirv-None-04641"));
  1317. EXPECT_THAT(
  1318. getDiagnosticString(),
  1319. HasSubstr("AtomicCompareExchange: Vulkan specification requires Memory "
  1320. "Semantics to be None if used with Invocation Memory Scope"));
  1321. }
  1322. TEST_F(ValidateAtomics, AtomicArithmeticsSuccess) {
  1323. const std::string body = R"(
  1324. OpAtomicStore %u32_var %device %relaxed %u32_1
  1325. %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release
  1326. %val2 = OpAtomicIDecrement %u32 %u32_var %device %acquire_release
  1327. %val3 = OpAtomicIAdd %u32 %u32_var %device %acquire_release %u32_1
  1328. %val4 = OpAtomicISub %u32 %u32_var %device %acquire_release %u32_1
  1329. %val5 = OpAtomicUMin %u32 %u32_var %device %acquire_release %u32_1
  1330. %val6 = OpAtomicUMax %u32 %u32_var %device %acquire_release %u32_1
  1331. %val7 = OpAtomicSMin %u32 %u32_var %device %sequentially_consistent %u32_1
  1332. %val8 = OpAtomicSMax %u32 %u32_var %device %sequentially_consistent %u32_1
  1333. %val9 = OpAtomicAnd %u32 %u32_var %device %sequentially_consistent %u32_1
  1334. %val10 = OpAtomicOr %u32 %u32_var %device %sequentially_consistent %u32_1
  1335. %val11 = OpAtomicXor %u32 %u32_var %device %sequentially_consistent %u32_1
  1336. )";
  1337. CompileSuccessfully(GenerateKernelCode(body));
  1338. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1339. }
  1340. TEST_F(ValidateAtomics, AtomicFlagsSuccess) {
  1341. const std::string body = R"(
  1342. OpAtomicFlagClear %u32_var %device %release
  1343. %val1 = OpAtomicFlagTestAndSet %bool %u32_var %device %relaxed
  1344. )";
  1345. CompileSuccessfully(GenerateKernelCode(body));
  1346. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1347. }
  1348. TEST_F(ValidateAtomics, AtomicFlagTestAndSetWrongResultType) {
  1349. const std::string body = R"(
  1350. %val1 = OpAtomicFlagTestAndSet %u32 %u32_var %device %relaxed
  1351. )";
  1352. CompileSuccessfully(GenerateKernelCode(body));
  1353. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1354. EXPECT_THAT(getDiagnosticString(),
  1355. HasSubstr("AtomicFlagTestAndSet: "
  1356. "expected Result Type to be bool scalar type"));
  1357. }
  1358. TEST_F(ValidateAtomics, AtomicFlagTestAndSetNotPointer) {
  1359. const std::string body = R"(
  1360. %val1 = OpAtomicFlagTestAndSet %bool %u32_1 %device %relaxed
  1361. )";
  1362. CompileSuccessfully(GenerateKernelCode(body));
  1363. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1364. EXPECT_THAT(getDiagnosticString(),
  1365. HasSubstr("AtomicFlagTestAndSet: "
  1366. "expected Pointer to be of type OpTypePointer"));
  1367. }
  1368. TEST_F(ValidateAtomics, AtomicFlagTestAndSetNotIntPointer) {
  1369. const std::string body = R"(
  1370. %val1 = OpAtomicFlagTestAndSet %bool %f32_var %device %relaxed
  1371. )";
  1372. CompileSuccessfully(GenerateKernelCode(body));
  1373. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1374. EXPECT_THAT(
  1375. getDiagnosticString(),
  1376. HasSubstr("AtomicFlagTestAndSet: "
  1377. "expected Pointer to point to a value of 32-bit integer type"));
  1378. }
  1379. TEST_F(ValidateAtomics, AtomicFlagTestAndSetNotInt32Pointer) {
  1380. const std::string body = R"(
  1381. %val1 = OpAtomicFlagTestAndSet %bool %u64_var %device %relaxed
  1382. )";
  1383. CompileSuccessfully(GenerateKernelCode(body, "OpCapability Int64Atomics\n"));
  1384. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1385. EXPECT_THAT(
  1386. getDiagnosticString(),
  1387. HasSubstr("AtomicFlagTestAndSet: "
  1388. "expected Pointer to point to a value of 32-bit integer type"));
  1389. }
  1390. TEST_F(ValidateAtomics, AtomicFlagTestAndSetWrongScopeType) {
  1391. const std::string body = R"(
  1392. %val1 = OpAtomicFlagTestAndSet %bool %u32_var %u64_1 %relaxed
  1393. )";
  1394. CompileSuccessfully(GenerateKernelCode(body));
  1395. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1396. EXPECT_THAT(
  1397. getDiagnosticString(),
  1398. HasSubstr("AtomicFlagTestAndSet: expected scope to be a 32-bit int"));
  1399. }
  1400. TEST_F(ValidateAtomics, AtomicFlagTestAndSetWrongMemorySemanticsType) {
  1401. const std::string body = R"(
  1402. %val1 = OpAtomicFlagTestAndSet %bool %u32_var %device %u64_1
  1403. )";
  1404. CompileSuccessfully(GenerateKernelCode(body));
  1405. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1406. EXPECT_THAT(getDiagnosticString(),
  1407. HasSubstr("AtomicFlagTestAndSet: "
  1408. "expected Memory Semantics to be a 32-bit int"));
  1409. }
  1410. TEST_F(ValidateAtomics, AtomicFlagClearAcquire) {
  1411. const std::string body = R"(
  1412. OpAtomicFlagClear %u32_var %device %acquire
  1413. )";
  1414. CompileSuccessfully(GenerateKernelCode(body));
  1415. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1416. EXPECT_THAT(getDiagnosticString(),
  1417. HasSubstr("Memory Semantics Acquire and AcquireRelease cannot be "
  1418. "used with AtomicFlagClear"));
  1419. }
  1420. TEST_F(ValidateAtomics, AtomicFlagClearNotPointer) {
  1421. const std::string body = R"(
  1422. OpAtomicFlagClear %u32_1 %device %relaxed
  1423. )";
  1424. CompileSuccessfully(GenerateKernelCode(body));
  1425. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1426. EXPECT_THAT(getDiagnosticString(),
  1427. HasSubstr("AtomicFlagClear: "
  1428. "expected Pointer to be of type OpTypePointer"));
  1429. }
  1430. TEST_F(ValidateAtomics, AtomicFlagClearNotIntPointer) {
  1431. const std::string body = R"(
  1432. OpAtomicFlagClear %f32_var %device %relaxed
  1433. )";
  1434. CompileSuccessfully(GenerateKernelCode(body));
  1435. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1436. EXPECT_THAT(
  1437. getDiagnosticString(),
  1438. HasSubstr("AtomicFlagClear: "
  1439. "expected Pointer to point to a value of 32-bit integer type"));
  1440. }
  1441. TEST_F(ValidateAtomics, AtomicFlagClearNotInt32Pointer) {
  1442. const std::string body = R"(
  1443. OpAtomicFlagClear %u64_var %device %relaxed
  1444. )";
  1445. CompileSuccessfully(GenerateKernelCode(body, "OpCapability Int64Atomics\n"));
  1446. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1447. EXPECT_THAT(
  1448. getDiagnosticString(),
  1449. HasSubstr("AtomicFlagClear: "
  1450. "expected Pointer to point to a value of 32-bit integer type"));
  1451. }
  1452. TEST_F(ValidateAtomics, AtomicFlagClearWrongScopeType) {
  1453. const std::string body = R"(
  1454. OpAtomicFlagClear %u32_var %u64_1 %relaxed
  1455. )";
  1456. CompileSuccessfully(GenerateKernelCode(body));
  1457. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1458. EXPECT_THAT(getDiagnosticString(),
  1459. HasSubstr("AtomicFlagClear: expected scope to be a 32-bit "
  1460. "int\n OpAtomicFlagClear %30 %ulong_1 %uint_0_1\n"));
  1461. }
  1462. TEST_F(ValidateAtomics, AtomicFlagClearWrongMemorySemanticsType) {
  1463. const std::string body = R"(
  1464. OpAtomicFlagClear %u32_var %device %u64_1
  1465. )";
  1466. CompileSuccessfully(GenerateKernelCode(body));
  1467. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1468. EXPECT_THAT(
  1469. getDiagnosticString(),
  1470. HasSubstr(
  1471. "AtomicFlagClear: expected Memory Semantics to be a 32-bit int"));
  1472. }
  1473. TEST_F(ValidateAtomics, AtomicIIncrementAcquireAndRelease) {
  1474. const std::string body = R"(
  1475. OpAtomicStore %u32_var %device %relaxed %u32_1
  1476. %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_and_release
  1477. )";
  1478. CompileSuccessfully(GenerateKernelCode(body));
  1479. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1480. EXPECT_THAT(getDiagnosticString(),
  1481. HasSubstr("AtomicIIncrement: Memory Semantics can have at most "
  1482. "one of the following bits set: Acquire, Release, "
  1483. "AcquireRelease or SequentiallyConsistent"));
  1484. }
  1485. TEST_F(ValidateAtomics, AtomicUniformMemorySemanticsShader) {
  1486. const std::string body = R"(
  1487. OpAtomicStore %u32_var %device %relaxed %u32_1
  1488. %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release_uniform_workgroup
  1489. )";
  1490. CompileSuccessfully(GenerateShaderCode(body));
  1491. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1492. }
  1493. TEST_F(ValidateAtomics, AtomicUniformMemorySemanticsKernel) {
  1494. const std::string body = R"(
  1495. OpAtomicStore %u32_var %device %relaxed %u32_1
  1496. %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release_uniform_workgroup
  1497. )";
  1498. CompileSuccessfully(GenerateKernelCode(body));
  1499. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1500. EXPECT_THAT(getDiagnosticString(),
  1501. HasSubstr("AtomicIIncrement: Memory Semantics UniformMemory "
  1502. "requires capability Shader"));
  1503. }
  1504. // Lack of the AtomicStorage capability is intentionally ignored, see
  1505. // https://github.com/KhronosGroup/glslang/issues/1618 for the reasoning why.
  1506. TEST_F(ValidateAtomics, AtomicCounterMemorySemanticsNoCapability) {
  1507. const std::string body = R"(
  1508. OpAtomicStore %u32_var %device %relaxed %u32_1
  1509. %val1 = OpAtomicIIncrement %u32 %u32_var %device
  1510. %acquire_release_atomic_counter_workgroup
  1511. )";
  1512. CompileSuccessfully(GenerateKernelCode(body));
  1513. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1514. }
  1515. TEST_F(ValidateAtomics, AtomicCounterMemorySemanticsWithCapability) {
  1516. const std::string body = R"(
  1517. OpAtomicStore %u32_var %device %relaxed %u32_1
  1518. %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release_atomic_counter_workgroup
  1519. )";
  1520. CompileSuccessfully(GenerateKernelCode(body, "OpCapability AtomicStorage\n"));
  1521. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1522. }
  1523. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicLoad) {
  1524. const std::string body = R"(
  1525. %ld = OpAtomicLoad %u32 %u32_var %workgroup %sequentially_consistent
  1526. )";
  1527. const std::string extra = R"(
  1528. OpCapability VulkanMemoryModelKHR
  1529. OpExtension "SPV_KHR_vulkan_memory_model"
  1530. )";
  1531. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1532. SPV_ENV_UNIVERSAL_1_3);
  1533. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1534. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1535. EXPECT_THAT(getDiagnosticString(),
  1536. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1537. "used with the VulkanKHR memory model."));
  1538. }
  1539. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicStore) {
  1540. const std::string body = R"(
  1541. OpAtomicStore %u32_var %workgroup %sequentially_consistent %u32_0
  1542. )";
  1543. const std::string extra = R"(
  1544. OpCapability VulkanMemoryModelKHR
  1545. OpExtension "SPV_KHR_vulkan_memory_model"
  1546. )";
  1547. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1548. SPV_ENV_UNIVERSAL_1_3);
  1549. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1550. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1551. EXPECT_THAT(getDiagnosticString(),
  1552. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1553. "used with the VulkanKHR memory model."));
  1554. }
  1555. TEST_F(ValidateAtomics,
  1556. VulkanMemoryModelBanSequentiallyConsistentAtomicExchange) {
  1557. const std::string body = R"(
  1558. %ex = OpAtomicExchange %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1559. )";
  1560. const std::string extra = R"(
  1561. OpCapability VulkanMemoryModelKHR
  1562. OpExtension "SPV_KHR_vulkan_memory_model"
  1563. )";
  1564. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1565. SPV_ENV_UNIVERSAL_1_3);
  1566. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1567. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1568. EXPECT_THAT(getDiagnosticString(),
  1569. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1570. "used with the VulkanKHR memory model."));
  1571. }
  1572. TEST_F(ValidateAtomics,
  1573. VulkanMemoryModelBanSequentiallyConsistentAtomicCompareExchangeEqual) {
  1574. const std::string body = R"(
  1575. %ex = OpAtomicCompareExchange %u32 %u32_var %workgroup %sequentially_consistent %relaxed %u32_0 %u32_0
  1576. )";
  1577. const std::string extra = R"(
  1578. OpCapability VulkanMemoryModelKHR
  1579. OpExtension "SPV_KHR_vulkan_memory_model"
  1580. )";
  1581. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1582. SPV_ENV_UNIVERSAL_1_3);
  1583. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1584. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1585. EXPECT_THAT(getDiagnosticString(),
  1586. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1587. "used with the VulkanKHR memory model."));
  1588. }
  1589. TEST_F(ValidateAtomics,
  1590. VulkanMemoryModelBanSequentiallyConsistentAtomicCompareExchangeUnequal) {
  1591. const std::string body = R"(
  1592. %ex = OpAtomicCompareExchange %u32 %u32_var %workgroup %relaxed %sequentially_consistent %u32_0 %u32_0
  1593. )";
  1594. const std::string extra = R"(
  1595. OpCapability VulkanMemoryModelKHR
  1596. OpExtension "SPV_KHR_vulkan_memory_model"
  1597. )";
  1598. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1599. SPV_ENV_UNIVERSAL_1_3);
  1600. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1601. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1602. EXPECT_THAT(getDiagnosticString(),
  1603. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1604. "used with the VulkanKHR memory model."));
  1605. }
  1606. TEST_F(ValidateAtomics,
  1607. VulkanMemoryModelBanSequentiallyConsistentAtomicIIncrement) {
  1608. const std::string body = R"(
  1609. %inc = OpAtomicIIncrement %u32 %u32_var %workgroup %sequentially_consistent
  1610. )";
  1611. const std::string extra = R"(
  1612. OpCapability VulkanMemoryModelKHR
  1613. OpExtension "SPV_KHR_vulkan_memory_model"
  1614. )";
  1615. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1616. SPV_ENV_UNIVERSAL_1_3);
  1617. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1618. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1619. EXPECT_THAT(getDiagnosticString(),
  1620. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1621. "used with the VulkanKHR memory model."));
  1622. }
  1623. TEST_F(ValidateAtomics,
  1624. VulkanMemoryModelBanSequentiallyConsistentAtomicIDecrement) {
  1625. const std::string body = R"(
  1626. %dec = OpAtomicIDecrement %u32 %u32_var %workgroup %sequentially_consistent
  1627. )";
  1628. const std::string extra = R"(
  1629. OpCapability VulkanMemoryModelKHR
  1630. OpExtension "SPV_KHR_vulkan_memory_model"
  1631. )";
  1632. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1633. SPV_ENV_UNIVERSAL_1_3);
  1634. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1635. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1636. EXPECT_THAT(getDiagnosticString(),
  1637. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1638. "used with the VulkanKHR memory model."));
  1639. }
  1640. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicIAdd) {
  1641. const std::string body = R"(
  1642. %add = OpAtomicIAdd %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1643. )";
  1644. const std::string extra = R"(
  1645. OpCapability VulkanMemoryModelKHR
  1646. OpExtension "SPV_KHR_vulkan_memory_model"
  1647. )";
  1648. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1649. SPV_ENV_UNIVERSAL_1_3);
  1650. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1651. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1652. EXPECT_THAT(getDiagnosticString(),
  1653. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1654. "used with the VulkanKHR memory model."));
  1655. }
  1656. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicISub) {
  1657. const std::string body = R"(
  1658. %sub = OpAtomicISub %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1659. )";
  1660. const std::string extra = R"(
  1661. OpCapability VulkanMemoryModelKHR
  1662. OpExtension "SPV_KHR_vulkan_memory_model"
  1663. )";
  1664. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1665. SPV_ENV_UNIVERSAL_1_3);
  1666. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1667. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1668. EXPECT_THAT(getDiagnosticString(),
  1669. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1670. "used with the VulkanKHR memory model."));
  1671. }
  1672. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicSMin) {
  1673. const std::string body = R"(
  1674. %min = OpAtomicSMin %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1675. )";
  1676. const std::string extra = R"(
  1677. OpCapability VulkanMemoryModelKHR
  1678. OpExtension "SPV_KHR_vulkan_memory_model"
  1679. )";
  1680. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1681. SPV_ENV_UNIVERSAL_1_3);
  1682. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1683. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1684. EXPECT_THAT(getDiagnosticString(),
  1685. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1686. "used with the VulkanKHR memory model."));
  1687. }
  1688. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicUMin) {
  1689. const std::string body = R"(
  1690. %min = OpAtomicUMin %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1691. )";
  1692. const std::string extra = R"(
  1693. OpCapability VulkanMemoryModelKHR
  1694. OpExtension "SPV_KHR_vulkan_memory_model"
  1695. )";
  1696. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1697. SPV_ENV_UNIVERSAL_1_3);
  1698. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1699. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1700. EXPECT_THAT(getDiagnosticString(),
  1701. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1702. "used with the VulkanKHR memory model."));
  1703. }
  1704. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicFMinEXT) {
  1705. const std::string body = R"(
  1706. %max = OpAtomicFMinEXT %f32 %f32_var %workgroup %sequentially_consistent %f32_0
  1707. )";
  1708. const std::string extra = R"(
  1709. OpCapability VulkanMemoryModelKHR
  1710. OpCapability AtomicFloat32MinMaxEXT
  1711. OpExtension "SPV_KHR_vulkan_memory_model"
  1712. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  1713. )";
  1714. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1715. SPV_ENV_UNIVERSAL_1_3);
  1716. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1717. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1718. EXPECT_THAT(getDiagnosticString(),
  1719. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1720. "used with the VulkanKHR memory model."));
  1721. }
  1722. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicSMax) {
  1723. const std::string body = R"(
  1724. %max = OpAtomicSMax %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1725. )";
  1726. const std::string extra = R"(
  1727. OpCapability VulkanMemoryModelKHR
  1728. OpExtension "SPV_KHR_vulkan_memory_model"
  1729. )";
  1730. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1731. SPV_ENV_UNIVERSAL_1_3);
  1732. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1733. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1734. EXPECT_THAT(getDiagnosticString(),
  1735. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1736. "used with the VulkanKHR memory model."));
  1737. }
  1738. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicUMax) {
  1739. const std::string body = R"(
  1740. %max = OpAtomicUMax %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1741. )";
  1742. const std::string extra = R"(
  1743. OpCapability VulkanMemoryModelKHR
  1744. OpExtension "SPV_KHR_vulkan_memory_model"
  1745. )";
  1746. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1747. SPV_ENV_UNIVERSAL_1_3);
  1748. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1749. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1750. EXPECT_THAT(getDiagnosticString(),
  1751. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1752. "used with the VulkanKHR memory model."));
  1753. }
  1754. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicFMaxEXT) {
  1755. const std::string body = R"(
  1756. %max = OpAtomicFMaxEXT %f32 %f32_var %workgroup %sequentially_consistent %f32_0
  1757. )";
  1758. const std::string extra = R"(
  1759. OpCapability VulkanMemoryModelKHR
  1760. OpCapability AtomicFloat32MinMaxEXT
  1761. OpExtension "SPV_KHR_vulkan_memory_model"
  1762. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  1763. )";
  1764. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1765. SPV_ENV_UNIVERSAL_1_3);
  1766. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1767. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1768. EXPECT_THAT(getDiagnosticString(),
  1769. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1770. "used with the VulkanKHR memory model."));
  1771. }
  1772. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicAnd) {
  1773. const std::string body = R"(
  1774. %and = OpAtomicAnd %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1775. )";
  1776. const std::string extra = R"(
  1777. OpCapability VulkanMemoryModelKHR
  1778. OpExtension "SPV_KHR_vulkan_memory_model"
  1779. )";
  1780. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1781. SPV_ENV_UNIVERSAL_1_3);
  1782. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1783. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1784. EXPECT_THAT(getDiagnosticString(),
  1785. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1786. "used with the VulkanKHR memory model."));
  1787. }
  1788. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicOr) {
  1789. const std::string body = R"(
  1790. %or = OpAtomicOr %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1791. )";
  1792. const std::string extra = R"(
  1793. OpCapability VulkanMemoryModelKHR
  1794. OpExtension "SPV_KHR_vulkan_memory_model"
  1795. )";
  1796. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1797. SPV_ENV_UNIVERSAL_1_3);
  1798. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1799. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1800. EXPECT_THAT(getDiagnosticString(),
  1801. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1802. "used with the VulkanKHR memory model."));
  1803. }
  1804. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicXor) {
  1805. const std::string body = R"(
  1806. %xor = OpAtomicXor %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1807. )";
  1808. const std::string extra = R"(
  1809. OpCapability VulkanMemoryModelKHR
  1810. OpExtension "SPV_KHR_vulkan_memory_model"
  1811. )";
  1812. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1813. SPV_ENV_UNIVERSAL_1_3);
  1814. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1815. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1816. EXPECT_THAT(getDiagnosticString(),
  1817. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1818. "used with the VulkanKHR memory model."));
  1819. }
  1820. TEST_F(ValidateAtomics, OutputMemoryKHRRequiresVulkanMemoryModelKHR) {
  1821. const std::string text = R"(
  1822. OpCapability Shader
  1823. OpMemoryModel Logical GLSL450
  1824. OpEntryPoint Fragment %1 "func"
  1825. OpExecutionMode %1 OriginUpperLeft
  1826. %2 = OpTypeVoid
  1827. %3 = OpTypeInt 32 0
  1828. %semantics = OpConstant %3 4100
  1829. %5 = OpTypeFunction %2
  1830. %workgroup = OpConstant %3 2
  1831. %ptr = OpTypePointer Workgroup %3
  1832. %var = OpVariable %ptr Workgroup
  1833. %1 = OpFunction %2 None %5
  1834. %7 = OpLabel
  1835. OpAtomicStore %var %workgroup %semantics %workgroup
  1836. OpReturn
  1837. OpFunctionEnd
  1838. )";
  1839. CompileSuccessfully(text);
  1840. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1841. EXPECT_THAT(getDiagnosticString(),
  1842. HasSubstr("AtomicStore: Memory Semantics OutputMemoryKHR "
  1843. "requires capability VulkanMemoryModelKHR"));
  1844. }
  1845. TEST_F(ValidateAtomics, MakeAvailableKHRRequiresVulkanMemoryModelKHR) {
  1846. const std::string text = R"(
  1847. OpCapability Shader
  1848. OpMemoryModel Logical GLSL450
  1849. OpEntryPoint Fragment %1 "func"
  1850. OpExecutionMode %1 OriginUpperLeft
  1851. %2 = OpTypeVoid
  1852. %3 = OpTypeInt 32 0
  1853. %semantics = OpConstant %3 8196
  1854. %5 = OpTypeFunction %2
  1855. %workgroup = OpConstant %3 2
  1856. %ptr = OpTypePointer Workgroup %3
  1857. %var = OpVariable %ptr Workgroup
  1858. %1 = OpFunction %2 None %5
  1859. %7 = OpLabel
  1860. OpAtomicStore %var %workgroup %semantics %workgroup
  1861. OpReturn
  1862. OpFunctionEnd
  1863. )";
  1864. CompileSuccessfully(text);
  1865. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1866. EXPECT_THAT(getDiagnosticString(),
  1867. HasSubstr("AtomicStore: Memory Semantics MakeAvailableKHR "
  1868. "requires capability VulkanMemoryModelKHR"));
  1869. }
  1870. TEST_F(ValidateAtomics, MakeVisibleKHRRequiresVulkanMemoryModelKHR) {
  1871. const std::string text = R"(
  1872. OpCapability Shader
  1873. OpMemoryModel Logical GLSL450
  1874. OpEntryPoint Fragment %1 "func"
  1875. OpExecutionMode %1 OriginUpperLeft
  1876. %2 = OpTypeVoid
  1877. %3 = OpTypeInt 32 0
  1878. %semantics = OpConstant %3 16386
  1879. %5 = OpTypeFunction %2
  1880. %workgroup = OpConstant %3 2
  1881. %ptr = OpTypePointer Workgroup %3
  1882. %var = OpVariable %ptr Workgroup
  1883. %1 = OpFunction %2 None %5
  1884. %7 = OpLabel
  1885. %ld = OpAtomicLoad %3 %var %workgroup %semantics
  1886. OpReturn
  1887. OpFunctionEnd
  1888. )";
  1889. CompileSuccessfully(text);
  1890. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1891. EXPECT_THAT(getDiagnosticString(),
  1892. HasSubstr("AtomicLoad: Memory Semantics MakeVisibleKHR requires "
  1893. "capability VulkanMemoryModelKHR"));
  1894. }
  1895. TEST_F(ValidateAtomics, MakeAvailableKHRRequiresReleaseSemantics) {
  1896. const std::string text = R"(
  1897. OpCapability Shader
  1898. OpCapability VulkanMemoryModelKHR
  1899. OpExtension "SPV_KHR_vulkan_memory_model"
  1900. OpMemoryModel Logical VulkanKHR
  1901. OpEntryPoint Fragment %1 "func"
  1902. OpExecutionMode %1 OriginUpperLeft
  1903. %2 = OpTypeVoid
  1904. %3 = OpTypeInt 32 0
  1905. %semantics = OpConstant %3 8448
  1906. %5 = OpTypeFunction %2
  1907. %workgroup = OpConstant %3 2
  1908. %ptr = OpTypePointer Workgroup %3
  1909. %var = OpVariable %ptr Workgroup
  1910. %1 = OpFunction %2 None %5
  1911. %7 = OpLabel
  1912. OpAtomicStore %var %workgroup %semantics %workgroup
  1913. OpReturn
  1914. OpFunctionEnd
  1915. )";
  1916. CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
  1917. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1918. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1919. EXPECT_THAT(
  1920. getDiagnosticString(),
  1921. HasSubstr("AtomicStore: MakeAvailableKHR Memory Semantics also requires "
  1922. "either Release or AcquireRelease Memory Semantics"));
  1923. }
  1924. TEST_F(ValidateAtomics, MakeVisibleKHRRequiresAcquireSemantics) {
  1925. const std::string text = R"(
  1926. OpCapability Shader
  1927. OpCapability VulkanMemoryModelKHR
  1928. OpExtension "SPV_KHR_vulkan_memory_model"
  1929. OpMemoryModel Logical VulkanKHR
  1930. OpEntryPoint Fragment %1 "func"
  1931. OpExecutionMode %1 OriginUpperLeft
  1932. %2 = OpTypeVoid
  1933. %3 = OpTypeInt 32 0
  1934. %semantics = OpConstant %3 16640
  1935. %5 = OpTypeFunction %2
  1936. %workgroup = OpConstant %3 2
  1937. %ptr = OpTypePointer Workgroup %3
  1938. %var = OpVariable %ptr Workgroup
  1939. %1 = OpFunction %2 None %5
  1940. %7 = OpLabel
  1941. %ld = OpAtomicLoad %3 %var %workgroup %semantics
  1942. OpReturn
  1943. OpFunctionEnd
  1944. )";
  1945. CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
  1946. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1947. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1948. EXPECT_THAT(
  1949. getDiagnosticString(),
  1950. HasSubstr("AtomicLoad: MakeVisibleKHR Memory Semantics also requires "
  1951. "either Acquire or AcquireRelease Memory Semantics"));
  1952. }
  1953. TEST_F(ValidateAtomics, MakeAvailableKHRRequiresStorageSemantics) {
  1954. const std::string text = R"(
  1955. OpCapability Shader
  1956. OpCapability VulkanMemoryModelKHR
  1957. OpExtension "SPV_KHR_vulkan_memory_model"
  1958. OpMemoryModel Logical VulkanKHR
  1959. OpEntryPoint Fragment %1 "func"
  1960. OpExecutionMode %1 OriginUpperLeft
  1961. %2 = OpTypeVoid
  1962. %3 = OpTypeInt 32 0
  1963. %semantics = OpConstant %3 8196
  1964. %5 = OpTypeFunction %2
  1965. %workgroup = OpConstant %3 2
  1966. %ptr = OpTypePointer Workgroup %3
  1967. %var = OpVariable %ptr Workgroup
  1968. %1 = OpFunction %2 None %5
  1969. %7 = OpLabel
  1970. OpAtomicStore %var %workgroup %semantics %workgroup
  1971. OpReturn
  1972. OpFunctionEnd
  1973. )";
  1974. CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
  1975. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1976. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1977. EXPECT_THAT(
  1978. getDiagnosticString(),
  1979. HasSubstr(
  1980. "AtomicStore: expected Memory Semantics to include a storage class"));
  1981. }
  1982. TEST_F(ValidateAtomics, MakeVisibleKHRRequiresStorageSemantics) {
  1983. const std::string text = R"(
  1984. OpCapability Shader
  1985. OpCapability VulkanMemoryModelKHR
  1986. OpExtension "SPV_KHR_vulkan_memory_model"
  1987. OpMemoryModel Logical VulkanKHR
  1988. OpEntryPoint Fragment %1 "func"
  1989. OpExecutionMode %1 OriginUpperLeft
  1990. %2 = OpTypeVoid
  1991. %3 = OpTypeInt 32 0
  1992. %semantics = OpConstant %3 16386
  1993. %5 = OpTypeFunction %2
  1994. %workgroup = OpConstant %3 2
  1995. %ptr = OpTypePointer Workgroup %3
  1996. %var = OpVariable %ptr Workgroup
  1997. %1 = OpFunction %2 None %5
  1998. %7 = OpLabel
  1999. %ld = OpAtomicLoad %3 %var %workgroup %semantics
  2000. OpReturn
  2001. OpFunctionEnd
  2002. )";
  2003. CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
  2004. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  2005. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  2006. EXPECT_THAT(
  2007. getDiagnosticString(),
  2008. HasSubstr(
  2009. "AtomicLoad: expected Memory Semantics to include a storage class"));
  2010. }
  2011. TEST_F(ValidateAtomics, VulkanMemoryModelAllowsQueueFamilyKHR) {
  2012. const std::string body = R"(
  2013. %val = OpAtomicAnd %u32 %u32_var %queuefamily %relaxed %u32_1
  2014. )";
  2015. const std::string extra = R"(
  2016. OpCapability VulkanMemoryModelKHR
  2017. OpExtension "SPV_KHR_vulkan_memory_model"
  2018. )";
  2019. CompileSuccessfully(GenerateShaderComputeCode(body, extra, "", "VulkanKHR"),
  2020. SPV_ENV_VULKAN_1_1);
  2021. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_1));
  2022. }
  2023. TEST_F(ValidateAtomics, NonVulkanMemoryModelDisallowsQueueFamilyKHR) {
  2024. const std::string body = R"(
  2025. %val = OpAtomicAnd %u32 %u32_var %queuefamily %relaxed %u32_1
  2026. )";
  2027. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_1);
  2028. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_1));
  2029. EXPECT_THAT(getDiagnosticString(),
  2030. HasSubstr("AtomicAnd: Memory Scope QueueFamilyKHR requires "
  2031. "capability VulkanMemoryModelKHR\n %42 = OpAtomicAnd "
  2032. "%uint %29 %uint_5 %uint_0_1 %uint_1\n"));
  2033. }
  2034. TEST_F(ValidateAtomics, SemanticsSpecConstantShader) {
  2035. const std::string spirv = R"(
  2036. OpCapability Shader
  2037. OpMemoryModel Logical GLSL450
  2038. OpEntryPoint Fragment %func "func"
  2039. OpExecutionMode %func OriginUpperLeft
  2040. %void = OpTypeVoid
  2041. %int = OpTypeInt 32 0
  2042. %spec_const = OpSpecConstant %int 0
  2043. %workgroup = OpConstant %int 2
  2044. %ptr_int_workgroup = OpTypePointer Workgroup %int
  2045. %var = OpVariable %ptr_int_workgroup Workgroup
  2046. %voidfn = OpTypeFunction %void
  2047. %func = OpFunction %void None %voidfn
  2048. %entry = OpLabel
  2049. %ld = OpAtomicLoad %int %var %workgroup %spec_const
  2050. OpReturn
  2051. OpFunctionEnd
  2052. )";
  2053. CompileSuccessfully(spirv);
  2054. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  2055. EXPECT_THAT(getDiagnosticString(),
  2056. HasSubstr("Memory Semantics ids must be OpConstant when Shader "
  2057. "capability is present"));
  2058. }
  2059. TEST_F(ValidateAtomics, SemanticsSpecConstantKernel) {
  2060. const std::string spirv = R"(
  2061. OpCapability Kernel
  2062. OpCapability Linkage
  2063. OpMemoryModel Logical OpenCL
  2064. %void = OpTypeVoid
  2065. %int = OpTypeInt 32 0
  2066. %spec_const = OpSpecConstant %int 0
  2067. %workgroup = OpConstant %int 2
  2068. %ptr_int_workgroup = OpTypePointer Workgroup %int
  2069. %var = OpVariable %ptr_int_workgroup Workgroup
  2070. %voidfn = OpTypeFunction %void
  2071. %func = OpFunction %void None %voidfn
  2072. %entry = OpLabel
  2073. %ld = OpAtomicLoad %int %var %workgroup %spec_const
  2074. OpReturn
  2075. OpFunctionEnd
  2076. )";
  2077. CompileSuccessfully(spirv);
  2078. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
  2079. }
  2080. TEST_F(ValidateAtomics, ScopeSpecConstantShader) {
  2081. const std::string spirv = R"(
  2082. OpCapability Shader
  2083. OpMemoryModel Logical GLSL450
  2084. OpEntryPoint Fragment %func "func"
  2085. OpExecutionMode %func OriginUpperLeft
  2086. %void = OpTypeVoid
  2087. %int = OpTypeInt 32 0
  2088. %spec_const = OpSpecConstant %int 0
  2089. %relaxed = OpConstant %int 0
  2090. %ptr_int_workgroup = OpTypePointer Workgroup %int
  2091. %var = OpVariable %ptr_int_workgroup Workgroup
  2092. %voidfn = OpTypeFunction %void
  2093. %func = OpFunction %void None %voidfn
  2094. %entry = OpLabel
  2095. %ld = OpAtomicLoad %int %var %spec_const %relaxed
  2096. OpReturn
  2097. OpFunctionEnd
  2098. )";
  2099. CompileSuccessfully(spirv);
  2100. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  2101. EXPECT_THAT(
  2102. getDiagnosticString(),
  2103. HasSubstr(
  2104. "Scope ids must be OpConstant when Shader capability is present"));
  2105. }
  2106. TEST_F(ValidateAtomics, ScopeSpecConstantKernel) {
  2107. const std::string spirv = R"(
  2108. OpCapability Kernel
  2109. OpCapability Linkage
  2110. OpMemoryModel Logical OpenCL
  2111. %void = OpTypeVoid
  2112. %int = OpTypeInt 32 0
  2113. %spec_const = OpSpecConstant %int 0
  2114. %relaxed = OpConstant %int 0
  2115. %ptr_int_workgroup = OpTypePointer Workgroup %int
  2116. %var = OpVariable %ptr_int_workgroup Workgroup
  2117. %voidfn = OpTypeFunction %void
  2118. %func = OpFunction %void None %voidfn
  2119. %entry = OpLabel
  2120. %ld = OpAtomicLoad %int %var %spec_const %relaxed
  2121. OpReturn
  2122. OpFunctionEnd
  2123. )";
  2124. CompileSuccessfully(spirv);
  2125. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
  2126. }
  2127. TEST_F(ValidateAtomics, VulkanMemoryModelDeviceScopeBad) {
  2128. const std::string body = R"(
  2129. %val = OpAtomicAnd %u32 %u32_var %device %relaxed %u32_1
  2130. )";
  2131. const std::string extra = R"(OpCapability VulkanMemoryModelKHR
  2132. OpExtension "SPV_KHR_vulkan_memory_model"
  2133. )";
  2134. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  2135. SPV_ENV_UNIVERSAL_1_3);
  2136. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  2137. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  2138. EXPECT_THAT(
  2139. getDiagnosticString(),
  2140. HasSubstr("Use of device scope with VulkanKHR memory model requires the "
  2141. "VulkanMemoryModelDeviceScopeKHR capability"));
  2142. }
  2143. TEST_F(ValidateAtomics, VulkanMemoryModelDeviceScopeGood) {
  2144. const std::string body = R"(
  2145. %val = OpAtomicAnd %u32 %u32_var %device %relaxed %u32_1
  2146. )";
  2147. const std::string extra = R"(OpCapability VulkanMemoryModelKHR
  2148. OpCapability VulkanMemoryModelDeviceScopeKHR
  2149. OpExtension "SPV_KHR_vulkan_memory_model"
  2150. )";
  2151. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  2152. SPV_ENV_UNIVERSAL_1_3);
  2153. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  2154. }
  2155. TEST_F(ValidateAtomics, CompareExchangeWeakV13ValV14Good) {
  2156. const std::string body = R"(
  2157. %val1 = OpAtomicCompareExchangeWeak %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
  2158. )";
  2159. CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_UNIVERSAL_1_3);
  2160. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_UNIVERSAL_1_4));
  2161. }
  2162. TEST_F(ValidateAtomics, CompareExchangeWeakV14Bad) {
  2163. const std::string body = R"(
  2164. %val1 = OpAtomicCompareExchangeWeak %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
  2165. )";
  2166. CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_UNIVERSAL_1_4);
  2167. EXPECT_EQ(SPV_ERROR_WRONG_VERSION,
  2168. ValidateInstructions(SPV_ENV_UNIVERSAL_1_4));
  2169. EXPECT_THAT(
  2170. getDiagnosticString(),
  2171. HasSubstr(
  2172. "AtomicCompareExchangeWeak requires SPIR-V version 1.3 or earlier"));
  2173. }
  2174. TEST_F(ValidateAtomics, CompareExchangeVolatileMatch) {
  2175. const std::string spirv = R"(
  2176. OpCapability Shader
  2177. OpCapability VulkanMemoryModelKHR
  2178. OpCapability Linkage
  2179. OpExtension "SPV_KHR_vulkan_memory_model"
  2180. OpMemoryModel Logical VulkanKHR
  2181. %void = OpTypeVoid
  2182. %int = OpTypeInt 32 0
  2183. %int_0 = OpConstant %int 0
  2184. %int_1 = OpConstant %int 1
  2185. %workgroup = OpConstant %int 2
  2186. %volatile = OpConstant %int 32768
  2187. %ptr_wg_int = OpTypePointer Workgroup %int
  2188. %wg_var = OpVariable %ptr_wg_int Workgroup
  2189. %void_fn = OpTypeFunction %void
  2190. %func = OpFunction %void None %void_fn
  2191. %entry = OpLabel
  2192. %cmp_ex = OpAtomicCompareExchange %int %wg_var %workgroup %volatile %volatile %int_0 %int_1
  2193. OpReturn
  2194. OpFunctionEnd
  2195. )";
  2196. CompileSuccessfully(spirv);
  2197. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
  2198. }
  2199. TEST_F(ValidateAtomics, CompareExchangeVolatileMismatch) {
  2200. const std::string spirv = R"(
  2201. OpCapability Shader
  2202. OpCapability VulkanMemoryModelKHR
  2203. OpCapability Linkage
  2204. OpExtension "SPV_KHR_vulkan_memory_model"
  2205. OpMemoryModel Logical VulkanKHR
  2206. %void = OpTypeVoid
  2207. %int = OpTypeInt 32 0
  2208. %int_0 = OpConstant %int 0
  2209. %int_1 = OpConstant %int 1
  2210. %workgroup = OpConstant %int 2
  2211. %volatile = OpConstant %int 32768
  2212. %non_volatile = OpConstant %int 0
  2213. %ptr_wg_int = OpTypePointer Workgroup %int
  2214. %wg_var = OpVariable %ptr_wg_int Workgroup
  2215. %void_fn = OpTypeFunction %void
  2216. %func = OpFunction %void None %void_fn
  2217. %entry = OpLabel
  2218. %cmp_ex = OpAtomicCompareExchange %int %wg_var %workgroup %non_volatile %volatile %int_0 %int_1
  2219. OpReturn
  2220. OpFunctionEnd
  2221. )";
  2222. CompileSuccessfully(spirv);
  2223. EXPECT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
  2224. EXPECT_THAT(getDiagnosticString(),
  2225. HasSubstr("Volatile mask setting must match for Equal and "
  2226. "Unequal memory semantics"));
  2227. }
  2228. TEST_F(ValidateAtomics, CompareExchangeVolatileMismatchCooperativeMatrix) {
  2229. const std::string spirv = R"(
  2230. OpCapability Shader
  2231. OpCapability VulkanMemoryModelKHR
  2232. OpCapability Linkage
  2233. OpCapability CooperativeMatrixNV
  2234. OpExtension "SPV_KHR_vulkan_memory_model"
  2235. OpExtension "SPV_NV_cooperative_matrix"
  2236. OpMemoryModel Logical VulkanKHR
  2237. %void = OpTypeVoid
  2238. %int = OpTypeInt 32 0
  2239. %int_0 = OpConstant %int 0
  2240. %int_1 = OpConstant %int 1
  2241. %workgroup = OpConstant %int 2
  2242. %volatile = OpSpecConstant %int 32768
  2243. %non_volatile = OpSpecConstant %int 32768
  2244. %ptr_wg_int = OpTypePointer Workgroup %int
  2245. %wg_var = OpVariable %ptr_wg_int Workgroup
  2246. %void_fn = OpTypeFunction %void
  2247. %func = OpFunction %void None %void_fn
  2248. %entry = OpLabel
  2249. %cmp_ex = OpAtomicCompareExchange %int %wg_var %workgroup %volatile %non_volatile %int_0 %int_1
  2250. OpReturn
  2251. OpFunctionEnd
  2252. )";
  2253. // This is ok because we cannot evaluate the spec constant defaults.
  2254. CompileSuccessfully(spirv);
  2255. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
  2256. }
  2257. TEST_F(ValidateAtomics, VolatileRequiresVulkanMemoryModel) {
  2258. const std::string spirv = R"(
  2259. OpCapability Shader
  2260. OpCapability Linkage
  2261. OpMemoryModel Logical GLSL450
  2262. %void = OpTypeVoid
  2263. %int = OpTypeInt 32 0
  2264. %int_0 = OpConstant %int 0
  2265. %int_1 = OpConstant %int 1
  2266. %workgroup = OpConstant %int 2
  2267. %volatile = OpConstant %int 32768
  2268. %ptr_wg_int = OpTypePointer Workgroup %int
  2269. %wg_var = OpVariable %ptr_wg_int Workgroup
  2270. %void_fn = OpTypeFunction %void
  2271. %func = OpFunction %void None %void_fn
  2272. %entry = OpLabel
  2273. %ld = OpAtomicLoad %int %wg_var %workgroup %volatile
  2274. OpReturn
  2275. OpFunctionEnd
  2276. )";
  2277. CompileSuccessfully(spirv);
  2278. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  2279. EXPECT_THAT(getDiagnosticString(),
  2280. HasSubstr("Memory Semantics Volatile requires capability "
  2281. "VulkanMemoryModelKHR"));
  2282. }
  2283. TEST_F(ValidateAtomics, CooperativeMatrixSemanticsMustBeConstant) {
  2284. const std::string spirv = R"(
  2285. OpCapability Shader
  2286. OpCapability Linkage
  2287. OpCapability CooperativeMatrixNV
  2288. OpExtension "SPV_NV_cooperative_matrix"
  2289. OpMemoryModel Logical GLSL450
  2290. %void = OpTypeVoid
  2291. %int = OpTypeInt 32 0
  2292. %int_0 = OpConstant %int 0
  2293. %int_1 = OpConstant %int 1
  2294. %workgroup = OpConstant %int 2
  2295. %undef = OpUndef %int
  2296. %ptr_wg_int = OpTypePointer Workgroup %int
  2297. %wg_var = OpVariable %ptr_wg_int Workgroup
  2298. %void_fn = OpTypeFunction %void
  2299. %func = OpFunction %void None %void_fn
  2300. %entry = OpLabel
  2301. %ld = OpAtomicLoad %int %wg_var %workgroup %undef
  2302. OpReturn
  2303. OpFunctionEnd
  2304. )";
  2305. CompileSuccessfully(spirv);
  2306. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  2307. EXPECT_THAT(getDiagnosticString(),
  2308. HasSubstr("Memory Semantics must be a constant instruction when "
  2309. "CooperativeMatrixNV capability is present"));
  2310. }
  2311. TEST_F(ValidateAtomics, IIncrementBadPointerDataType) {
  2312. const std::string spirv = R"(
  2313. OpCapability Shader
  2314. OpMemoryModel Logical GLSL450
  2315. %uint = OpTypeInt 32 0
  2316. %_ptr_Input_uint = OpTypePointer Input %uint
  2317. %v3uint = OpTypeVector %uint 3
  2318. %_ptr_Input_v3uint = OpTypePointer Input %v3uint
  2319. %void = OpTypeVoid
  2320. %16 = OpTypeFunction %void
  2321. %uint_538976288 = OpConstant %uint 538976288
  2322. %int = OpTypeInt 32 1
  2323. %_runtimearr_int = OpTypeRuntimeArray %int
  2324. %_struct_5 = OpTypeStruct %_runtimearr_int
  2325. %_ptr_Uniform__struct_5 = OpTypePointer Uniform %_struct_5
  2326. %3 = OpVariable %_ptr_Input_v3uint Input
  2327. %7 = OpVariable %_ptr_Uniform__struct_5 Uniform
  2328. %8224 = OpFunction %void None %16
  2329. %65312 = OpLabel
  2330. %25 = OpAccessChain %_ptr_Input_uint %3 %uint_538976288
  2331. %26 = OpLoad %uint %25
  2332. %2097184 = OpAtomicIIncrement %int %7 %uint_538976288 %26
  2333. OpUnreachable
  2334. OpFunctionEnd
  2335. )";
  2336. CompileSuccessfully(spirv);
  2337. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  2338. EXPECT_THAT(getDiagnosticString(),
  2339. HasSubstr("AtomicIIncrement: expected Pointer to point to a "
  2340. "value of type Result Type"));
  2341. }
  2342. } // namespace
  2343. } // namespace val
  2344. } // namespace spvtools