val_atomics_test.cpp 100 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970
  1. // Copyright (c) 2017 Google Inc.
  2. //
  3. // Licensed under the Apache License, Version 2.0 (the "License");
  4. // you may not use this file except in compliance with the License.
  5. // You may obtain a copy of the License at
  6. //
  7. // http://www.apache.org/licenses/LICENSE-2.0
  8. //
  9. // Unless required by applicable law or agreed to in writing, software
  10. // distributed under the License is distributed on an "AS IS" BASIS,
  11. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  12. // See the License for the specific language governing permissions and
  13. // limitations under the License.
  14. #include <sstream>
  15. #include <string>
  16. #include "gmock/gmock.h"
  17. #include "test/unit_spirv.h"
  18. #include "test/val/val_fixtures.h"
  19. namespace spvtools {
  20. namespace val {
  21. namespace {
  22. using ::testing::HasSubstr;
  23. using ::testing::Not;
  24. using ValidateAtomics = spvtest::ValidateBase<bool>;
  25. std::string GenerateShaderCodeImpl(
  26. const std::string& body, const std::string& capabilities_and_extensions,
  27. const std::string& definitions, const std::string& memory_model,
  28. const std::string& execution) {
  29. std::ostringstream ss;
  30. ss << R"(
  31. OpCapability Shader
  32. )";
  33. ss << capabilities_and_extensions;
  34. ss << "OpMemoryModel Logical " << memory_model << "\n";
  35. ss << execution;
  36. ss << R"(
  37. %void = OpTypeVoid
  38. %func = OpTypeFunction %void
  39. %bool = OpTypeBool
  40. %f32 = OpTypeFloat 32
  41. %u32 = OpTypeInt 32 0
  42. %f32vec4 = OpTypeVector %f32 4
  43. %f32_0 = OpConstant %f32 0
  44. %f32_1 = OpConstant %f32 1
  45. %u32_0 = OpConstant %u32 0
  46. %u32_1 = OpConstant %u32 1
  47. %f32vec4_0000 = OpConstantComposite %f32vec4 %f32_0 %f32_0 %f32_0 %f32_0
  48. %cross_device = OpConstant %u32 0
  49. %device = OpConstant %u32 1
  50. %workgroup = OpConstant %u32 2
  51. %subgroup = OpConstant %u32 3
  52. %invocation = OpConstant %u32 4
  53. %queuefamily = OpConstant %u32 5
  54. %relaxed = OpConstant %u32 0
  55. %acquire = OpConstant %u32 2
  56. %release = OpConstant %u32 4
  57. %acquire_release = OpConstant %u32 8
  58. %acquire_and_release = OpConstant %u32 6
  59. %sequentially_consistent = OpConstant %u32 16
  60. %acquire_release_uniform_workgroup = OpConstant %u32 328
  61. %f32_ptr = OpTypePointer Workgroup %f32
  62. %f32_var = OpVariable %f32_ptr Workgroup
  63. %u32_ptr = OpTypePointer Workgroup %u32
  64. %u32_var = OpVariable %u32_ptr Workgroup
  65. %f32vec4_ptr = OpTypePointer Workgroup %f32vec4
  66. %f32vec4_var = OpVariable %f32vec4_ptr Workgroup
  67. %f32_ptr_function = OpTypePointer Function %f32
  68. )";
  69. ss << definitions;
  70. ss << R"(
  71. %main = OpFunction %void None %func
  72. %main_entry = OpLabel
  73. )";
  74. ss << body;
  75. ss << R"(
  76. OpReturn
  77. OpFunctionEnd)";
  78. return ss.str();
  79. }
  80. std::string GenerateShaderCode(
  81. const std::string& body,
  82. const std::string& capabilities_and_extensions = "",
  83. const std::string& extra_defs = "",
  84. const std::string& memory_model = "GLSL450") {
  85. const std::string execution = R"(
  86. OpEntryPoint Fragment %main "main"
  87. OpExecutionMode %main OriginUpperLeft
  88. )";
  89. const std::string definitions = R"(
  90. %u64 = OpTypeInt 64 0
  91. %s64 = OpTypeInt 64 1
  92. %u64_1 = OpConstant %u64 1
  93. %s64_1 = OpConstant %s64 1
  94. %u64_ptr = OpTypePointer Workgroup %u64
  95. %s64_ptr = OpTypePointer Workgroup %s64
  96. %u64_var = OpVariable %u64_ptr Workgroup
  97. %s64_var = OpVariable %s64_ptr Workgroup
  98. )";
  99. return GenerateShaderCodeImpl(
  100. body, "OpCapability Int64\n" + capabilities_and_extensions,
  101. definitions + extra_defs, memory_model, execution);
  102. }
  103. std::string GenerateShaderComputeCode(
  104. const std::string& body,
  105. const std::string& capabilities_and_extensions = "",
  106. const std::string& extra_defs = "",
  107. const std::string& memory_model = "GLSL450") {
  108. const std::string execution = R"(
  109. OpEntryPoint GLCompute %main "main"
  110. OpExecutionMode %main LocalSize 32 1 1
  111. )";
  112. const std::string definitions = R"(
  113. %u64 = OpTypeInt 64 0
  114. %s64 = OpTypeInt 64 1
  115. %u64_1 = OpConstant %u64 1
  116. %s64_1 = OpConstant %s64 1
  117. %u64_ptr = OpTypePointer Workgroup %u64
  118. %s64_ptr = OpTypePointer Workgroup %s64
  119. %u64_var = OpVariable %u64_ptr Workgroup
  120. %s64_var = OpVariable %s64_ptr Workgroup
  121. )";
  122. return GenerateShaderCodeImpl(
  123. body, "OpCapability Int64\n" + capabilities_and_extensions,
  124. definitions + extra_defs, memory_model, execution);
  125. }
  126. std::string GenerateKernelCode(
  127. const std::string& body,
  128. const std::string& capabilities_and_extensions = "") {
  129. std::ostringstream ss;
  130. ss << R"(
  131. OpCapability Addresses
  132. OpCapability Kernel
  133. OpCapability Linkage
  134. OpCapability Int64
  135. )";
  136. ss << capabilities_and_extensions;
  137. ss << R"(
  138. OpMemoryModel Physical32 OpenCL
  139. %void = OpTypeVoid
  140. %func = OpTypeFunction %void
  141. %bool = OpTypeBool
  142. %f32 = OpTypeFloat 32
  143. %u32 = OpTypeInt 32 0
  144. %u64 = OpTypeInt 64 0
  145. %f32vec4 = OpTypeVector %f32 4
  146. %f32_0 = OpConstant %f32 0
  147. %f32_1 = OpConstant %f32 1
  148. %u32_0 = OpConstant %u32 0
  149. %u32_1 = OpConstant %u32 1
  150. %u64_1 = OpConstant %u64 1
  151. %f32vec4_0000 = OpConstantComposite %f32vec4 %f32_0 %f32_0 %f32_0 %f32_0
  152. %cross_device = OpConstant %u32 0
  153. %device = OpConstant %u32 1
  154. %workgroup = OpConstant %u32 2
  155. %subgroup = OpConstant %u32 3
  156. %invocation = OpConstant %u32 4
  157. %relaxed = OpConstant %u32 0
  158. %acquire = OpConstant %u32 2
  159. %release = OpConstant %u32 4
  160. %acquire_release = OpConstant %u32 8
  161. %acquire_and_release = OpConstant %u32 6
  162. %sequentially_consistent = OpConstant %u32 16
  163. %acquire_release_uniform_workgroup = OpConstant %u32 328
  164. %acquire_release_atomic_counter_workgroup = OpConstant %u32 1288
  165. %f32_ptr = OpTypePointer Workgroup %f32
  166. %f32_var = OpVariable %f32_ptr Workgroup
  167. %u32_ptr = OpTypePointer Workgroup %u32
  168. %u32_var = OpVariable %u32_ptr Workgroup
  169. %u64_ptr = OpTypePointer Workgroup %u64
  170. %u64_var = OpVariable %u64_ptr Workgroup
  171. %f32vec4_ptr = OpTypePointer Workgroup %f32vec4
  172. %f32vec4_var = OpVariable %f32vec4_ptr Workgroup
  173. %f32_ptr_function = OpTypePointer Function %f32
  174. %f32_ptr_uniformconstant = OpTypePointer UniformConstant %f32
  175. %f32_uc_var = OpVariable %f32_ptr_uniformconstant UniformConstant
  176. %f32_ptr_image = OpTypePointer Image %f32
  177. %f32_im_var = OpVariable %f32_ptr_image Image
  178. %main = OpFunction %void None %func
  179. %main_entry = OpLabel
  180. )";
  181. ss << body;
  182. ss << R"(
  183. OpReturn
  184. OpFunctionEnd)";
  185. return ss.str();
  186. }
  187. TEST_F(ValidateAtomics, AtomicLoadShaderSuccess) {
  188. const std::string body = R"(
  189. %val1 = OpAtomicLoad %u32 %u32_var %device %relaxed
  190. %val2 = OpAtomicLoad %u32 %u32_var %workgroup %acquire
  191. )";
  192. CompileSuccessfully(GenerateShaderCode(body));
  193. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  194. }
  195. TEST_F(ValidateAtomics, AtomicLoadKernelSuccess) {
  196. const std::string body = R"(
  197. %val1 = OpAtomicLoad %f32 %f32_var %device %relaxed
  198. %val2 = OpAtomicLoad %u32 %u32_var %workgroup %sequentially_consistent
  199. )";
  200. CompileSuccessfully(GenerateKernelCode(body));
  201. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  202. }
  203. TEST_F(ValidateAtomics, AtomicLoadInt64ShaderSuccess) {
  204. const std::string body = R"(
  205. %val1 = OpAtomicLoad %u64 %u64_var %subgroup %sequentially_consistent
  206. )";
  207. CompileSuccessfully(GenerateShaderCode(body, "OpCapability Int64Atomics\n"));
  208. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  209. }
  210. TEST_F(ValidateAtomics, AtomicLoadInt64KernelSuccess) {
  211. const std::string body = R"(
  212. %val1 = OpAtomicLoad %u64 %u64_var %subgroup %acquire
  213. )";
  214. CompileSuccessfully(GenerateKernelCode(body, "OpCapability Int64Atomics\n"));
  215. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  216. }
  217. TEST_F(ValidateAtomics, AtomicLoadInt32VulkanSuccess) {
  218. const std::string body = R"(
  219. %val1 = OpAtomicLoad %u32 %u32_var %device %relaxed
  220. %val2 = OpAtomicLoad %u32 %u32_var %workgroup %acquire
  221. %val3 = OpAtomicLoad %u32 %u32_var %invocation %relaxed
  222. )";
  223. CompileSuccessfully(GenerateShaderComputeCode(body), SPV_ENV_VULKAN_1_0);
  224. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  225. }
  226. TEST_F(ValidateAtomics, AtomicLoadVulkanWrongStorageClass) {
  227. const std::string body = R"(
  228. %val1 = OpAtomicLoad %u32 %u32_var %device %relaxed
  229. )";
  230. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  231. ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  232. EXPECT_THAT(getDiagnosticString(),
  233. AnyVUID("VUID-StandaloneSpirv-None-04645"));
  234. EXPECT_THAT(
  235. getDiagnosticString(),
  236. HasSubstr("in Vulkan environment, Workgroup Storage Class is limited to "
  237. "MeshNV, TaskNV, and GLCompute execution model"));
  238. }
  239. TEST_F(ValidateAtomics, AtomicAddIntVulkanWrongType1) {
  240. const std::string body = R"(
  241. %val1 = OpAtomicIAdd %f32 %f32_var %device %relaxed %f32_1
  242. )";
  243. CompileSuccessfully(GenerateShaderCode(body));
  244. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  245. EXPECT_THAT(getDiagnosticString(),
  246. HasSubstr("AtomicIAdd: "
  247. "expected Result Type to be integer scalar type"));
  248. }
  249. TEST_F(ValidateAtomics, AtomicAddIntVulkanWrongType2) {
  250. const std::string body = R"(
  251. %val1 = OpAtomicIAdd %f32vec4 %f32vec4_var %device %relaxed %f32_1
  252. )";
  253. CompileSuccessfully(GenerateShaderCode(body));
  254. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  255. EXPECT_THAT(getDiagnosticString(),
  256. HasSubstr("AtomicIAdd: "
  257. "expected Result Type to be integer scalar type"));
  258. }
  259. TEST_F(ValidateAtomics, AtomicAddFloatVulkan) {
  260. const std::string body = R"(
  261. %val1 = OpAtomicFAddEXT %f32 %f32_var %device %relaxed %f32_1
  262. )";
  263. CompileSuccessfully(GenerateShaderCode(body));
  264. ASSERT_EQ(SPV_ERROR_INVALID_CAPABILITY, ValidateInstructions());
  265. EXPECT_THAT(
  266. getDiagnosticString(),
  267. HasSubstr("Opcode AtomicFAddEXT requires one of these capabilities: "
  268. "AtomicFloat16VectorNV AtomicFloat32AddEXT AtomicFloat64AddEXT "
  269. "AtomicFloat16AddEXT"));
  270. }
  271. TEST_F(ValidateAtomics, AtomicMinFloatVulkan) {
  272. const std::string body = R"(
  273. %val1 = OpAtomicFMinEXT %f32 %f32_var %device %relaxed %f32_1
  274. )";
  275. CompileSuccessfully(GenerateShaderCode(body));
  276. ASSERT_EQ(SPV_ERROR_INVALID_CAPABILITY, ValidateInstructions());
  277. EXPECT_THAT(
  278. getDiagnosticString(),
  279. HasSubstr("Opcode AtomicFMinEXT requires one of these capabilities: "
  280. "AtomicFloat16VectorNV AtomicFloat32MinMaxEXT "
  281. "AtomicFloat64MinMaxEXT AtomicFloat16MinMaxEXT"));
  282. }
  283. TEST_F(ValidateAtomics, AtomicMaxFloatVulkan) {
  284. const std::string body = R"(
  285. %val1 = OpAtomicFMaxEXT %f32 %f32_var %device %relaxed %f32_1
  286. )";
  287. CompileSuccessfully(GenerateShaderCode(body));
  288. ASSERT_EQ(SPV_ERROR_INVALID_CAPABILITY, ValidateInstructions());
  289. EXPECT_THAT(
  290. getDiagnosticString(),
  291. HasSubstr(
  292. "Opcode AtomicFMaxEXT requires one of these capabilities: "
  293. "AtomicFloat16VectorNV AtomicFloat32MinMaxEXT AtomicFloat64MinMaxEXT "
  294. "AtomicFloat16MinMaxEXT"));
  295. }
  296. TEST_F(ValidateAtomics, AtomicAddFloatVulkanWrongType1) {
  297. const std::string body = R"(
  298. %val1 = OpAtomicFAddEXT %f32vec4 %f32vec4_var %device %relaxed %f32_1
  299. )";
  300. const std::string extra = R"(
  301. OpCapability AtomicFloat32AddEXT
  302. OpExtension "SPV_EXT_shader_atomic_float_add"
  303. )";
  304. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  305. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  306. EXPECT_THAT(getDiagnosticString(),
  307. HasSubstr("AtomicFAddEXT: "
  308. "expected Result Type to be float scalar type"));
  309. }
  310. TEST_F(ValidateAtomics, AtomicMinFloatVulkanWrongType1) {
  311. const std::string body = R"(
  312. %val1 = OpAtomicFMinEXT %f32vec4 %f32vec4_var %device %relaxed %f32_1
  313. )";
  314. const std::string extra = R"(
  315. OpCapability AtomicFloat32MinMaxEXT
  316. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  317. )";
  318. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  319. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  320. EXPECT_THAT(getDiagnosticString(),
  321. HasSubstr("AtomicFMinEXT: "
  322. "expected Result Type to be float scalar type"));
  323. }
  324. TEST_F(ValidateAtomics, AtomicMaxFloatVulkanWrongType1) {
  325. const std::string body = R"(
  326. %val1 = OpAtomicFMaxEXT %f32vec4 %f32vec4_var %device %relaxed %f32_1
  327. )";
  328. const std::string extra = R"(
  329. OpCapability AtomicFloat32MinMaxEXT
  330. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  331. )";
  332. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  333. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  334. EXPECT_THAT(getDiagnosticString(),
  335. HasSubstr("AtomicFMaxEXT: "
  336. "expected Result Type to be float scalar type"));
  337. }
  338. TEST_F(ValidateAtomics, AtomicAddFloatVulkanWrongType2) {
  339. const std::string body = R"(
  340. %val1 = OpAtomicFAddEXT %u32 %u32_var %device %relaxed %u32_1
  341. )";
  342. const std::string extra = R"(
  343. OpCapability AtomicFloat32AddEXT
  344. OpExtension "SPV_EXT_shader_atomic_float_add"
  345. )";
  346. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  347. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  348. EXPECT_THAT(getDiagnosticString(),
  349. HasSubstr("AtomicFAddEXT: "
  350. "expected Result Type to be float scalar type"));
  351. }
  352. TEST_F(ValidateAtomics, AtomicMinFloatVulkanWrongType2) {
  353. const std::string body = R"(
  354. %val1 = OpAtomicFMinEXT %u32 %u32_var %device %relaxed %u32_1
  355. )";
  356. const std::string extra = R"(
  357. OpCapability AtomicFloat32MinMaxEXT
  358. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  359. )";
  360. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  361. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  362. EXPECT_THAT(getDiagnosticString(),
  363. HasSubstr("AtomicFMinEXT: "
  364. "expected Result Type to be float scalar type"));
  365. }
  366. TEST_F(ValidateAtomics, AtomicMaxFloatVulkanWrongType2) {
  367. const std::string body = R"(
  368. %val1 = OpAtomicFMaxEXT %u32 %u32_var %device %relaxed %u32_1
  369. )";
  370. const std::string extra = R"(
  371. OpCapability AtomicFloat32MinMaxEXT
  372. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  373. )";
  374. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  375. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  376. EXPECT_THAT(getDiagnosticString(),
  377. HasSubstr("AtomicFMaxEXT: "
  378. "expected Result Type to be float scalar type"));
  379. }
  380. TEST_F(ValidateAtomics, AtomicAddFloatVulkanWrongType3) {
  381. const std::string body = R"(
  382. %val1 = OpAtomicFAddEXT %u64 %u64_var %device %relaxed %u64_1
  383. )";
  384. const std::string extra = R"(
  385. OpCapability AtomicFloat32AddEXT
  386. OpExtension "SPV_EXT_shader_atomic_float_add"
  387. )";
  388. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  389. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  390. EXPECT_THAT(getDiagnosticString(),
  391. HasSubstr("AtomicFAddEXT: "
  392. "expected Result Type to be float scalar type"));
  393. }
  394. TEST_F(ValidateAtomics, AtomicMinFloatVulkanWrongType3) {
  395. const std::string body = R"(
  396. %val1 = OpAtomicFMinEXT %u64 %u64_var %device %relaxed %u64_1
  397. )";
  398. const std::string extra = R"(
  399. OpCapability AtomicFloat32MinMaxEXT
  400. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  401. )";
  402. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  403. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  404. EXPECT_THAT(getDiagnosticString(),
  405. HasSubstr("AtomicFMinEXT: "
  406. "expected Result Type to be float scalar type"));
  407. }
  408. TEST_F(ValidateAtomics, AtomicMaxFloatVulkanWrongType3) {
  409. const std::string body = R"(
  410. %val1 = OpAtomicFMaxEXT %u64 %u64_var %device %relaxed %u64_1
  411. )";
  412. const std::string extra = R"(
  413. OpCapability AtomicFloat32MinMaxEXT
  414. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  415. )";
  416. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  417. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  418. EXPECT_THAT(getDiagnosticString(),
  419. HasSubstr("AtomicFMaxEXT: "
  420. "expected Result Type to be float scalar type"));
  421. }
  422. TEST_F(ValidateAtomics, AtomicAddFloatVulkanWrongCapability) {
  423. const std::string body = R"(
  424. %val1 = OpAtomicFAddEXT %f32 %f32_var %device %relaxed %f32_1
  425. )";
  426. const std::string extra = R"(
  427. OpCapability AtomicFloat64AddEXT
  428. OpExtension "SPV_EXT_shader_atomic_float_add"
  429. )";
  430. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  431. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  432. EXPECT_THAT(getDiagnosticString(),
  433. HasSubstr("AtomicFAddEXT: float add atomics "
  434. "require the AtomicFloat32AddEXT capability"));
  435. }
  436. TEST_F(ValidateAtomics, AtomicMinFloatVulkanWrongCapability) {
  437. const std::string body = R"(
  438. %val1 = OpAtomicFMinEXT %f32 %f32_var %device %relaxed %f32_1
  439. )";
  440. const std::string extra = R"(
  441. OpCapability AtomicFloat64MinMaxEXT
  442. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  443. )";
  444. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  445. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  446. EXPECT_THAT(getDiagnosticString(),
  447. HasSubstr("AtomicFMinEXT: float min/max atomics "
  448. "require the AtomicFloat32MinMaxEXT capability"));
  449. }
  450. TEST_F(ValidateAtomics, AtomicMaxFloatVulkanWrongCapability) {
  451. const std::string body = R"(
  452. %val1 = OpAtomicFMaxEXT %f32 %f32_var %device %relaxed %f32_1
  453. )";
  454. const std::string extra = R"(
  455. OpCapability AtomicFloat64MinMaxEXT
  456. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  457. )";
  458. CompileSuccessfully(GenerateShaderCode(body, extra), SPV_ENV_VULKAN_1_0);
  459. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  460. EXPECT_THAT(getDiagnosticString(),
  461. HasSubstr("AtomicFMaxEXT: float min/max atomics "
  462. "require the AtomicFloat32MinMaxEXT capability"));
  463. }
  464. TEST_F(ValidateAtomics, AtomicAddFloat16VulkanSuccess) {
  465. const std::string defs = R"(
  466. %f16 = OpTypeFloat 16
  467. %f16_1 = OpConstant %f16 1
  468. %f16_ptr = OpTypePointer Workgroup %f16
  469. %f16_var = OpVariable %f16_ptr Workgroup
  470. )";
  471. const std::string body = R"(
  472. %val1 = OpAtomicFAddEXT %f16 %f16_var %device %relaxed %f16_1
  473. )";
  474. const std::string extra = R"(
  475. OpCapability Float16
  476. OpCapability AtomicFloat16AddEXT
  477. OpExtension "SPV_EXT_shader_atomic_float16_add"
  478. )";
  479. CompileSuccessfully(GenerateShaderComputeCode(body, extra, defs),
  480. SPV_ENV_VULKAN_1_0);
  481. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  482. }
  483. TEST_F(ValidateAtomics, AtomicAddFloatVulkanSuccess) {
  484. const std::string body = R"(
  485. %val1 = OpAtomicFAddEXT %f32 %f32_var %device %relaxed %f32_1
  486. %val2 = OpAtomicFAddEXT %f32 %f32_var %invocation %relaxed %f32_1
  487. )";
  488. const std::string extra = R"(
  489. OpCapability AtomicFloat32AddEXT
  490. OpExtension "SPV_EXT_shader_atomic_float_add"
  491. )";
  492. CompileSuccessfully(GenerateShaderComputeCode(body, extra),
  493. SPV_ENV_VULKAN_1_0);
  494. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  495. }
  496. TEST_F(ValidateAtomics, AtomicMinFloat16VulkanSuccess) {
  497. const std::string defs = R"(
  498. %f16 = OpTypeFloat 16
  499. %f16_1 = OpConstant %f16 1
  500. %f16_ptr = OpTypePointer Workgroup %f16
  501. %f16_var = OpVariable %f16_ptr Workgroup
  502. )";
  503. const std::string body = R"(
  504. %val1 = OpAtomicFMinEXT %f16 %f16_var %device %relaxed %f16_1
  505. )";
  506. const std::string extra = R"(
  507. OpCapability Float16
  508. OpCapability AtomicFloat16MinMaxEXT
  509. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  510. )";
  511. CompileSuccessfully(GenerateShaderComputeCode(body, extra, defs),
  512. SPV_ENV_VULKAN_1_0);
  513. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  514. }
  515. TEST_F(ValidateAtomics, AtomicMaxFloat16VulkanSuccess) {
  516. const std::string defs = R"(
  517. %f16 = OpTypeFloat 16
  518. %f16_1 = OpConstant %f16 1
  519. %f16_ptr = OpTypePointer Workgroup %f16
  520. %f16_var = OpVariable %f16_ptr Workgroup
  521. )";
  522. const std::string body = R"(
  523. %val1 = OpAtomicFMaxEXT %f16 %f16_var %device %relaxed %f16_1
  524. )";
  525. const std::string extra = R"(
  526. OpCapability Float16
  527. OpCapability AtomicFloat16MinMaxEXT
  528. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  529. )";
  530. CompileSuccessfully(GenerateShaderComputeCode(body, extra, defs),
  531. SPV_ENV_VULKAN_1_0);
  532. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  533. }
  534. TEST_F(ValidateAtomics, AtomicMinFloat32VulkanSuccess) {
  535. const std::string body = R"(
  536. %val1 = OpAtomicFMinEXT %f32 %f32_var %device %relaxed %f32_1
  537. )";
  538. const std::string extra = R"(
  539. OpCapability AtomicFloat32MinMaxEXT
  540. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  541. )";
  542. CompileSuccessfully(GenerateShaderComputeCode(body, extra),
  543. SPV_ENV_VULKAN_1_0);
  544. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  545. }
  546. TEST_F(ValidateAtomics, AtomicMaxFloat32VulkanSuccess) {
  547. const std::string body = R"(
  548. %val1 = OpAtomicFMaxEXT %f32 %f32_var %device %relaxed %f32_1
  549. )";
  550. const std::string extra = R"(
  551. OpCapability AtomicFloat32MinMaxEXT
  552. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  553. )";
  554. CompileSuccessfully(GenerateShaderComputeCode(body, extra),
  555. SPV_ENV_VULKAN_1_0);
  556. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  557. }
  558. TEST_F(ValidateAtomics, AtomicMinFloat64VulkanSuccess) {
  559. const std::string defs = R"(
  560. %f64 = OpTypeFloat 64
  561. %f64_1 = OpConstant %f64 1
  562. %f64_ptr = OpTypePointer Workgroup %f64
  563. %f64_var = OpVariable %f64_ptr Workgroup
  564. )";
  565. const std::string body = R"(
  566. %val1 = OpAtomicFMinEXT %f64 %f64_var %device %relaxed %f64_1
  567. )";
  568. const std::string extra = R"(
  569. OpCapability Float64
  570. OpCapability AtomicFloat64MinMaxEXT
  571. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  572. )";
  573. CompileSuccessfully(GenerateShaderComputeCode(body, extra, defs),
  574. SPV_ENV_VULKAN_1_0);
  575. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  576. }
  577. TEST_F(ValidateAtomics, AtomicMaxFloat64VulkanSuccess) {
  578. const std::string defs = R"(
  579. %f64 = OpTypeFloat 64
  580. %f64_1 = OpConstant %f64 1
  581. %f64_ptr = OpTypePointer Workgroup %f64
  582. %f64_var = OpVariable %f64_ptr Workgroup
  583. )";
  584. const std::string body = R"(
  585. %val1 = OpAtomicFMaxEXT %f64 %f64_var %device %relaxed %f64_1
  586. )";
  587. const std::string extra = R"(
  588. OpCapability Float64
  589. OpCapability AtomicFloat64MinMaxEXT
  590. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  591. )";
  592. CompileSuccessfully(GenerateShaderComputeCode(body, extra, defs),
  593. SPV_ENV_VULKAN_1_0);
  594. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  595. }
  596. TEST_F(ValidateAtomics, AtomicLoadFloatVulkan) {
  597. const std::string body = R"(
  598. %val1 = OpAtomicLoad %f32 %f32_var %device %relaxed
  599. %val2 = OpAtomicLoad %f32 %f32_var %workgroup %acquire
  600. )";
  601. CompileSuccessfully(GenerateShaderComputeCode(body), SPV_ENV_VULKAN_1_0);
  602. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  603. }
  604. TEST_F(ValidateAtomics, AtomicStoreVulkanWrongStorageClass) {
  605. const std::string body = R"(
  606. OpAtomicStore %f32_var %device %relaxed %f32_1
  607. )";
  608. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  609. ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  610. EXPECT_THAT(getDiagnosticString(),
  611. AnyVUID("VUID-StandaloneSpirv-None-04645"));
  612. EXPECT_THAT(
  613. getDiagnosticString(),
  614. HasSubstr("in Vulkan environment, Workgroup Storage Class is limited to "
  615. "MeshNV, TaskNV, and GLCompute execution model"));
  616. }
  617. TEST_F(ValidateAtomics, AtomicStoreFloatVulkan) {
  618. const std::string body = R"(
  619. OpAtomicStore %f32_var %device %relaxed %f32_1
  620. )";
  621. CompileSuccessfully(GenerateShaderComputeCode(body), SPV_ENV_VULKAN_1_0);
  622. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  623. }
  624. TEST_F(ValidateAtomics, AtomicExchangeFloatVulkan) {
  625. const std::string body = R"(
  626. %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %f32_0
  627. )";
  628. CompileSuccessfully(GenerateShaderComputeCode(body), SPV_ENV_VULKAN_1_0);
  629. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  630. }
  631. TEST_F(ValidateAtomics, AtomicLoadInt64WithCapabilityVulkanSuccess) {
  632. const std::string body = R"(
  633. %val1 = OpAtomicLoad %u64 %u64_var %device %relaxed
  634. %val2 = OpAtomicLoad %u64 %u64_var %workgroup %acquire
  635. %val3 = OpAtomicLoad %u64 %u64_var %invocation %relaxed
  636. )";
  637. CompileSuccessfully(
  638. GenerateShaderComputeCode(body, "OpCapability Int64Atomics\n"),
  639. SPV_ENV_VULKAN_1_0);
  640. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  641. }
  642. TEST_F(ValidateAtomics, AtomicLoadInt64WithoutCapabilityVulkan) {
  643. const std::string body = R"(
  644. %val1 = OpAtomicLoad %u64 %u64_var %device %relaxed
  645. %val2 = OpAtomicLoad %u64 %u64_var %workgroup %acquire
  646. )";
  647. CompileSuccessfully(GenerateShaderComputeCode(body), SPV_ENV_VULKAN_1_0);
  648. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  649. EXPECT_THAT(getDiagnosticString(),
  650. HasSubstr("64-bit atomics require the Int64Atomics capability"));
  651. }
  652. TEST_F(ValidateAtomics, AtomicStoreOpenCLFunctionPointerStorageTypeSuccess) {
  653. const std::string body = R"(
  654. %f32_var_function = OpVariable %f32_ptr_function Function
  655. OpAtomicStore %f32_var_function %device %relaxed %f32_1
  656. )";
  657. CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_OPENCL_1_2);
  658. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_OPENCL_1_2));
  659. }
  660. TEST_F(ValidateAtomics, AtomicStoreVulkanFunctionPointerStorageType) {
  661. const std::string body = R"(
  662. %f32_var_function = OpVariable %f32_ptr_function Function
  663. OpAtomicStore %f32_var_function %device %relaxed %f32_1
  664. )";
  665. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  666. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  667. EXPECT_THAT(getDiagnosticString(),
  668. AnyVUID("VUID-StandaloneSpirv-None-04686"));
  669. EXPECT_THAT(
  670. getDiagnosticString(),
  671. HasSubstr("AtomicStore: Vulkan spec only allows storage classes for "
  672. "atomic to be: Uniform, Workgroup, Image, StorageBuffer, "
  673. "PhysicalStorageBuffer or TaskPayloadWorkgroupEXT."));
  674. }
  675. TEST_F(ValidateAtomics, AtomicStoreFunctionPointerStorageType) {
  676. const std::string body = R"(
  677. %f32_var_function = OpVariable %f32_ptr_function Function
  678. OpAtomicStore %f32_var_function %device %relaxed %f32_1
  679. )";
  680. CompileSuccessfully(GenerateShaderCode(body));
  681. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  682. EXPECT_THAT(getDiagnosticString(),
  683. HasSubstr("AtomicStore: Function storage class forbidden when "
  684. "the Shader capability is declared."));
  685. }
  686. // TODO([email protected]): the corresponding check fails Vulkan CTS,
  687. // reenable once fixed.
  688. TEST_F(ValidateAtomics, DISABLED_AtomicLoadVulkanSubgroup) {
  689. const std::string body = R"(
  690. %val1 = OpAtomicLoad %u32 %u32_var %subgroup %acquire
  691. )";
  692. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  693. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  694. EXPECT_THAT(getDiagnosticString(),
  695. HasSubstr("AtomicLoad: in Vulkan environment memory scope is "
  696. "limited to Device, Workgroup and Invocation"));
  697. }
  698. TEST_F(ValidateAtomics, AtomicLoadVulkanRelease) {
  699. const std::string body = R"(
  700. %val1 = OpAtomicLoad %u32 %u32_var %workgroup %release
  701. )";
  702. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  703. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  704. EXPECT_THAT(getDiagnosticString(),
  705. AnyVUID("VUID-StandaloneSpirv-OpAtomicLoad-04731"));
  706. EXPECT_THAT(
  707. getDiagnosticString(),
  708. HasSubstr("Vulkan spec disallows OpAtomicLoad with Memory Semantics "
  709. "Release, AcquireRelease and SequentiallyConsistent"));
  710. }
  711. TEST_F(ValidateAtomics, AtomicLoadVulkanAcquireRelease) {
  712. const std::string body = R"(
  713. %val1 = OpAtomicLoad %u32 %u32_var %workgroup %acquire_release
  714. )";
  715. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  716. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  717. EXPECT_THAT(getDiagnosticString(),
  718. AnyVUID("VUID-StandaloneSpirv-OpAtomicLoad-04731"));
  719. EXPECT_THAT(
  720. getDiagnosticString(),
  721. HasSubstr("Vulkan spec disallows OpAtomicLoad with Memory Semantics "
  722. "Release, AcquireRelease and SequentiallyConsistent"));
  723. }
  724. TEST_F(ValidateAtomics, AtomicLoadVulkanSequentiallyConsistent) {
  725. const std::string body = R"(
  726. %val1 = OpAtomicLoad %u32 %u32_var %workgroup %sequentially_consistent
  727. )";
  728. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  729. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  730. EXPECT_THAT(getDiagnosticString(),
  731. AnyVUID("VUID-StandaloneSpirv-OpAtomicLoad-04731"));
  732. EXPECT_THAT(
  733. getDiagnosticString(),
  734. HasSubstr("Vulkan spec disallows OpAtomicLoad with Memory Semantics "
  735. "Release, AcquireRelease and SequentiallyConsistent"));
  736. }
  737. TEST_F(ValidateAtomics, AtomicLoadVulkanInvocationSemantics) {
  738. const std::string body = R"(
  739. %val1 = OpAtomicLoad %u32 %u32_var %invocation %acquire
  740. )";
  741. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  742. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  743. EXPECT_THAT(getDiagnosticString(),
  744. AnyVUID("VUID-StandaloneSpirv-None-04641"));
  745. EXPECT_THAT(
  746. getDiagnosticString(),
  747. HasSubstr("AtomicLoad: Vulkan specification requires Memory Semantics to "
  748. "be None if used with Invocation Memory Scope"));
  749. }
  750. TEST_F(ValidateAtomics, AtomicLoadShaderFloat) {
  751. const std::string body = R"(
  752. %val1 = OpAtomicLoad %f32 %f32_var %device %relaxed
  753. )";
  754. CompileSuccessfully(GenerateShaderCode(body));
  755. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  756. }
  757. TEST_F(ValidateAtomics, AtomicLoadVulkanInt64) {
  758. const std::string body = R"(
  759. %val1 = OpAtomicLoad %u64 %u64_var %device %relaxed
  760. )";
  761. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  762. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  763. EXPECT_THAT(
  764. getDiagnosticString(),
  765. HasSubstr(
  766. "AtomicLoad: 64-bit atomics require the Int64Atomics capability"));
  767. }
  768. TEST_F(ValidateAtomics, AtomicLoadKernelInt64) {
  769. const std::string body = R"(
  770. %val1 = OpAtomicLoad %u64 %u64_var %device %relaxed
  771. )";
  772. CompileSuccessfully(GenerateKernelCode(body));
  773. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  774. EXPECT_THAT(
  775. getDiagnosticString(),
  776. HasSubstr(
  777. "AtomicLoad: 64-bit atomics require the Int64Atomics capability"));
  778. }
  779. TEST_F(ValidateAtomics, AtomicStoreVulkanInt64) {
  780. const std::string body = R"(
  781. OpAtomicStore %u64_var %device %relaxed %u64_1
  782. )";
  783. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  784. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  785. EXPECT_THAT(
  786. getDiagnosticString(),
  787. HasSubstr(
  788. "AtomicStore: 64-bit atomics require the Int64Atomics capability"));
  789. }
  790. TEST_F(ValidateAtomics, AtomicStoreKernelInt64) {
  791. const std::string body = R"(
  792. OpAtomicStore %u64_var %device %relaxed %u64_1
  793. )";
  794. CompileSuccessfully(GenerateKernelCode(body));
  795. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  796. EXPECT_THAT(
  797. getDiagnosticString(),
  798. HasSubstr(
  799. "AtomicStore: 64-bit atomics require the Int64Atomics capability"));
  800. }
  801. TEST_F(ValidateAtomics, VK_KHR_shader_atomic_int64Success) {
  802. const std::string body = R"(
  803. %val1 = OpAtomicUMin %u64 %u64_var %device %relaxed %u64_1
  804. %val2 = OpAtomicUMax %u64 %u64_var %device %relaxed %u64_1
  805. %val3 = OpAtomicSMin %u64 %u64_var %device %relaxed %u64_1
  806. %val4 = OpAtomicSMax %u64 %u64_var %device %relaxed %u64_1
  807. %val5 = OpAtomicAnd %u64 %u64_var %device %relaxed %u64_1
  808. %val6 = OpAtomicOr %u64 %u64_var %device %relaxed %u64_1
  809. %val7 = OpAtomicXor %u64 %u64_var %device %relaxed %u64_1
  810. %val8 = OpAtomicIAdd %u64 %u64_var %device %relaxed %u64_1
  811. %val9 = OpAtomicExchange %u64 %u64_var %device %relaxed %u64_1
  812. %val10 = OpAtomicCompareExchange %u64 %u64_var %device %relaxed %relaxed %u64_1 %u64_1
  813. %val11 = OpAtomicUMin %s64 %s64_var %device %relaxed %s64_1
  814. %val12 = OpAtomicUMax %s64 %s64_var %device %relaxed %s64_1
  815. %val13 = OpAtomicSMin %s64 %s64_var %device %relaxed %s64_1
  816. %val14 = OpAtomicSMax %s64 %s64_var %device %relaxed %s64_1
  817. %val15 = OpAtomicAnd %s64 %s64_var %device %relaxed %s64_1
  818. %val16 = OpAtomicOr %s64 %s64_var %device %relaxed %s64_1
  819. %val17 = OpAtomicXor %s64 %s64_var %device %relaxed %s64_1
  820. %val18 = OpAtomicIAdd %s64 %s64_var %device %relaxed %s64_1
  821. %val19 = OpAtomicExchange %s64 %s64_var %device %relaxed %s64_1
  822. %val20 = OpAtomicCompareExchange %s64 %s64_var %device %relaxed %relaxed %s64_1 %s64_1
  823. %val21 = OpAtomicLoad %u64 %u64_var %device %relaxed
  824. %val22 = OpAtomicLoad %s64 %s64_var %device %relaxed
  825. OpAtomicStore %u64_var %device %relaxed %u64_1
  826. OpAtomicStore %s64_var %device %relaxed %s64_1
  827. )";
  828. CompileSuccessfully(
  829. GenerateShaderComputeCode(body, "OpCapability Int64Atomics\n"),
  830. SPV_ENV_VULKAN_1_0);
  831. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  832. }
  833. TEST_F(ValidateAtomics, VK_KHR_shader_atomic_int64MissingCapability) {
  834. const std::string body = R"(
  835. %val1 = OpAtomicUMin %u64 %u64_var %device %relaxed %u64_1
  836. )";
  837. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  838. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  839. EXPECT_THAT(
  840. getDiagnosticString(),
  841. HasSubstr(
  842. "AtomicUMin: 64-bit atomics require the Int64Atomics capability"));
  843. }
  844. TEST_F(ValidateAtomics, AtomicLoadWrongResultType) {
  845. const std::string body = R"(
  846. %val1 = OpAtomicLoad %f32vec4 %f32vec4_var %device %relaxed
  847. )";
  848. CompileSuccessfully(GenerateKernelCode(body));
  849. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  850. EXPECT_THAT(
  851. getDiagnosticString(),
  852. HasSubstr("AtomicLoad: "
  853. "expected Result Type to be integer or float scalar type"));
  854. }
  855. TEST_F(ValidateAtomics, AtomicLoadWrongPointerType) {
  856. const std::string body = R"(
  857. %val1 = OpAtomicLoad %f32 %f32_ptr %device %relaxed
  858. )";
  859. CompileSuccessfully(GenerateKernelCode(body));
  860. ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
  861. EXPECT_THAT(
  862. getDiagnosticString(),
  863. HasSubstr("Operand '27[%_ptr_Workgroup_float]' cannot be a type"));
  864. }
  865. TEST_F(ValidateAtomics, AtomicLoadWrongPointerDataType) {
  866. const std::string body = R"(
  867. %val1 = OpAtomicLoad %u32 %f32_var %device %relaxed
  868. )";
  869. CompileSuccessfully(GenerateKernelCode(body));
  870. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  871. EXPECT_THAT(
  872. getDiagnosticString(),
  873. HasSubstr("AtomicLoad: "
  874. "expected Pointer to point to a value of type Result Type"));
  875. }
  876. TEST_F(ValidateAtomics, AtomicLoadWrongScopeType) {
  877. const std::string body = R"(
  878. %val1 = OpAtomicLoad %f32 %f32_var %f32_1 %relaxed
  879. )";
  880. CompileSuccessfully(GenerateKernelCode(body));
  881. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  882. EXPECT_THAT(getDiagnosticString(),
  883. HasSubstr("AtomicLoad: expected scope to be a 32-bit int"));
  884. }
  885. TEST_F(ValidateAtomics, AtomicLoadWrongMemorySemanticsType) {
  886. const std::string body = R"(
  887. %val1 = OpAtomicLoad %f32 %f32_var %device %u64_1
  888. )";
  889. CompileSuccessfully(GenerateKernelCode(body));
  890. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  891. EXPECT_THAT(
  892. getDiagnosticString(),
  893. HasSubstr("AtomicLoad: expected Memory Semantics to be a 32-bit int"));
  894. }
  895. TEST_F(ValidateAtomics, AtomicStoreKernelSuccess) {
  896. const std::string body = R"(
  897. OpAtomicStore %f32_var %device %relaxed %f32_1
  898. OpAtomicStore %u32_var %subgroup %release %u32_1
  899. )";
  900. CompileSuccessfully(GenerateKernelCode(body));
  901. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  902. }
  903. TEST_F(ValidateAtomics, AtomicStoreShaderSuccess) {
  904. const std::string body = R"(
  905. OpAtomicStore %u32_var %device %release %u32_1
  906. OpAtomicStore %u32_var %subgroup %sequentially_consistent %u32_1
  907. )";
  908. CompileSuccessfully(GenerateShaderCode(body));
  909. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  910. }
  911. TEST_F(ValidateAtomics, AtomicStoreVulkanSuccess) {
  912. const std::string body = R"(
  913. OpAtomicStore %u32_var %device %release %u32_1
  914. OpAtomicStore %u32_var %invocation %relaxed %u32_1
  915. )";
  916. CompileSuccessfully(GenerateShaderComputeCode(body), SPV_ENV_VULKAN_1_0);
  917. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  918. }
  919. TEST_F(ValidateAtomics, AtomicStoreVulkanAcquire) {
  920. const std::string body = R"(
  921. OpAtomicStore %u32_var %device %acquire %u32_1
  922. )";
  923. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  924. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  925. EXPECT_THAT(getDiagnosticString(),
  926. AnyVUID("VUID-StandaloneSpirv-OpAtomicStore-04730"));
  927. EXPECT_THAT(
  928. getDiagnosticString(),
  929. HasSubstr("Vulkan spec disallows OpAtomicStore with Memory Semantics "
  930. "Acquire, AcquireRelease and SequentiallyConsistent"));
  931. }
  932. TEST_F(ValidateAtomics, AtomicStoreVulkanAcquireRelease) {
  933. const std::string body = R"(
  934. OpAtomicStore %u32_var %device %acquire_release %u32_1
  935. )";
  936. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  937. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  938. EXPECT_THAT(getDiagnosticString(),
  939. AnyVUID("VUID-StandaloneSpirv-OpAtomicStore-04730"));
  940. EXPECT_THAT(
  941. getDiagnosticString(),
  942. HasSubstr("Vulkan spec disallows OpAtomicStore with Memory Semantics "
  943. "Acquire, AcquireRelease and SequentiallyConsistent"));
  944. }
  945. TEST_F(ValidateAtomics, AtomicStoreVulkanSequentiallyConsistent) {
  946. const std::string body = R"(
  947. OpAtomicStore %u32_var %device %sequentially_consistent %u32_1
  948. )";
  949. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  950. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  951. EXPECT_THAT(getDiagnosticString(),
  952. AnyVUID("VUID-StandaloneSpirv-OpAtomicStore-04730"));
  953. EXPECT_THAT(
  954. getDiagnosticString(),
  955. HasSubstr("Vulkan spec disallows OpAtomicStore with Memory Semantics "
  956. "Acquire, AcquireRelease and SequentiallyConsistent"));
  957. }
  958. TEST_F(ValidateAtomics, AtomicStoreVulkanInvocationSemantics) {
  959. const std::string body = R"(
  960. OpAtomicStore %u32_var %invocation %acquire %u32_1
  961. )";
  962. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  963. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  964. EXPECT_THAT(getDiagnosticString(),
  965. AnyVUID("VUID-StandaloneSpirv-None-04641"));
  966. EXPECT_THAT(
  967. getDiagnosticString(),
  968. HasSubstr("AtomicStore: Vulkan specification requires Memory Semantics "
  969. "to be None if used with Invocation Memory Scope"));
  970. }
  971. TEST_F(ValidateAtomics, AtomicStoreWrongPointerType) {
  972. const std::string body = R"(
  973. OpAtomicStore %f32_1 %device %relaxed %f32_1
  974. )";
  975. CompileSuccessfully(GenerateKernelCode(body));
  976. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  977. EXPECT_THAT(getDiagnosticString(),
  978. HasSubstr("AtomicStore: expected Pointer to be a pointer type"));
  979. }
  980. TEST_F(ValidateAtomics, AtomicStoreWrongPointerDataType) {
  981. const std::string body = R"(
  982. OpAtomicStore %f32vec4_var %device %relaxed %f32_1
  983. )";
  984. CompileSuccessfully(GenerateKernelCode(body));
  985. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  986. EXPECT_THAT(
  987. getDiagnosticString(),
  988. HasSubstr(
  989. "AtomicStore: "
  990. "expected Pointer to be a pointer to integer or float scalar type"));
  991. }
  992. TEST_F(ValidateAtomics, AtomicStoreWrongPointerStorageTypeForOpenCL) {
  993. const std::string body = R"(
  994. OpAtomicStore %f32_im_var %device %relaxed %f32_1
  995. )";
  996. CompileSuccessfully(GenerateKernelCode(body));
  997. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_OPENCL_1_2));
  998. EXPECT_THAT(
  999. getDiagnosticString(),
  1000. HasSubstr("AtomicStore: storage class must be Function, Workgroup, "
  1001. "CrossWorkGroup or Generic in the OpenCL environment."));
  1002. }
  1003. TEST_F(ValidateAtomics, AtomicStoreWrongPointerStorageType) {
  1004. const std::string body = R"(
  1005. OpAtomicStore %f32_uc_var %device %relaxed %f32_1
  1006. )";
  1007. CompileSuccessfully(GenerateKernelCode(body));
  1008. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1009. EXPECT_THAT(getDiagnosticString(),
  1010. HasSubstr("AtomicStore: storage class forbidden by universal "
  1011. "validation rules."));
  1012. }
  1013. TEST_F(ValidateAtomics, AtomicStoreWrongScopeType) {
  1014. const std::string body = R"(
  1015. OpAtomicStore %f32_var %f32_1 %relaxed %f32_1
  1016. )";
  1017. CompileSuccessfully(GenerateKernelCode(body));
  1018. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1019. EXPECT_THAT(getDiagnosticString(),
  1020. HasSubstr("AtomicStore: expected scope to be a 32-bit int\n "
  1021. "OpAtomicStore %28 %float_1 %uint_0_1 %float_1\n"));
  1022. }
  1023. TEST_F(ValidateAtomics, AtomicStoreWrongMemorySemanticsType) {
  1024. const std::string body = R"(
  1025. OpAtomicStore %f32_var %device %f32_1 %f32_1
  1026. )";
  1027. CompileSuccessfully(GenerateKernelCode(body));
  1028. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1029. EXPECT_THAT(
  1030. getDiagnosticString(),
  1031. HasSubstr("AtomicStore: expected Memory Semantics to be a 32-bit int"));
  1032. }
  1033. TEST_F(ValidateAtomics, AtomicStoreWrongValueType) {
  1034. const std::string body = R"(
  1035. OpAtomicStore %f32_var %device %relaxed %u32_1
  1036. )";
  1037. CompileSuccessfully(GenerateKernelCode(body));
  1038. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1039. EXPECT_THAT(
  1040. getDiagnosticString(),
  1041. HasSubstr("AtomicStore: "
  1042. "expected Value type and the type pointed to by Pointer to "
  1043. "be the same"));
  1044. }
  1045. TEST_F(ValidateAtomics, AtomicExchangeShaderSuccess) {
  1046. const std::string body = R"(
  1047. OpAtomicStore %u32_var %device %relaxed %u32_1
  1048. %val2 = OpAtomicExchange %u32 %u32_var %device %relaxed %u32_0
  1049. )";
  1050. CompileSuccessfully(GenerateShaderCode(body));
  1051. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1052. }
  1053. TEST_F(ValidateAtomics, AtomicExchangeKernelSuccess) {
  1054. const std::string body = R"(
  1055. OpAtomicStore %f32_var %device %relaxed %f32_1
  1056. %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %f32_0
  1057. OpAtomicStore %u32_var %device %relaxed %u32_1
  1058. %val4 = OpAtomicExchange %u32 %u32_var %device %relaxed %u32_0
  1059. )";
  1060. CompileSuccessfully(GenerateKernelCode(body));
  1061. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1062. }
  1063. TEST_F(ValidateAtomics, AtomicExchangeShaderFloat) {
  1064. const std::string body = R"(
  1065. OpAtomicStore %f32_var %device %relaxed %f32_1
  1066. %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %f32_0
  1067. )";
  1068. CompileSuccessfully(GenerateShaderCode(body));
  1069. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1070. }
  1071. TEST_F(ValidateAtomics, AtomicExchangeWrongResultType) {
  1072. const std::string body = R"(
  1073. OpStore %f32vec4_var %f32vec4_0000
  1074. %val2 = OpAtomicExchange %f32vec4 %f32vec4_var %device %relaxed %f32vec4_0000
  1075. )";
  1076. CompileSuccessfully(GenerateKernelCode(body));
  1077. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1078. EXPECT_THAT(
  1079. getDiagnosticString(),
  1080. HasSubstr("AtomicExchange: "
  1081. "expected Result Type to be integer or float scalar type"));
  1082. }
  1083. TEST_F(ValidateAtomics, AtomicExchangeWrongPointerType) {
  1084. const std::string body = R"(
  1085. %val2 = OpAtomicExchange %f32 %f32vec4_ptr %device %relaxed %f32vec4_0000
  1086. )";
  1087. CompileSuccessfully(GenerateKernelCode(body));
  1088. ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
  1089. EXPECT_THAT(getDiagnosticString(),
  1090. HasSubstr("Operand '33[%_ptr_Workgroup_v4float]' cannot be a "
  1091. "type"));
  1092. }
  1093. TEST_F(ValidateAtomics, AtomicExchangeWrongPointerDataType) {
  1094. const std::string body = R"(
  1095. OpStore %f32vec4_var %f32vec4_0000
  1096. %val2 = OpAtomicExchange %f32 %f32vec4_var %device %relaxed %f32vec4_0000
  1097. )";
  1098. CompileSuccessfully(GenerateKernelCode(body));
  1099. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1100. EXPECT_THAT(
  1101. getDiagnosticString(),
  1102. HasSubstr("AtomicExchange: "
  1103. "expected Pointer to point to a value of type Result Type"));
  1104. }
  1105. TEST_F(ValidateAtomics, AtomicExchangeWrongScopeType) {
  1106. const std::string body = R"(
  1107. OpAtomicStore %f32_var %device %relaxed %f32_1
  1108. %val2 = OpAtomicExchange %f32 %f32_var %f32_1 %relaxed %f32_0
  1109. )";
  1110. CompileSuccessfully(GenerateKernelCode(body));
  1111. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1112. EXPECT_THAT(getDiagnosticString(),
  1113. HasSubstr("AtomicExchange: expected scope to be a 32-bit int"));
  1114. }
  1115. TEST_F(ValidateAtomics, AtomicExchangeWrongMemorySemanticsType) {
  1116. const std::string body = R"(
  1117. OpAtomicStore %f32_var %device %relaxed %f32_1
  1118. %val2 = OpAtomicExchange %f32 %f32_var %device %f32_1 %f32_0
  1119. )";
  1120. CompileSuccessfully(GenerateKernelCode(body));
  1121. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1122. EXPECT_THAT(
  1123. getDiagnosticString(),
  1124. HasSubstr(
  1125. "AtomicExchange: expected Memory Semantics to be a 32-bit int"));
  1126. }
  1127. TEST_F(ValidateAtomics, AtomicExchangeWrongValueType) {
  1128. const std::string body = R"(
  1129. OpAtomicStore %f32_var %device %relaxed %f32_1
  1130. %val2 = OpAtomicExchange %f32 %f32_var %device %relaxed %u32_0
  1131. )";
  1132. CompileSuccessfully(GenerateKernelCode(body));
  1133. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1134. EXPECT_THAT(getDiagnosticString(),
  1135. HasSubstr("AtomicExchange: "
  1136. "expected Value to be of type Result Type"));
  1137. }
  1138. TEST_F(ValidateAtomics, AtomicExchangeVulkanInvocationSemantics) {
  1139. const std::string body = R"(
  1140. OpAtomicStore %u32_var %invocation %relaxed %u32_1
  1141. %val2 = OpAtomicExchange %u32 %u32_var %invocation %acquire %u32_0
  1142. )";
  1143. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  1144. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  1145. EXPECT_THAT(getDiagnosticString(),
  1146. AnyVUID("VUID-StandaloneSpirv-None-04641"));
  1147. EXPECT_THAT(
  1148. getDiagnosticString(),
  1149. HasSubstr("AtomicExchange: Vulkan specification requires Memory "
  1150. "Semantics to be None if used with Invocation Memory Scope"));
  1151. }
  1152. TEST_F(ValidateAtomics, AtomicCompareExchangeShaderSuccess) {
  1153. const std::string body = R"(
  1154. OpAtomicStore %u32_var %device %relaxed %u32_1
  1155. %val2 = OpAtomicCompareExchange %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
  1156. )";
  1157. CompileSuccessfully(GenerateShaderCode(body));
  1158. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1159. }
  1160. TEST_F(ValidateAtomics, AtomicCompareExchangeKernelSuccess) {
  1161. const std::string body = R"(
  1162. OpAtomicStore %u32_var %device %relaxed %u32_1
  1163. %val2 = OpAtomicCompareExchange %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
  1164. )";
  1165. CompileSuccessfully(GenerateKernelCode(body));
  1166. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1167. }
  1168. TEST_F(ValidateAtomics, AtomicCompareExchangeShaderFloat) {
  1169. const std::string body = R"(
  1170. OpAtomicStore %f32_var %device %relaxed %f32_1
  1171. %val1 = OpAtomicCompareExchange %f32 %f32_var %device %relaxed %relaxed %f32_0 %f32_1
  1172. )";
  1173. CompileSuccessfully(GenerateShaderCode(body));
  1174. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1175. EXPECT_THAT(getDiagnosticString(),
  1176. HasSubstr("AtomicCompareExchange: "
  1177. "expected Result Type to be integer scalar type"));
  1178. }
  1179. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongResultType) {
  1180. const std::string body = R"(
  1181. OpStore %f32vec4_var %f32vec4_0000
  1182. %val2 = OpAtomicCompareExchange %f32vec4 %f32vec4_var %device %relaxed %relaxed %f32vec4_0000 %f32vec4_0000
  1183. )";
  1184. CompileSuccessfully(GenerateKernelCode(body));
  1185. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1186. EXPECT_THAT(getDiagnosticString(),
  1187. HasSubstr("AtomicCompareExchange: "
  1188. "expected Result Type to be integer scalar type"));
  1189. }
  1190. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongPointerType) {
  1191. const std::string body = R"(
  1192. %val2 = OpAtomicCompareExchange %f32 %f32vec4_ptr %device %relaxed %relaxed %f32vec4_0000 %f32vec4_0000
  1193. )";
  1194. CompileSuccessfully(GenerateKernelCode(body));
  1195. ASSERT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
  1196. EXPECT_THAT(getDiagnosticString(),
  1197. HasSubstr("Operand '33[%_ptr_Workgroup_v4float]' cannot be a "
  1198. "type"));
  1199. }
  1200. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongPointerDataType) {
  1201. const std::string body = R"(
  1202. OpStore %f32vec4_var %f32vec4_0000
  1203. %val2 = OpAtomicCompareExchange %u32 %f32vec4_var %device %relaxed %relaxed %u32_0 %u32_0
  1204. )";
  1205. CompileSuccessfully(GenerateKernelCode(body));
  1206. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1207. EXPECT_THAT(
  1208. getDiagnosticString(),
  1209. HasSubstr("AtomicCompareExchange: "
  1210. "expected Pointer to point to a value of type Result Type"));
  1211. }
  1212. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongScopeType) {
  1213. const std::string body = R"(
  1214. OpAtomicStore %u64_var %device %relaxed %u64_1
  1215. %val2 = OpAtomicCompareExchange %u64 %u64_var %u64_1 %relaxed %relaxed %u32_0 %u32_0
  1216. )";
  1217. CompileSuccessfully(GenerateKernelCode(body, "OpCapability Int64Atomics\n"));
  1218. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1219. EXPECT_THAT(getDiagnosticString(),
  1220. HasSubstr("AtomicCompareExchange: expected scope to be a 32-bit "
  1221. "int"));
  1222. }
  1223. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongMemorySemanticsType1) {
  1224. const std::string body = R"(
  1225. OpAtomicStore %u32_var %device %relaxed %u32_1
  1226. %val2 = OpAtomicCompareExchange %u32 %u32_var %device %f32_1 %relaxed %u32_0 %u32_0
  1227. )";
  1228. CompileSuccessfully(GenerateKernelCode(body));
  1229. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1230. EXPECT_THAT(getDiagnosticString(),
  1231. HasSubstr("AtomicCompareExchange: expected Memory Semantics to "
  1232. "be a 32-bit int"));
  1233. }
  1234. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongMemorySemanticsType2) {
  1235. const std::string body = R"(
  1236. OpAtomicStore %u32_var %device %relaxed %u32_1
  1237. %val2 = OpAtomicCompareExchange %u32 %u32_var %device %relaxed %f32_1 %u32_0 %u32_0
  1238. )";
  1239. CompileSuccessfully(GenerateKernelCode(body));
  1240. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1241. EXPECT_THAT(getDiagnosticString(),
  1242. HasSubstr("AtomicCompareExchange: expected Memory Semantics to "
  1243. "be a 32-bit int"));
  1244. }
  1245. TEST_F(ValidateAtomics, AtomicCompareExchangeUnequalRelease) {
  1246. const std::string body = R"(
  1247. OpAtomicStore %u32_var %device %relaxed %u32_1
  1248. %val2 = OpAtomicCompareExchange %u32 %u32_var %device %relaxed %release %u32_0 %u32_0
  1249. )";
  1250. CompileSuccessfully(GenerateKernelCode(body));
  1251. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1252. EXPECT_THAT(getDiagnosticString(),
  1253. HasSubstr("AtomicCompareExchange: Memory Semantics Release and "
  1254. "AcquireRelease cannot be used for operand Unequal"));
  1255. }
  1256. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongValueType) {
  1257. const std::string body = R"(
  1258. OpAtomicStore %u32_var %device %relaxed %u32_1
  1259. %val2 = OpAtomicCompareExchange %u32 %u32_var %device %relaxed %relaxed %f32_1 %u32_0
  1260. )";
  1261. CompileSuccessfully(GenerateKernelCode(body));
  1262. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1263. EXPECT_THAT(getDiagnosticString(),
  1264. HasSubstr("AtomicCompareExchange: "
  1265. "expected Value to be of type Result Type"));
  1266. }
  1267. TEST_F(ValidateAtomics, AtomicCompareExchangeWrongComparatorType) {
  1268. const std::string body = R"(
  1269. OpAtomicStore %u32_var %device %relaxed %u32_1
  1270. %val2 = OpAtomicCompareExchange %u32 %u32_var %device %relaxed %relaxed %u32_0 %f32_0
  1271. )";
  1272. CompileSuccessfully(GenerateKernelCode(body));
  1273. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1274. EXPECT_THAT(getDiagnosticString(),
  1275. HasSubstr("AtomicCompareExchange: "
  1276. "expected Comparator to be of type Result Type"));
  1277. }
  1278. TEST_F(ValidateAtomics, AtomicCompareExchangeWeakSuccess) {
  1279. const std::string body = R"(
  1280. OpAtomicStore %u32_var %device %relaxed %u32_1
  1281. %val4 = OpAtomicCompareExchangeWeak %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
  1282. )";
  1283. CompileSuccessfully(GenerateKernelCode(body));
  1284. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1285. }
  1286. TEST_F(ValidateAtomics, AtomicCompareExchangeWeakWrongResultType) {
  1287. const std::string body = R"(
  1288. OpAtomicStore %f32_var %device %relaxed %f32_1
  1289. %val2 = OpAtomicCompareExchangeWeak %f32 %f32_var %device %relaxed %relaxed %f32_0 %f32_1
  1290. )";
  1291. CompileSuccessfully(GenerateKernelCode(body));
  1292. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1293. EXPECT_THAT(getDiagnosticString(),
  1294. HasSubstr("AtomicCompareExchangeWeak: "
  1295. "expected Result Type to be integer scalar type"));
  1296. }
  1297. TEST_F(ValidateAtomics, AtomicCompareExchangeVulkanInvocationSemanticsEqual) {
  1298. const std::string body = R"(
  1299. OpAtomicStore %u32_var %device %relaxed %u32_1
  1300. %val2 = OpAtomicCompareExchange %u32 %u32_var %invocation %release %relaxed %u32_0 %u32_0
  1301. )";
  1302. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  1303. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  1304. EXPECT_THAT(getDiagnosticString(),
  1305. AnyVUID("VUID-StandaloneSpirv-None-04641"));
  1306. EXPECT_THAT(
  1307. getDiagnosticString(),
  1308. HasSubstr("AtomicCompareExchange: Vulkan specification requires Memory "
  1309. "Semantics to be None if used with Invocation Memory Scope"));
  1310. }
  1311. TEST_F(ValidateAtomics, AtomicCompareExchangeVulkanInvocationSemanticsUnequal) {
  1312. const std::string body = R"(
  1313. OpAtomicStore %u32_var %device %relaxed %u32_1
  1314. %val2 = OpAtomicCompareExchange %u32 %u32_var %invocation %relaxed %acquire %u32_0 %u32_0
  1315. )";
  1316. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_0);
  1317. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  1318. EXPECT_THAT(getDiagnosticString(),
  1319. AnyVUID("VUID-StandaloneSpirv-None-04641"));
  1320. EXPECT_THAT(
  1321. getDiagnosticString(),
  1322. HasSubstr("AtomicCompareExchange: Vulkan specification requires Memory "
  1323. "Semantics to be None if used with Invocation Memory Scope"));
  1324. }
  1325. TEST_F(ValidateAtomics, AtomicArithmeticsSuccess) {
  1326. const std::string body = R"(
  1327. OpAtomicStore %u32_var %device %relaxed %u32_1
  1328. %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release
  1329. %val2 = OpAtomicIDecrement %u32 %u32_var %device %acquire_release
  1330. %val3 = OpAtomicIAdd %u32 %u32_var %device %acquire_release %u32_1
  1331. %val4 = OpAtomicISub %u32 %u32_var %device %acquire_release %u32_1
  1332. %val5 = OpAtomicUMin %u32 %u32_var %device %acquire_release %u32_1
  1333. %val6 = OpAtomicUMax %u32 %u32_var %device %acquire_release %u32_1
  1334. %val7 = OpAtomicSMin %u32 %u32_var %device %sequentially_consistent %u32_1
  1335. %val8 = OpAtomicSMax %u32 %u32_var %device %sequentially_consistent %u32_1
  1336. %val9 = OpAtomicAnd %u32 %u32_var %device %sequentially_consistent %u32_1
  1337. %val10 = OpAtomicOr %u32 %u32_var %device %sequentially_consistent %u32_1
  1338. %val11 = OpAtomicXor %u32 %u32_var %device %sequentially_consistent %u32_1
  1339. )";
  1340. CompileSuccessfully(GenerateKernelCode(body));
  1341. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1342. }
  1343. TEST_F(ValidateAtomics, AtomicFlagsSuccess) {
  1344. const std::string body = R"(
  1345. OpAtomicFlagClear %u32_var %device %release
  1346. %val1 = OpAtomicFlagTestAndSet %bool %u32_var %device %relaxed
  1347. )";
  1348. CompileSuccessfully(GenerateKernelCode(body));
  1349. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1350. }
  1351. TEST_F(ValidateAtomics, AtomicFlagTestAndSetWrongResultType) {
  1352. const std::string body = R"(
  1353. %val1 = OpAtomicFlagTestAndSet %u32 %u32_var %device %relaxed
  1354. )";
  1355. CompileSuccessfully(GenerateKernelCode(body));
  1356. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1357. EXPECT_THAT(getDiagnosticString(),
  1358. HasSubstr("AtomicFlagTestAndSet: "
  1359. "expected Result Type to be bool scalar type"));
  1360. }
  1361. TEST_F(ValidateAtomics, AtomicFlagTestAndSetNotPointer) {
  1362. const std::string body = R"(
  1363. %val1 = OpAtomicFlagTestAndSet %bool %u32_1 %device %relaxed
  1364. )";
  1365. CompileSuccessfully(GenerateKernelCode(body));
  1366. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1367. EXPECT_THAT(getDiagnosticString(),
  1368. HasSubstr("AtomicFlagTestAndSet: "
  1369. "expected Pointer to be a pointer type"));
  1370. }
  1371. TEST_F(ValidateAtomics, AtomicFlagTestAndSetNotIntPointer) {
  1372. const std::string body = R"(
  1373. %val1 = OpAtomicFlagTestAndSet %bool %f32_var %device %relaxed
  1374. )";
  1375. CompileSuccessfully(GenerateKernelCode(body));
  1376. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1377. EXPECT_THAT(
  1378. getDiagnosticString(),
  1379. HasSubstr("AtomicFlagTestAndSet: "
  1380. "expected Pointer to point to a value of 32-bit integer type"));
  1381. }
  1382. TEST_F(ValidateAtomics, AtomicFlagTestAndSetNotInt32Pointer) {
  1383. const std::string body = R"(
  1384. %val1 = OpAtomicFlagTestAndSet %bool %u64_var %device %relaxed
  1385. )";
  1386. CompileSuccessfully(GenerateKernelCode(body, "OpCapability Int64Atomics\n"));
  1387. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1388. EXPECT_THAT(
  1389. getDiagnosticString(),
  1390. HasSubstr("AtomicFlagTestAndSet: "
  1391. "expected Pointer to point to a value of 32-bit integer type"));
  1392. }
  1393. TEST_F(ValidateAtomics, AtomicFlagTestAndSetWrongScopeType) {
  1394. const std::string body = R"(
  1395. %val1 = OpAtomicFlagTestAndSet %bool %u32_var %u64_1 %relaxed
  1396. )";
  1397. CompileSuccessfully(GenerateKernelCode(body));
  1398. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1399. EXPECT_THAT(
  1400. getDiagnosticString(),
  1401. HasSubstr("AtomicFlagTestAndSet: expected scope to be a 32-bit int"));
  1402. }
  1403. TEST_F(ValidateAtomics, AtomicFlagTestAndSetWrongMemorySemanticsType) {
  1404. const std::string body = R"(
  1405. %val1 = OpAtomicFlagTestAndSet %bool %u32_var %device %u64_1
  1406. )";
  1407. CompileSuccessfully(GenerateKernelCode(body));
  1408. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1409. EXPECT_THAT(getDiagnosticString(),
  1410. HasSubstr("AtomicFlagTestAndSet: "
  1411. "expected Memory Semantics to be a 32-bit int"));
  1412. }
  1413. TEST_F(ValidateAtomics, AtomicFlagClearAcquire) {
  1414. const std::string body = R"(
  1415. OpAtomicFlagClear %u32_var %device %acquire
  1416. )";
  1417. CompileSuccessfully(GenerateKernelCode(body));
  1418. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1419. EXPECT_THAT(getDiagnosticString(),
  1420. HasSubstr("Memory Semantics Acquire and AcquireRelease cannot be "
  1421. "used with AtomicFlagClear"));
  1422. }
  1423. TEST_F(ValidateAtomics, AtomicFlagClearNotPointer) {
  1424. const std::string body = R"(
  1425. OpAtomicFlagClear %u32_1 %device %relaxed
  1426. )";
  1427. CompileSuccessfully(GenerateKernelCode(body));
  1428. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1429. EXPECT_THAT(getDiagnosticString(),
  1430. HasSubstr("AtomicFlagClear: "
  1431. "expected Pointer to be a pointer type"));
  1432. }
  1433. TEST_F(ValidateAtomics, AtomicFlagClearNotIntPointer) {
  1434. const std::string body = R"(
  1435. OpAtomicFlagClear %f32_var %device %relaxed
  1436. )";
  1437. CompileSuccessfully(GenerateKernelCode(body));
  1438. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1439. EXPECT_THAT(
  1440. getDiagnosticString(),
  1441. HasSubstr("AtomicFlagClear: "
  1442. "expected Pointer to point to a value of 32-bit integer type"));
  1443. }
  1444. TEST_F(ValidateAtomics, AtomicFlagClearNotInt32Pointer) {
  1445. const std::string body = R"(
  1446. OpAtomicFlagClear %u64_var %device %relaxed
  1447. )";
  1448. CompileSuccessfully(GenerateKernelCode(body, "OpCapability Int64Atomics\n"));
  1449. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1450. EXPECT_THAT(
  1451. getDiagnosticString(),
  1452. HasSubstr("AtomicFlagClear: "
  1453. "expected Pointer to point to a value of 32-bit integer type"));
  1454. }
  1455. TEST_F(ValidateAtomics, AtomicFlagClearWrongScopeType) {
  1456. const std::string body = R"(
  1457. OpAtomicFlagClear %u32_var %u64_1 %relaxed
  1458. )";
  1459. CompileSuccessfully(GenerateKernelCode(body));
  1460. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1461. EXPECT_THAT(getDiagnosticString(),
  1462. HasSubstr("AtomicFlagClear: expected scope to be a 32-bit "
  1463. "int\n OpAtomicFlagClear %30 %ulong_1 %uint_0_1\n"));
  1464. }
  1465. TEST_F(ValidateAtomics, AtomicFlagClearWrongMemorySemanticsType) {
  1466. const std::string body = R"(
  1467. OpAtomicFlagClear %u32_var %device %u64_1
  1468. )";
  1469. CompileSuccessfully(GenerateKernelCode(body));
  1470. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1471. EXPECT_THAT(
  1472. getDiagnosticString(),
  1473. HasSubstr(
  1474. "AtomicFlagClear: expected Memory Semantics to be a 32-bit int"));
  1475. }
  1476. TEST_F(ValidateAtomics, AtomicIIncrementAcquireAndRelease) {
  1477. const std::string body = R"(
  1478. OpAtomicStore %u32_var %device %relaxed %u32_1
  1479. %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_and_release
  1480. )";
  1481. CompileSuccessfully(GenerateKernelCode(body));
  1482. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1483. EXPECT_THAT(getDiagnosticString(),
  1484. HasSubstr("AtomicIIncrement: Memory Semantics can have at most "
  1485. "one of the following bits set: Acquire, Release, "
  1486. "AcquireRelease or SequentiallyConsistent"));
  1487. }
  1488. TEST_F(ValidateAtomics, AtomicUniformMemorySemanticsShader) {
  1489. const std::string body = R"(
  1490. OpAtomicStore %u32_var %device %relaxed %u32_1
  1491. %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release_uniform_workgroup
  1492. )";
  1493. CompileSuccessfully(GenerateShaderCode(body));
  1494. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1495. }
  1496. TEST_F(ValidateAtomics, AtomicUniformMemorySemanticsKernel) {
  1497. const std::string body = R"(
  1498. OpAtomicStore %u32_var %device %relaxed %u32_1
  1499. %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release_uniform_workgroup
  1500. )";
  1501. CompileSuccessfully(GenerateKernelCode(body));
  1502. ASSERT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1503. EXPECT_THAT(getDiagnosticString(),
  1504. HasSubstr("AtomicIIncrement: Memory Semantics UniformMemory "
  1505. "requires capability Shader"));
  1506. }
  1507. // Lack of the AtomicStorage capability is intentionally ignored, see
  1508. // https://github.com/KhronosGroup/glslang/issues/1618 for the reasoning why.
  1509. TEST_F(ValidateAtomics, AtomicCounterMemorySemanticsNoCapability) {
  1510. const std::string body = R"(
  1511. OpAtomicStore %u32_var %device %relaxed %u32_1
  1512. %val1 = OpAtomicIIncrement %u32 %u32_var %device
  1513. %acquire_release_atomic_counter_workgroup
  1514. )";
  1515. CompileSuccessfully(GenerateKernelCode(body));
  1516. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1517. }
  1518. TEST_F(ValidateAtomics, AtomicCounterMemorySemanticsWithCapability) {
  1519. const std::string body = R"(
  1520. OpAtomicStore %u32_var %device %relaxed %u32_1
  1521. %val1 = OpAtomicIIncrement %u32 %u32_var %device %acquire_release_atomic_counter_workgroup
  1522. )";
  1523. CompileSuccessfully(GenerateKernelCode(body, "OpCapability AtomicStorage\n"));
  1524. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions());
  1525. }
  1526. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicLoad) {
  1527. const std::string body = R"(
  1528. %ld = OpAtomicLoad %u32 %u32_var %workgroup %sequentially_consistent
  1529. )";
  1530. const std::string extra = R"(
  1531. OpCapability VulkanMemoryModelKHR
  1532. OpExtension "SPV_KHR_vulkan_memory_model"
  1533. )";
  1534. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1535. SPV_ENV_UNIVERSAL_1_3);
  1536. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1537. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1538. EXPECT_THAT(getDiagnosticString(),
  1539. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1540. "used with the VulkanKHR memory model."));
  1541. }
  1542. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicStore) {
  1543. const std::string body = R"(
  1544. OpAtomicStore %u32_var %workgroup %sequentially_consistent %u32_0
  1545. )";
  1546. const std::string extra = R"(
  1547. OpCapability VulkanMemoryModelKHR
  1548. OpExtension "SPV_KHR_vulkan_memory_model"
  1549. )";
  1550. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1551. SPV_ENV_UNIVERSAL_1_3);
  1552. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1553. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1554. EXPECT_THAT(getDiagnosticString(),
  1555. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1556. "used with the VulkanKHR memory model."));
  1557. }
  1558. TEST_F(ValidateAtomics,
  1559. VulkanMemoryModelBanSequentiallyConsistentAtomicExchange) {
  1560. const std::string body = R"(
  1561. %ex = OpAtomicExchange %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1562. )";
  1563. const std::string extra = R"(
  1564. OpCapability VulkanMemoryModelKHR
  1565. OpExtension "SPV_KHR_vulkan_memory_model"
  1566. )";
  1567. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1568. SPV_ENV_UNIVERSAL_1_3);
  1569. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1570. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1571. EXPECT_THAT(getDiagnosticString(),
  1572. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1573. "used with the VulkanKHR memory model."));
  1574. }
  1575. TEST_F(ValidateAtomics,
  1576. VulkanMemoryModelBanSequentiallyConsistentAtomicCompareExchangeEqual) {
  1577. const std::string body = R"(
  1578. %ex = OpAtomicCompareExchange %u32 %u32_var %workgroup %sequentially_consistent %relaxed %u32_0 %u32_0
  1579. )";
  1580. const std::string extra = R"(
  1581. OpCapability VulkanMemoryModelKHR
  1582. OpExtension "SPV_KHR_vulkan_memory_model"
  1583. )";
  1584. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1585. SPV_ENV_UNIVERSAL_1_3);
  1586. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1587. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1588. EXPECT_THAT(getDiagnosticString(),
  1589. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1590. "used with the VulkanKHR memory model."));
  1591. }
  1592. TEST_F(ValidateAtomics,
  1593. VulkanMemoryModelBanSequentiallyConsistentAtomicCompareExchangeUnequal) {
  1594. const std::string body = R"(
  1595. %ex = OpAtomicCompareExchange %u32 %u32_var %workgroup %relaxed %sequentially_consistent %u32_0 %u32_0
  1596. )";
  1597. const std::string extra = R"(
  1598. OpCapability VulkanMemoryModelKHR
  1599. OpExtension "SPV_KHR_vulkan_memory_model"
  1600. )";
  1601. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1602. SPV_ENV_UNIVERSAL_1_3);
  1603. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1604. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1605. EXPECT_THAT(getDiagnosticString(),
  1606. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1607. "used with the VulkanKHR memory model."));
  1608. }
  1609. TEST_F(ValidateAtomics,
  1610. VulkanMemoryModelBanSequentiallyConsistentAtomicIIncrement) {
  1611. const std::string body = R"(
  1612. %inc = OpAtomicIIncrement %u32 %u32_var %workgroup %sequentially_consistent
  1613. )";
  1614. const std::string extra = R"(
  1615. OpCapability VulkanMemoryModelKHR
  1616. OpExtension "SPV_KHR_vulkan_memory_model"
  1617. )";
  1618. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1619. SPV_ENV_UNIVERSAL_1_3);
  1620. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1621. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1622. EXPECT_THAT(getDiagnosticString(),
  1623. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1624. "used with the VulkanKHR memory model."));
  1625. }
  1626. TEST_F(ValidateAtomics,
  1627. VulkanMemoryModelBanSequentiallyConsistentAtomicIDecrement) {
  1628. const std::string body = R"(
  1629. %dec = OpAtomicIDecrement %u32 %u32_var %workgroup %sequentially_consistent
  1630. )";
  1631. const std::string extra = R"(
  1632. OpCapability VulkanMemoryModelKHR
  1633. OpExtension "SPV_KHR_vulkan_memory_model"
  1634. )";
  1635. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1636. SPV_ENV_UNIVERSAL_1_3);
  1637. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1638. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1639. EXPECT_THAT(getDiagnosticString(),
  1640. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1641. "used with the VulkanKHR memory model."));
  1642. }
  1643. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicIAdd) {
  1644. const std::string body = R"(
  1645. %add = OpAtomicIAdd %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1646. )";
  1647. const std::string extra = R"(
  1648. OpCapability VulkanMemoryModelKHR
  1649. OpExtension "SPV_KHR_vulkan_memory_model"
  1650. )";
  1651. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1652. SPV_ENV_UNIVERSAL_1_3);
  1653. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1654. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1655. EXPECT_THAT(getDiagnosticString(),
  1656. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1657. "used with the VulkanKHR memory model."));
  1658. }
  1659. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicISub) {
  1660. const std::string body = R"(
  1661. %sub = OpAtomicISub %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1662. )";
  1663. const std::string extra = R"(
  1664. OpCapability VulkanMemoryModelKHR
  1665. OpExtension "SPV_KHR_vulkan_memory_model"
  1666. )";
  1667. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1668. SPV_ENV_UNIVERSAL_1_3);
  1669. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1670. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1671. EXPECT_THAT(getDiagnosticString(),
  1672. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1673. "used with the VulkanKHR memory model."));
  1674. }
  1675. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicSMin) {
  1676. const std::string body = R"(
  1677. %min = OpAtomicSMin %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1678. )";
  1679. const std::string extra = R"(
  1680. OpCapability VulkanMemoryModelKHR
  1681. OpExtension "SPV_KHR_vulkan_memory_model"
  1682. )";
  1683. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1684. SPV_ENV_UNIVERSAL_1_3);
  1685. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1686. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1687. EXPECT_THAT(getDiagnosticString(),
  1688. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1689. "used with the VulkanKHR memory model."));
  1690. }
  1691. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicUMin) {
  1692. const std::string body = R"(
  1693. %min = OpAtomicUMin %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1694. )";
  1695. const std::string extra = R"(
  1696. OpCapability VulkanMemoryModelKHR
  1697. OpExtension "SPV_KHR_vulkan_memory_model"
  1698. )";
  1699. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1700. SPV_ENV_UNIVERSAL_1_3);
  1701. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1702. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1703. EXPECT_THAT(getDiagnosticString(),
  1704. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1705. "used with the VulkanKHR memory model."));
  1706. }
  1707. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicFMinEXT) {
  1708. const std::string body = R"(
  1709. %max = OpAtomicFMinEXT %f32 %f32_var %workgroup %sequentially_consistent %f32_0
  1710. )";
  1711. const std::string extra = R"(
  1712. OpCapability VulkanMemoryModelKHR
  1713. OpCapability AtomicFloat32MinMaxEXT
  1714. OpExtension "SPV_KHR_vulkan_memory_model"
  1715. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  1716. )";
  1717. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1718. SPV_ENV_UNIVERSAL_1_3);
  1719. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1720. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1721. EXPECT_THAT(getDiagnosticString(),
  1722. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1723. "used with the VulkanKHR memory model."));
  1724. }
  1725. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicSMax) {
  1726. const std::string body = R"(
  1727. %max = OpAtomicSMax %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1728. )";
  1729. const std::string extra = R"(
  1730. OpCapability VulkanMemoryModelKHR
  1731. OpExtension "SPV_KHR_vulkan_memory_model"
  1732. )";
  1733. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1734. SPV_ENV_UNIVERSAL_1_3);
  1735. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1736. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1737. EXPECT_THAT(getDiagnosticString(),
  1738. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1739. "used with the VulkanKHR memory model."));
  1740. }
  1741. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicUMax) {
  1742. const std::string body = R"(
  1743. %max = OpAtomicUMax %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1744. )";
  1745. const std::string extra = R"(
  1746. OpCapability VulkanMemoryModelKHR
  1747. OpExtension "SPV_KHR_vulkan_memory_model"
  1748. )";
  1749. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1750. SPV_ENV_UNIVERSAL_1_3);
  1751. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1752. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1753. EXPECT_THAT(getDiagnosticString(),
  1754. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1755. "used with the VulkanKHR memory model."));
  1756. }
  1757. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicFMaxEXT) {
  1758. const std::string body = R"(
  1759. %max = OpAtomicFMaxEXT %f32 %f32_var %workgroup %sequentially_consistent %f32_0
  1760. )";
  1761. const std::string extra = R"(
  1762. OpCapability VulkanMemoryModelKHR
  1763. OpCapability AtomicFloat32MinMaxEXT
  1764. OpExtension "SPV_KHR_vulkan_memory_model"
  1765. OpExtension "SPV_EXT_shader_atomic_float_min_max"
  1766. )";
  1767. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1768. SPV_ENV_UNIVERSAL_1_3);
  1769. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1770. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1771. EXPECT_THAT(getDiagnosticString(),
  1772. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1773. "used with the VulkanKHR memory model."));
  1774. }
  1775. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicAnd) {
  1776. const std::string body = R"(
  1777. %and = OpAtomicAnd %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1778. )";
  1779. const std::string extra = R"(
  1780. OpCapability VulkanMemoryModelKHR
  1781. OpExtension "SPV_KHR_vulkan_memory_model"
  1782. )";
  1783. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1784. SPV_ENV_UNIVERSAL_1_3);
  1785. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1786. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1787. EXPECT_THAT(getDiagnosticString(),
  1788. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1789. "used with the VulkanKHR memory model."));
  1790. }
  1791. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicOr) {
  1792. const std::string body = R"(
  1793. %or = OpAtomicOr %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1794. )";
  1795. const std::string extra = R"(
  1796. OpCapability VulkanMemoryModelKHR
  1797. OpExtension "SPV_KHR_vulkan_memory_model"
  1798. )";
  1799. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1800. SPV_ENV_UNIVERSAL_1_3);
  1801. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1802. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1803. EXPECT_THAT(getDiagnosticString(),
  1804. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1805. "used with the VulkanKHR memory model."));
  1806. }
  1807. TEST_F(ValidateAtomics, VulkanMemoryModelBanSequentiallyConsistentAtomicXor) {
  1808. const std::string body = R"(
  1809. %xor = OpAtomicXor %u32 %u32_var %workgroup %sequentially_consistent %u32_0
  1810. )";
  1811. const std::string extra = R"(
  1812. OpCapability VulkanMemoryModelKHR
  1813. OpExtension "SPV_KHR_vulkan_memory_model"
  1814. )";
  1815. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  1816. SPV_ENV_UNIVERSAL_1_3);
  1817. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1818. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1819. EXPECT_THAT(getDiagnosticString(),
  1820. HasSubstr("SequentiallyConsistent memory semantics cannot be "
  1821. "used with the VulkanKHR memory model."));
  1822. }
  1823. TEST_F(ValidateAtomics, OutputMemoryKHRRequiresVulkanMemoryModelKHR) {
  1824. const std::string text = R"(
  1825. OpCapability Shader
  1826. OpMemoryModel Logical GLSL450
  1827. OpEntryPoint Fragment %1 "func"
  1828. OpExecutionMode %1 OriginUpperLeft
  1829. %2 = OpTypeVoid
  1830. %3 = OpTypeInt 32 0
  1831. %semantics = OpConstant %3 4100
  1832. %5 = OpTypeFunction %2
  1833. %workgroup = OpConstant %3 2
  1834. %ptr = OpTypePointer Workgroup %3
  1835. %var = OpVariable %ptr Workgroup
  1836. %1 = OpFunction %2 None %5
  1837. %7 = OpLabel
  1838. OpAtomicStore %var %workgroup %semantics %workgroup
  1839. OpReturn
  1840. OpFunctionEnd
  1841. )";
  1842. CompileSuccessfully(text);
  1843. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1844. EXPECT_THAT(getDiagnosticString(),
  1845. HasSubstr("AtomicStore: Memory Semantics OutputMemoryKHR "
  1846. "requires capability VulkanMemoryModelKHR"));
  1847. }
  1848. TEST_F(ValidateAtomics, MakeAvailableKHRRequiresVulkanMemoryModelKHR) {
  1849. const std::string text = R"(
  1850. OpCapability Shader
  1851. OpMemoryModel Logical GLSL450
  1852. OpEntryPoint Fragment %1 "func"
  1853. OpExecutionMode %1 OriginUpperLeft
  1854. %2 = OpTypeVoid
  1855. %3 = OpTypeInt 32 0
  1856. %semantics = OpConstant %3 8196
  1857. %5 = OpTypeFunction %2
  1858. %workgroup = OpConstant %3 2
  1859. %ptr = OpTypePointer Workgroup %3
  1860. %var = OpVariable %ptr Workgroup
  1861. %1 = OpFunction %2 None %5
  1862. %7 = OpLabel
  1863. OpAtomicStore %var %workgroup %semantics %workgroup
  1864. OpReturn
  1865. OpFunctionEnd
  1866. )";
  1867. CompileSuccessfully(text);
  1868. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1869. EXPECT_THAT(getDiagnosticString(),
  1870. HasSubstr("AtomicStore: Memory Semantics MakeAvailableKHR "
  1871. "requires capability VulkanMemoryModelKHR"));
  1872. }
  1873. TEST_F(ValidateAtomics, MakeVisibleKHRRequiresVulkanMemoryModelKHR) {
  1874. const std::string text = R"(
  1875. OpCapability Shader
  1876. OpMemoryModel Logical GLSL450
  1877. OpEntryPoint Fragment %1 "func"
  1878. OpExecutionMode %1 OriginUpperLeft
  1879. %2 = OpTypeVoid
  1880. %3 = OpTypeInt 32 0
  1881. %semantics = OpConstant %3 16386
  1882. %5 = OpTypeFunction %2
  1883. %workgroup = OpConstant %3 2
  1884. %ptr = OpTypePointer Workgroup %3
  1885. %var = OpVariable %ptr Workgroup
  1886. %1 = OpFunction %2 None %5
  1887. %7 = OpLabel
  1888. %ld = OpAtomicLoad %3 %var %workgroup %semantics
  1889. OpReturn
  1890. OpFunctionEnd
  1891. )";
  1892. CompileSuccessfully(text);
  1893. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  1894. EXPECT_THAT(getDiagnosticString(),
  1895. HasSubstr("AtomicLoad: Memory Semantics MakeVisibleKHR requires "
  1896. "capability VulkanMemoryModelKHR"));
  1897. }
  1898. TEST_F(ValidateAtomics, MakeAvailableKHRRequiresReleaseSemantics) {
  1899. const std::string text = R"(
  1900. OpCapability Shader
  1901. OpCapability VulkanMemoryModelKHR
  1902. OpExtension "SPV_KHR_vulkan_memory_model"
  1903. OpMemoryModel Logical VulkanKHR
  1904. OpEntryPoint Fragment %1 "func"
  1905. OpExecutionMode %1 OriginUpperLeft
  1906. %2 = OpTypeVoid
  1907. %3 = OpTypeInt 32 0
  1908. %semantics = OpConstant %3 8448
  1909. %5 = OpTypeFunction %2
  1910. %workgroup = OpConstant %3 2
  1911. %ptr = OpTypePointer Workgroup %3
  1912. %var = OpVariable %ptr Workgroup
  1913. %1 = OpFunction %2 None %5
  1914. %7 = OpLabel
  1915. OpAtomicStore %var %workgroup %semantics %workgroup
  1916. OpReturn
  1917. OpFunctionEnd
  1918. )";
  1919. CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
  1920. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1921. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1922. EXPECT_THAT(
  1923. getDiagnosticString(),
  1924. HasSubstr("AtomicStore: MakeAvailableKHR Memory Semantics also requires "
  1925. "either Release or AcquireRelease Memory Semantics"));
  1926. }
  1927. TEST_F(ValidateAtomics, MakeVisibleKHRRequiresAcquireSemantics) {
  1928. const std::string text = R"(
  1929. OpCapability Shader
  1930. OpCapability VulkanMemoryModelKHR
  1931. OpExtension "SPV_KHR_vulkan_memory_model"
  1932. OpMemoryModel Logical VulkanKHR
  1933. OpEntryPoint Fragment %1 "func"
  1934. OpExecutionMode %1 OriginUpperLeft
  1935. %2 = OpTypeVoid
  1936. %3 = OpTypeInt 32 0
  1937. %semantics = OpConstant %3 16640
  1938. %5 = OpTypeFunction %2
  1939. %workgroup = OpConstant %3 2
  1940. %ptr = OpTypePointer Workgroup %3
  1941. %var = OpVariable %ptr Workgroup
  1942. %1 = OpFunction %2 None %5
  1943. %7 = OpLabel
  1944. %ld = OpAtomicLoad %3 %var %workgroup %semantics
  1945. OpReturn
  1946. OpFunctionEnd
  1947. )";
  1948. CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
  1949. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1950. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1951. EXPECT_THAT(
  1952. getDiagnosticString(),
  1953. HasSubstr("AtomicLoad: MakeVisibleKHR Memory Semantics also requires "
  1954. "either Acquire or AcquireRelease Memory Semantics"));
  1955. }
  1956. TEST_F(ValidateAtomics, MakeAvailableKHRRequiresStorageSemantics) {
  1957. const std::string text = R"(
  1958. OpCapability Shader
  1959. OpCapability VulkanMemoryModelKHR
  1960. OpExtension "SPV_KHR_vulkan_memory_model"
  1961. OpMemoryModel Logical VulkanKHR
  1962. OpEntryPoint Fragment %1 "func"
  1963. OpExecutionMode %1 OriginUpperLeft
  1964. %2 = OpTypeVoid
  1965. %3 = OpTypeInt 32 0
  1966. %semantics = OpConstant %3 8196
  1967. %5 = OpTypeFunction %2
  1968. %workgroup = OpConstant %3 2
  1969. %ptr = OpTypePointer Workgroup %3
  1970. %var = OpVariable %ptr Workgroup
  1971. %1 = OpFunction %2 None %5
  1972. %7 = OpLabel
  1973. OpAtomicStore %var %workgroup %semantics %workgroup
  1974. OpReturn
  1975. OpFunctionEnd
  1976. )";
  1977. CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
  1978. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  1979. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  1980. EXPECT_THAT(
  1981. getDiagnosticString(),
  1982. HasSubstr(
  1983. "AtomicStore: expected Memory Semantics to include a storage class"));
  1984. }
  1985. TEST_F(ValidateAtomics, MakeVisibleKHRRequiresStorageSemantics) {
  1986. const std::string text = R"(
  1987. OpCapability Shader
  1988. OpCapability VulkanMemoryModelKHR
  1989. OpExtension "SPV_KHR_vulkan_memory_model"
  1990. OpMemoryModel Logical VulkanKHR
  1991. OpEntryPoint Fragment %1 "func"
  1992. OpExecutionMode %1 OriginUpperLeft
  1993. %2 = OpTypeVoid
  1994. %3 = OpTypeInt 32 0
  1995. %semantics = OpConstant %3 16386
  1996. %5 = OpTypeFunction %2
  1997. %workgroup = OpConstant %3 2
  1998. %ptr = OpTypePointer Workgroup %3
  1999. %var = OpVariable %ptr Workgroup
  2000. %1 = OpFunction %2 None %5
  2001. %7 = OpLabel
  2002. %ld = OpAtomicLoad %3 %var %workgroup %semantics
  2003. OpReturn
  2004. OpFunctionEnd
  2005. )";
  2006. CompileSuccessfully(text, SPV_ENV_UNIVERSAL_1_3);
  2007. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  2008. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  2009. EXPECT_THAT(
  2010. getDiagnosticString(),
  2011. HasSubstr(
  2012. "AtomicLoad: expected Memory Semantics to include a storage class"));
  2013. }
  2014. TEST_F(ValidateAtomics, VulkanMemoryModelAllowsQueueFamilyKHR) {
  2015. const std::string body = R"(
  2016. %val = OpAtomicAnd %u32 %u32_var %queuefamily %relaxed %u32_1
  2017. )";
  2018. const std::string extra = R"(
  2019. OpCapability VulkanMemoryModelKHR
  2020. OpExtension "SPV_KHR_vulkan_memory_model"
  2021. )";
  2022. CompileSuccessfully(GenerateShaderComputeCode(body, extra, "", "VulkanKHR"),
  2023. SPV_ENV_VULKAN_1_1);
  2024. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_1));
  2025. }
  2026. TEST_F(ValidateAtomics, NonVulkanMemoryModelDisallowsQueueFamilyKHR) {
  2027. const std::string body = R"(
  2028. %val = OpAtomicAnd %u32 %u32_var %queuefamily %relaxed %u32_1
  2029. )";
  2030. CompileSuccessfully(GenerateShaderCode(body), SPV_ENV_VULKAN_1_1);
  2031. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions(SPV_ENV_VULKAN_1_1));
  2032. EXPECT_THAT(getDiagnosticString(),
  2033. HasSubstr("AtomicAnd: Memory Scope QueueFamilyKHR requires "
  2034. "capability VulkanMemoryModelKHR\n %42 = OpAtomicAnd "
  2035. "%uint %29 %uint_5 %uint_0_1 %uint_1\n"));
  2036. }
  2037. TEST_F(ValidateAtomics, SemanticsSpecConstantShader) {
  2038. const std::string spirv = R"(
  2039. OpCapability Shader
  2040. OpMemoryModel Logical GLSL450
  2041. OpEntryPoint Fragment %func "func"
  2042. OpExecutionMode %func OriginUpperLeft
  2043. %void = OpTypeVoid
  2044. %int = OpTypeInt 32 0
  2045. %spec_const = OpSpecConstant %int 0
  2046. %workgroup = OpConstant %int 2
  2047. %ptr_int_workgroup = OpTypePointer Workgroup %int
  2048. %var = OpVariable %ptr_int_workgroup Workgroup
  2049. %voidfn = OpTypeFunction %void
  2050. %func = OpFunction %void None %voidfn
  2051. %entry = OpLabel
  2052. %ld = OpAtomicLoad %int %var %workgroup %spec_const
  2053. OpReturn
  2054. OpFunctionEnd
  2055. )";
  2056. CompileSuccessfully(spirv);
  2057. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  2058. EXPECT_THAT(getDiagnosticString(),
  2059. HasSubstr("Memory Semantics ids must be OpConstant when Shader "
  2060. "capability is present"));
  2061. }
  2062. TEST_F(ValidateAtomics, SemanticsSpecConstantKernel) {
  2063. const std::string spirv = R"(
  2064. OpCapability Kernel
  2065. OpCapability Linkage
  2066. OpMemoryModel Logical OpenCL
  2067. %void = OpTypeVoid
  2068. %int = OpTypeInt 32 0
  2069. %spec_const = OpSpecConstant %int 0
  2070. %workgroup = OpConstant %int 2
  2071. %ptr_int_workgroup = OpTypePointer Workgroup %int
  2072. %var = OpVariable %ptr_int_workgroup Workgroup
  2073. %voidfn = OpTypeFunction %void
  2074. %func = OpFunction %void None %voidfn
  2075. %entry = OpLabel
  2076. %ld = OpAtomicLoad %int %var %workgroup %spec_const
  2077. OpReturn
  2078. OpFunctionEnd
  2079. )";
  2080. CompileSuccessfully(spirv);
  2081. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
  2082. }
  2083. TEST_F(ValidateAtomics, ScopeSpecConstantShader) {
  2084. const std::string spirv = R"(
  2085. OpCapability Shader
  2086. OpMemoryModel Logical GLSL450
  2087. OpEntryPoint Fragment %func "func"
  2088. OpExecutionMode %func OriginUpperLeft
  2089. %void = OpTypeVoid
  2090. %int = OpTypeInt 32 0
  2091. %spec_const = OpSpecConstant %int 0
  2092. %relaxed = OpConstant %int 0
  2093. %ptr_int_workgroup = OpTypePointer Workgroup %int
  2094. %var = OpVariable %ptr_int_workgroup Workgroup
  2095. %voidfn = OpTypeFunction %void
  2096. %func = OpFunction %void None %voidfn
  2097. %entry = OpLabel
  2098. %ld = OpAtomicLoad %int %var %spec_const %relaxed
  2099. OpReturn
  2100. OpFunctionEnd
  2101. )";
  2102. CompileSuccessfully(spirv);
  2103. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  2104. EXPECT_THAT(
  2105. getDiagnosticString(),
  2106. HasSubstr(
  2107. "Scope ids must be OpConstant when Shader capability is present"));
  2108. }
  2109. TEST_F(ValidateAtomics, ScopeSpecConstantKernel) {
  2110. const std::string spirv = R"(
  2111. OpCapability Kernel
  2112. OpCapability Linkage
  2113. OpMemoryModel Logical OpenCL
  2114. %void = OpTypeVoid
  2115. %int = OpTypeInt 32 0
  2116. %spec_const = OpSpecConstant %int 0
  2117. %relaxed = OpConstant %int 0
  2118. %ptr_int_workgroup = OpTypePointer Workgroup %int
  2119. %var = OpVariable %ptr_int_workgroup Workgroup
  2120. %voidfn = OpTypeFunction %void
  2121. %func = OpFunction %void None %voidfn
  2122. %entry = OpLabel
  2123. %ld = OpAtomicLoad %int %var %spec_const %relaxed
  2124. OpReturn
  2125. OpFunctionEnd
  2126. )";
  2127. CompileSuccessfully(spirv);
  2128. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
  2129. }
  2130. TEST_F(ValidateAtomics, VulkanMemoryModelDeviceScopeBad) {
  2131. const std::string body = R"(
  2132. %val = OpAtomicAnd %u32 %u32_var %device %relaxed %u32_1
  2133. )";
  2134. const std::string extra = R"(OpCapability VulkanMemoryModelKHR
  2135. OpExtension "SPV_KHR_vulkan_memory_model"
  2136. )";
  2137. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  2138. SPV_ENV_UNIVERSAL_1_3);
  2139. EXPECT_EQ(SPV_ERROR_INVALID_DATA,
  2140. ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  2141. EXPECT_THAT(
  2142. getDiagnosticString(),
  2143. HasSubstr("Use of device scope with VulkanKHR memory model requires the "
  2144. "VulkanMemoryModelDeviceScopeKHR capability"));
  2145. }
  2146. TEST_F(ValidateAtomics, VulkanMemoryModelDeviceScopeGood) {
  2147. const std::string body = R"(
  2148. %val = OpAtomicAnd %u32 %u32_var %device %relaxed %u32_1
  2149. )";
  2150. const std::string extra = R"(OpCapability VulkanMemoryModelKHR
  2151. OpCapability VulkanMemoryModelDeviceScopeKHR
  2152. OpExtension "SPV_KHR_vulkan_memory_model"
  2153. )";
  2154. CompileSuccessfully(GenerateShaderCode(body, extra, "", "VulkanKHR"),
  2155. SPV_ENV_UNIVERSAL_1_3);
  2156. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_UNIVERSAL_1_3));
  2157. }
  2158. TEST_F(ValidateAtomics, CompareExchangeWeakV13ValV14Good) {
  2159. const std::string body = R"(
  2160. %val1 = OpAtomicCompareExchangeWeak %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
  2161. )";
  2162. CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_UNIVERSAL_1_3);
  2163. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_UNIVERSAL_1_4));
  2164. }
  2165. TEST_F(ValidateAtomics, CompareExchangeWeakV14Bad) {
  2166. const std::string body = R"(
  2167. %val1 = OpAtomicCompareExchangeWeak %u32 %u32_var %device %relaxed %relaxed %u32_0 %u32_0
  2168. )";
  2169. CompileSuccessfully(GenerateKernelCode(body), SPV_ENV_UNIVERSAL_1_4);
  2170. EXPECT_EQ(SPV_ERROR_WRONG_VERSION,
  2171. ValidateInstructions(SPV_ENV_UNIVERSAL_1_4));
  2172. EXPECT_THAT(
  2173. getDiagnosticString(),
  2174. HasSubstr(
  2175. "AtomicCompareExchangeWeak requires SPIR-V version 1.3 or earlier"));
  2176. }
  2177. TEST_F(ValidateAtomics, CompareExchangeVolatileMatch) {
  2178. const std::string spirv = R"(
  2179. OpCapability Shader
  2180. OpCapability VulkanMemoryModelKHR
  2181. OpCapability Linkage
  2182. OpExtension "SPV_KHR_vulkan_memory_model"
  2183. OpMemoryModel Logical VulkanKHR
  2184. %void = OpTypeVoid
  2185. %int = OpTypeInt 32 0
  2186. %int_0 = OpConstant %int 0
  2187. %int_1 = OpConstant %int 1
  2188. %workgroup = OpConstant %int 2
  2189. %volatile = OpConstant %int 32768
  2190. %ptr_wg_int = OpTypePointer Workgroup %int
  2191. %wg_var = OpVariable %ptr_wg_int Workgroup
  2192. %void_fn = OpTypeFunction %void
  2193. %func = OpFunction %void None %void_fn
  2194. %entry = OpLabel
  2195. %cmp_ex = OpAtomicCompareExchange %int %wg_var %workgroup %volatile %volatile %int_0 %int_1
  2196. OpReturn
  2197. OpFunctionEnd
  2198. )";
  2199. CompileSuccessfully(spirv);
  2200. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
  2201. }
  2202. TEST_F(ValidateAtomics, CompareExchangeVolatileMismatch) {
  2203. const std::string spirv = R"(
  2204. OpCapability Shader
  2205. OpCapability VulkanMemoryModelKHR
  2206. OpCapability Linkage
  2207. OpExtension "SPV_KHR_vulkan_memory_model"
  2208. OpMemoryModel Logical VulkanKHR
  2209. %void = OpTypeVoid
  2210. %int = OpTypeInt 32 0
  2211. %int_0 = OpConstant %int 0
  2212. %int_1 = OpConstant %int 1
  2213. %workgroup = OpConstant %int 2
  2214. %volatile = OpConstant %int 32768
  2215. %non_volatile = OpConstant %int 0
  2216. %ptr_wg_int = OpTypePointer Workgroup %int
  2217. %wg_var = OpVariable %ptr_wg_int Workgroup
  2218. %void_fn = OpTypeFunction %void
  2219. %func = OpFunction %void None %void_fn
  2220. %entry = OpLabel
  2221. %cmp_ex = OpAtomicCompareExchange %int %wg_var %workgroup %non_volatile %volatile %int_0 %int_1
  2222. OpReturn
  2223. OpFunctionEnd
  2224. )";
  2225. CompileSuccessfully(spirv);
  2226. EXPECT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
  2227. EXPECT_THAT(getDiagnosticString(),
  2228. HasSubstr("Volatile mask setting must match for Equal and "
  2229. "Unequal memory semantics"));
  2230. }
  2231. TEST_F(ValidateAtomics, CompareExchangeVolatileMismatchCooperativeMatrix) {
  2232. const std::string spirv = R"(
  2233. OpCapability Shader
  2234. OpCapability VulkanMemoryModelKHR
  2235. OpCapability Linkage
  2236. OpCapability CooperativeMatrixNV
  2237. OpExtension "SPV_KHR_vulkan_memory_model"
  2238. OpExtension "SPV_NV_cooperative_matrix"
  2239. OpMemoryModel Logical VulkanKHR
  2240. %void = OpTypeVoid
  2241. %int = OpTypeInt 32 0
  2242. %int_0 = OpConstant %int 0
  2243. %int_1 = OpConstant %int 1
  2244. %workgroup = OpConstant %int 2
  2245. %volatile = OpSpecConstant %int 32768
  2246. %non_volatile = OpSpecConstant %int 32768
  2247. %ptr_wg_int = OpTypePointer Workgroup %int
  2248. %wg_var = OpVariable %ptr_wg_int Workgroup
  2249. %void_fn = OpTypeFunction %void
  2250. %func = OpFunction %void None %void_fn
  2251. %entry = OpLabel
  2252. %cmp_ex = OpAtomicCompareExchange %int %wg_var %workgroup %volatile %non_volatile %int_0 %int_1
  2253. OpReturn
  2254. OpFunctionEnd
  2255. )";
  2256. // This is ok because we cannot evaluate the spec constant defaults.
  2257. CompileSuccessfully(spirv);
  2258. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions());
  2259. }
  2260. TEST_F(ValidateAtomics, VolatileRequiresVulkanMemoryModel) {
  2261. const std::string spirv = R"(
  2262. OpCapability Shader
  2263. OpCapability Linkage
  2264. OpMemoryModel Logical GLSL450
  2265. %void = OpTypeVoid
  2266. %int = OpTypeInt 32 0
  2267. %int_0 = OpConstant %int 0
  2268. %int_1 = OpConstant %int 1
  2269. %workgroup = OpConstant %int 2
  2270. %volatile = OpConstant %int 32768
  2271. %ptr_wg_int = OpTypePointer Workgroup %int
  2272. %wg_var = OpVariable %ptr_wg_int Workgroup
  2273. %void_fn = OpTypeFunction %void
  2274. %func = OpFunction %void None %void_fn
  2275. %entry = OpLabel
  2276. %ld = OpAtomicLoad %int %wg_var %workgroup %volatile
  2277. OpReturn
  2278. OpFunctionEnd
  2279. )";
  2280. CompileSuccessfully(spirv);
  2281. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  2282. EXPECT_THAT(getDiagnosticString(),
  2283. HasSubstr("Memory Semantics Volatile requires capability "
  2284. "VulkanMemoryModelKHR"));
  2285. }
  2286. TEST_F(ValidateAtomics, CooperativeMatrixSemanticsMustBeConstant) {
  2287. const std::string spirv = R"(
  2288. OpCapability Shader
  2289. OpCapability Linkage
  2290. OpCapability CooperativeMatrixNV
  2291. OpExtension "SPV_NV_cooperative_matrix"
  2292. OpMemoryModel Logical GLSL450
  2293. %void = OpTypeVoid
  2294. %int = OpTypeInt 32 0
  2295. %int_0 = OpConstant %int 0
  2296. %int_1 = OpConstant %int 1
  2297. %workgroup = OpConstant %int 2
  2298. %undef = OpUndef %int
  2299. %ptr_wg_int = OpTypePointer Workgroup %int
  2300. %wg_var = OpVariable %ptr_wg_int Workgroup
  2301. %void_fn = OpTypeFunction %void
  2302. %func = OpFunction %void None %void_fn
  2303. %entry = OpLabel
  2304. %ld = OpAtomicLoad %int %wg_var %workgroup %undef
  2305. OpReturn
  2306. OpFunctionEnd
  2307. )";
  2308. CompileSuccessfully(spirv);
  2309. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  2310. EXPECT_THAT(getDiagnosticString(),
  2311. HasSubstr("Memory Semantics must be a constant instruction when "
  2312. "CooperativeMatrixNV capability is present"));
  2313. }
  2314. TEST_F(ValidateAtomics, IIncrementBadPointerDataType) {
  2315. const std::string spirv = R"(
  2316. OpCapability Shader
  2317. OpMemoryModel Logical GLSL450
  2318. %uint = OpTypeInt 32 0
  2319. %_ptr_Input_uint = OpTypePointer Input %uint
  2320. %v3uint = OpTypeVector %uint 3
  2321. %_ptr_Input_v3uint = OpTypePointer Input %v3uint
  2322. %void = OpTypeVoid
  2323. %16 = OpTypeFunction %void
  2324. %uint_538976288 = OpConstant %uint 538976288
  2325. %int = OpTypeInt 32 1
  2326. %_runtimearr_int = OpTypeRuntimeArray %int
  2327. %_struct_5 = OpTypeStruct %_runtimearr_int
  2328. %_ptr_Uniform__struct_5 = OpTypePointer Uniform %_struct_5
  2329. %3 = OpVariable %_ptr_Input_v3uint Input
  2330. %7 = OpVariable %_ptr_Uniform__struct_5 Uniform
  2331. %8224 = OpFunction %void None %16
  2332. %65312 = OpLabel
  2333. %25 = OpAccessChain %_ptr_Input_uint %3 %uint_538976288
  2334. %26 = OpLoad %uint %25
  2335. %2097184 = OpAtomicIIncrement %int %7 %uint_538976288 %26
  2336. OpUnreachable
  2337. OpFunctionEnd
  2338. )";
  2339. CompileSuccessfully(spirv);
  2340. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  2341. EXPECT_THAT(getDiagnosticString(),
  2342. HasSubstr("AtomicIIncrement: expected Pointer to point to a "
  2343. "value of type Result Type"));
  2344. }
  2345. TEST_F(ValidateAtomics, AtomicFloat16VectorSuccess) {
  2346. const std::string definitions = R"(
  2347. %f16 = OpTypeFloat 16
  2348. %f16vec2 = OpTypeVector %f16 2
  2349. %f16vec4 = OpTypeVector %f16 4
  2350. %f16_1 = OpConstant %f16 1
  2351. %f16vec2_1 = OpConstantComposite %f16vec2 %f16_1 %f16_1
  2352. %f16vec4_1 = OpConstantComposite %f16vec4 %f16_1 %f16_1 %f16_1 %f16_1
  2353. %f16vec2_ptr = OpTypePointer Workgroup %f16vec2
  2354. %f16vec4_ptr = OpTypePointer Workgroup %f16vec4
  2355. %f16vec2_var = OpVariable %f16vec2_ptr Workgroup
  2356. %f16vec4_var = OpVariable %f16vec4_ptr Workgroup
  2357. )";
  2358. const std::string body = R"(
  2359. %val3 = OpAtomicFMinEXT %f16vec2 %f16vec2_var %device %relaxed %f16vec2_1
  2360. %val4 = OpAtomicFMaxEXT %f16vec2 %f16vec2_var %device %relaxed %f16vec2_1
  2361. %val8 = OpAtomicFAddEXT %f16vec2 %f16vec2_var %device %relaxed %f16vec2_1
  2362. %val9 = OpAtomicExchange %f16vec2 %f16vec2_var %device %relaxed %f16vec2_1
  2363. %val11 = OpAtomicFMinEXT %f16vec4 %f16vec4_var %device %relaxed %f16vec4_1
  2364. %val12 = OpAtomicFMaxEXT %f16vec4 %f16vec4_var %device %relaxed %f16vec4_1
  2365. %val18 = OpAtomicFAddEXT %f16vec4 %f16vec4_var %device %relaxed %f16vec4_1
  2366. %val19 = OpAtomicExchange %f16vec4 %f16vec4_var %device %relaxed %f16vec4_1
  2367. )";
  2368. CompileSuccessfully(GenerateShaderComputeCode(
  2369. body,
  2370. "OpCapability Float16\n"
  2371. "OpCapability AtomicFloat16VectorNV\n"
  2372. "OpExtension \"SPV_NV_shader_atomic_fp16_vector\"\n",
  2373. definitions),
  2374. SPV_ENV_VULKAN_1_0);
  2375. ASSERT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_VULKAN_1_0));
  2376. }
  2377. static constexpr char Float16Vector3Defs[] = R"(
  2378. %f16 = OpTypeFloat 16
  2379. %f16vec3 = OpTypeVector %f16 3
  2380. %f16_1 = OpConstant %f16 1
  2381. %f16vec3_1 = OpConstantComposite %f16vec3 %f16_1 %f16_1 %f16_1
  2382. %f16vec3_ptr = OpTypePointer Workgroup %f16vec3
  2383. %f16vec3_var = OpVariable %f16vec3_ptr Workgroup
  2384. )";
  2385. TEST_F(ValidateAtomics, AtomicFloat16Vector3MinFail) {
  2386. const std::string definitions = Float16Vector3Defs;
  2387. const std::string body = R"(
  2388. %val11 = OpAtomicFMinEXT %f16vec3 %f16vec3_var %device %relaxed %f16vec3_1
  2389. )";
  2390. CompileSuccessfully(GenerateShaderComputeCode(
  2391. body,
  2392. "OpCapability Float16\n"
  2393. "OpCapability AtomicFloat16VectorNV\n"
  2394. "OpExtension \"SPV_NV_shader_atomic_fp16_vector\"\n",
  2395. definitions),
  2396. SPV_ENV_VULKAN_1_0);
  2397. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  2398. EXPECT_THAT(
  2399. getDiagnosticString(),
  2400. HasSubstr("AtomicFMinEXT: expected Result Type to be float scalar type"));
  2401. }
  2402. TEST_F(ValidateAtomics, AtomicFloat16Vector3MaxFail) {
  2403. const std::string definitions = Float16Vector3Defs;
  2404. const std::string body = R"(
  2405. %val12 = OpAtomicFMaxEXT %f16vec3 %f16vec3_var %device %relaxed %f16vec3_1
  2406. )";
  2407. CompileSuccessfully(GenerateShaderComputeCode(
  2408. body,
  2409. "OpCapability Float16\n"
  2410. "OpCapability AtomicFloat16VectorNV\n"
  2411. "OpExtension \"SPV_NV_shader_atomic_fp16_vector\"\n",
  2412. definitions),
  2413. SPV_ENV_VULKAN_1_0);
  2414. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  2415. EXPECT_THAT(
  2416. getDiagnosticString(),
  2417. HasSubstr("AtomicFMaxEXT: expected Result Type to be float scalar type"));
  2418. }
  2419. TEST_F(ValidateAtomics, AtomicFloat16Vector3AddFail) {
  2420. const std::string definitions = Float16Vector3Defs;
  2421. const std::string body = R"(
  2422. %val18 = OpAtomicFAddEXT %f16vec3 %f16vec3_var %device %relaxed %f16vec3_1
  2423. )";
  2424. CompileSuccessfully(GenerateShaderComputeCode(
  2425. body,
  2426. "OpCapability Float16\n"
  2427. "OpCapability AtomicFloat16VectorNV\n"
  2428. "OpExtension \"SPV_NV_shader_atomic_fp16_vector\"\n",
  2429. definitions),
  2430. SPV_ENV_VULKAN_1_0);
  2431. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  2432. EXPECT_THAT(
  2433. getDiagnosticString(),
  2434. HasSubstr("AtomicFAddEXT: expected Result Type to be float scalar type"));
  2435. }
  2436. TEST_F(ValidateAtomics, AtomicFloat16Vector3ExchangeFail) {
  2437. const std::string definitions = Float16Vector3Defs;
  2438. const std::string body = R"(
  2439. %val19 = OpAtomicExchange %f16vec3 %f16vec3_var %device %relaxed %f16vec3_1
  2440. )";
  2441. CompileSuccessfully(GenerateShaderComputeCode(
  2442. body,
  2443. "OpCapability Float16\n"
  2444. "OpCapability AtomicFloat16VectorNV\n"
  2445. "OpExtension \"SPV_NV_shader_atomic_fp16_vector\"\n",
  2446. definitions),
  2447. SPV_ENV_VULKAN_1_0);
  2448. EXPECT_EQ(SPV_ERROR_INVALID_DATA, ValidateInstructions());
  2449. EXPECT_THAT(getDiagnosticString(),
  2450. HasSubstr("AtomicExchange: expected Result Type to be integer or "
  2451. "float scalar type"));
  2452. }
  2453. TEST_F(ValidateAtomics, AtomicLoadUntypedPointer) {
  2454. const std::string spirv = R"(
  2455. OpCapability Shader
  2456. OpCapability UntypedPointersKHR
  2457. OpCapability WorkgroupMemoryExplicitLayoutKHR
  2458. OpExtension "SPV_KHR_workgroup_memory_explicit_layout"
  2459. OpExtension "SPV_KHR_untyped_pointers"
  2460. OpMemoryModel Logical GLSL450
  2461. OpEntryPoint GLCompute %main "main" %var
  2462. OpDecorate %struct Block
  2463. OpMemberDecorate %struct 0 Offset 0
  2464. %void = OpTypeVoid
  2465. %int = OpTypeInt 32 0
  2466. %int_0 = OpConstant %int 0
  2467. %int_1 = OpConstant %int 1
  2468. %struct = OpTypeStruct %int
  2469. %ptr = OpTypeUntypedPointerKHR Workgroup
  2470. %var = OpUntypedVariableKHR %ptr Workgroup %struct
  2471. %void_fn = OpTypeFunction %void
  2472. %main = OpFunction %void None %void_fn
  2473. %entry = OpLabel
  2474. %load = OpAtomicLoad %int %var %int_1 %int_0
  2475. OpReturn
  2476. OpFunctionEnd
  2477. )";
  2478. CompileSuccessfully(spirv, SPV_ENV_UNIVERSAL_1_4);
  2479. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_UNIVERSAL_1_4));
  2480. }
  2481. TEST_F(ValidateAtomics, AtomicStoreUntypedPointer) {
  2482. const std::string spirv = R"(
  2483. OpCapability Shader
  2484. OpCapability UntypedPointersKHR
  2485. OpCapability WorkgroupMemoryExplicitLayoutKHR
  2486. OpExtension "SPV_KHR_workgroup_memory_explicit_layout"
  2487. OpExtension "SPV_KHR_untyped_pointers"
  2488. OpMemoryModel Logical GLSL450
  2489. OpEntryPoint GLCompute %main "main" %var
  2490. OpDecorate %struct Block
  2491. OpMemberDecorate %struct 0 Offset 0
  2492. %void = OpTypeVoid
  2493. %int = OpTypeInt 32 0
  2494. %int_0 = OpConstant %int 0
  2495. %int_1 = OpConstant %int 1
  2496. %struct = OpTypeStruct %int
  2497. %ptr = OpTypeUntypedPointerKHR Workgroup
  2498. %var = OpUntypedVariableKHR %ptr Workgroup %struct
  2499. %void_fn = OpTypeFunction %void
  2500. %main = OpFunction %void None %void_fn
  2501. %entry = OpLabel
  2502. OpAtomicStore %var %int_1 %int_0 %int_0
  2503. OpReturn
  2504. OpFunctionEnd
  2505. )";
  2506. CompileSuccessfully(spirv, SPV_ENV_UNIVERSAL_1_4);
  2507. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_UNIVERSAL_1_4));
  2508. }
  2509. TEST_F(ValidateAtomics, AtomicExchangeUntypedPointer) {
  2510. const std::string spirv = R"(
  2511. OpCapability Shader
  2512. OpCapability UntypedPointersKHR
  2513. OpCapability WorkgroupMemoryExplicitLayoutKHR
  2514. OpExtension "SPV_KHR_workgroup_memory_explicit_layout"
  2515. OpExtension "SPV_KHR_untyped_pointers"
  2516. OpMemoryModel Logical GLSL450
  2517. OpEntryPoint GLCompute %main "main" %var
  2518. OpDecorate %struct Block
  2519. OpMemberDecorate %struct 0 Offset 0
  2520. %void = OpTypeVoid
  2521. %int = OpTypeInt 32 0
  2522. %int_0 = OpConstant %int 0
  2523. %int_1 = OpConstant %int 1
  2524. %struct = OpTypeStruct %int
  2525. %ptr = OpTypeUntypedPointerKHR Workgroup
  2526. %var = OpUntypedVariableKHR %ptr Workgroup %struct
  2527. %void_fn = OpTypeFunction %void
  2528. %main = OpFunction %void None %void_fn
  2529. %entry = OpLabel
  2530. %ex = OpAtomicExchange %int %var %int_1 %int_0 %int_0
  2531. OpReturn
  2532. OpFunctionEnd
  2533. )";
  2534. CompileSuccessfully(spirv, SPV_ENV_UNIVERSAL_1_4);
  2535. EXPECT_EQ(SPV_SUCCESS, ValidateInstructions(SPV_ENV_UNIVERSAL_1_4));
  2536. }
  2537. TEST_F(ValidateAtomics, AtomicFlagClearUntypedPointer) {
  2538. const std::string spirv = R"(
  2539. OpCapability Kernel
  2540. OpCapability Linkage
  2541. OpCapability UntypedPointersKHR
  2542. OpExtension "SPV_KHR_untyped_pointers"
  2543. OpMemoryModel Logical OpenCL
  2544. %void = OpTypeVoid
  2545. %int = OpTypeInt 32 0
  2546. %int_0 = OpConstant %int 0
  2547. %int_1 = OpConstant %int 1
  2548. %ptr = OpTypeUntypedPointerKHR Workgroup
  2549. %var = OpUntypedVariableKHR %ptr Workgroup %int
  2550. %void_fn = OpTypeFunction %void
  2551. %main = OpFunction %void None %void_fn
  2552. %entry = OpLabel
  2553. OpAtomicFlagClear %var %int_1 %int_0
  2554. OpReturn
  2555. OpFunctionEnd
  2556. )";
  2557. CompileSuccessfully(spirv);
  2558. EXPECT_EQ(SPV_ERROR_INVALID_ID, ValidateInstructions());
  2559. EXPECT_THAT(
  2560. getDiagnosticString(),
  2561. HasSubstr(
  2562. "Untyped pointers are not supported by atomic flag instructions"));
  2563. }
  2564. } // namespace
  2565. } // namespace val
  2566. } // namespace spvtools