inst_bindless_check_pass.cpp 37 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834
  1. // Copyright (c) 2018 The Khronos Group Inc.
  2. // Copyright (c) 2018 Valve Corporation
  3. // Copyright (c) 2018 LunarG Inc.
  4. //
  5. // Licensed under the Apache License, Version 2.0 (the "License");
  6. // you may not use this file except in compliance with the License.
  7. // You may obtain a copy of the License at
  8. //
  9. // http://www.apache.org/licenses/LICENSE-2.0
  10. //
  11. // Unless required by applicable law or agreed to in writing, software
  12. // distributed under the License is distributed on an "AS IS" BASIS,
  13. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. // See the License for the specific language governing permissions and
  15. // limitations under the License.
  16. #include "inst_bindless_check_pass.h"
  17. namespace {
  18. // Input Operand Indices
  19. static const int kSpvImageSampleImageIdInIdx = 0;
  20. static const int kSpvSampledImageImageIdInIdx = 0;
  21. static const int kSpvSampledImageSamplerIdInIdx = 1;
  22. static const int kSpvImageSampledImageIdInIdx = 0;
  23. static const int kSpvCopyObjectOperandIdInIdx = 0;
  24. static const int kSpvLoadPtrIdInIdx = 0;
  25. static const int kSpvAccessChainBaseIdInIdx = 0;
  26. static const int kSpvAccessChainIndex0IdInIdx = 1;
  27. static const int kSpvTypeArrayTypeIdInIdx = 0;
  28. static const int kSpvTypeArrayLengthIdInIdx = 1;
  29. static const int kSpvConstantValueInIdx = 0;
  30. static const int kSpvVariableStorageClassInIdx = 0;
  31. static const int kSpvTypePtrTypeIdInIdx = 1;
  32. static const int kSpvTypeImageDim = 1;
  33. static const int kSpvTypeImageDepth = 2;
  34. static const int kSpvTypeImageArrayed = 3;
  35. static const int kSpvTypeImageMS = 4;
  36. static const int kSpvTypeImageSampled = 5;
  37. } // anonymous namespace
  38. namespace spvtools {
  39. namespace opt {
  40. uint32_t InstBindlessCheckPass::GenDebugReadLength(
  41. uint32_t var_id, InstructionBuilder* builder) {
  42. uint32_t desc_set_idx =
  43. var2desc_set_[var_id] + kDebugInputBindlessOffsetLengths;
  44. uint32_t desc_set_idx_id = builder->GetUintConstantId(desc_set_idx);
  45. uint32_t binding_idx_id = builder->GetUintConstantId(var2binding_[var_id]);
  46. return GenDebugDirectRead({desc_set_idx_id, binding_idx_id}, builder);
  47. }
  48. uint32_t InstBindlessCheckPass::GenDebugReadInit(uint32_t var_id,
  49. uint32_t desc_idx_id,
  50. InstructionBuilder* builder) {
  51. uint32_t binding_idx_id = builder->GetUintConstantId(var2binding_[var_id]);
  52. uint32_t u_desc_idx_id = GenUintCastCode(desc_idx_id, builder);
  53. // If desc index checking is not enabled, we know the offset of initialization
  54. // entries is 1, so we can avoid loading this value and just add 1 to the
  55. // descriptor set.
  56. if (!desc_idx_enabled_) {
  57. uint32_t desc_set_idx_id =
  58. builder->GetUintConstantId(var2desc_set_[var_id] + 1);
  59. return GenDebugDirectRead({desc_set_idx_id, binding_idx_id, u_desc_idx_id},
  60. builder);
  61. } else {
  62. uint32_t desc_set_base_id =
  63. builder->GetUintConstantId(kDebugInputBindlessInitOffset);
  64. uint32_t desc_set_idx_id =
  65. builder->GetUintConstantId(var2desc_set_[var_id]);
  66. return GenDebugDirectRead(
  67. {desc_set_base_id, desc_set_idx_id, binding_idx_id, u_desc_idx_id},
  68. builder);
  69. }
  70. }
  71. uint32_t InstBindlessCheckPass::CloneOriginalImage(
  72. uint32_t old_image_id, InstructionBuilder* builder) {
  73. Instruction* new_image_inst;
  74. Instruction* old_image_inst = get_def_use_mgr()->GetDef(old_image_id);
  75. if (old_image_inst->opcode() == SpvOpLoad) {
  76. new_image_inst = builder->AddLoad(
  77. old_image_inst->type_id(),
  78. old_image_inst->GetSingleWordInOperand(kSpvLoadPtrIdInIdx));
  79. } else if (old_image_inst->opcode() == SpvOp::SpvOpSampledImage) {
  80. uint32_t clone_id = CloneOriginalImage(
  81. old_image_inst->GetSingleWordInOperand(kSpvSampledImageImageIdInIdx),
  82. builder);
  83. new_image_inst = builder->AddBinaryOp(
  84. old_image_inst->type_id(), SpvOpSampledImage, clone_id,
  85. old_image_inst->GetSingleWordInOperand(kSpvSampledImageSamplerIdInIdx));
  86. } else if (old_image_inst->opcode() == SpvOp::SpvOpImage) {
  87. uint32_t clone_id = CloneOriginalImage(
  88. old_image_inst->GetSingleWordInOperand(kSpvImageSampledImageIdInIdx),
  89. builder);
  90. new_image_inst =
  91. builder->AddUnaryOp(old_image_inst->type_id(), SpvOpImage, clone_id);
  92. } else {
  93. assert(old_image_inst->opcode() == SpvOp::SpvOpCopyObject &&
  94. "expecting OpCopyObject");
  95. uint32_t clone_id = CloneOriginalImage(
  96. old_image_inst->GetSingleWordInOperand(kSpvCopyObjectOperandIdInIdx),
  97. builder);
  98. // Since we are cloning, no need to create new copy
  99. new_image_inst = get_def_use_mgr()->GetDef(clone_id);
  100. }
  101. uid2offset_[new_image_inst->unique_id()] =
  102. uid2offset_[old_image_inst->unique_id()];
  103. uint32_t new_image_id = new_image_inst->result_id();
  104. get_decoration_mgr()->CloneDecorations(old_image_id, new_image_id);
  105. return new_image_id;
  106. }
  107. uint32_t InstBindlessCheckPass::CloneOriginalReference(
  108. RefAnalysis* ref, InstructionBuilder* builder) {
  109. // If original is image based, start by cloning descriptor load
  110. uint32_t new_image_id = 0;
  111. if (ref->desc_load_id != 0) {
  112. uint32_t old_image_id =
  113. ref->ref_inst->GetSingleWordInOperand(kSpvImageSampleImageIdInIdx);
  114. new_image_id = CloneOriginalImage(old_image_id, builder);
  115. }
  116. // Clone original reference
  117. std::unique_ptr<Instruction> new_ref_inst(ref->ref_inst->Clone(context()));
  118. uint32_t ref_result_id = ref->ref_inst->result_id();
  119. uint32_t new_ref_id = 0;
  120. if (ref_result_id != 0) {
  121. new_ref_id = TakeNextId();
  122. new_ref_inst->SetResultId(new_ref_id);
  123. }
  124. // Update new ref with new image if created
  125. if (new_image_id != 0)
  126. new_ref_inst->SetInOperand(kSpvImageSampleImageIdInIdx, {new_image_id});
  127. // Register new reference and add to new block
  128. Instruction* added_inst = builder->AddInstruction(std::move(new_ref_inst));
  129. uid2offset_[added_inst->unique_id()] =
  130. uid2offset_[ref->ref_inst->unique_id()];
  131. if (new_ref_id != 0)
  132. get_decoration_mgr()->CloneDecorations(ref_result_id, new_ref_id);
  133. return new_ref_id;
  134. }
  135. uint32_t InstBindlessCheckPass::GetImageId(Instruction* inst) {
  136. switch (inst->opcode()) {
  137. case SpvOp::SpvOpImageSampleImplicitLod:
  138. case SpvOp::SpvOpImageSampleExplicitLod:
  139. case SpvOp::SpvOpImageSampleDrefImplicitLod:
  140. case SpvOp::SpvOpImageSampleDrefExplicitLod:
  141. case SpvOp::SpvOpImageSampleProjImplicitLod:
  142. case SpvOp::SpvOpImageSampleProjExplicitLod:
  143. case SpvOp::SpvOpImageSampleProjDrefImplicitLod:
  144. case SpvOp::SpvOpImageSampleProjDrefExplicitLod:
  145. case SpvOp::SpvOpImageGather:
  146. case SpvOp::SpvOpImageDrefGather:
  147. case SpvOp::SpvOpImageQueryLod:
  148. case SpvOp::SpvOpImageSparseSampleImplicitLod:
  149. case SpvOp::SpvOpImageSparseSampleExplicitLod:
  150. case SpvOp::SpvOpImageSparseSampleDrefImplicitLod:
  151. case SpvOp::SpvOpImageSparseSampleDrefExplicitLod:
  152. case SpvOp::SpvOpImageSparseSampleProjImplicitLod:
  153. case SpvOp::SpvOpImageSparseSampleProjExplicitLod:
  154. case SpvOp::SpvOpImageSparseSampleProjDrefImplicitLod:
  155. case SpvOp::SpvOpImageSparseSampleProjDrefExplicitLod:
  156. case SpvOp::SpvOpImageSparseGather:
  157. case SpvOp::SpvOpImageSparseDrefGather:
  158. case SpvOp::SpvOpImageFetch:
  159. case SpvOp::SpvOpImageRead:
  160. case SpvOp::SpvOpImageQueryFormat:
  161. case SpvOp::SpvOpImageQueryOrder:
  162. case SpvOp::SpvOpImageQuerySizeLod:
  163. case SpvOp::SpvOpImageQuerySize:
  164. case SpvOp::SpvOpImageQueryLevels:
  165. case SpvOp::SpvOpImageQuerySamples:
  166. case SpvOp::SpvOpImageSparseFetch:
  167. case SpvOp::SpvOpImageSparseRead:
  168. case SpvOp::SpvOpImageWrite:
  169. return inst->GetSingleWordInOperand(kSpvImageSampleImageIdInIdx);
  170. default:
  171. break;
  172. }
  173. return 0;
  174. }
  175. Instruction* InstBindlessCheckPass::GetPointeeTypeInst(Instruction* ptr_inst) {
  176. uint32_t pte_ty_id = GetPointeeTypeId(ptr_inst);
  177. return get_def_use_mgr()->GetDef(pte_ty_id);
  178. }
  179. bool InstBindlessCheckPass::AnalyzeDescriptorReference(Instruction* ref_inst,
  180. RefAnalysis* ref) {
  181. ref->ref_inst = ref_inst;
  182. if (ref_inst->opcode() == SpvOpLoad || ref_inst->opcode() == SpvOpStore) {
  183. ref->desc_load_id = 0;
  184. ref->ptr_id = ref_inst->GetSingleWordInOperand(kSpvLoadPtrIdInIdx);
  185. Instruction* ptr_inst = get_def_use_mgr()->GetDef(ref->ptr_id);
  186. if (ptr_inst->opcode() != SpvOp::SpvOpAccessChain) return false;
  187. ref->var_id = ptr_inst->GetSingleWordInOperand(kSpvAccessChainBaseIdInIdx);
  188. Instruction* var_inst = get_def_use_mgr()->GetDef(ref->var_id);
  189. if (var_inst->opcode() != SpvOp::SpvOpVariable) return false;
  190. uint32_t storage_class =
  191. var_inst->GetSingleWordInOperand(kSpvVariableStorageClassInIdx);
  192. switch (storage_class) {
  193. case SpvStorageClassUniform:
  194. case SpvStorageClassStorageBuffer:
  195. break;
  196. default:
  197. return false;
  198. break;
  199. }
  200. // Check for deprecated storage block form
  201. if (storage_class == SpvStorageClassUniform) {
  202. uint32_t var_ty_id = var_inst->type_id();
  203. Instruction* var_ty_inst = get_def_use_mgr()->GetDef(var_ty_id);
  204. uint32_t ptr_ty_id =
  205. var_ty_inst->GetSingleWordInOperand(kSpvTypePtrTypeIdInIdx);
  206. Instruction* ptr_ty_inst = get_def_use_mgr()->GetDef(ptr_ty_id);
  207. SpvOp ptr_ty_op = ptr_ty_inst->opcode();
  208. uint32_t block_ty_id =
  209. (ptr_ty_op == SpvOpTypeArray || ptr_ty_op == SpvOpTypeRuntimeArray)
  210. ? ptr_ty_inst->GetSingleWordInOperand(kSpvTypeArrayTypeIdInIdx)
  211. : ptr_ty_id;
  212. assert(get_def_use_mgr()->GetDef(block_ty_id)->opcode() ==
  213. SpvOpTypeStruct &&
  214. "unexpected block type");
  215. bool block_found = get_decoration_mgr()->FindDecoration(
  216. block_ty_id, SpvDecorationBlock,
  217. [](const Instruction&) { return true; });
  218. if (!block_found) {
  219. // If block decoration not found, verify deprecated form of SSBO
  220. bool buffer_block_found = get_decoration_mgr()->FindDecoration(
  221. block_ty_id, SpvDecorationBufferBlock,
  222. [](const Instruction&) { return true; });
  223. USE_ASSERT(buffer_block_found && "block decoration not found");
  224. storage_class = SpvStorageClassStorageBuffer;
  225. }
  226. }
  227. ref->strg_class = storage_class;
  228. Instruction* desc_type_inst = GetPointeeTypeInst(var_inst);
  229. switch (desc_type_inst->opcode()) {
  230. case SpvOpTypeArray:
  231. case SpvOpTypeRuntimeArray:
  232. // A load through a descriptor array will have at least 3 operands. We
  233. // do not want to instrument loads of descriptors here which are part of
  234. // an image-based reference.
  235. if (ptr_inst->NumInOperands() < 3) return false;
  236. ref->desc_idx_id =
  237. ptr_inst->GetSingleWordInOperand(kSpvAccessChainIndex0IdInIdx);
  238. break;
  239. default:
  240. ref->desc_idx_id = 0;
  241. break;
  242. }
  243. return true;
  244. }
  245. // Reference is not load or store. If not an image-based reference, return.
  246. ref->image_id = GetImageId(ref_inst);
  247. if (ref->image_id == 0) return false;
  248. // Search for descriptor load
  249. uint32_t desc_load_id = ref->image_id;
  250. Instruction* desc_load_inst;
  251. for (;;) {
  252. desc_load_inst = get_def_use_mgr()->GetDef(desc_load_id);
  253. if (desc_load_inst->opcode() == SpvOp::SpvOpSampledImage)
  254. desc_load_id =
  255. desc_load_inst->GetSingleWordInOperand(kSpvSampledImageImageIdInIdx);
  256. else if (desc_load_inst->opcode() == SpvOp::SpvOpImage)
  257. desc_load_id =
  258. desc_load_inst->GetSingleWordInOperand(kSpvImageSampledImageIdInIdx);
  259. else if (desc_load_inst->opcode() == SpvOp::SpvOpCopyObject)
  260. desc_load_id =
  261. desc_load_inst->GetSingleWordInOperand(kSpvCopyObjectOperandIdInIdx);
  262. else
  263. break;
  264. }
  265. if (desc_load_inst->opcode() != SpvOp::SpvOpLoad) {
  266. // TODO(greg-lunarg): Handle additional possibilities?
  267. return false;
  268. }
  269. ref->desc_load_id = desc_load_id;
  270. ref->ptr_id = desc_load_inst->GetSingleWordInOperand(kSpvLoadPtrIdInIdx);
  271. Instruction* ptr_inst = get_def_use_mgr()->GetDef(ref->ptr_id);
  272. if (ptr_inst->opcode() == SpvOp::SpvOpVariable) {
  273. ref->desc_idx_id = 0;
  274. ref->var_id = ref->ptr_id;
  275. } else if (ptr_inst->opcode() == SpvOp::SpvOpAccessChain) {
  276. if (ptr_inst->NumInOperands() != 2) {
  277. assert(false && "unexpected bindless index number");
  278. return false;
  279. }
  280. ref->desc_idx_id =
  281. ptr_inst->GetSingleWordInOperand(kSpvAccessChainIndex0IdInIdx);
  282. ref->var_id = ptr_inst->GetSingleWordInOperand(kSpvAccessChainBaseIdInIdx);
  283. Instruction* var_inst = get_def_use_mgr()->GetDef(ref->var_id);
  284. if (var_inst->opcode() != SpvOpVariable) {
  285. assert(false && "unexpected bindless base");
  286. return false;
  287. }
  288. } else {
  289. // TODO(greg-lunarg): Handle additional possibilities?
  290. return false;
  291. }
  292. return true;
  293. }
  294. uint32_t InstBindlessCheckPass::FindStride(uint32_t ty_id,
  295. uint32_t stride_deco) {
  296. uint32_t stride = 0xdeadbeef;
  297. bool found = get_decoration_mgr()->FindDecoration(
  298. ty_id, stride_deco, [&stride](const Instruction& deco_inst) {
  299. stride = deco_inst.GetSingleWordInOperand(2u);
  300. return true;
  301. });
  302. USE_ASSERT(found && "stride not found");
  303. return stride;
  304. }
  305. uint32_t InstBindlessCheckPass::ByteSize(uint32_t ty_id, uint32_t matrix_stride,
  306. bool col_major, bool in_matrix) {
  307. analysis::TypeManager* type_mgr = context()->get_type_mgr();
  308. const analysis::Type* sz_ty = type_mgr->GetType(ty_id);
  309. if (sz_ty->kind() == analysis::Type::kPointer) {
  310. // Assuming PhysicalStorageBuffer pointer
  311. return 8;
  312. }
  313. if (sz_ty->kind() == analysis::Type::kMatrix) {
  314. assert(matrix_stride != 0 && "missing matrix stride");
  315. const analysis::Matrix* m_ty = sz_ty->AsMatrix();
  316. if (col_major) {
  317. return m_ty->element_count() * matrix_stride;
  318. } else {
  319. const analysis::Vector* v_ty = m_ty->element_type()->AsVector();
  320. return v_ty->element_count() * matrix_stride;
  321. }
  322. }
  323. uint32_t size = 1;
  324. if (sz_ty->kind() == analysis::Type::kVector) {
  325. const analysis::Vector* v_ty = sz_ty->AsVector();
  326. size = v_ty->element_count();
  327. const analysis::Type* comp_ty = v_ty->element_type();
  328. // if vector in row major matrix, the vector is strided so return the
  329. // number of bytes spanned by the vector
  330. if (in_matrix && !col_major && matrix_stride > 0) {
  331. uint32_t comp_ty_id = type_mgr->GetId(comp_ty);
  332. return (size - 1) * matrix_stride + ByteSize(comp_ty_id, 0, false, false);
  333. }
  334. sz_ty = comp_ty;
  335. }
  336. switch (sz_ty->kind()) {
  337. case analysis::Type::kFloat: {
  338. const analysis::Float* f_ty = sz_ty->AsFloat();
  339. size *= f_ty->width();
  340. } break;
  341. case analysis::Type::kInteger: {
  342. const analysis::Integer* i_ty = sz_ty->AsInteger();
  343. size *= i_ty->width();
  344. } break;
  345. default: { assert(false && "unexpected type"); } break;
  346. }
  347. size /= 8;
  348. return size;
  349. }
  350. uint32_t InstBindlessCheckPass::GenLastByteIdx(RefAnalysis* ref,
  351. InstructionBuilder* builder) {
  352. // Find outermost buffer type and its access chain index
  353. Instruction* var_inst = get_def_use_mgr()->GetDef(ref->var_id);
  354. Instruction* desc_ty_inst = GetPointeeTypeInst(var_inst);
  355. uint32_t buff_ty_id;
  356. uint32_t ac_in_idx = 1;
  357. switch (desc_ty_inst->opcode()) {
  358. case SpvOpTypeArray:
  359. case SpvOpTypeRuntimeArray:
  360. buff_ty_id = desc_ty_inst->GetSingleWordInOperand(0);
  361. ++ac_in_idx;
  362. break;
  363. default:
  364. assert(desc_ty_inst->opcode() == SpvOpTypeStruct &&
  365. "unexpected descriptor type");
  366. buff_ty_id = desc_ty_inst->result_id();
  367. break;
  368. }
  369. // Process remaining access chain indices
  370. Instruction* ac_inst = get_def_use_mgr()->GetDef(ref->ptr_id);
  371. uint32_t curr_ty_id = buff_ty_id;
  372. uint32_t sum_id = 0u;
  373. uint32_t matrix_stride = 0u;
  374. bool col_major = false;
  375. uint32_t matrix_stride_id = 0u;
  376. bool in_matrix = false;
  377. while (ac_in_idx < ac_inst->NumInOperands()) {
  378. uint32_t curr_idx_id = ac_inst->GetSingleWordInOperand(ac_in_idx);
  379. Instruction* curr_ty_inst = get_def_use_mgr()->GetDef(curr_ty_id);
  380. uint32_t curr_offset_id = 0;
  381. switch (curr_ty_inst->opcode()) {
  382. case SpvOpTypeArray:
  383. case SpvOpTypeRuntimeArray: {
  384. // Get array stride and multiply by current index
  385. uint32_t arr_stride = FindStride(curr_ty_id, SpvDecorationArrayStride);
  386. uint32_t arr_stride_id = builder->GetUintConstantId(arr_stride);
  387. uint32_t curr_idx_32b_id = Gen32BitCvtCode(curr_idx_id, builder);
  388. Instruction* curr_offset_inst = builder->AddBinaryOp(
  389. GetUintId(), SpvOpIMul, arr_stride_id, curr_idx_32b_id);
  390. curr_offset_id = curr_offset_inst->result_id();
  391. // Get element type for next step
  392. curr_ty_id = curr_ty_inst->GetSingleWordInOperand(0);
  393. } break;
  394. case SpvOpTypeMatrix: {
  395. assert(matrix_stride != 0 && "missing matrix stride");
  396. matrix_stride_id = builder->GetUintConstantId(matrix_stride);
  397. uint32_t vec_ty_id = curr_ty_inst->GetSingleWordInOperand(0);
  398. // If column major, multiply column index by matrix stride, otherwise
  399. // by vector component size and save matrix stride for vector (row)
  400. // index
  401. uint32_t col_stride_id;
  402. if (col_major) {
  403. col_stride_id = matrix_stride_id;
  404. } else {
  405. Instruction* vec_ty_inst = get_def_use_mgr()->GetDef(vec_ty_id);
  406. uint32_t comp_ty_id = vec_ty_inst->GetSingleWordInOperand(0u);
  407. uint32_t col_stride = ByteSize(comp_ty_id, 0u, false, false);
  408. col_stride_id = builder->GetUintConstantId(col_stride);
  409. }
  410. uint32_t curr_idx_32b_id = Gen32BitCvtCode(curr_idx_id, builder);
  411. Instruction* curr_offset_inst = builder->AddBinaryOp(
  412. GetUintId(), SpvOpIMul, col_stride_id, curr_idx_32b_id);
  413. curr_offset_id = curr_offset_inst->result_id();
  414. // Get element type for next step
  415. curr_ty_id = vec_ty_id;
  416. in_matrix = true;
  417. } break;
  418. case SpvOpTypeVector: {
  419. // If inside a row major matrix type, multiply index by matrix stride,
  420. // else multiply by component size
  421. uint32_t comp_ty_id = curr_ty_inst->GetSingleWordInOperand(0u);
  422. uint32_t curr_idx_32b_id = Gen32BitCvtCode(curr_idx_id, builder);
  423. if (in_matrix && !col_major) {
  424. Instruction* curr_offset_inst = builder->AddBinaryOp(
  425. GetUintId(), SpvOpIMul, matrix_stride_id, curr_idx_32b_id);
  426. curr_offset_id = curr_offset_inst->result_id();
  427. } else {
  428. uint32_t comp_ty_sz = ByteSize(comp_ty_id, 0u, false, false);
  429. uint32_t comp_ty_sz_id = builder->GetUintConstantId(comp_ty_sz);
  430. Instruction* curr_offset_inst = builder->AddBinaryOp(
  431. GetUintId(), SpvOpIMul, comp_ty_sz_id, curr_idx_32b_id);
  432. curr_offset_id = curr_offset_inst->result_id();
  433. }
  434. // Get element type for next step
  435. curr_ty_id = comp_ty_id;
  436. } break;
  437. case SpvOpTypeStruct: {
  438. // Get buffer byte offset for the referenced member
  439. Instruction* curr_idx_inst = get_def_use_mgr()->GetDef(curr_idx_id);
  440. assert(curr_idx_inst->opcode() == SpvOpConstant &&
  441. "unexpected struct index");
  442. uint32_t member_idx = curr_idx_inst->GetSingleWordInOperand(0);
  443. uint32_t member_offset = 0xdeadbeef;
  444. bool found = get_decoration_mgr()->FindDecoration(
  445. curr_ty_id, SpvDecorationOffset,
  446. [&member_idx, &member_offset](const Instruction& deco_inst) {
  447. if (deco_inst.GetSingleWordInOperand(1u) != member_idx)
  448. return false;
  449. member_offset = deco_inst.GetSingleWordInOperand(3u);
  450. return true;
  451. });
  452. USE_ASSERT(found && "member offset not found");
  453. curr_offset_id = builder->GetUintConstantId(member_offset);
  454. // Look for matrix stride for this member if there is one. The matrix
  455. // stride is not on the matrix type, but in a OpMemberDecorate on the
  456. // enclosing struct type at the member index. If none found, reset
  457. // stride to 0.
  458. found = get_decoration_mgr()->FindDecoration(
  459. curr_ty_id, SpvDecorationMatrixStride,
  460. [&member_idx, &matrix_stride](const Instruction& deco_inst) {
  461. if (deco_inst.GetSingleWordInOperand(1u) != member_idx)
  462. return false;
  463. matrix_stride = deco_inst.GetSingleWordInOperand(3u);
  464. return true;
  465. });
  466. if (!found) matrix_stride = 0;
  467. // Look for column major decoration
  468. found = get_decoration_mgr()->FindDecoration(
  469. curr_ty_id, SpvDecorationColMajor,
  470. [&member_idx, &col_major](const Instruction& deco_inst) {
  471. if (deco_inst.GetSingleWordInOperand(1u) != member_idx)
  472. return false;
  473. col_major = true;
  474. return true;
  475. });
  476. if (!found) col_major = false;
  477. // Get element type for next step
  478. curr_ty_id = curr_ty_inst->GetSingleWordInOperand(member_idx);
  479. } break;
  480. default: { assert(false && "unexpected non-composite type"); } break;
  481. }
  482. if (sum_id == 0)
  483. sum_id = curr_offset_id;
  484. else {
  485. Instruction* sum_inst =
  486. builder->AddBinaryOp(GetUintId(), SpvOpIAdd, sum_id, curr_offset_id);
  487. sum_id = sum_inst->result_id();
  488. }
  489. ++ac_in_idx;
  490. }
  491. // Add in offset of last byte of referenced object
  492. uint32_t bsize = ByteSize(curr_ty_id, matrix_stride, col_major, in_matrix);
  493. uint32_t last = bsize - 1;
  494. uint32_t last_id = builder->GetUintConstantId(last);
  495. Instruction* sum_inst =
  496. builder->AddBinaryOp(GetUintId(), SpvOpIAdd, sum_id, last_id);
  497. return sum_inst->result_id();
  498. }
  499. void InstBindlessCheckPass::GenCheckCode(
  500. uint32_t check_id, uint32_t error_id, uint32_t offset_id,
  501. uint32_t length_id, uint32_t stage_idx, RefAnalysis* ref,
  502. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  503. BasicBlock* back_blk_ptr = &*new_blocks->back();
  504. InstructionBuilder builder(
  505. context(), back_blk_ptr,
  506. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  507. // Gen conditional branch on check_id. Valid branch generates original
  508. // reference. Invalid generates debug output and zero result (if needed).
  509. uint32_t merge_blk_id = TakeNextId();
  510. uint32_t valid_blk_id = TakeNextId();
  511. uint32_t invalid_blk_id = TakeNextId();
  512. std::unique_ptr<Instruction> merge_label(NewLabel(merge_blk_id));
  513. std::unique_ptr<Instruction> valid_label(NewLabel(valid_blk_id));
  514. std::unique_ptr<Instruction> invalid_label(NewLabel(invalid_blk_id));
  515. (void)builder.AddConditionalBranch(check_id, valid_blk_id, invalid_blk_id,
  516. merge_blk_id, SpvSelectionControlMaskNone);
  517. // Gen valid bounds branch
  518. std::unique_ptr<BasicBlock> new_blk_ptr(
  519. new BasicBlock(std::move(valid_label)));
  520. builder.SetInsertPoint(&*new_blk_ptr);
  521. uint32_t new_ref_id = CloneOriginalReference(ref, &builder);
  522. (void)builder.AddBranch(merge_blk_id);
  523. new_blocks->push_back(std::move(new_blk_ptr));
  524. // Gen invalid block
  525. new_blk_ptr.reset(new BasicBlock(std::move(invalid_label)));
  526. builder.SetInsertPoint(&*new_blk_ptr);
  527. uint32_t u_index_id = GenUintCastCode(ref->desc_idx_id, &builder);
  528. if (offset_id != 0) {
  529. // Buffer OOB
  530. uint32_t u_offset_id = GenUintCastCode(offset_id, &builder);
  531. uint32_t u_length_id = GenUintCastCode(length_id, &builder);
  532. GenDebugStreamWrite(uid2offset_[ref->ref_inst->unique_id()], stage_idx,
  533. {error_id, u_index_id, u_offset_id, u_length_id},
  534. &builder);
  535. } else if (buffer_bounds_enabled_ || texel_buffer_enabled_) {
  536. // Uninitialized Descriptor - Return additional unused zero so all error
  537. // modes will use same debug stream write function
  538. uint32_t u_length_id = GenUintCastCode(length_id, &builder);
  539. GenDebugStreamWrite(
  540. uid2offset_[ref->ref_inst->unique_id()], stage_idx,
  541. {error_id, u_index_id, u_length_id, builder.GetUintConstantId(0)},
  542. &builder);
  543. } else {
  544. // Uninitialized Descriptor - Normal error return
  545. uint32_t u_length_id = GenUintCastCode(length_id, &builder);
  546. GenDebugStreamWrite(uid2offset_[ref->ref_inst->unique_id()], stage_idx,
  547. {error_id, u_index_id, u_length_id}, &builder);
  548. }
  549. // Remember last invalid block id
  550. uint32_t last_invalid_blk_id = new_blk_ptr->GetLabelInst()->result_id();
  551. // Gen zero for invalid reference
  552. uint32_t ref_type_id = ref->ref_inst->type_id();
  553. (void)builder.AddBranch(merge_blk_id);
  554. new_blocks->push_back(std::move(new_blk_ptr));
  555. // Gen merge block
  556. new_blk_ptr.reset(new BasicBlock(std::move(merge_label)));
  557. builder.SetInsertPoint(&*new_blk_ptr);
  558. // Gen phi of new reference and zero, if necessary, and replace the
  559. // result id of the original reference with that of the Phi. Kill original
  560. // reference.
  561. if (new_ref_id != 0) {
  562. Instruction* phi_inst = builder.AddPhi(
  563. ref_type_id, {new_ref_id, valid_blk_id, GetNullId(ref_type_id),
  564. last_invalid_blk_id});
  565. context()->ReplaceAllUsesWith(ref->ref_inst->result_id(),
  566. phi_inst->result_id());
  567. }
  568. new_blocks->push_back(std::move(new_blk_ptr));
  569. context()->KillInst(ref->ref_inst);
  570. }
  571. void InstBindlessCheckPass::GenDescIdxCheckCode(
  572. BasicBlock::iterator ref_inst_itr,
  573. UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
  574. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  575. // Look for reference through indexed descriptor. If found, analyze and
  576. // save components. If not, return.
  577. RefAnalysis ref;
  578. if (!AnalyzeDescriptorReference(&*ref_inst_itr, &ref)) return;
  579. Instruction* ptr_inst = get_def_use_mgr()->GetDef(ref.ptr_id);
  580. if (ptr_inst->opcode() != SpvOp::SpvOpAccessChain) return;
  581. // If index and bound both compile-time constants and index < bound,
  582. // return without changing
  583. Instruction* var_inst = get_def_use_mgr()->GetDef(ref.var_id);
  584. Instruction* desc_type_inst = GetPointeeTypeInst(var_inst);
  585. uint32_t length_id = 0;
  586. if (desc_type_inst->opcode() == SpvOpTypeArray) {
  587. length_id =
  588. desc_type_inst->GetSingleWordInOperand(kSpvTypeArrayLengthIdInIdx);
  589. Instruction* index_inst = get_def_use_mgr()->GetDef(ref.desc_idx_id);
  590. Instruction* length_inst = get_def_use_mgr()->GetDef(length_id);
  591. if (index_inst->opcode() == SpvOpConstant &&
  592. length_inst->opcode() == SpvOpConstant &&
  593. index_inst->GetSingleWordInOperand(kSpvConstantValueInIdx) <
  594. length_inst->GetSingleWordInOperand(kSpvConstantValueInIdx))
  595. return;
  596. } else if (!desc_idx_enabled_ ||
  597. desc_type_inst->opcode() != SpvOpTypeRuntimeArray) {
  598. return;
  599. }
  600. // Move original block's preceding instructions into first new block
  601. std::unique_ptr<BasicBlock> new_blk_ptr;
  602. MovePreludeCode(ref_inst_itr, ref_block_itr, &new_blk_ptr);
  603. InstructionBuilder builder(
  604. context(), &*new_blk_ptr,
  605. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  606. new_blocks->push_back(std::move(new_blk_ptr));
  607. uint32_t error_id = builder.GetUintConstantId(kInstErrorBindlessBounds);
  608. // If length id not yet set, descriptor array is runtime size so
  609. // generate load of length from stage's debug input buffer.
  610. if (length_id == 0) {
  611. assert(desc_type_inst->opcode() == SpvOpTypeRuntimeArray &&
  612. "unexpected bindless type");
  613. length_id = GenDebugReadLength(ref.var_id, &builder);
  614. }
  615. // Generate full runtime bounds test code with true branch
  616. // being full reference and false branch being debug output and zero
  617. // for the referenced value.
  618. uint32_t desc_idx_32b_id = Gen32BitCvtCode(ref.desc_idx_id, &builder);
  619. uint32_t length_32b_id = Gen32BitCvtCode(length_id, &builder);
  620. Instruction* ult_inst = builder.AddBinaryOp(GetBoolId(), SpvOpULessThan,
  621. desc_idx_32b_id, length_32b_id);
  622. ref.desc_idx_id = desc_idx_32b_id;
  623. GenCheckCode(ult_inst->result_id(), error_id, 0u, length_id, stage_idx, &ref,
  624. new_blocks);
  625. // Move original block's remaining code into remainder/merge block and add
  626. // to new blocks
  627. BasicBlock* back_blk_ptr = &*new_blocks->back();
  628. MovePostludeCode(ref_block_itr, back_blk_ptr);
  629. }
  630. void InstBindlessCheckPass::GenDescInitCheckCode(
  631. BasicBlock::iterator ref_inst_itr,
  632. UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
  633. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  634. // Look for reference through descriptor. If not, return.
  635. RefAnalysis ref;
  636. if (!AnalyzeDescriptorReference(&*ref_inst_itr, &ref)) return;
  637. // Determine if we can only do initialization check
  638. bool init_check = false;
  639. if (ref.desc_load_id != 0 || !buffer_bounds_enabled_) {
  640. init_check = true;
  641. } else {
  642. // For now, only do bounds check for non-aggregate types. Otherwise
  643. // just do descriptor initialization check.
  644. // TODO(greg-lunarg): Do bounds check for aggregate loads and stores
  645. Instruction* ref_ptr_inst = get_def_use_mgr()->GetDef(ref.ptr_id);
  646. Instruction* pte_type_inst = GetPointeeTypeInst(ref_ptr_inst);
  647. uint32_t pte_type_op = pte_type_inst->opcode();
  648. if (pte_type_op == SpvOpTypeArray || pte_type_op == SpvOpTypeRuntimeArray ||
  649. pte_type_op == SpvOpTypeStruct)
  650. init_check = true;
  651. }
  652. // If initialization check and not enabled, return
  653. if (init_check && !desc_init_enabled_) return;
  654. // Move original block's preceding instructions into first new block
  655. std::unique_ptr<BasicBlock> new_blk_ptr;
  656. MovePreludeCode(ref_inst_itr, ref_block_itr, &new_blk_ptr);
  657. InstructionBuilder builder(
  658. context(), &*new_blk_ptr,
  659. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  660. new_blocks->push_back(std::move(new_blk_ptr));
  661. // If initialization check, use reference value of zero.
  662. // Else use the index of the last byte referenced.
  663. uint32_t ref_id = init_check ? builder.GetUintConstantId(0u)
  664. : GenLastByteIdx(&ref, &builder);
  665. // Read initialization/bounds from debug input buffer. If index id not yet
  666. // set, binding is single descriptor, so set index to constant 0.
  667. if (ref.desc_idx_id == 0) ref.desc_idx_id = builder.GetUintConstantId(0u);
  668. uint32_t init_id = GenDebugReadInit(ref.var_id, ref.desc_idx_id, &builder);
  669. // Generate runtime initialization/bounds test code with true branch
  670. // being full reference and false branch being debug output and zero
  671. // for the referenced value.
  672. Instruction* ult_inst =
  673. builder.AddBinaryOp(GetBoolId(), SpvOpULessThan, ref_id, init_id);
  674. uint32_t error = init_check ? kInstErrorBindlessUninit
  675. : (ref.strg_class == SpvStorageClassUniform
  676. ? kInstErrorBuffOOBUniform
  677. : kInstErrorBuffOOBStorage);
  678. uint32_t error_id = builder.GetUintConstantId(error);
  679. GenCheckCode(ult_inst->result_id(), error_id, init_check ? 0 : ref_id,
  680. init_check ? builder.GetUintConstantId(0u) : init_id, stage_idx,
  681. &ref, new_blocks);
  682. // Move original block's remaining code into remainder/merge block and add
  683. // to new blocks
  684. BasicBlock* back_blk_ptr = &*new_blocks->back();
  685. MovePostludeCode(ref_block_itr, back_blk_ptr);
  686. }
  687. void InstBindlessCheckPass::GenTexBuffCheckCode(
  688. BasicBlock::iterator ref_inst_itr,
  689. UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
  690. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  691. // Only process OpImageRead and OpImageWrite with no optional operands
  692. Instruction* ref_inst = &*ref_inst_itr;
  693. SpvOp op = ref_inst->opcode();
  694. uint32_t num_in_oprnds = ref_inst->NumInOperands();
  695. if (!((op == SpvOpImageRead && num_in_oprnds == 2) ||
  696. (op == SpvOpImageFetch && num_in_oprnds == 2) ||
  697. (op == SpvOpImageWrite && num_in_oprnds == 3)))
  698. return;
  699. // Pull components from descriptor reference
  700. RefAnalysis ref;
  701. if (!AnalyzeDescriptorReference(ref_inst, &ref)) return;
  702. // Only process if image is texel buffer
  703. Instruction* image_inst = get_def_use_mgr()->GetDef(ref.image_id);
  704. uint32_t image_ty_id = image_inst->type_id();
  705. Instruction* image_ty_inst = get_def_use_mgr()->GetDef(image_ty_id);
  706. if (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageDim) != SpvDimBuffer)
  707. return;
  708. if (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageDepth) != 0) return;
  709. if (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageArrayed) != 0) return;
  710. if (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageMS) != 0) return;
  711. // Enable ImageQuery Capability if not yet enabled
  712. if (!get_feature_mgr()->HasCapability(SpvCapabilityImageQuery)) {
  713. std::unique_ptr<Instruction> cap_image_query_inst(new Instruction(
  714. context(), SpvOpCapability, 0, 0,
  715. std::initializer_list<Operand>{
  716. {SPV_OPERAND_TYPE_CAPABILITY, {SpvCapabilityImageQuery}}}));
  717. get_def_use_mgr()->AnalyzeInstDefUse(&*cap_image_query_inst);
  718. context()->AddCapability(std::move(cap_image_query_inst));
  719. }
  720. // Move original block's preceding instructions into first new block
  721. std::unique_ptr<BasicBlock> new_blk_ptr;
  722. MovePreludeCode(ref_inst_itr, ref_block_itr, &new_blk_ptr);
  723. InstructionBuilder builder(
  724. context(), &*new_blk_ptr,
  725. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  726. new_blocks->push_back(std::move(new_blk_ptr));
  727. // Get texel coordinate
  728. uint32_t coord_id =
  729. GenUintCastCode(ref_inst->GetSingleWordInOperand(1), &builder);
  730. // If index id not yet set, binding is single descriptor, so set index to
  731. // constant 0.
  732. if (ref.desc_idx_id == 0) ref.desc_idx_id = builder.GetUintConstantId(0u);
  733. // Get texel buffer size.
  734. Instruction* size_inst =
  735. builder.AddUnaryOp(GetUintId(), SpvOpImageQuerySize, ref.image_id);
  736. uint32_t size_id = size_inst->result_id();
  737. // Generate runtime initialization/bounds test code with true branch
  738. // being full reference and false branch being debug output and zero
  739. // for the referenced value.
  740. Instruction* ult_inst =
  741. builder.AddBinaryOp(GetBoolId(), SpvOpULessThan, coord_id, size_id);
  742. uint32_t error =
  743. (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageSampled) == 2)
  744. ? kInstErrorBuffOOBStorageTexel
  745. : kInstErrorBuffOOBUniformTexel;
  746. uint32_t error_id = builder.GetUintConstantId(error);
  747. GenCheckCode(ult_inst->result_id(), error_id, coord_id, size_id, stage_idx,
  748. &ref, new_blocks);
  749. // Move original block's remaining code into remainder/merge block and add
  750. // to new blocks
  751. BasicBlock* back_blk_ptr = &*new_blocks->back();
  752. MovePostludeCode(ref_block_itr, back_blk_ptr);
  753. }
  754. void InstBindlessCheckPass::InitializeInstBindlessCheck() {
  755. // Initialize base class
  756. InitializeInstrument();
  757. // If runtime array length support or buffer bounds checking are enabled,
  758. // create variable mappings. Length support is always enabled if descriptor
  759. // init check is enabled.
  760. if (desc_idx_enabled_ || buffer_bounds_enabled_ || texel_buffer_enabled_)
  761. for (auto& anno : get_module()->annotations())
  762. if (anno.opcode() == SpvOpDecorate) {
  763. if (anno.GetSingleWordInOperand(1u) == SpvDecorationDescriptorSet)
  764. var2desc_set_[anno.GetSingleWordInOperand(0u)] =
  765. anno.GetSingleWordInOperand(2u);
  766. else if (anno.GetSingleWordInOperand(1u) == SpvDecorationBinding)
  767. var2binding_[anno.GetSingleWordInOperand(0u)] =
  768. anno.GetSingleWordInOperand(2u);
  769. }
  770. }
  771. Pass::Status InstBindlessCheckPass::ProcessImpl() {
  772. // Perform bindless bounds check on each entry point function in module
  773. InstProcessFunction pfn =
  774. [this](BasicBlock::iterator ref_inst_itr,
  775. UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
  776. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  777. return GenDescIdxCheckCode(ref_inst_itr, ref_block_itr, stage_idx,
  778. new_blocks);
  779. };
  780. bool modified = InstProcessEntryPointCallTree(pfn);
  781. if (desc_init_enabled_ || buffer_bounds_enabled_) {
  782. // Perform descriptor initialization and/or buffer bounds check on each
  783. // entry point function in module
  784. pfn = [this](BasicBlock::iterator ref_inst_itr,
  785. UptrVectorIterator<BasicBlock> ref_block_itr,
  786. uint32_t stage_idx,
  787. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  788. return GenDescInitCheckCode(ref_inst_itr, ref_block_itr, stage_idx,
  789. new_blocks);
  790. };
  791. modified |= InstProcessEntryPointCallTree(pfn);
  792. }
  793. if (texel_buffer_enabled_) {
  794. // Perform texel buffer bounds check on each entry point function in
  795. // module. Generate after descriptor bounds and initialization checks.
  796. pfn = [this](BasicBlock::iterator ref_inst_itr,
  797. UptrVectorIterator<BasicBlock> ref_block_itr,
  798. uint32_t stage_idx,
  799. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  800. return GenTexBuffCheckCode(ref_inst_itr, ref_block_itr, stage_idx,
  801. new_blocks);
  802. };
  803. modified |= InstProcessEntryPointCallTree(pfn);
  804. }
  805. return modified ? Status::SuccessWithChange : Status::SuccessWithoutChange;
  806. }
  807. Pass::Status InstBindlessCheckPass::Process() {
  808. InitializeInstBindlessCheck();
  809. return ProcessImpl();
  810. }
  811. } // namespace opt
  812. } // namespace spvtools