inst_bindless_check_pass.cpp 37 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841
  1. // Copyright (c) 2018 The Khronos Group Inc.
  2. // Copyright (c) 2018 Valve Corporation
  3. // Copyright (c) 2018 LunarG Inc.
  4. //
  5. // Licensed under the Apache License, Version 2.0 (the "License");
  6. // you may not use this file except in compliance with the License.
  7. // You may obtain a copy of the License at
  8. //
  9. // http://www.apache.org/licenses/LICENSE-2.0
  10. //
  11. // Unless required by applicable law or agreed to in writing, software
  12. // distributed under the License is distributed on an "AS IS" BASIS,
  13. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. // See the License for the specific language governing permissions and
  15. // limitations under the License.
  16. #include "inst_bindless_check_pass.h"
  17. namespace {
  18. // Input Operand Indices
  19. static const int kSpvImageSampleImageIdInIdx = 0;
  20. static const int kSpvSampledImageImageIdInIdx = 0;
  21. static const int kSpvSampledImageSamplerIdInIdx = 1;
  22. static const int kSpvImageSampledImageIdInIdx = 0;
  23. static const int kSpvCopyObjectOperandIdInIdx = 0;
  24. static const int kSpvLoadPtrIdInIdx = 0;
  25. static const int kSpvAccessChainBaseIdInIdx = 0;
  26. static const int kSpvAccessChainIndex0IdInIdx = 1;
  27. static const int kSpvTypeArrayTypeIdInIdx = 0;
  28. static const int kSpvTypeArrayLengthIdInIdx = 1;
  29. static const int kSpvConstantValueInIdx = 0;
  30. static const int kSpvVariableStorageClassInIdx = 0;
  31. static const int kSpvTypePtrTypeIdInIdx = 1;
  32. static const int kSpvTypeImageDim = 1;
  33. static const int kSpvTypeImageDepth = 2;
  34. static const int kSpvTypeImageArrayed = 3;
  35. static const int kSpvTypeImageMS = 4;
  36. static const int kSpvTypeImageSampled = 5;
  37. } // anonymous namespace
  38. // Avoid unused variable warning/error on Linux
  39. #ifndef NDEBUG
  40. #define USE_ASSERT(x) assert(x)
  41. #else
  42. #define USE_ASSERT(x) ((void)(x))
  43. #endif
  44. namespace spvtools {
  45. namespace opt {
  46. uint32_t InstBindlessCheckPass::GenDebugReadLength(
  47. uint32_t var_id, InstructionBuilder* builder) {
  48. uint32_t desc_set_idx =
  49. var2desc_set_[var_id] + kDebugInputBindlessOffsetLengths;
  50. uint32_t desc_set_idx_id = builder->GetUintConstantId(desc_set_idx);
  51. uint32_t binding_idx_id = builder->GetUintConstantId(var2binding_[var_id]);
  52. return GenDebugDirectRead({desc_set_idx_id, binding_idx_id}, builder);
  53. }
  54. uint32_t InstBindlessCheckPass::GenDebugReadInit(uint32_t var_id,
  55. uint32_t desc_idx_id,
  56. InstructionBuilder* builder) {
  57. uint32_t binding_idx_id = builder->GetUintConstantId(var2binding_[var_id]);
  58. uint32_t u_desc_idx_id = GenUintCastCode(desc_idx_id, builder);
  59. // If desc index checking is not enabled, we know the offset of initialization
  60. // entries is 1, so we can avoid loading this value and just add 1 to the
  61. // descriptor set.
  62. if (!desc_idx_enabled_) {
  63. uint32_t desc_set_idx_id =
  64. builder->GetUintConstantId(var2desc_set_[var_id] + 1);
  65. return GenDebugDirectRead({desc_set_idx_id, binding_idx_id, u_desc_idx_id},
  66. builder);
  67. } else {
  68. uint32_t desc_set_base_id =
  69. builder->GetUintConstantId(kDebugInputBindlessInitOffset);
  70. uint32_t desc_set_idx_id =
  71. builder->GetUintConstantId(var2desc_set_[var_id]);
  72. return GenDebugDirectRead(
  73. {desc_set_base_id, desc_set_idx_id, binding_idx_id, u_desc_idx_id},
  74. builder);
  75. }
  76. }
  77. uint32_t InstBindlessCheckPass::CloneOriginalImage(
  78. uint32_t old_image_id, InstructionBuilder* builder) {
  79. Instruction* new_image_inst;
  80. Instruction* old_image_inst = get_def_use_mgr()->GetDef(old_image_id);
  81. if (old_image_inst->opcode() == SpvOpLoad) {
  82. new_image_inst = builder->AddLoad(
  83. old_image_inst->type_id(),
  84. old_image_inst->GetSingleWordInOperand(kSpvLoadPtrIdInIdx));
  85. } else if (old_image_inst->opcode() == SpvOp::SpvOpSampledImage) {
  86. uint32_t clone_id = CloneOriginalImage(
  87. old_image_inst->GetSingleWordInOperand(kSpvSampledImageImageIdInIdx),
  88. builder);
  89. new_image_inst = builder->AddBinaryOp(
  90. old_image_inst->type_id(), SpvOpSampledImage, clone_id,
  91. old_image_inst->GetSingleWordInOperand(kSpvSampledImageSamplerIdInIdx));
  92. } else if (old_image_inst->opcode() == SpvOp::SpvOpImage) {
  93. uint32_t clone_id = CloneOriginalImage(
  94. old_image_inst->GetSingleWordInOperand(kSpvImageSampledImageIdInIdx),
  95. builder);
  96. new_image_inst =
  97. builder->AddUnaryOp(old_image_inst->type_id(), SpvOpImage, clone_id);
  98. } else {
  99. assert(old_image_inst->opcode() == SpvOp::SpvOpCopyObject &&
  100. "expecting OpCopyObject");
  101. uint32_t clone_id = CloneOriginalImage(
  102. old_image_inst->GetSingleWordInOperand(kSpvCopyObjectOperandIdInIdx),
  103. builder);
  104. // Since we are cloning, no need to create new copy
  105. new_image_inst = get_def_use_mgr()->GetDef(clone_id);
  106. }
  107. uid2offset_[new_image_inst->unique_id()] =
  108. uid2offset_[old_image_inst->unique_id()];
  109. uint32_t new_image_id = new_image_inst->result_id();
  110. get_decoration_mgr()->CloneDecorations(old_image_id, new_image_id);
  111. return new_image_id;
  112. }
  113. uint32_t InstBindlessCheckPass::CloneOriginalReference(
  114. RefAnalysis* ref, InstructionBuilder* builder) {
  115. // If original is image based, start by cloning descriptor load
  116. uint32_t new_image_id = 0;
  117. if (ref->desc_load_id != 0) {
  118. uint32_t old_image_id =
  119. ref->ref_inst->GetSingleWordInOperand(kSpvImageSampleImageIdInIdx);
  120. new_image_id = CloneOriginalImage(old_image_id, builder);
  121. }
  122. // Clone original reference
  123. std::unique_ptr<Instruction> new_ref_inst(ref->ref_inst->Clone(context()));
  124. uint32_t ref_result_id = ref->ref_inst->result_id();
  125. uint32_t new_ref_id = 0;
  126. if (ref_result_id != 0) {
  127. new_ref_id = TakeNextId();
  128. new_ref_inst->SetResultId(new_ref_id);
  129. }
  130. // Update new ref with new image if created
  131. if (new_image_id != 0)
  132. new_ref_inst->SetInOperand(kSpvImageSampleImageIdInIdx, {new_image_id});
  133. // Register new reference and add to new block
  134. Instruction* added_inst = builder->AddInstruction(std::move(new_ref_inst));
  135. uid2offset_[added_inst->unique_id()] =
  136. uid2offset_[ref->ref_inst->unique_id()];
  137. if (new_ref_id != 0)
  138. get_decoration_mgr()->CloneDecorations(ref_result_id, new_ref_id);
  139. return new_ref_id;
  140. }
  141. uint32_t InstBindlessCheckPass::GetImageId(Instruction* inst) {
  142. switch (inst->opcode()) {
  143. case SpvOp::SpvOpImageSampleImplicitLod:
  144. case SpvOp::SpvOpImageSampleExplicitLod:
  145. case SpvOp::SpvOpImageSampleDrefImplicitLod:
  146. case SpvOp::SpvOpImageSampleDrefExplicitLod:
  147. case SpvOp::SpvOpImageSampleProjImplicitLod:
  148. case SpvOp::SpvOpImageSampleProjExplicitLod:
  149. case SpvOp::SpvOpImageSampleProjDrefImplicitLod:
  150. case SpvOp::SpvOpImageSampleProjDrefExplicitLod:
  151. case SpvOp::SpvOpImageGather:
  152. case SpvOp::SpvOpImageDrefGather:
  153. case SpvOp::SpvOpImageQueryLod:
  154. case SpvOp::SpvOpImageSparseSampleImplicitLod:
  155. case SpvOp::SpvOpImageSparseSampleExplicitLod:
  156. case SpvOp::SpvOpImageSparseSampleDrefImplicitLod:
  157. case SpvOp::SpvOpImageSparseSampleDrefExplicitLod:
  158. case SpvOp::SpvOpImageSparseSampleProjImplicitLod:
  159. case SpvOp::SpvOpImageSparseSampleProjExplicitLod:
  160. case SpvOp::SpvOpImageSparseSampleProjDrefImplicitLod:
  161. case SpvOp::SpvOpImageSparseSampleProjDrefExplicitLod:
  162. case SpvOp::SpvOpImageSparseGather:
  163. case SpvOp::SpvOpImageSparseDrefGather:
  164. case SpvOp::SpvOpImageFetch:
  165. case SpvOp::SpvOpImageRead:
  166. case SpvOp::SpvOpImageQueryFormat:
  167. case SpvOp::SpvOpImageQueryOrder:
  168. case SpvOp::SpvOpImageQuerySizeLod:
  169. case SpvOp::SpvOpImageQuerySize:
  170. case SpvOp::SpvOpImageQueryLevels:
  171. case SpvOp::SpvOpImageQuerySamples:
  172. case SpvOp::SpvOpImageSparseFetch:
  173. case SpvOp::SpvOpImageSparseRead:
  174. case SpvOp::SpvOpImageWrite:
  175. return inst->GetSingleWordInOperand(kSpvImageSampleImageIdInIdx);
  176. default:
  177. break;
  178. }
  179. return 0;
  180. }
  181. Instruction* InstBindlessCheckPass::GetPointeeTypeInst(Instruction* ptr_inst) {
  182. uint32_t pte_ty_id = GetPointeeTypeId(ptr_inst);
  183. return get_def_use_mgr()->GetDef(pte_ty_id);
  184. }
  185. bool InstBindlessCheckPass::AnalyzeDescriptorReference(Instruction* ref_inst,
  186. RefAnalysis* ref) {
  187. ref->ref_inst = ref_inst;
  188. if (ref_inst->opcode() == SpvOpLoad || ref_inst->opcode() == SpvOpStore) {
  189. ref->desc_load_id = 0;
  190. ref->ptr_id = ref_inst->GetSingleWordInOperand(kSpvLoadPtrIdInIdx);
  191. Instruction* ptr_inst = get_def_use_mgr()->GetDef(ref->ptr_id);
  192. if (ptr_inst->opcode() != SpvOp::SpvOpAccessChain) return false;
  193. ref->var_id = ptr_inst->GetSingleWordInOperand(kSpvAccessChainBaseIdInIdx);
  194. Instruction* var_inst = get_def_use_mgr()->GetDef(ref->var_id);
  195. if (var_inst->opcode() != SpvOp::SpvOpVariable) return false;
  196. uint32_t storage_class =
  197. var_inst->GetSingleWordInOperand(kSpvVariableStorageClassInIdx);
  198. switch (storage_class) {
  199. case SpvStorageClassUniform:
  200. case SpvStorageClassStorageBuffer:
  201. break;
  202. default:
  203. return false;
  204. break;
  205. }
  206. // Check for deprecated storage block form
  207. if (storage_class == SpvStorageClassUniform) {
  208. uint32_t var_ty_id = var_inst->type_id();
  209. Instruction* var_ty_inst = get_def_use_mgr()->GetDef(var_ty_id);
  210. uint32_t ptr_ty_id =
  211. var_ty_inst->GetSingleWordInOperand(kSpvTypePtrTypeIdInIdx);
  212. Instruction* ptr_ty_inst = get_def_use_mgr()->GetDef(ptr_ty_id);
  213. SpvOp ptr_ty_op = ptr_ty_inst->opcode();
  214. uint32_t block_ty_id =
  215. (ptr_ty_op == SpvOpTypeArray || ptr_ty_op == SpvOpTypeRuntimeArray)
  216. ? ptr_ty_inst->GetSingleWordInOperand(kSpvTypeArrayTypeIdInIdx)
  217. : ptr_ty_id;
  218. assert(get_def_use_mgr()->GetDef(block_ty_id)->opcode() ==
  219. SpvOpTypeStruct &&
  220. "unexpected block type");
  221. bool block_found = get_decoration_mgr()->FindDecoration(
  222. block_ty_id, SpvDecorationBlock,
  223. [](const Instruction&) { return true; });
  224. if (!block_found) {
  225. // If block decoration not found, verify deprecated form of SSBO
  226. bool buffer_block_found = get_decoration_mgr()->FindDecoration(
  227. block_ty_id, SpvDecorationBufferBlock,
  228. [](const Instruction&) { return true; });
  229. USE_ASSERT(buffer_block_found && "block decoration not found");
  230. storage_class = SpvStorageClassStorageBuffer;
  231. }
  232. }
  233. ref->strg_class = storage_class;
  234. Instruction* desc_type_inst = GetPointeeTypeInst(var_inst);
  235. switch (desc_type_inst->opcode()) {
  236. case SpvOpTypeArray:
  237. case SpvOpTypeRuntimeArray:
  238. // A load through a descriptor array will have at least 3 operands. We
  239. // do not want to instrument loads of descriptors here which are part of
  240. // an image-based reference.
  241. if (ptr_inst->NumInOperands() < 3) return false;
  242. ref->desc_idx_id =
  243. ptr_inst->GetSingleWordInOperand(kSpvAccessChainIndex0IdInIdx);
  244. break;
  245. default:
  246. ref->desc_idx_id = 0;
  247. break;
  248. }
  249. return true;
  250. }
  251. // Reference is not load or store. If not an image-based reference, return.
  252. ref->image_id = GetImageId(ref_inst);
  253. if (ref->image_id == 0) return false;
  254. // Search for descriptor load
  255. uint32_t desc_load_id = ref->image_id;
  256. Instruction* desc_load_inst;
  257. for (;;) {
  258. desc_load_inst = get_def_use_mgr()->GetDef(desc_load_id);
  259. if (desc_load_inst->opcode() == SpvOp::SpvOpSampledImage)
  260. desc_load_id =
  261. desc_load_inst->GetSingleWordInOperand(kSpvSampledImageImageIdInIdx);
  262. else if (desc_load_inst->opcode() == SpvOp::SpvOpImage)
  263. desc_load_id =
  264. desc_load_inst->GetSingleWordInOperand(kSpvImageSampledImageIdInIdx);
  265. else if (desc_load_inst->opcode() == SpvOp::SpvOpCopyObject)
  266. desc_load_id =
  267. desc_load_inst->GetSingleWordInOperand(kSpvCopyObjectOperandIdInIdx);
  268. else
  269. break;
  270. }
  271. if (desc_load_inst->opcode() != SpvOp::SpvOpLoad) {
  272. // TODO(greg-lunarg): Handle additional possibilities?
  273. return false;
  274. }
  275. ref->desc_load_id = desc_load_id;
  276. ref->ptr_id = desc_load_inst->GetSingleWordInOperand(kSpvLoadPtrIdInIdx);
  277. Instruction* ptr_inst = get_def_use_mgr()->GetDef(ref->ptr_id);
  278. if (ptr_inst->opcode() == SpvOp::SpvOpVariable) {
  279. ref->desc_idx_id = 0;
  280. ref->var_id = ref->ptr_id;
  281. } else if (ptr_inst->opcode() == SpvOp::SpvOpAccessChain) {
  282. if (ptr_inst->NumInOperands() != 2) {
  283. assert(false && "unexpected bindless index number");
  284. return false;
  285. }
  286. ref->desc_idx_id =
  287. ptr_inst->GetSingleWordInOperand(kSpvAccessChainIndex0IdInIdx);
  288. ref->var_id = ptr_inst->GetSingleWordInOperand(kSpvAccessChainBaseIdInIdx);
  289. Instruction* var_inst = get_def_use_mgr()->GetDef(ref->var_id);
  290. if (var_inst->opcode() != SpvOpVariable) {
  291. assert(false && "unexpected bindless base");
  292. return false;
  293. }
  294. } else {
  295. // TODO(greg-lunarg): Handle additional possibilities?
  296. return false;
  297. }
  298. return true;
  299. }
  300. uint32_t InstBindlessCheckPass::FindStride(uint32_t ty_id,
  301. uint32_t stride_deco) {
  302. uint32_t stride = 0xdeadbeef;
  303. bool found = get_decoration_mgr()->FindDecoration(
  304. ty_id, stride_deco, [&stride](const Instruction& deco_inst) {
  305. stride = deco_inst.GetSingleWordInOperand(2u);
  306. return true;
  307. });
  308. USE_ASSERT(found && "stride not found");
  309. return stride;
  310. }
  311. uint32_t InstBindlessCheckPass::ByteSize(uint32_t ty_id, uint32_t matrix_stride,
  312. bool col_major, bool in_matrix) {
  313. analysis::TypeManager* type_mgr = context()->get_type_mgr();
  314. const analysis::Type* sz_ty = type_mgr->GetType(ty_id);
  315. if (sz_ty->kind() == analysis::Type::kPointer) {
  316. // Assuming PhysicalStorageBuffer pointer
  317. return 8;
  318. }
  319. if (sz_ty->kind() == analysis::Type::kMatrix) {
  320. assert(matrix_stride != 0 && "missing matrix stride");
  321. const analysis::Matrix* m_ty = sz_ty->AsMatrix();
  322. if (col_major) {
  323. return m_ty->element_count() * matrix_stride;
  324. } else {
  325. const analysis::Vector* v_ty = m_ty->element_type()->AsVector();
  326. return v_ty->element_count() * matrix_stride;
  327. }
  328. }
  329. uint32_t size = 1;
  330. if (sz_ty->kind() == analysis::Type::kVector) {
  331. const analysis::Vector* v_ty = sz_ty->AsVector();
  332. size = v_ty->element_count();
  333. const analysis::Type* comp_ty = v_ty->element_type();
  334. // if vector in row major matrix, the vector is strided so return the
  335. // number of bytes spanned by the vector
  336. if (in_matrix && !col_major && matrix_stride > 0) {
  337. uint32_t comp_ty_id = type_mgr->GetId(comp_ty);
  338. return (size - 1) * matrix_stride + ByteSize(comp_ty_id, 0, false, false);
  339. }
  340. sz_ty = comp_ty;
  341. }
  342. switch (sz_ty->kind()) {
  343. case analysis::Type::kFloat: {
  344. const analysis::Float* f_ty = sz_ty->AsFloat();
  345. size *= f_ty->width();
  346. } break;
  347. case analysis::Type::kInteger: {
  348. const analysis::Integer* i_ty = sz_ty->AsInteger();
  349. size *= i_ty->width();
  350. } break;
  351. default: { assert(false && "unexpected type"); } break;
  352. }
  353. size /= 8;
  354. return size;
  355. }
  356. uint32_t InstBindlessCheckPass::GenLastByteIdx(RefAnalysis* ref,
  357. InstructionBuilder* builder) {
  358. // Find outermost buffer type and its access chain index
  359. Instruction* var_inst = get_def_use_mgr()->GetDef(ref->var_id);
  360. Instruction* desc_ty_inst = GetPointeeTypeInst(var_inst);
  361. uint32_t buff_ty_id;
  362. uint32_t ac_in_idx = 1;
  363. switch (desc_ty_inst->opcode()) {
  364. case SpvOpTypeArray:
  365. case SpvOpTypeRuntimeArray:
  366. buff_ty_id = desc_ty_inst->GetSingleWordInOperand(0);
  367. ++ac_in_idx;
  368. break;
  369. default:
  370. assert(desc_ty_inst->opcode() == SpvOpTypeStruct &&
  371. "unexpected descriptor type");
  372. buff_ty_id = desc_ty_inst->result_id();
  373. break;
  374. }
  375. // Process remaining access chain indices
  376. Instruction* ac_inst = get_def_use_mgr()->GetDef(ref->ptr_id);
  377. uint32_t curr_ty_id = buff_ty_id;
  378. uint32_t sum_id = 0u;
  379. uint32_t matrix_stride = 0u;
  380. bool col_major = false;
  381. uint32_t matrix_stride_id = 0u;
  382. bool in_matrix = false;
  383. while (ac_in_idx < ac_inst->NumInOperands()) {
  384. uint32_t curr_idx_id = ac_inst->GetSingleWordInOperand(ac_in_idx);
  385. Instruction* curr_ty_inst = get_def_use_mgr()->GetDef(curr_ty_id);
  386. uint32_t curr_offset_id = 0;
  387. switch (curr_ty_inst->opcode()) {
  388. case SpvOpTypeArray:
  389. case SpvOpTypeRuntimeArray: {
  390. // Get array stride and multiply by current index
  391. uint32_t arr_stride = FindStride(curr_ty_id, SpvDecorationArrayStride);
  392. uint32_t arr_stride_id = builder->GetUintConstantId(arr_stride);
  393. uint32_t curr_idx_32b_id = Gen32BitCvtCode(curr_idx_id, builder);
  394. Instruction* curr_offset_inst = builder->AddBinaryOp(
  395. GetUintId(), SpvOpIMul, arr_stride_id, curr_idx_32b_id);
  396. curr_offset_id = curr_offset_inst->result_id();
  397. // Get element type for next step
  398. curr_ty_id = curr_ty_inst->GetSingleWordInOperand(0);
  399. } break;
  400. case SpvOpTypeMatrix: {
  401. assert(matrix_stride != 0 && "missing matrix stride");
  402. matrix_stride_id = builder->GetUintConstantId(matrix_stride);
  403. uint32_t vec_ty_id = curr_ty_inst->GetSingleWordInOperand(0);
  404. // If column major, multiply column index by matrix stride, otherwise
  405. // by vector component size and save matrix stride for vector (row)
  406. // index
  407. uint32_t col_stride_id;
  408. if (col_major) {
  409. col_stride_id = matrix_stride_id;
  410. } else {
  411. Instruction* vec_ty_inst = get_def_use_mgr()->GetDef(vec_ty_id);
  412. uint32_t comp_ty_id = vec_ty_inst->GetSingleWordInOperand(0u);
  413. uint32_t col_stride = ByteSize(comp_ty_id, 0u, false, false);
  414. col_stride_id = builder->GetUintConstantId(col_stride);
  415. }
  416. uint32_t curr_idx_32b_id = Gen32BitCvtCode(curr_idx_id, builder);
  417. Instruction* curr_offset_inst = builder->AddBinaryOp(
  418. GetUintId(), SpvOpIMul, col_stride_id, curr_idx_32b_id);
  419. curr_offset_id = curr_offset_inst->result_id();
  420. // Get element type for next step
  421. curr_ty_id = vec_ty_id;
  422. in_matrix = true;
  423. } break;
  424. case SpvOpTypeVector: {
  425. // If inside a row major matrix type, multiply index by matrix stride,
  426. // else multiply by component size
  427. uint32_t comp_ty_id = curr_ty_inst->GetSingleWordInOperand(0u);
  428. uint32_t curr_idx_32b_id = Gen32BitCvtCode(curr_idx_id, builder);
  429. if (in_matrix && !col_major) {
  430. Instruction* curr_offset_inst = builder->AddBinaryOp(
  431. GetUintId(), SpvOpIMul, matrix_stride_id, curr_idx_32b_id);
  432. curr_offset_id = curr_offset_inst->result_id();
  433. } else {
  434. uint32_t comp_ty_sz = ByteSize(comp_ty_id, 0u, false, false);
  435. uint32_t comp_ty_sz_id = builder->GetUintConstantId(comp_ty_sz);
  436. Instruction* curr_offset_inst = builder->AddBinaryOp(
  437. GetUintId(), SpvOpIMul, comp_ty_sz_id, curr_idx_32b_id);
  438. curr_offset_id = curr_offset_inst->result_id();
  439. }
  440. // Get element type for next step
  441. curr_ty_id = comp_ty_id;
  442. } break;
  443. case SpvOpTypeStruct: {
  444. // Get buffer byte offset for the referenced member
  445. Instruction* curr_idx_inst = get_def_use_mgr()->GetDef(curr_idx_id);
  446. assert(curr_idx_inst->opcode() == SpvOpConstant &&
  447. "unexpected struct index");
  448. uint32_t member_idx = curr_idx_inst->GetSingleWordInOperand(0);
  449. uint32_t member_offset = 0xdeadbeef;
  450. bool found = get_decoration_mgr()->FindDecoration(
  451. curr_ty_id, SpvDecorationOffset,
  452. [&member_idx, &member_offset](const Instruction& deco_inst) {
  453. if (deco_inst.GetSingleWordInOperand(1u) != member_idx)
  454. return false;
  455. member_offset = deco_inst.GetSingleWordInOperand(3u);
  456. return true;
  457. });
  458. USE_ASSERT(found && "member offset not found");
  459. curr_offset_id = builder->GetUintConstantId(member_offset);
  460. // Look for matrix stride for this member if there is one. The matrix
  461. // stride is not on the matrix type, but in a OpMemberDecorate on the
  462. // enclosing struct type at the member index. If none found, reset
  463. // stride to 0.
  464. found = get_decoration_mgr()->FindDecoration(
  465. curr_ty_id, SpvDecorationMatrixStride,
  466. [&member_idx, &matrix_stride](const Instruction& deco_inst) {
  467. if (deco_inst.GetSingleWordInOperand(1u) != member_idx)
  468. return false;
  469. matrix_stride = deco_inst.GetSingleWordInOperand(3u);
  470. return true;
  471. });
  472. if (!found) matrix_stride = 0;
  473. // Look for column major decoration
  474. found = get_decoration_mgr()->FindDecoration(
  475. curr_ty_id, SpvDecorationColMajor,
  476. [&member_idx, &col_major](const Instruction& deco_inst) {
  477. if (deco_inst.GetSingleWordInOperand(1u) != member_idx)
  478. return false;
  479. col_major = true;
  480. return true;
  481. });
  482. if (!found) col_major = false;
  483. // Get element type for next step
  484. curr_ty_id = curr_ty_inst->GetSingleWordInOperand(member_idx);
  485. } break;
  486. default: { assert(false && "unexpected non-composite type"); } break;
  487. }
  488. if (sum_id == 0)
  489. sum_id = curr_offset_id;
  490. else {
  491. Instruction* sum_inst =
  492. builder->AddBinaryOp(GetUintId(), SpvOpIAdd, sum_id, curr_offset_id);
  493. sum_id = sum_inst->result_id();
  494. }
  495. ++ac_in_idx;
  496. }
  497. // Add in offset of last byte of referenced object
  498. uint32_t bsize = ByteSize(curr_ty_id, matrix_stride, col_major, in_matrix);
  499. uint32_t last = bsize - 1;
  500. uint32_t last_id = builder->GetUintConstantId(last);
  501. Instruction* sum_inst =
  502. builder->AddBinaryOp(GetUintId(), SpvOpIAdd, sum_id, last_id);
  503. return sum_inst->result_id();
  504. }
  505. void InstBindlessCheckPass::GenCheckCode(
  506. uint32_t check_id, uint32_t error_id, uint32_t offset_id,
  507. uint32_t length_id, uint32_t stage_idx, RefAnalysis* ref,
  508. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  509. BasicBlock* back_blk_ptr = &*new_blocks->back();
  510. InstructionBuilder builder(
  511. context(), back_blk_ptr,
  512. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  513. // Gen conditional branch on check_id. Valid branch generates original
  514. // reference. Invalid generates debug output and zero result (if needed).
  515. uint32_t merge_blk_id = TakeNextId();
  516. uint32_t valid_blk_id = TakeNextId();
  517. uint32_t invalid_blk_id = TakeNextId();
  518. std::unique_ptr<Instruction> merge_label(NewLabel(merge_blk_id));
  519. std::unique_ptr<Instruction> valid_label(NewLabel(valid_blk_id));
  520. std::unique_ptr<Instruction> invalid_label(NewLabel(invalid_blk_id));
  521. (void)builder.AddConditionalBranch(check_id, valid_blk_id, invalid_blk_id,
  522. merge_blk_id, SpvSelectionControlMaskNone);
  523. // Gen valid bounds branch
  524. std::unique_ptr<BasicBlock> new_blk_ptr(
  525. new BasicBlock(std::move(valid_label)));
  526. builder.SetInsertPoint(&*new_blk_ptr);
  527. uint32_t new_ref_id = CloneOriginalReference(ref, &builder);
  528. (void)builder.AddBranch(merge_blk_id);
  529. new_blocks->push_back(std::move(new_blk_ptr));
  530. // Gen invalid block
  531. new_blk_ptr.reset(new BasicBlock(std::move(invalid_label)));
  532. builder.SetInsertPoint(&*new_blk_ptr);
  533. uint32_t u_index_id = GenUintCastCode(ref->desc_idx_id, &builder);
  534. if (offset_id != 0) {
  535. // Buffer OOB
  536. uint32_t u_offset_id = GenUintCastCode(offset_id, &builder);
  537. uint32_t u_length_id = GenUintCastCode(length_id, &builder);
  538. GenDebugStreamWrite(uid2offset_[ref->ref_inst->unique_id()], stage_idx,
  539. {error_id, u_index_id, u_offset_id, u_length_id},
  540. &builder);
  541. } else if (buffer_bounds_enabled_ || texel_buffer_enabled_) {
  542. // Uninitialized Descriptor - Return additional unused zero so all error
  543. // modes will use same debug stream write function
  544. uint32_t u_length_id = GenUintCastCode(length_id, &builder);
  545. GenDebugStreamWrite(
  546. uid2offset_[ref->ref_inst->unique_id()], stage_idx,
  547. {error_id, u_index_id, u_length_id, builder.GetUintConstantId(0)},
  548. &builder);
  549. } else {
  550. // Uninitialized Descriptor - Normal error return
  551. uint32_t u_length_id = GenUintCastCode(length_id, &builder);
  552. GenDebugStreamWrite(uid2offset_[ref->ref_inst->unique_id()], stage_idx,
  553. {error_id, u_index_id, u_length_id}, &builder);
  554. }
  555. // Remember last invalid block id
  556. uint32_t last_invalid_blk_id = new_blk_ptr->GetLabelInst()->result_id();
  557. // Gen zero for invalid reference
  558. uint32_t ref_type_id = ref->ref_inst->type_id();
  559. (void)builder.AddBranch(merge_blk_id);
  560. new_blocks->push_back(std::move(new_blk_ptr));
  561. // Gen merge block
  562. new_blk_ptr.reset(new BasicBlock(std::move(merge_label)));
  563. builder.SetInsertPoint(&*new_blk_ptr);
  564. // Gen phi of new reference and zero, if necessary, and replace the
  565. // result id of the original reference with that of the Phi. Kill original
  566. // reference.
  567. if (new_ref_id != 0) {
  568. Instruction* phi_inst = builder.AddPhi(
  569. ref_type_id, {new_ref_id, valid_blk_id, GetNullId(ref_type_id),
  570. last_invalid_blk_id});
  571. context()->ReplaceAllUsesWith(ref->ref_inst->result_id(),
  572. phi_inst->result_id());
  573. }
  574. new_blocks->push_back(std::move(new_blk_ptr));
  575. context()->KillInst(ref->ref_inst);
  576. }
  577. void InstBindlessCheckPass::GenDescIdxCheckCode(
  578. BasicBlock::iterator ref_inst_itr,
  579. UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
  580. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  581. // Look for reference through indexed descriptor. If found, analyze and
  582. // save components. If not, return.
  583. RefAnalysis ref;
  584. if (!AnalyzeDescriptorReference(&*ref_inst_itr, &ref)) return;
  585. Instruction* ptr_inst = get_def_use_mgr()->GetDef(ref.ptr_id);
  586. if (ptr_inst->opcode() != SpvOp::SpvOpAccessChain) return;
  587. // If index and bound both compile-time constants and index < bound,
  588. // return without changing
  589. Instruction* var_inst = get_def_use_mgr()->GetDef(ref.var_id);
  590. Instruction* desc_type_inst = GetPointeeTypeInst(var_inst);
  591. uint32_t length_id = 0;
  592. if (desc_type_inst->opcode() == SpvOpTypeArray) {
  593. length_id =
  594. desc_type_inst->GetSingleWordInOperand(kSpvTypeArrayLengthIdInIdx);
  595. Instruction* index_inst = get_def_use_mgr()->GetDef(ref.desc_idx_id);
  596. Instruction* length_inst = get_def_use_mgr()->GetDef(length_id);
  597. if (index_inst->opcode() == SpvOpConstant &&
  598. length_inst->opcode() == SpvOpConstant &&
  599. index_inst->GetSingleWordInOperand(kSpvConstantValueInIdx) <
  600. length_inst->GetSingleWordInOperand(kSpvConstantValueInIdx))
  601. return;
  602. } else if (!desc_idx_enabled_ ||
  603. desc_type_inst->opcode() != SpvOpTypeRuntimeArray) {
  604. return;
  605. }
  606. // Move original block's preceding instructions into first new block
  607. std::unique_ptr<BasicBlock> new_blk_ptr;
  608. MovePreludeCode(ref_inst_itr, ref_block_itr, &new_blk_ptr);
  609. InstructionBuilder builder(
  610. context(), &*new_blk_ptr,
  611. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  612. new_blocks->push_back(std::move(new_blk_ptr));
  613. uint32_t error_id = builder.GetUintConstantId(kInstErrorBindlessBounds);
  614. // If length id not yet set, descriptor array is runtime size so
  615. // generate load of length from stage's debug input buffer.
  616. if (length_id == 0) {
  617. assert(desc_type_inst->opcode() == SpvOpTypeRuntimeArray &&
  618. "unexpected bindless type");
  619. length_id = GenDebugReadLength(ref.var_id, &builder);
  620. }
  621. // Generate full runtime bounds test code with true branch
  622. // being full reference and false branch being debug output and zero
  623. // for the referenced value.
  624. uint32_t desc_idx_32b_id = Gen32BitCvtCode(ref.desc_idx_id, &builder);
  625. uint32_t length_32b_id = Gen32BitCvtCode(length_id, &builder);
  626. Instruction* ult_inst = builder.AddBinaryOp(GetBoolId(), SpvOpULessThan,
  627. desc_idx_32b_id, length_32b_id);
  628. ref.desc_idx_id = desc_idx_32b_id;
  629. GenCheckCode(ult_inst->result_id(), error_id, 0u, length_id, stage_idx, &ref,
  630. new_blocks);
  631. // Move original block's remaining code into remainder/merge block and add
  632. // to new blocks
  633. BasicBlock* back_blk_ptr = &*new_blocks->back();
  634. MovePostludeCode(ref_block_itr, back_blk_ptr);
  635. }
  636. void InstBindlessCheckPass::GenDescInitCheckCode(
  637. BasicBlock::iterator ref_inst_itr,
  638. UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
  639. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  640. // Look for reference through descriptor. If not, return.
  641. RefAnalysis ref;
  642. if (!AnalyzeDescriptorReference(&*ref_inst_itr, &ref)) return;
  643. // Determine if we can only do initialization check
  644. bool init_check = false;
  645. if (ref.desc_load_id != 0 || !buffer_bounds_enabled_) {
  646. init_check = true;
  647. } else {
  648. // For now, only do bounds check for non-aggregate types. Otherwise
  649. // just do descriptor initialization check.
  650. // TODO(greg-lunarg): Do bounds check for aggregate loads and stores
  651. Instruction* ref_ptr_inst = get_def_use_mgr()->GetDef(ref.ptr_id);
  652. Instruction* pte_type_inst = GetPointeeTypeInst(ref_ptr_inst);
  653. uint32_t pte_type_op = pte_type_inst->opcode();
  654. if (pte_type_op == SpvOpTypeArray || pte_type_op == SpvOpTypeRuntimeArray ||
  655. pte_type_op == SpvOpTypeStruct)
  656. init_check = true;
  657. }
  658. // If initialization check and not enabled, return
  659. if (init_check && !desc_init_enabled_) return;
  660. // Move original block's preceding instructions into first new block
  661. std::unique_ptr<BasicBlock> new_blk_ptr;
  662. MovePreludeCode(ref_inst_itr, ref_block_itr, &new_blk_ptr);
  663. InstructionBuilder builder(
  664. context(), &*new_blk_ptr,
  665. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  666. new_blocks->push_back(std::move(new_blk_ptr));
  667. // If initialization check, use reference value of zero.
  668. // Else use the index of the last byte referenced.
  669. uint32_t ref_id = init_check ? builder.GetUintConstantId(0u)
  670. : GenLastByteIdx(&ref, &builder);
  671. // Read initialization/bounds from debug input buffer. If index id not yet
  672. // set, binding is single descriptor, so set index to constant 0.
  673. if (ref.desc_idx_id == 0) ref.desc_idx_id = builder.GetUintConstantId(0u);
  674. uint32_t init_id = GenDebugReadInit(ref.var_id, ref.desc_idx_id, &builder);
  675. // Generate runtime initialization/bounds test code with true branch
  676. // being full reference and false branch being debug output and zero
  677. // for the referenced value.
  678. Instruction* ult_inst =
  679. builder.AddBinaryOp(GetBoolId(), SpvOpULessThan, ref_id, init_id);
  680. uint32_t error = init_check ? kInstErrorBindlessUninit
  681. : (ref.strg_class == SpvStorageClassUniform
  682. ? kInstErrorBuffOOBUniform
  683. : kInstErrorBuffOOBStorage);
  684. uint32_t error_id = builder.GetUintConstantId(error);
  685. GenCheckCode(ult_inst->result_id(), error_id, init_check ? 0 : ref_id,
  686. init_check ? builder.GetUintConstantId(0u) : init_id, stage_idx,
  687. &ref, new_blocks);
  688. // Move original block's remaining code into remainder/merge block and add
  689. // to new blocks
  690. BasicBlock* back_blk_ptr = &*new_blocks->back();
  691. MovePostludeCode(ref_block_itr, back_blk_ptr);
  692. }
  693. void InstBindlessCheckPass::GenTexBuffCheckCode(
  694. BasicBlock::iterator ref_inst_itr,
  695. UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
  696. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  697. // Only process OpImageRead and OpImageWrite with no optional operands
  698. Instruction* ref_inst = &*ref_inst_itr;
  699. SpvOp op = ref_inst->opcode();
  700. uint32_t num_in_oprnds = ref_inst->NumInOperands();
  701. if (!((op == SpvOpImageRead && num_in_oprnds == 2) ||
  702. (op == SpvOpImageFetch && num_in_oprnds == 2) ||
  703. (op == SpvOpImageWrite && num_in_oprnds == 3)))
  704. return;
  705. // Pull components from descriptor reference
  706. RefAnalysis ref;
  707. if (!AnalyzeDescriptorReference(ref_inst, &ref)) return;
  708. // Only process if image is texel buffer
  709. Instruction* image_inst = get_def_use_mgr()->GetDef(ref.image_id);
  710. uint32_t image_ty_id = image_inst->type_id();
  711. Instruction* image_ty_inst = get_def_use_mgr()->GetDef(image_ty_id);
  712. if (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageDim) != SpvDimBuffer)
  713. return;
  714. if (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageDepth) != 0) return;
  715. if (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageArrayed) != 0) return;
  716. if (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageMS) != 0) return;
  717. // Enable ImageQuery Capability if not yet enabled
  718. if (!get_feature_mgr()->HasCapability(SpvCapabilityImageQuery)) {
  719. std::unique_ptr<Instruction> cap_image_query_inst(new Instruction(
  720. context(), SpvOpCapability, 0, 0,
  721. std::initializer_list<Operand>{
  722. {SPV_OPERAND_TYPE_CAPABILITY, {SpvCapabilityImageQuery}}}));
  723. get_def_use_mgr()->AnalyzeInstDefUse(&*cap_image_query_inst);
  724. context()->AddCapability(std::move(cap_image_query_inst));
  725. }
  726. // Move original block's preceding instructions into first new block
  727. std::unique_ptr<BasicBlock> new_blk_ptr;
  728. MovePreludeCode(ref_inst_itr, ref_block_itr, &new_blk_ptr);
  729. InstructionBuilder builder(
  730. context(), &*new_blk_ptr,
  731. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  732. new_blocks->push_back(std::move(new_blk_ptr));
  733. // Get texel coordinate
  734. uint32_t coord_id =
  735. GenUintCastCode(ref_inst->GetSingleWordInOperand(1), &builder);
  736. // If index id not yet set, binding is single descriptor, so set index to
  737. // constant 0.
  738. if (ref.desc_idx_id == 0) ref.desc_idx_id = builder.GetUintConstantId(0u);
  739. // Get texel buffer size.
  740. Instruction* size_inst =
  741. builder.AddUnaryOp(GetUintId(), SpvOpImageQuerySize, ref.image_id);
  742. uint32_t size_id = size_inst->result_id();
  743. // Generate runtime initialization/bounds test code with true branch
  744. // being full reference and false branch being debug output and zero
  745. // for the referenced value.
  746. Instruction* ult_inst =
  747. builder.AddBinaryOp(GetBoolId(), SpvOpULessThan, coord_id, size_id);
  748. uint32_t error =
  749. (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageSampled) == 2)
  750. ? kInstErrorBuffOOBStorageTexel
  751. : kInstErrorBuffOOBUniformTexel;
  752. uint32_t error_id = builder.GetUintConstantId(error);
  753. GenCheckCode(ult_inst->result_id(), error_id, coord_id, size_id, stage_idx,
  754. &ref, new_blocks);
  755. // Move original block's remaining code into remainder/merge block and add
  756. // to new blocks
  757. BasicBlock* back_blk_ptr = &*new_blocks->back();
  758. MovePostludeCode(ref_block_itr, back_blk_ptr);
  759. }
  760. void InstBindlessCheckPass::InitializeInstBindlessCheck() {
  761. // Initialize base class
  762. InitializeInstrument();
  763. // If runtime array length support or buffer bounds checking are enabled,
  764. // create variable mappings. Length support is always enabled if descriptor
  765. // init check is enabled.
  766. if (desc_idx_enabled_ || buffer_bounds_enabled_ || texel_buffer_enabled_)
  767. for (auto& anno : get_module()->annotations())
  768. if (anno.opcode() == SpvOpDecorate) {
  769. if (anno.GetSingleWordInOperand(1u) == SpvDecorationDescriptorSet)
  770. var2desc_set_[anno.GetSingleWordInOperand(0u)] =
  771. anno.GetSingleWordInOperand(2u);
  772. else if (anno.GetSingleWordInOperand(1u) == SpvDecorationBinding)
  773. var2binding_[anno.GetSingleWordInOperand(0u)] =
  774. anno.GetSingleWordInOperand(2u);
  775. }
  776. }
  777. Pass::Status InstBindlessCheckPass::ProcessImpl() {
  778. // Perform bindless bounds check on each entry point function in module
  779. InstProcessFunction pfn =
  780. [this](BasicBlock::iterator ref_inst_itr,
  781. UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
  782. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  783. return GenDescIdxCheckCode(ref_inst_itr, ref_block_itr, stage_idx,
  784. new_blocks);
  785. };
  786. bool modified = InstProcessEntryPointCallTree(pfn);
  787. if (desc_init_enabled_ || buffer_bounds_enabled_) {
  788. // Perform descriptor initialization and/or buffer bounds check on each
  789. // entry point function in module
  790. pfn = [this](BasicBlock::iterator ref_inst_itr,
  791. UptrVectorIterator<BasicBlock> ref_block_itr,
  792. uint32_t stage_idx,
  793. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  794. return GenDescInitCheckCode(ref_inst_itr, ref_block_itr, stage_idx,
  795. new_blocks);
  796. };
  797. modified |= InstProcessEntryPointCallTree(pfn);
  798. }
  799. if (texel_buffer_enabled_) {
  800. // Perform texel buffer bounds check on each entry point function in
  801. // module. Generate after descriptor bounds and initialization checks.
  802. pfn = [this](BasicBlock::iterator ref_inst_itr,
  803. UptrVectorIterator<BasicBlock> ref_block_itr,
  804. uint32_t stage_idx,
  805. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  806. return GenTexBuffCheckCode(ref_inst_itr, ref_block_itr, stage_idx,
  807. new_blocks);
  808. };
  809. modified |= InstProcessEntryPointCallTree(pfn);
  810. }
  811. return modified ? Status::SuccessWithChange : Status::SuccessWithoutChange;
  812. }
  813. Pass::Status InstBindlessCheckPass::Process() {
  814. InitializeInstBindlessCheck();
  815. return ProcessImpl();
  816. }
  817. } // namespace opt
  818. } // namespace spvtools