inst_bindless_check_pass.cpp 37 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845
  1. // Copyright (c) 2018 The Khronos Group Inc.
  2. // Copyright (c) 2018 Valve Corporation
  3. // Copyright (c) 2018 LunarG Inc.
  4. //
  5. // Licensed under the Apache License, Version 2.0 (the "License");
  6. // you may not use this file except in compliance with the License.
  7. // You may obtain a copy of the License at
  8. //
  9. // http://www.apache.org/licenses/LICENSE-2.0
  10. //
  11. // Unless required by applicable law or agreed to in writing, software
  12. // distributed under the License is distributed on an "AS IS" BASIS,
  13. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. // See the License for the specific language governing permissions and
  15. // limitations under the License.
  16. #include "inst_bindless_check_pass.h"
  17. namespace spvtools {
  18. namespace opt {
  19. namespace {
  20. // Input Operand Indices
  21. constexpr int kSpvImageSampleImageIdInIdx = 0;
  22. constexpr int kSpvSampledImageImageIdInIdx = 0;
  23. constexpr int kSpvSampledImageSamplerIdInIdx = 1;
  24. constexpr int kSpvImageSampledImageIdInIdx = 0;
  25. constexpr int kSpvCopyObjectOperandIdInIdx = 0;
  26. constexpr int kSpvLoadPtrIdInIdx = 0;
  27. constexpr int kSpvAccessChainBaseIdInIdx = 0;
  28. constexpr int kSpvAccessChainIndex0IdInIdx = 1;
  29. constexpr int kSpvTypeArrayTypeIdInIdx = 0;
  30. constexpr int kSpvTypeArrayLengthIdInIdx = 1;
  31. constexpr int kSpvConstantValueInIdx = 0;
  32. constexpr int kSpvVariableStorageClassInIdx = 0;
  33. constexpr int kSpvTypePtrTypeIdInIdx = 1;
  34. constexpr int kSpvTypeImageDim = 1;
  35. constexpr int kSpvTypeImageDepth = 2;
  36. constexpr int kSpvTypeImageArrayed = 3;
  37. constexpr int kSpvTypeImageMS = 4;
  38. constexpr int kSpvTypeImageSampled = 5;
  39. } // namespace
  40. uint32_t InstBindlessCheckPass::GenDebugReadLength(
  41. uint32_t var_id, InstructionBuilder* builder) {
  42. uint32_t desc_set_idx =
  43. var2desc_set_[var_id] + kDebugInputBindlessOffsetLengths;
  44. uint32_t desc_set_idx_id = builder->GetUintConstantId(desc_set_idx);
  45. uint32_t binding_idx_id = builder->GetUintConstantId(var2binding_[var_id]);
  46. return GenDebugDirectRead({desc_set_idx_id, binding_idx_id}, builder);
  47. }
  48. uint32_t InstBindlessCheckPass::GenDebugReadInit(uint32_t var_id,
  49. uint32_t desc_idx_id,
  50. InstructionBuilder* builder) {
  51. uint32_t binding_idx_id = builder->GetUintConstantId(var2binding_[var_id]);
  52. uint32_t u_desc_idx_id = GenUintCastCode(desc_idx_id, builder);
  53. // If desc index checking is not enabled, we know the offset of initialization
  54. // entries is 1, so we can avoid loading this value and just add 1 to the
  55. // descriptor set.
  56. if (!desc_idx_enabled_) {
  57. uint32_t desc_set_idx_id =
  58. builder->GetUintConstantId(var2desc_set_[var_id] + 1);
  59. return GenDebugDirectRead({desc_set_idx_id, binding_idx_id, u_desc_idx_id},
  60. builder);
  61. } else {
  62. uint32_t desc_set_base_id =
  63. builder->GetUintConstantId(kDebugInputBindlessInitOffset);
  64. uint32_t desc_set_idx_id =
  65. builder->GetUintConstantId(var2desc_set_[var_id]);
  66. return GenDebugDirectRead(
  67. {desc_set_base_id, desc_set_idx_id, binding_idx_id, u_desc_idx_id},
  68. builder);
  69. }
  70. }
  71. uint32_t InstBindlessCheckPass::CloneOriginalImage(
  72. uint32_t old_image_id, InstructionBuilder* builder) {
  73. Instruction* new_image_inst;
  74. Instruction* old_image_inst = get_def_use_mgr()->GetDef(old_image_id);
  75. if (old_image_inst->opcode() == spv::Op::OpLoad) {
  76. new_image_inst = builder->AddLoad(
  77. old_image_inst->type_id(),
  78. old_image_inst->GetSingleWordInOperand(kSpvLoadPtrIdInIdx));
  79. } else if (old_image_inst->opcode() == spv::Op::OpSampledImage) {
  80. uint32_t clone_id = CloneOriginalImage(
  81. old_image_inst->GetSingleWordInOperand(kSpvSampledImageImageIdInIdx),
  82. builder);
  83. new_image_inst = builder->AddBinaryOp(
  84. old_image_inst->type_id(), spv::Op::OpSampledImage, clone_id,
  85. old_image_inst->GetSingleWordInOperand(kSpvSampledImageSamplerIdInIdx));
  86. } else if (old_image_inst->opcode() == spv::Op::OpImage) {
  87. uint32_t clone_id = CloneOriginalImage(
  88. old_image_inst->GetSingleWordInOperand(kSpvImageSampledImageIdInIdx),
  89. builder);
  90. new_image_inst = builder->AddUnaryOp(old_image_inst->type_id(),
  91. spv::Op::OpImage, clone_id);
  92. } else {
  93. assert(old_image_inst->opcode() == spv::Op::OpCopyObject &&
  94. "expecting OpCopyObject");
  95. uint32_t clone_id = CloneOriginalImage(
  96. old_image_inst->GetSingleWordInOperand(kSpvCopyObjectOperandIdInIdx),
  97. builder);
  98. // Since we are cloning, no need to create new copy
  99. new_image_inst = get_def_use_mgr()->GetDef(clone_id);
  100. }
  101. uid2offset_[new_image_inst->unique_id()] =
  102. uid2offset_[old_image_inst->unique_id()];
  103. uint32_t new_image_id = new_image_inst->result_id();
  104. get_decoration_mgr()->CloneDecorations(old_image_id, new_image_id);
  105. return new_image_id;
  106. }
  107. uint32_t InstBindlessCheckPass::CloneOriginalReference(
  108. RefAnalysis* ref, InstructionBuilder* builder) {
  109. // If original is image based, start by cloning descriptor load
  110. uint32_t new_image_id = 0;
  111. if (ref->desc_load_id != 0) {
  112. uint32_t old_image_id =
  113. ref->ref_inst->GetSingleWordInOperand(kSpvImageSampleImageIdInIdx);
  114. new_image_id = CloneOriginalImage(old_image_id, builder);
  115. }
  116. // Clone original reference
  117. std::unique_ptr<Instruction> new_ref_inst(ref->ref_inst->Clone(context()));
  118. uint32_t ref_result_id = ref->ref_inst->result_id();
  119. uint32_t new_ref_id = 0;
  120. if (ref_result_id != 0) {
  121. new_ref_id = TakeNextId();
  122. new_ref_inst->SetResultId(new_ref_id);
  123. }
  124. // Update new ref with new image if created
  125. if (new_image_id != 0)
  126. new_ref_inst->SetInOperand(kSpvImageSampleImageIdInIdx, {new_image_id});
  127. // Register new reference and add to new block
  128. Instruction* added_inst = builder->AddInstruction(std::move(new_ref_inst));
  129. uid2offset_[added_inst->unique_id()] =
  130. uid2offset_[ref->ref_inst->unique_id()];
  131. if (new_ref_id != 0)
  132. get_decoration_mgr()->CloneDecorations(ref_result_id, new_ref_id);
  133. return new_ref_id;
  134. }
  135. uint32_t InstBindlessCheckPass::GetImageId(Instruction* inst) {
  136. switch (inst->opcode()) {
  137. case spv::Op::OpImageSampleImplicitLod:
  138. case spv::Op::OpImageSampleExplicitLod:
  139. case spv::Op::OpImageSampleDrefImplicitLod:
  140. case spv::Op::OpImageSampleDrefExplicitLod:
  141. case spv::Op::OpImageSampleProjImplicitLod:
  142. case spv::Op::OpImageSampleProjExplicitLod:
  143. case spv::Op::OpImageSampleProjDrefImplicitLod:
  144. case spv::Op::OpImageSampleProjDrefExplicitLod:
  145. case spv::Op::OpImageGather:
  146. case spv::Op::OpImageDrefGather:
  147. case spv::Op::OpImageQueryLod:
  148. case spv::Op::OpImageSparseSampleImplicitLod:
  149. case spv::Op::OpImageSparseSampleExplicitLod:
  150. case spv::Op::OpImageSparseSampleDrefImplicitLod:
  151. case spv::Op::OpImageSparseSampleDrefExplicitLod:
  152. case spv::Op::OpImageSparseSampleProjImplicitLod:
  153. case spv::Op::OpImageSparseSampleProjExplicitLod:
  154. case spv::Op::OpImageSparseSampleProjDrefImplicitLod:
  155. case spv::Op::OpImageSparseSampleProjDrefExplicitLod:
  156. case spv::Op::OpImageSparseGather:
  157. case spv::Op::OpImageSparseDrefGather:
  158. case spv::Op::OpImageFetch:
  159. case spv::Op::OpImageRead:
  160. case spv::Op::OpImageQueryFormat:
  161. case spv::Op::OpImageQueryOrder:
  162. case spv::Op::OpImageQuerySizeLod:
  163. case spv::Op::OpImageQuerySize:
  164. case spv::Op::OpImageQueryLevels:
  165. case spv::Op::OpImageQuerySamples:
  166. case spv::Op::OpImageSparseFetch:
  167. case spv::Op::OpImageSparseRead:
  168. case spv::Op::OpImageWrite:
  169. return inst->GetSingleWordInOperand(kSpvImageSampleImageIdInIdx);
  170. default:
  171. break;
  172. }
  173. return 0;
  174. }
  175. Instruction* InstBindlessCheckPass::GetPointeeTypeInst(Instruction* ptr_inst) {
  176. uint32_t pte_ty_id = GetPointeeTypeId(ptr_inst);
  177. return get_def_use_mgr()->GetDef(pte_ty_id);
  178. }
  179. bool InstBindlessCheckPass::AnalyzeDescriptorReference(Instruction* ref_inst,
  180. RefAnalysis* ref) {
  181. ref->ref_inst = ref_inst;
  182. if (ref_inst->opcode() == spv::Op::OpLoad ||
  183. ref_inst->opcode() == spv::Op::OpStore) {
  184. ref->desc_load_id = 0;
  185. ref->ptr_id = ref_inst->GetSingleWordInOperand(kSpvLoadPtrIdInIdx);
  186. Instruction* ptr_inst = get_def_use_mgr()->GetDef(ref->ptr_id);
  187. if (ptr_inst->opcode() != spv::Op::OpAccessChain) return false;
  188. ref->var_id = ptr_inst->GetSingleWordInOperand(kSpvAccessChainBaseIdInIdx);
  189. Instruction* var_inst = get_def_use_mgr()->GetDef(ref->var_id);
  190. if (var_inst->opcode() != spv::Op::OpVariable) return false;
  191. spv::StorageClass storage_class = spv::StorageClass(
  192. var_inst->GetSingleWordInOperand(kSpvVariableStorageClassInIdx));
  193. switch (storage_class) {
  194. case spv::StorageClass::Uniform:
  195. case spv::StorageClass::StorageBuffer:
  196. break;
  197. default:
  198. return false;
  199. break;
  200. }
  201. // Check for deprecated storage block form
  202. if (storage_class == spv::StorageClass::Uniform) {
  203. uint32_t var_ty_id = var_inst->type_id();
  204. Instruction* var_ty_inst = get_def_use_mgr()->GetDef(var_ty_id);
  205. uint32_t ptr_ty_id =
  206. var_ty_inst->GetSingleWordInOperand(kSpvTypePtrTypeIdInIdx);
  207. Instruction* ptr_ty_inst = get_def_use_mgr()->GetDef(ptr_ty_id);
  208. spv::Op ptr_ty_op = ptr_ty_inst->opcode();
  209. uint32_t block_ty_id =
  210. (ptr_ty_op == spv::Op::OpTypeArray ||
  211. ptr_ty_op == spv::Op::OpTypeRuntimeArray)
  212. ? ptr_ty_inst->GetSingleWordInOperand(kSpvTypeArrayTypeIdInIdx)
  213. : ptr_ty_id;
  214. assert(get_def_use_mgr()->GetDef(block_ty_id)->opcode() ==
  215. spv::Op::OpTypeStruct &&
  216. "unexpected block type");
  217. bool block_found = get_decoration_mgr()->FindDecoration(
  218. block_ty_id, uint32_t(spv::Decoration::Block),
  219. [](const Instruction&) { return true; });
  220. if (!block_found) {
  221. // If block decoration not found, verify deprecated form of SSBO
  222. bool buffer_block_found = get_decoration_mgr()->FindDecoration(
  223. block_ty_id, uint32_t(spv::Decoration::BufferBlock),
  224. [](const Instruction&) { return true; });
  225. USE_ASSERT(buffer_block_found && "block decoration not found");
  226. storage_class = spv::StorageClass::StorageBuffer;
  227. }
  228. }
  229. ref->strg_class = uint32_t(storage_class);
  230. Instruction* desc_type_inst = GetPointeeTypeInst(var_inst);
  231. switch (desc_type_inst->opcode()) {
  232. case spv::Op::OpTypeArray:
  233. case spv::Op::OpTypeRuntimeArray:
  234. // A load through a descriptor array will have at least 3 operands. We
  235. // do not want to instrument loads of descriptors here which are part of
  236. // an image-based reference.
  237. if (ptr_inst->NumInOperands() < 3) return false;
  238. ref->desc_idx_id =
  239. ptr_inst->GetSingleWordInOperand(kSpvAccessChainIndex0IdInIdx);
  240. break;
  241. default:
  242. ref->desc_idx_id = 0;
  243. break;
  244. }
  245. return true;
  246. }
  247. // Reference is not load or store. If not an image-based reference, return.
  248. ref->image_id = GetImageId(ref_inst);
  249. if (ref->image_id == 0) return false;
  250. // Search for descriptor load
  251. uint32_t desc_load_id = ref->image_id;
  252. Instruction* desc_load_inst;
  253. for (;;) {
  254. desc_load_inst = get_def_use_mgr()->GetDef(desc_load_id);
  255. if (desc_load_inst->opcode() == spv::Op::OpSampledImage)
  256. desc_load_id =
  257. desc_load_inst->GetSingleWordInOperand(kSpvSampledImageImageIdInIdx);
  258. else if (desc_load_inst->opcode() == spv::Op::OpImage)
  259. desc_load_id =
  260. desc_load_inst->GetSingleWordInOperand(kSpvImageSampledImageIdInIdx);
  261. else if (desc_load_inst->opcode() == spv::Op::OpCopyObject)
  262. desc_load_id =
  263. desc_load_inst->GetSingleWordInOperand(kSpvCopyObjectOperandIdInIdx);
  264. else
  265. break;
  266. }
  267. if (desc_load_inst->opcode() != spv::Op::OpLoad) {
  268. // TODO(greg-lunarg): Handle additional possibilities?
  269. return false;
  270. }
  271. ref->desc_load_id = desc_load_id;
  272. ref->ptr_id = desc_load_inst->GetSingleWordInOperand(kSpvLoadPtrIdInIdx);
  273. Instruction* ptr_inst = get_def_use_mgr()->GetDef(ref->ptr_id);
  274. if (ptr_inst->opcode() == spv::Op::OpVariable) {
  275. ref->desc_idx_id = 0;
  276. ref->var_id = ref->ptr_id;
  277. } else if (ptr_inst->opcode() == spv::Op::OpAccessChain) {
  278. if (ptr_inst->NumInOperands() != 2) {
  279. assert(false && "unexpected bindless index number");
  280. return false;
  281. }
  282. ref->desc_idx_id =
  283. ptr_inst->GetSingleWordInOperand(kSpvAccessChainIndex0IdInIdx);
  284. ref->var_id = ptr_inst->GetSingleWordInOperand(kSpvAccessChainBaseIdInIdx);
  285. Instruction* var_inst = get_def_use_mgr()->GetDef(ref->var_id);
  286. if (var_inst->opcode() != spv::Op::OpVariable) {
  287. assert(false && "unexpected bindless base");
  288. return false;
  289. }
  290. } else {
  291. // TODO(greg-lunarg): Handle additional possibilities?
  292. return false;
  293. }
  294. return true;
  295. }
  296. uint32_t InstBindlessCheckPass::FindStride(uint32_t ty_id,
  297. uint32_t stride_deco) {
  298. uint32_t stride = 0xdeadbeef;
  299. bool found = get_decoration_mgr()->FindDecoration(
  300. ty_id, stride_deco, [&stride](const Instruction& deco_inst) {
  301. stride = deco_inst.GetSingleWordInOperand(2u);
  302. return true;
  303. });
  304. USE_ASSERT(found && "stride not found");
  305. return stride;
  306. }
  307. uint32_t InstBindlessCheckPass::ByteSize(uint32_t ty_id, uint32_t matrix_stride,
  308. bool col_major, bool in_matrix) {
  309. analysis::TypeManager* type_mgr = context()->get_type_mgr();
  310. const analysis::Type* sz_ty = type_mgr->GetType(ty_id);
  311. if (sz_ty->kind() == analysis::Type::kPointer) {
  312. // Assuming PhysicalStorageBuffer pointer
  313. return 8;
  314. }
  315. if (sz_ty->kind() == analysis::Type::kMatrix) {
  316. assert(matrix_stride != 0 && "missing matrix stride");
  317. const analysis::Matrix* m_ty = sz_ty->AsMatrix();
  318. if (col_major) {
  319. return m_ty->element_count() * matrix_stride;
  320. } else {
  321. const analysis::Vector* v_ty = m_ty->element_type()->AsVector();
  322. return v_ty->element_count() * matrix_stride;
  323. }
  324. }
  325. uint32_t size = 1;
  326. if (sz_ty->kind() == analysis::Type::kVector) {
  327. const analysis::Vector* v_ty = sz_ty->AsVector();
  328. size = v_ty->element_count();
  329. const analysis::Type* comp_ty = v_ty->element_type();
  330. // if vector in row major matrix, the vector is strided so return the
  331. // number of bytes spanned by the vector
  332. if (in_matrix && !col_major && matrix_stride > 0) {
  333. uint32_t comp_ty_id = type_mgr->GetId(comp_ty);
  334. return (size - 1) * matrix_stride + ByteSize(comp_ty_id, 0, false, false);
  335. }
  336. sz_ty = comp_ty;
  337. }
  338. switch (sz_ty->kind()) {
  339. case analysis::Type::kFloat: {
  340. const analysis::Float* f_ty = sz_ty->AsFloat();
  341. size *= f_ty->width();
  342. } break;
  343. case analysis::Type::kInteger: {
  344. const analysis::Integer* i_ty = sz_ty->AsInteger();
  345. size *= i_ty->width();
  346. } break;
  347. default: { assert(false && "unexpected type"); } break;
  348. }
  349. size /= 8;
  350. return size;
  351. }
  352. uint32_t InstBindlessCheckPass::GenLastByteIdx(RefAnalysis* ref,
  353. InstructionBuilder* builder) {
  354. // Find outermost buffer type and its access chain index
  355. Instruction* var_inst = get_def_use_mgr()->GetDef(ref->var_id);
  356. Instruction* desc_ty_inst = GetPointeeTypeInst(var_inst);
  357. uint32_t buff_ty_id;
  358. uint32_t ac_in_idx = 1;
  359. switch (desc_ty_inst->opcode()) {
  360. case spv::Op::OpTypeArray:
  361. case spv::Op::OpTypeRuntimeArray:
  362. buff_ty_id = desc_ty_inst->GetSingleWordInOperand(0);
  363. ++ac_in_idx;
  364. break;
  365. default:
  366. assert(desc_ty_inst->opcode() == spv::Op::OpTypeStruct &&
  367. "unexpected descriptor type");
  368. buff_ty_id = desc_ty_inst->result_id();
  369. break;
  370. }
  371. // Process remaining access chain indices
  372. Instruction* ac_inst = get_def_use_mgr()->GetDef(ref->ptr_id);
  373. uint32_t curr_ty_id = buff_ty_id;
  374. uint32_t sum_id = 0u;
  375. uint32_t matrix_stride = 0u;
  376. bool col_major = false;
  377. uint32_t matrix_stride_id = 0u;
  378. bool in_matrix = false;
  379. while (ac_in_idx < ac_inst->NumInOperands()) {
  380. uint32_t curr_idx_id = ac_inst->GetSingleWordInOperand(ac_in_idx);
  381. Instruction* curr_ty_inst = get_def_use_mgr()->GetDef(curr_ty_id);
  382. uint32_t curr_offset_id = 0;
  383. switch (curr_ty_inst->opcode()) {
  384. case spv::Op::OpTypeArray:
  385. case spv::Op::OpTypeRuntimeArray: {
  386. // Get array stride and multiply by current index
  387. uint32_t arr_stride =
  388. FindStride(curr_ty_id, uint32_t(spv::Decoration::ArrayStride));
  389. uint32_t arr_stride_id = builder->GetUintConstantId(arr_stride);
  390. uint32_t curr_idx_32b_id = Gen32BitCvtCode(curr_idx_id, builder);
  391. Instruction* curr_offset_inst = builder->AddBinaryOp(
  392. GetUintId(), spv::Op::OpIMul, arr_stride_id, curr_idx_32b_id);
  393. curr_offset_id = curr_offset_inst->result_id();
  394. // Get element type for next step
  395. curr_ty_id = curr_ty_inst->GetSingleWordInOperand(0);
  396. } break;
  397. case spv::Op::OpTypeMatrix: {
  398. assert(matrix_stride != 0 && "missing matrix stride");
  399. matrix_stride_id = builder->GetUintConstantId(matrix_stride);
  400. uint32_t vec_ty_id = curr_ty_inst->GetSingleWordInOperand(0);
  401. // If column major, multiply column index by matrix stride, otherwise
  402. // by vector component size and save matrix stride for vector (row)
  403. // index
  404. uint32_t col_stride_id;
  405. if (col_major) {
  406. col_stride_id = matrix_stride_id;
  407. } else {
  408. Instruction* vec_ty_inst = get_def_use_mgr()->GetDef(vec_ty_id);
  409. uint32_t comp_ty_id = vec_ty_inst->GetSingleWordInOperand(0u);
  410. uint32_t col_stride = ByteSize(comp_ty_id, 0u, false, false);
  411. col_stride_id = builder->GetUintConstantId(col_stride);
  412. }
  413. uint32_t curr_idx_32b_id = Gen32BitCvtCode(curr_idx_id, builder);
  414. Instruction* curr_offset_inst = builder->AddBinaryOp(
  415. GetUintId(), spv::Op::OpIMul, col_stride_id, curr_idx_32b_id);
  416. curr_offset_id = curr_offset_inst->result_id();
  417. // Get element type for next step
  418. curr_ty_id = vec_ty_id;
  419. in_matrix = true;
  420. } break;
  421. case spv::Op::OpTypeVector: {
  422. // If inside a row major matrix type, multiply index by matrix stride,
  423. // else multiply by component size
  424. uint32_t comp_ty_id = curr_ty_inst->GetSingleWordInOperand(0u);
  425. uint32_t curr_idx_32b_id = Gen32BitCvtCode(curr_idx_id, builder);
  426. if (in_matrix && !col_major) {
  427. Instruction* curr_offset_inst = builder->AddBinaryOp(
  428. GetUintId(), spv::Op::OpIMul, matrix_stride_id, curr_idx_32b_id);
  429. curr_offset_id = curr_offset_inst->result_id();
  430. } else {
  431. uint32_t comp_ty_sz = ByteSize(comp_ty_id, 0u, false, false);
  432. uint32_t comp_ty_sz_id = builder->GetUintConstantId(comp_ty_sz);
  433. Instruction* curr_offset_inst = builder->AddBinaryOp(
  434. GetUintId(), spv::Op::OpIMul, comp_ty_sz_id, curr_idx_32b_id);
  435. curr_offset_id = curr_offset_inst->result_id();
  436. }
  437. // Get element type for next step
  438. curr_ty_id = comp_ty_id;
  439. } break;
  440. case spv::Op::OpTypeStruct: {
  441. // Get buffer byte offset for the referenced member
  442. Instruction* curr_idx_inst = get_def_use_mgr()->GetDef(curr_idx_id);
  443. assert(curr_idx_inst->opcode() == spv::Op::OpConstant &&
  444. "unexpected struct index");
  445. uint32_t member_idx = curr_idx_inst->GetSingleWordInOperand(0);
  446. uint32_t member_offset = 0xdeadbeef;
  447. bool found = get_decoration_mgr()->FindDecoration(
  448. curr_ty_id, uint32_t(spv::Decoration::Offset),
  449. [&member_idx, &member_offset](const Instruction& deco_inst) {
  450. if (deco_inst.GetSingleWordInOperand(1u) != member_idx)
  451. return false;
  452. member_offset = deco_inst.GetSingleWordInOperand(3u);
  453. return true;
  454. });
  455. USE_ASSERT(found && "member offset not found");
  456. curr_offset_id = builder->GetUintConstantId(member_offset);
  457. // Look for matrix stride for this member if there is one. The matrix
  458. // stride is not on the matrix type, but in a OpMemberDecorate on the
  459. // enclosing struct type at the member index. If none found, reset
  460. // stride to 0.
  461. found = get_decoration_mgr()->FindDecoration(
  462. curr_ty_id, uint32_t(spv::Decoration::MatrixStride),
  463. [&member_idx, &matrix_stride](const Instruction& deco_inst) {
  464. if (deco_inst.GetSingleWordInOperand(1u) != member_idx)
  465. return false;
  466. matrix_stride = deco_inst.GetSingleWordInOperand(3u);
  467. return true;
  468. });
  469. if (!found) matrix_stride = 0;
  470. // Look for column major decoration
  471. found = get_decoration_mgr()->FindDecoration(
  472. curr_ty_id, uint32_t(spv::Decoration::ColMajor),
  473. [&member_idx, &col_major](const Instruction& deco_inst) {
  474. if (deco_inst.GetSingleWordInOperand(1u) != member_idx)
  475. return false;
  476. col_major = true;
  477. return true;
  478. });
  479. if (!found) col_major = false;
  480. // Get element type for next step
  481. curr_ty_id = curr_ty_inst->GetSingleWordInOperand(member_idx);
  482. } break;
  483. default: { assert(false && "unexpected non-composite type"); } break;
  484. }
  485. if (sum_id == 0)
  486. sum_id = curr_offset_id;
  487. else {
  488. Instruction* sum_inst = builder->AddBinaryOp(GetUintId(), spv::Op::OpIAdd,
  489. sum_id, curr_offset_id);
  490. sum_id = sum_inst->result_id();
  491. }
  492. ++ac_in_idx;
  493. }
  494. // Add in offset of last byte of referenced object
  495. uint32_t bsize = ByteSize(curr_ty_id, matrix_stride, col_major, in_matrix);
  496. uint32_t last = bsize - 1;
  497. uint32_t last_id = builder->GetUintConstantId(last);
  498. Instruction* sum_inst =
  499. builder->AddBinaryOp(GetUintId(), spv::Op::OpIAdd, sum_id, last_id);
  500. return sum_inst->result_id();
  501. }
  502. void InstBindlessCheckPass::GenCheckCode(
  503. uint32_t check_id, uint32_t error_id, uint32_t offset_id,
  504. uint32_t length_id, uint32_t stage_idx, RefAnalysis* ref,
  505. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  506. BasicBlock* back_blk_ptr = &*new_blocks->back();
  507. InstructionBuilder builder(
  508. context(), back_blk_ptr,
  509. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  510. // Gen conditional branch on check_id. Valid branch generates original
  511. // reference. Invalid generates debug output and zero result (if needed).
  512. uint32_t merge_blk_id = TakeNextId();
  513. uint32_t valid_blk_id = TakeNextId();
  514. uint32_t invalid_blk_id = TakeNextId();
  515. std::unique_ptr<Instruction> merge_label(NewLabel(merge_blk_id));
  516. std::unique_ptr<Instruction> valid_label(NewLabel(valid_blk_id));
  517. std::unique_ptr<Instruction> invalid_label(NewLabel(invalid_blk_id));
  518. (void)builder.AddConditionalBranch(
  519. check_id, valid_blk_id, invalid_blk_id, merge_blk_id,
  520. uint32_t(spv::SelectionControlMask::MaskNone));
  521. // Gen valid bounds branch
  522. std::unique_ptr<BasicBlock> new_blk_ptr(
  523. new BasicBlock(std::move(valid_label)));
  524. builder.SetInsertPoint(&*new_blk_ptr);
  525. uint32_t new_ref_id = CloneOriginalReference(ref, &builder);
  526. (void)builder.AddBranch(merge_blk_id);
  527. new_blocks->push_back(std::move(new_blk_ptr));
  528. // Gen invalid block
  529. new_blk_ptr.reset(new BasicBlock(std::move(invalid_label)));
  530. builder.SetInsertPoint(&*new_blk_ptr);
  531. uint32_t u_index_id = GenUintCastCode(ref->desc_idx_id, &builder);
  532. if (offset_id != 0) {
  533. // Buffer OOB
  534. uint32_t u_offset_id = GenUintCastCode(offset_id, &builder);
  535. uint32_t u_length_id = GenUintCastCode(length_id, &builder);
  536. GenDebugStreamWrite(uid2offset_[ref->ref_inst->unique_id()], stage_idx,
  537. {error_id, u_index_id, u_offset_id, u_length_id},
  538. &builder);
  539. } else if (buffer_bounds_enabled_ || texel_buffer_enabled_) {
  540. // Uninitialized Descriptor - Return additional unused zero so all error
  541. // modes will use same debug stream write function
  542. uint32_t u_length_id = GenUintCastCode(length_id, &builder);
  543. GenDebugStreamWrite(
  544. uid2offset_[ref->ref_inst->unique_id()], stage_idx,
  545. {error_id, u_index_id, u_length_id, builder.GetUintConstantId(0)},
  546. &builder);
  547. } else {
  548. // Uninitialized Descriptor - Normal error return
  549. uint32_t u_length_id = GenUintCastCode(length_id, &builder);
  550. GenDebugStreamWrite(uid2offset_[ref->ref_inst->unique_id()], stage_idx,
  551. {error_id, u_index_id, u_length_id}, &builder);
  552. }
  553. // Remember last invalid block id
  554. uint32_t last_invalid_blk_id = new_blk_ptr->GetLabelInst()->result_id();
  555. // Gen zero for invalid reference
  556. uint32_t ref_type_id = ref->ref_inst->type_id();
  557. (void)builder.AddBranch(merge_blk_id);
  558. new_blocks->push_back(std::move(new_blk_ptr));
  559. // Gen merge block
  560. new_blk_ptr.reset(new BasicBlock(std::move(merge_label)));
  561. builder.SetInsertPoint(&*new_blk_ptr);
  562. // Gen phi of new reference and zero, if necessary, and replace the
  563. // result id of the original reference with that of the Phi. Kill original
  564. // reference.
  565. if (new_ref_id != 0) {
  566. Instruction* phi_inst = builder.AddPhi(
  567. ref_type_id, {new_ref_id, valid_blk_id, GetNullId(ref_type_id),
  568. last_invalid_blk_id});
  569. context()->ReplaceAllUsesWith(ref->ref_inst->result_id(),
  570. phi_inst->result_id());
  571. }
  572. new_blocks->push_back(std::move(new_blk_ptr));
  573. context()->KillInst(ref->ref_inst);
  574. }
  575. void InstBindlessCheckPass::GenDescIdxCheckCode(
  576. BasicBlock::iterator ref_inst_itr,
  577. UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
  578. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  579. // Look for reference through indexed descriptor. If found, analyze and
  580. // save components. If not, return.
  581. RefAnalysis ref;
  582. if (!AnalyzeDescriptorReference(&*ref_inst_itr, &ref)) return;
  583. Instruction* ptr_inst = get_def_use_mgr()->GetDef(ref.ptr_id);
  584. if (ptr_inst->opcode() != spv::Op::OpAccessChain) return;
  585. // If index and bound both compile-time constants and index < bound,
  586. // return without changing
  587. Instruction* var_inst = get_def_use_mgr()->GetDef(ref.var_id);
  588. Instruction* desc_type_inst = GetPointeeTypeInst(var_inst);
  589. uint32_t length_id = 0;
  590. if (desc_type_inst->opcode() == spv::Op::OpTypeArray) {
  591. length_id =
  592. desc_type_inst->GetSingleWordInOperand(kSpvTypeArrayLengthIdInIdx);
  593. Instruction* index_inst = get_def_use_mgr()->GetDef(ref.desc_idx_id);
  594. Instruction* length_inst = get_def_use_mgr()->GetDef(length_id);
  595. if (index_inst->opcode() == spv::Op::OpConstant &&
  596. length_inst->opcode() == spv::Op::OpConstant &&
  597. index_inst->GetSingleWordInOperand(kSpvConstantValueInIdx) <
  598. length_inst->GetSingleWordInOperand(kSpvConstantValueInIdx))
  599. return;
  600. } else if (!desc_idx_enabled_ ||
  601. desc_type_inst->opcode() != spv::Op::OpTypeRuntimeArray) {
  602. return;
  603. }
  604. // Move original block's preceding instructions into first new block
  605. std::unique_ptr<BasicBlock> new_blk_ptr;
  606. MovePreludeCode(ref_inst_itr, ref_block_itr, &new_blk_ptr);
  607. InstructionBuilder builder(
  608. context(), &*new_blk_ptr,
  609. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  610. new_blocks->push_back(std::move(new_blk_ptr));
  611. uint32_t error_id = builder.GetUintConstantId(kInstErrorBindlessBounds);
  612. // If length id not yet set, descriptor array is runtime size so
  613. // generate load of length from stage's debug input buffer.
  614. if (length_id == 0) {
  615. assert(desc_type_inst->opcode() == spv::Op::OpTypeRuntimeArray &&
  616. "unexpected bindless type");
  617. length_id = GenDebugReadLength(ref.var_id, &builder);
  618. }
  619. // Generate full runtime bounds test code with true branch
  620. // being full reference and false branch being debug output and zero
  621. // for the referenced value.
  622. uint32_t desc_idx_32b_id = Gen32BitCvtCode(ref.desc_idx_id, &builder);
  623. uint32_t length_32b_id = Gen32BitCvtCode(length_id, &builder);
  624. Instruction* ult_inst = builder.AddBinaryOp(GetBoolId(), spv::Op::OpULessThan,
  625. desc_idx_32b_id, length_32b_id);
  626. ref.desc_idx_id = desc_idx_32b_id;
  627. GenCheckCode(ult_inst->result_id(), error_id, 0u, length_id, stage_idx, &ref,
  628. new_blocks);
  629. // Move original block's remaining code into remainder/merge block and add
  630. // to new blocks
  631. BasicBlock* back_blk_ptr = &*new_blocks->back();
  632. MovePostludeCode(ref_block_itr, back_blk_ptr);
  633. }
  634. void InstBindlessCheckPass::GenDescInitCheckCode(
  635. BasicBlock::iterator ref_inst_itr,
  636. UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
  637. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  638. // Look for reference through descriptor. If not, return.
  639. RefAnalysis ref;
  640. if (!AnalyzeDescriptorReference(&*ref_inst_itr, &ref)) return;
  641. // Determine if we can only do initialization check
  642. bool init_check = false;
  643. if (ref.desc_load_id != 0 || !buffer_bounds_enabled_) {
  644. init_check = true;
  645. } else {
  646. // For now, only do bounds check for non-aggregate types. Otherwise
  647. // just do descriptor initialization check.
  648. // TODO(greg-lunarg): Do bounds check for aggregate loads and stores
  649. Instruction* ref_ptr_inst = get_def_use_mgr()->GetDef(ref.ptr_id);
  650. Instruction* pte_type_inst = GetPointeeTypeInst(ref_ptr_inst);
  651. spv::Op pte_type_op = pte_type_inst->opcode();
  652. if (pte_type_op == spv::Op::OpTypeArray ||
  653. pte_type_op == spv::Op::OpTypeRuntimeArray ||
  654. pte_type_op == spv::Op::OpTypeStruct)
  655. init_check = true;
  656. }
  657. // If initialization check and not enabled, return
  658. if (init_check && !desc_init_enabled_) return;
  659. // Move original block's preceding instructions into first new block
  660. std::unique_ptr<BasicBlock> new_blk_ptr;
  661. MovePreludeCode(ref_inst_itr, ref_block_itr, &new_blk_ptr);
  662. InstructionBuilder builder(
  663. context(), &*new_blk_ptr,
  664. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  665. new_blocks->push_back(std::move(new_blk_ptr));
  666. // If initialization check, use reference value of zero.
  667. // Else use the index of the last byte referenced.
  668. uint32_t ref_id = init_check ? builder.GetUintConstantId(0u)
  669. : GenLastByteIdx(&ref, &builder);
  670. // Read initialization/bounds from debug input buffer. If index id not yet
  671. // set, binding is single descriptor, so set index to constant 0.
  672. if (ref.desc_idx_id == 0) ref.desc_idx_id = builder.GetUintConstantId(0u);
  673. uint32_t init_id = GenDebugReadInit(ref.var_id, ref.desc_idx_id, &builder);
  674. // Generate runtime initialization/bounds test code with true branch
  675. // being full reference and false branch being debug output and zero
  676. // for the referenced value.
  677. Instruction* ult_inst =
  678. builder.AddBinaryOp(GetBoolId(), spv::Op::OpULessThan, ref_id, init_id);
  679. uint32_t error =
  680. init_check
  681. ? kInstErrorBindlessUninit
  682. : (spv::StorageClass(ref.strg_class) == spv::StorageClass::Uniform
  683. ? kInstErrorBuffOOBUniform
  684. : kInstErrorBuffOOBStorage);
  685. uint32_t error_id = builder.GetUintConstantId(error);
  686. GenCheckCode(ult_inst->result_id(), error_id, init_check ? 0 : ref_id,
  687. init_check ? builder.GetUintConstantId(0u) : init_id, stage_idx,
  688. &ref, new_blocks);
  689. // Move original block's remaining code into remainder/merge block and add
  690. // to new blocks
  691. BasicBlock* back_blk_ptr = &*new_blocks->back();
  692. MovePostludeCode(ref_block_itr, back_blk_ptr);
  693. }
  694. void InstBindlessCheckPass::GenTexBuffCheckCode(
  695. BasicBlock::iterator ref_inst_itr,
  696. UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
  697. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  698. // Only process OpImageRead and OpImageWrite with no optional operands
  699. Instruction* ref_inst = &*ref_inst_itr;
  700. spv::Op op = ref_inst->opcode();
  701. uint32_t num_in_oprnds = ref_inst->NumInOperands();
  702. if (!((op == spv::Op::OpImageRead && num_in_oprnds == 2) ||
  703. (op == spv::Op::OpImageFetch && num_in_oprnds == 2) ||
  704. (op == spv::Op::OpImageWrite && num_in_oprnds == 3)))
  705. return;
  706. // Pull components from descriptor reference
  707. RefAnalysis ref;
  708. if (!AnalyzeDescriptorReference(ref_inst, &ref)) return;
  709. // Only process if image is texel buffer
  710. Instruction* image_inst = get_def_use_mgr()->GetDef(ref.image_id);
  711. uint32_t image_ty_id = image_inst->type_id();
  712. Instruction* image_ty_inst = get_def_use_mgr()->GetDef(image_ty_id);
  713. if (spv::Dim(image_ty_inst->GetSingleWordInOperand(kSpvTypeImageDim)) !=
  714. spv::Dim::Buffer) {
  715. return;
  716. }
  717. if (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageDepth) != 0) return;
  718. if (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageArrayed) != 0) return;
  719. if (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageMS) != 0) return;
  720. // Enable ImageQuery Capability if not yet enabled
  721. if (!get_feature_mgr()->HasCapability(spv::Capability::ImageQuery)) {
  722. std::unique_ptr<Instruction> cap_image_query_inst(
  723. new Instruction(context(), spv::Op::OpCapability, 0, 0,
  724. std::initializer_list<Operand>{
  725. {SPV_OPERAND_TYPE_CAPABILITY,
  726. {uint32_t(spv::Capability::ImageQuery)}}}));
  727. get_def_use_mgr()->AnalyzeInstDefUse(&*cap_image_query_inst);
  728. context()->AddCapability(std::move(cap_image_query_inst));
  729. }
  730. // Move original block's preceding instructions into first new block
  731. std::unique_ptr<BasicBlock> new_blk_ptr;
  732. MovePreludeCode(ref_inst_itr, ref_block_itr, &new_blk_ptr);
  733. InstructionBuilder builder(
  734. context(), &*new_blk_ptr,
  735. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  736. new_blocks->push_back(std::move(new_blk_ptr));
  737. // Get texel coordinate
  738. uint32_t coord_id =
  739. GenUintCastCode(ref_inst->GetSingleWordInOperand(1), &builder);
  740. // If index id not yet set, binding is single descriptor, so set index to
  741. // constant 0.
  742. if (ref.desc_idx_id == 0) ref.desc_idx_id = builder.GetUintConstantId(0u);
  743. // Get texel buffer size.
  744. Instruction* size_inst =
  745. builder.AddUnaryOp(GetUintId(), spv::Op::OpImageQuerySize, ref.image_id);
  746. uint32_t size_id = size_inst->result_id();
  747. // Generate runtime initialization/bounds test code with true branch
  748. // being full reference and false branch being debug output and zero
  749. // for the referenced value.
  750. Instruction* ult_inst =
  751. builder.AddBinaryOp(GetBoolId(), spv::Op::OpULessThan, coord_id, size_id);
  752. uint32_t error =
  753. (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageSampled) == 2)
  754. ? kInstErrorBuffOOBStorageTexel
  755. : kInstErrorBuffOOBUniformTexel;
  756. uint32_t error_id = builder.GetUintConstantId(error);
  757. GenCheckCode(ult_inst->result_id(), error_id, coord_id, size_id, stage_idx,
  758. &ref, new_blocks);
  759. // Move original block's remaining code into remainder/merge block and add
  760. // to new blocks
  761. BasicBlock* back_blk_ptr = &*new_blocks->back();
  762. MovePostludeCode(ref_block_itr, back_blk_ptr);
  763. }
  764. void InstBindlessCheckPass::InitializeInstBindlessCheck() {
  765. // Initialize base class
  766. InitializeInstrument();
  767. // If runtime array length support or buffer bounds checking are enabled,
  768. // create variable mappings. Length support is always enabled if descriptor
  769. // init check is enabled.
  770. if (desc_idx_enabled_ || buffer_bounds_enabled_ || texel_buffer_enabled_)
  771. for (auto& anno : get_module()->annotations())
  772. if (anno.opcode() == spv::Op::OpDecorate) {
  773. if (spv::Decoration(anno.GetSingleWordInOperand(1u)) ==
  774. spv::Decoration::DescriptorSet) {
  775. var2desc_set_[anno.GetSingleWordInOperand(0u)] =
  776. anno.GetSingleWordInOperand(2u);
  777. } else if (spv::Decoration(anno.GetSingleWordInOperand(1u)) ==
  778. spv::Decoration::Binding) {
  779. var2binding_[anno.GetSingleWordInOperand(0u)] =
  780. anno.GetSingleWordInOperand(2u);
  781. }
  782. }
  783. }
  784. Pass::Status InstBindlessCheckPass::ProcessImpl() {
  785. // Perform bindless bounds check on each entry point function in module
  786. InstProcessFunction pfn =
  787. [this](BasicBlock::iterator ref_inst_itr,
  788. UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
  789. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  790. return GenDescIdxCheckCode(ref_inst_itr, ref_block_itr, stage_idx,
  791. new_blocks);
  792. };
  793. bool modified = InstProcessEntryPointCallTree(pfn);
  794. if (desc_init_enabled_ || buffer_bounds_enabled_) {
  795. // Perform descriptor initialization and/or buffer bounds check on each
  796. // entry point function in module
  797. pfn = [this](BasicBlock::iterator ref_inst_itr,
  798. UptrVectorIterator<BasicBlock> ref_block_itr,
  799. uint32_t stage_idx,
  800. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  801. return GenDescInitCheckCode(ref_inst_itr, ref_block_itr, stage_idx,
  802. new_blocks);
  803. };
  804. modified |= InstProcessEntryPointCallTree(pfn);
  805. }
  806. if (texel_buffer_enabled_) {
  807. // Perform texel buffer bounds check on each entry point function in
  808. // module. Generate after descriptor bounds and initialization checks.
  809. pfn = [this](BasicBlock::iterator ref_inst_itr,
  810. UptrVectorIterator<BasicBlock> ref_block_itr,
  811. uint32_t stage_idx,
  812. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  813. return GenTexBuffCheckCode(ref_inst_itr, ref_block_itr, stage_idx,
  814. new_blocks);
  815. };
  816. modified |= InstProcessEntryPointCallTree(pfn);
  817. }
  818. return modified ? Status::SuccessWithChange : Status::SuccessWithoutChange;
  819. }
  820. Pass::Status InstBindlessCheckPass::Process() {
  821. InitializeInstBindlessCheck();
  822. return ProcessImpl();
  823. }
  824. } // namespace opt
  825. } // namespace spvtools