inst_bindless_check_pass.cpp 38 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850
  1. // Copyright (c) 2018 The Khronos Group Inc.
  2. // Copyright (c) 2018 Valve Corporation
  3. // Copyright (c) 2018 LunarG Inc.
  4. //
  5. // Licensed under the Apache License, Version 2.0 (the "License");
  6. // you may not use this file except in compliance with the License.
  7. // You may obtain a copy of the License at
  8. //
  9. // http://www.apache.org/licenses/LICENSE-2.0
  10. //
  11. // Unless required by applicable law or agreed to in writing, software
  12. // distributed under the License is distributed on an "AS IS" BASIS,
  13. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. // See the License for the specific language governing permissions and
  15. // limitations under the License.
  16. #include "inst_bindless_check_pass.h"
  17. namespace spvtools {
  18. namespace opt {
  19. namespace {
  20. // Input Operand Indices
  21. constexpr int kSpvImageSampleImageIdInIdx = 0;
  22. constexpr int kSpvSampledImageImageIdInIdx = 0;
  23. constexpr int kSpvSampledImageSamplerIdInIdx = 1;
  24. constexpr int kSpvImageSampledImageIdInIdx = 0;
  25. constexpr int kSpvCopyObjectOperandIdInIdx = 0;
  26. constexpr int kSpvLoadPtrIdInIdx = 0;
  27. constexpr int kSpvAccessChainBaseIdInIdx = 0;
  28. constexpr int kSpvAccessChainIndex0IdInIdx = 1;
  29. constexpr int kSpvTypeArrayTypeIdInIdx = 0;
  30. constexpr int kSpvTypeArrayLengthIdInIdx = 1;
  31. constexpr int kSpvConstantValueInIdx = 0;
  32. constexpr int kSpvVariableStorageClassInIdx = 0;
  33. constexpr int kSpvTypePtrTypeIdInIdx = 1;
  34. constexpr int kSpvTypeImageDim = 1;
  35. constexpr int kSpvTypeImageDepth = 2;
  36. constexpr int kSpvTypeImageArrayed = 3;
  37. constexpr int kSpvTypeImageMS = 4;
  38. constexpr int kSpvTypeImageSampled = 5;
  39. } // namespace
  40. uint32_t InstBindlessCheckPass::GenDebugReadLength(
  41. uint32_t var_id, InstructionBuilder* builder) {
  42. uint32_t desc_set_idx =
  43. var2desc_set_[var_id] + kDebugInputBindlessOffsetLengths;
  44. uint32_t desc_set_idx_id = builder->GetUintConstantId(desc_set_idx);
  45. uint32_t binding_idx_id = builder->GetUintConstantId(var2binding_[var_id]);
  46. return GenDebugDirectRead({desc_set_idx_id, binding_idx_id}, builder);
  47. }
  48. uint32_t InstBindlessCheckPass::GenDebugReadInit(uint32_t var_id,
  49. uint32_t desc_idx_id,
  50. InstructionBuilder* builder) {
  51. uint32_t binding_idx_id = builder->GetUintConstantId(var2binding_[var_id]);
  52. uint32_t u_desc_idx_id = GenUintCastCode(desc_idx_id, builder);
  53. // If desc index checking is not enabled, we know the offset of initialization
  54. // entries is 1, so we can avoid loading this value and just add 1 to the
  55. // descriptor set.
  56. if (!desc_idx_enabled_) {
  57. uint32_t desc_set_idx_id =
  58. builder->GetUintConstantId(var2desc_set_[var_id] + 1);
  59. return GenDebugDirectRead({desc_set_idx_id, binding_idx_id, u_desc_idx_id},
  60. builder);
  61. } else {
  62. uint32_t desc_set_base_id =
  63. builder->GetUintConstantId(kDebugInputBindlessInitOffset);
  64. uint32_t desc_set_idx_id =
  65. builder->GetUintConstantId(var2desc_set_[var_id]);
  66. return GenDebugDirectRead(
  67. {desc_set_base_id, desc_set_idx_id, binding_idx_id, u_desc_idx_id},
  68. builder);
  69. }
  70. }
  71. uint32_t InstBindlessCheckPass::CloneOriginalImage(
  72. uint32_t old_image_id, InstructionBuilder* builder) {
  73. Instruction* new_image_inst;
  74. Instruction* old_image_inst = get_def_use_mgr()->GetDef(old_image_id);
  75. if (old_image_inst->opcode() == spv::Op::OpLoad) {
  76. new_image_inst = builder->AddLoad(
  77. old_image_inst->type_id(),
  78. old_image_inst->GetSingleWordInOperand(kSpvLoadPtrIdInIdx));
  79. } else if (old_image_inst->opcode() == spv::Op::OpSampledImage) {
  80. uint32_t clone_id = CloneOriginalImage(
  81. old_image_inst->GetSingleWordInOperand(kSpvSampledImageImageIdInIdx),
  82. builder);
  83. new_image_inst = builder->AddBinaryOp(
  84. old_image_inst->type_id(), spv::Op::OpSampledImage, clone_id,
  85. old_image_inst->GetSingleWordInOperand(kSpvSampledImageSamplerIdInIdx));
  86. } else if (old_image_inst->opcode() == spv::Op::OpImage) {
  87. uint32_t clone_id = CloneOriginalImage(
  88. old_image_inst->GetSingleWordInOperand(kSpvImageSampledImageIdInIdx),
  89. builder);
  90. new_image_inst = builder->AddUnaryOp(old_image_inst->type_id(),
  91. spv::Op::OpImage, clone_id);
  92. } else {
  93. assert(old_image_inst->opcode() == spv::Op::OpCopyObject &&
  94. "expecting OpCopyObject");
  95. uint32_t clone_id = CloneOriginalImage(
  96. old_image_inst->GetSingleWordInOperand(kSpvCopyObjectOperandIdInIdx),
  97. builder);
  98. // Since we are cloning, no need to create new copy
  99. new_image_inst = get_def_use_mgr()->GetDef(clone_id);
  100. }
  101. uid2offset_[new_image_inst->unique_id()] =
  102. uid2offset_[old_image_inst->unique_id()];
  103. uint32_t new_image_id = new_image_inst->result_id();
  104. get_decoration_mgr()->CloneDecorations(old_image_id, new_image_id);
  105. return new_image_id;
  106. }
  107. uint32_t InstBindlessCheckPass::CloneOriginalReference(
  108. RefAnalysis* ref, InstructionBuilder* builder) {
  109. // If original is image based, start by cloning descriptor load
  110. uint32_t new_image_id = 0;
  111. if (ref->desc_load_id != 0) {
  112. uint32_t old_image_id =
  113. ref->ref_inst->GetSingleWordInOperand(kSpvImageSampleImageIdInIdx);
  114. new_image_id = CloneOriginalImage(old_image_id, builder);
  115. }
  116. // Clone original reference
  117. std::unique_ptr<Instruction> new_ref_inst(ref->ref_inst->Clone(context()));
  118. uint32_t ref_result_id = ref->ref_inst->result_id();
  119. uint32_t new_ref_id = 0;
  120. if (ref_result_id != 0) {
  121. new_ref_id = TakeNextId();
  122. new_ref_inst->SetResultId(new_ref_id);
  123. }
  124. // Update new ref with new image if created
  125. if (new_image_id != 0)
  126. new_ref_inst->SetInOperand(kSpvImageSampleImageIdInIdx, {new_image_id});
  127. // Register new reference and add to new block
  128. Instruction* added_inst = builder->AddInstruction(std::move(new_ref_inst));
  129. uid2offset_[added_inst->unique_id()] =
  130. uid2offset_[ref->ref_inst->unique_id()];
  131. if (new_ref_id != 0)
  132. get_decoration_mgr()->CloneDecorations(ref_result_id, new_ref_id);
  133. return new_ref_id;
  134. }
  135. uint32_t InstBindlessCheckPass::GetImageId(Instruction* inst) {
  136. switch (inst->opcode()) {
  137. case spv::Op::OpImageSampleImplicitLod:
  138. case spv::Op::OpImageSampleExplicitLod:
  139. case spv::Op::OpImageSampleDrefImplicitLod:
  140. case spv::Op::OpImageSampleDrefExplicitLod:
  141. case spv::Op::OpImageSampleProjImplicitLod:
  142. case spv::Op::OpImageSampleProjExplicitLod:
  143. case spv::Op::OpImageSampleProjDrefImplicitLod:
  144. case spv::Op::OpImageSampleProjDrefExplicitLod:
  145. case spv::Op::OpImageGather:
  146. case spv::Op::OpImageDrefGather:
  147. case spv::Op::OpImageQueryLod:
  148. case spv::Op::OpImageSparseSampleImplicitLod:
  149. case spv::Op::OpImageSparseSampleExplicitLod:
  150. case spv::Op::OpImageSparseSampleDrefImplicitLod:
  151. case spv::Op::OpImageSparseSampleDrefExplicitLod:
  152. case spv::Op::OpImageSparseSampleProjImplicitLod:
  153. case spv::Op::OpImageSparseSampleProjExplicitLod:
  154. case spv::Op::OpImageSparseSampleProjDrefImplicitLod:
  155. case spv::Op::OpImageSparseSampleProjDrefExplicitLod:
  156. case spv::Op::OpImageSparseGather:
  157. case spv::Op::OpImageSparseDrefGather:
  158. case spv::Op::OpImageFetch:
  159. case spv::Op::OpImageRead:
  160. case spv::Op::OpImageQueryFormat:
  161. case spv::Op::OpImageQueryOrder:
  162. case spv::Op::OpImageQuerySizeLod:
  163. case spv::Op::OpImageQuerySize:
  164. case spv::Op::OpImageQueryLevels:
  165. case spv::Op::OpImageQuerySamples:
  166. case spv::Op::OpImageSparseFetch:
  167. case spv::Op::OpImageSparseRead:
  168. case spv::Op::OpImageWrite:
  169. return inst->GetSingleWordInOperand(kSpvImageSampleImageIdInIdx);
  170. default:
  171. break;
  172. }
  173. return 0;
  174. }
  175. Instruction* InstBindlessCheckPass::GetPointeeTypeInst(Instruction* ptr_inst) {
  176. uint32_t pte_ty_id = GetPointeeTypeId(ptr_inst);
  177. return get_def_use_mgr()->GetDef(pte_ty_id);
  178. }
  179. bool InstBindlessCheckPass::AnalyzeDescriptorReference(Instruction* ref_inst,
  180. RefAnalysis* ref) {
  181. ref->ref_inst = ref_inst;
  182. if (ref_inst->opcode() == spv::Op::OpLoad ||
  183. ref_inst->opcode() == spv::Op::OpStore) {
  184. ref->desc_load_id = 0;
  185. ref->ptr_id = ref_inst->GetSingleWordInOperand(kSpvLoadPtrIdInIdx);
  186. Instruction* ptr_inst = get_def_use_mgr()->GetDef(ref->ptr_id);
  187. if (ptr_inst->opcode() != spv::Op::OpAccessChain) return false;
  188. ref->var_id = ptr_inst->GetSingleWordInOperand(kSpvAccessChainBaseIdInIdx);
  189. Instruction* var_inst = get_def_use_mgr()->GetDef(ref->var_id);
  190. if (var_inst->opcode() != spv::Op::OpVariable) return false;
  191. spv::StorageClass storage_class = spv::StorageClass(
  192. var_inst->GetSingleWordInOperand(kSpvVariableStorageClassInIdx));
  193. switch (storage_class) {
  194. case spv::StorageClass::Uniform:
  195. case spv::StorageClass::StorageBuffer:
  196. break;
  197. default:
  198. return false;
  199. break;
  200. }
  201. // Check for deprecated storage block form
  202. if (storage_class == spv::StorageClass::Uniform) {
  203. uint32_t var_ty_id = var_inst->type_id();
  204. Instruction* var_ty_inst = get_def_use_mgr()->GetDef(var_ty_id);
  205. uint32_t ptr_ty_id =
  206. var_ty_inst->GetSingleWordInOperand(kSpvTypePtrTypeIdInIdx);
  207. Instruction* ptr_ty_inst = get_def_use_mgr()->GetDef(ptr_ty_id);
  208. spv::Op ptr_ty_op = ptr_ty_inst->opcode();
  209. uint32_t block_ty_id =
  210. (ptr_ty_op == spv::Op::OpTypeArray ||
  211. ptr_ty_op == spv::Op::OpTypeRuntimeArray)
  212. ? ptr_ty_inst->GetSingleWordInOperand(kSpvTypeArrayTypeIdInIdx)
  213. : ptr_ty_id;
  214. assert(get_def_use_mgr()->GetDef(block_ty_id)->opcode() ==
  215. spv::Op::OpTypeStruct &&
  216. "unexpected block type");
  217. bool block_found = get_decoration_mgr()->FindDecoration(
  218. block_ty_id, uint32_t(spv::Decoration::Block),
  219. [](const Instruction&) { return true; });
  220. if (!block_found) {
  221. // If block decoration not found, verify deprecated form of SSBO
  222. bool buffer_block_found = get_decoration_mgr()->FindDecoration(
  223. block_ty_id, uint32_t(spv::Decoration::BufferBlock),
  224. [](const Instruction&) { return true; });
  225. USE_ASSERT(buffer_block_found && "block decoration not found");
  226. storage_class = spv::StorageClass::StorageBuffer;
  227. }
  228. }
  229. ref->strg_class = uint32_t(storage_class);
  230. Instruction* desc_type_inst = GetPointeeTypeInst(var_inst);
  231. switch (desc_type_inst->opcode()) {
  232. case spv::Op::OpTypeArray:
  233. case spv::Op::OpTypeRuntimeArray:
  234. // A load through a descriptor array will have at least 3 operands. We
  235. // do not want to instrument loads of descriptors here which are part of
  236. // an image-based reference.
  237. if (ptr_inst->NumInOperands() < 3) return false;
  238. ref->desc_idx_id =
  239. ptr_inst->GetSingleWordInOperand(kSpvAccessChainIndex0IdInIdx);
  240. break;
  241. default:
  242. ref->desc_idx_id = 0;
  243. break;
  244. }
  245. return true;
  246. }
  247. // Reference is not load or store. If not an image-based reference, return.
  248. ref->image_id = GetImageId(ref_inst);
  249. if (ref->image_id == 0) return false;
  250. // Search for descriptor load
  251. uint32_t desc_load_id = ref->image_id;
  252. Instruction* desc_load_inst;
  253. for (;;) {
  254. desc_load_inst = get_def_use_mgr()->GetDef(desc_load_id);
  255. if (desc_load_inst->opcode() == spv::Op::OpSampledImage)
  256. desc_load_id =
  257. desc_load_inst->GetSingleWordInOperand(kSpvSampledImageImageIdInIdx);
  258. else if (desc_load_inst->opcode() == spv::Op::OpImage)
  259. desc_load_id =
  260. desc_load_inst->GetSingleWordInOperand(kSpvImageSampledImageIdInIdx);
  261. else if (desc_load_inst->opcode() == spv::Op::OpCopyObject)
  262. desc_load_id =
  263. desc_load_inst->GetSingleWordInOperand(kSpvCopyObjectOperandIdInIdx);
  264. else
  265. break;
  266. }
  267. if (desc_load_inst->opcode() != spv::Op::OpLoad) {
  268. // TODO(greg-lunarg): Handle additional possibilities?
  269. return false;
  270. }
  271. ref->desc_load_id = desc_load_id;
  272. ref->ptr_id = desc_load_inst->GetSingleWordInOperand(kSpvLoadPtrIdInIdx);
  273. Instruction* ptr_inst = get_def_use_mgr()->GetDef(ref->ptr_id);
  274. if (ptr_inst->opcode() == spv::Op::OpVariable) {
  275. ref->desc_idx_id = 0;
  276. ref->var_id = ref->ptr_id;
  277. } else if (ptr_inst->opcode() == spv::Op::OpAccessChain) {
  278. if (ptr_inst->NumInOperands() != 2) {
  279. assert(false && "unexpected bindless index number");
  280. return false;
  281. }
  282. ref->desc_idx_id =
  283. ptr_inst->GetSingleWordInOperand(kSpvAccessChainIndex0IdInIdx);
  284. ref->var_id = ptr_inst->GetSingleWordInOperand(kSpvAccessChainBaseIdInIdx);
  285. Instruction* var_inst = get_def_use_mgr()->GetDef(ref->var_id);
  286. if (var_inst->opcode() != spv::Op::OpVariable) {
  287. assert(false && "unexpected bindless base");
  288. return false;
  289. }
  290. } else {
  291. // TODO(greg-lunarg): Handle additional possibilities?
  292. return false;
  293. }
  294. return true;
  295. }
  296. uint32_t InstBindlessCheckPass::FindStride(uint32_t ty_id,
  297. uint32_t stride_deco) {
  298. uint32_t stride = 0xdeadbeef;
  299. bool found = get_decoration_mgr()->FindDecoration(
  300. ty_id, stride_deco, [&stride](const Instruction& deco_inst) {
  301. stride = deco_inst.GetSingleWordInOperand(2u);
  302. return true;
  303. });
  304. USE_ASSERT(found && "stride not found");
  305. return stride;
  306. }
  307. uint32_t InstBindlessCheckPass::ByteSize(uint32_t ty_id, uint32_t matrix_stride,
  308. bool col_major, bool in_matrix) {
  309. analysis::TypeManager* type_mgr = context()->get_type_mgr();
  310. const analysis::Type* sz_ty = type_mgr->GetType(ty_id);
  311. if (sz_ty->kind() == analysis::Type::kPointer) {
  312. // Assuming PhysicalStorageBuffer pointer
  313. return 8;
  314. }
  315. if (sz_ty->kind() == analysis::Type::kMatrix) {
  316. assert(matrix_stride != 0 && "missing matrix stride");
  317. const analysis::Matrix* m_ty = sz_ty->AsMatrix();
  318. if (col_major) {
  319. return m_ty->element_count() * matrix_stride;
  320. } else {
  321. const analysis::Vector* v_ty = m_ty->element_type()->AsVector();
  322. return v_ty->element_count() * matrix_stride;
  323. }
  324. }
  325. uint32_t size = 1;
  326. if (sz_ty->kind() == analysis::Type::kVector) {
  327. const analysis::Vector* v_ty = sz_ty->AsVector();
  328. size = v_ty->element_count();
  329. const analysis::Type* comp_ty = v_ty->element_type();
  330. // if vector in row major matrix, the vector is strided so return the
  331. // number of bytes spanned by the vector
  332. if (in_matrix && !col_major && matrix_stride > 0) {
  333. uint32_t comp_ty_id = type_mgr->GetId(comp_ty);
  334. return (size - 1) * matrix_stride + ByteSize(comp_ty_id, 0, false, false);
  335. }
  336. sz_ty = comp_ty;
  337. }
  338. switch (sz_ty->kind()) {
  339. case analysis::Type::kFloat: {
  340. const analysis::Float* f_ty = sz_ty->AsFloat();
  341. size *= f_ty->width();
  342. } break;
  343. case analysis::Type::kInteger: {
  344. const analysis::Integer* i_ty = sz_ty->AsInteger();
  345. size *= i_ty->width();
  346. } break;
  347. default: { assert(false && "unexpected type"); } break;
  348. }
  349. size /= 8;
  350. return size;
  351. }
  352. uint32_t InstBindlessCheckPass::GenLastByteIdx(RefAnalysis* ref,
  353. InstructionBuilder* builder) {
  354. // Find outermost buffer type and its access chain index
  355. Instruction* var_inst = get_def_use_mgr()->GetDef(ref->var_id);
  356. Instruction* desc_ty_inst = GetPointeeTypeInst(var_inst);
  357. uint32_t buff_ty_id;
  358. uint32_t ac_in_idx = 1;
  359. switch (desc_ty_inst->opcode()) {
  360. case spv::Op::OpTypeArray:
  361. case spv::Op::OpTypeRuntimeArray:
  362. buff_ty_id = desc_ty_inst->GetSingleWordInOperand(0);
  363. ++ac_in_idx;
  364. break;
  365. default:
  366. assert(desc_ty_inst->opcode() == spv::Op::OpTypeStruct &&
  367. "unexpected descriptor type");
  368. buff_ty_id = desc_ty_inst->result_id();
  369. break;
  370. }
  371. // Process remaining access chain indices
  372. Instruction* ac_inst = get_def_use_mgr()->GetDef(ref->ptr_id);
  373. uint32_t curr_ty_id = buff_ty_id;
  374. uint32_t sum_id = 0u;
  375. uint32_t matrix_stride = 0u;
  376. bool col_major = false;
  377. uint32_t matrix_stride_id = 0u;
  378. bool in_matrix = false;
  379. while (ac_in_idx < ac_inst->NumInOperands()) {
  380. uint32_t curr_idx_id = ac_inst->GetSingleWordInOperand(ac_in_idx);
  381. Instruction* curr_ty_inst = get_def_use_mgr()->GetDef(curr_ty_id);
  382. uint32_t curr_offset_id = 0;
  383. switch (curr_ty_inst->opcode()) {
  384. case spv::Op::OpTypeArray:
  385. case spv::Op::OpTypeRuntimeArray: {
  386. // Get array stride and multiply by current index
  387. uint32_t arr_stride =
  388. FindStride(curr_ty_id, uint32_t(spv::Decoration::ArrayStride));
  389. uint32_t arr_stride_id = builder->GetUintConstantId(arr_stride);
  390. uint32_t curr_idx_32b_id = Gen32BitCvtCode(curr_idx_id, builder);
  391. Instruction* curr_offset_inst = builder->AddBinaryOp(
  392. GetUintId(), spv::Op::OpIMul, arr_stride_id, curr_idx_32b_id);
  393. curr_offset_id = curr_offset_inst->result_id();
  394. // Get element type for next step
  395. curr_ty_id = curr_ty_inst->GetSingleWordInOperand(0);
  396. } break;
  397. case spv::Op::OpTypeMatrix: {
  398. assert(matrix_stride != 0 && "missing matrix stride");
  399. matrix_stride_id = builder->GetUintConstantId(matrix_stride);
  400. uint32_t vec_ty_id = curr_ty_inst->GetSingleWordInOperand(0);
  401. // If column major, multiply column index by matrix stride, otherwise
  402. // by vector component size and save matrix stride for vector (row)
  403. // index
  404. uint32_t col_stride_id;
  405. if (col_major) {
  406. col_stride_id = matrix_stride_id;
  407. } else {
  408. Instruction* vec_ty_inst = get_def_use_mgr()->GetDef(vec_ty_id);
  409. uint32_t comp_ty_id = vec_ty_inst->GetSingleWordInOperand(0u);
  410. uint32_t col_stride = ByteSize(comp_ty_id, 0u, false, false);
  411. col_stride_id = builder->GetUintConstantId(col_stride);
  412. }
  413. uint32_t curr_idx_32b_id = Gen32BitCvtCode(curr_idx_id, builder);
  414. Instruction* curr_offset_inst = builder->AddBinaryOp(
  415. GetUintId(), spv::Op::OpIMul, col_stride_id, curr_idx_32b_id);
  416. curr_offset_id = curr_offset_inst->result_id();
  417. // Get element type for next step
  418. curr_ty_id = vec_ty_id;
  419. in_matrix = true;
  420. } break;
  421. case spv::Op::OpTypeVector: {
  422. // If inside a row major matrix type, multiply index by matrix stride,
  423. // else multiply by component size
  424. uint32_t comp_ty_id = curr_ty_inst->GetSingleWordInOperand(0u);
  425. uint32_t curr_idx_32b_id = Gen32BitCvtCode(curr_idx_id, builder);
  426. if (in_matrix && !col_major) {
  427. Instruction* curr_offset_inst = builder->AddBinaryOp(
  428. GetUintId(), spv::Op::OpIMul, matrix_stride_id, curr_idx_32b_id);
  429. curr_offset_id = curr_offset_inst->result_id();
  430. } else {
  431. uint32_t comp_ty_sz = ByteSize(comp_ty_id, 0u, false, false);
  432. uint32_t comp_ty_sz_id = builder->GetUintConstantId(comp_ty_sz);
  433. Instruction* curr_offset_inst = builder->AddBinaryOp(
  434. GetUintId(), spv::Op::OpIMul, comp_ty_sz_id, curr_idx_32b_id);
  435. curr_offset_id = curr_offset_inst->result_id();
  436. }
  437. // Get element type for next step
  438. curr_ty_id = comp_ty_id;
  439. } break;
  440. case spv::Op::OpTypeStruct: {
  441. // Get buffer byte offset for the referenced member
  442. Instruction* curr_idx_inst = get_def_use_mgr()->GetDef(curr_idx_id);
  443. assert(curr_idx_inst->opcode() == spv::Op::OpConstant &&
  444. "unexpected struct index");
  445. uint32_t member_idx = curr_idx_inst->GetSingleWordInOperand(0);
  446. uint32_t member_offset = 0xdeadbeef;
  447. bool found = get_decoration_mgr()->FindDecoration(
  448. curr_ty_id, uint32_t(spv::Decoration::Offset),
  449. [&member_idx, &member_offset](const Instruction& deco_inst) {
  450. if (deco_inst.GetSingleWordInOperand(1u) != member_idx)
  451. return false;
  452. member_offset = deco_inst.GetSingleWordInOperand(3u);
  453. return true;
  454. });
  455. USE_ASSERT(found && "member offset not found");
  456. curr_offset_id = builder->GetUintConstantId(member_offset);
  457. // Look for matrix stride for this member if there is one. The matrix
  458. // stride is not on the matrix type, but in a OpMemberDecorate on the
  459. // enclosing struct type at the member index. If none found, reset
  460. // stride to 0.
  461. found = get_decoration_mgr()->FindDecoration(
  462. curr_ty_id, uint32_t(spv::Decoration::MatrixStride),
  463. [&member_idx, &matrix_stride](const Instruction& deco_inst) {
  464. if (deco_inst.GetSingleWordInOperand(1u) != member_idx)
  465. return false;
  466. matrix_stride = deco_inst.GetSingleWordInOperand(3u);
  467. return true;
  468. });
  469. if (!found) matrix_stride = 0;
  470. // Look for column major decoration
  471. found = get_decoration_mgr()->FindDecoration(
  472. curr_ty_id, uint32_t(spv::Decoration::ColMajor),
  473. [&member_idx, &col_major](const Instruction& deco_inst) {
  474. if (deco_inst.GetSingleWordInOperand(1u) != member_idx)
  475. return false;
  476. col_major = true;
  477. return true;
  478. });
  479. if (!found) col_major = false;
  480. // Get element type for next step
  481. curr_ty_id = curr_ty_inst->GetSingleWordInOperand(member_idx);
  482. } break;
  483. default: { assert(false && "unexpected non-composite type"); } break;
  484. }
  485. if (sum_id == 0)
  486. sum_id = curr_offset_id;
  487. else {
  488. Instruction* sum_inst =
  489. builder->AddIAdd(GetUintId(), sum_id, curr_offset_id);
  490. sum_id = sum_inst->result_id();
  491. }
  492. ++ac_in_idx;
  493. }
  494. // Add in offset of last byte of referenced object
  495. uint32_t bsize = ByteSize(curr_ty_id, matrix_stride, col_major, in_matrix);
  496. uint32_t last = bsize - 1;
  497. uint32_t last_id = builder->GetUintConstantId(last);
  498. Instruction* sum_inst = builder->AddIAdd(GetUintId(), sum_id, last_id);
  499. return sum_inst->result_id();
  500. }
  501. void InstBindlessCheckPass::GenCheckCode(
  502. uint32_t check_id, uint32_t error_id, uint32_t offset_id,
  503. uint32_t length_id, uint32_t stage_idx, RefAnalysis* ref,
  504. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  505. BasicBlock* back_blk_ptr = &*new_blocks->back();
  506. InstructionBuilder builder(
  507. context(), back_blk_ptr,
  508. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  509. // Gen conditional branch on check_id. Valid branch generates original
  510. // reference. Invalid generates debug output and zero result (if needed).
  511. uint32_t merge_blk_id = TakeNextId();
  512. uint32_t valid_blk_id = TakeNextId();
  513. uint32_t invalid_blk_id = TakeNextId();
  514. std::unique_ptr<Instruction> merge_label(NewLabel(merge_blk_id));
  515. std::unique_ptr<Instruction> valid_label(NewLabel(valid_blk_id));
  516. std::unique_ptr<Instruction> invalid_label(NewLabel(invalid_blk_id));
  517. (void)builder.AddConditionalBranch(
  518. check_id, valid_blk_id, invalid_blk_id, merge_blk_id,
  519. uint32_t(spv::SelectionControlMask::MaskNone));
  520. // Gen valid bounds branch
  521. std::unique_ptr<BasicBlock> new_blk_ptr(
  522. new BasicBlock(std::move(valid_label)));
  523. builder.SetInsertPoint(&*new_blk_ptr);
  524. uint32_t new_ref_id = CloneOriginalReference(ref, &builder);
  525. uint32_t null_id = 0;
  526. uint32_t ref_type_id = ref->ref_inst->type_id();
  527. (void)builder.AddBranch(merge_blk_id);
  528. new_blocks->push_back(std::move(new_blk_ptr));
  529. // Gen invalid block
  530. new_blk_ptr.reset(new BasicBlock(std::move(invalid_label)));
  531. builder.SetInsertPoint(&*new_blk_ptr);
  532. uint32_t u_index_id = GenUintCastCode(ref->desc_idx_id, &builder);
  533. if (offset_id != 0) {
  534. // Buffer OOB
  535. uint32_t u_offset_id = GenUintCastCode(offset_id, &builder);
  536. uint32_t u_length_id = GenUintCastCode(length_id, &builder);
  537. GenDebugStreamWrite(uid2offset_[ref->ref_inst->unique_id()], stage_idx,
  538. {error_id, u_index_id, u_offset_id, u_length_id},
  539. &builder);
  540. } else if (buffer_bounds_enabled_ || texel_buffer_enabled_) {
  541. // Uninitialized Descriptor - Return additional unused zero so all error
  542. // modes will use same debug stream write function
  543. uint32_t u_length_id = GenUintCastCode(length_id, &builder);
  544. GenDebugStreamWrite(
  545. uid2offset_[ref->ref_inst->unique_id()], stage_idx,
  546. {error_id, u_index_id, u_length_id, builder.GetUintConstantId(0)},
  547. &builder);
  548. } else {
  549. // Uninitialized Descriptor - Normal error return
  550. uint32_t u_length_id = GenUintCastCode(length_id, &builder);
  551. GenDebugStreamWrite(uid2offset_[ref->ref_inst->unique_id()], stage_idx,
  552. {error_id, u_index_id, u_length_id}, &builder);
  553. }
  554. // Generate a ConstantNull, converting to uint64 if the type cannot be a null.
  555. if (new_ref_id != 0) {
  556. analysis::TypeManager* type_mgr = context()->get_type_mgr();
  557. analysis::Type* ref_type = type_mgr->GetType(ref_type_id);
  558. if (ref_type->AsPointer() != nullptr) {
  559. context()->AddCapability(spv::Capability::Int64);
  560. uint32_t null_u64_id = GetNullId(GetUint64Id());
  561. Instruction* null_ptr_inst = builder.AddUnaryOp(
  562. ref_type_id, spv::Op::OpConvertUToPtr, null_u64_id);
  563. null_id = null_ptr_inst->result_id();
  564. } else {
  565. null_id = GetNullId(ref_type_id);
  566. }
  567. }
  568. // Remember last invalid block id
  569. uint32_t last_invalid_blk_id = new_blk_ptr->GetLabelInst()->result_id();
  570. // Gen zero for invalid reference
  571. (void)builder.AddBranch(merge_blk_id);
  572. new_blocks->push_back(std::move(new_blk_ptr));
  573. // Gen merge block
  574. new_blk_ptr.reset(new BasicBlock(std::move(merge_label)));
  575. builder.SetInsertPoint(&*new_blk_ptr);
  576. // Gen phi of new reference and zero, if necessary, and replace the
  577. // result id of the original reference with that of the Phi. Kill original
  578. // reference.
  579. if (new_ref_id != 0) {
  580. Instruction* phi_inst = builder.AddPhi(
  581. ref_type_id, {new_ref_id, valid_blk_id, null_id, last_invalid_blk_id});
  582. context()->ReplaceAllUsesWith(ref->ref_inst->result_id(),
  583. phi_inst->result_id());
  584. }
  585. new_blocks->push_back(std::move(new_blk_ptr));
  586. context()->KillInst(ref->ref_inst);
  587. }
  588. void InstBindlessCheckPass::GenDescIdxCheckCode(
  589. BasicBlock::iterator ref_inst_itr,
  590. UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
  591. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  592. // Look for reference through indexed descriptor. If found, analyze and
  593. // save components. If not, return.
  594. RefAnalysis ref;
  595. if (!AnalyzeDescriptorReference(&*ref_inst_itr, &ref)) return;
  596. Instruction* ptr_inst = get_def_use_mgr()->GetDef(ref.ptr_id);
  597. if (ptr_inst->opcode() != spv::Op::OpAccessChain) return;
  598. // If index and bound both compile-time constants and index < bound,
  599. // return without changing
  600. Instruction* var_inst = get_def_use_mgr()->GetDef(ref.var_id);
  601. Instruction* desc_type_inst = GetPointeeTypeInst(var_inst);
  602. uint32_t length_id = 0;
  603. if (desc_type_inst->opcode() == spv::Op::OpTypeArray) {
  604. length_id =
  605. desc_type_inst->GetSingleWordInOperand(kSpvTypeArrayLengthIdInIdx);
  606. Instruction* index_inst = get_def_use_mgr()->GetDef(ref.desc_idx_id);
  607. Instruction* length_inst = get_def_use_mgr()->GetDef(length_id);
  608. if (index_inst->opcode() == spv::Op::OpConstant &&
  609. length_inst->opcode() == spv::Op::OpConstant &&
  610. index_inst->GetSingleWordInOperand(kSpvConstantValueInIdx) <
  611. length_inst->GetSingleWordInOperand(kSpvConstantValueInIdx))
  612. return;
  613. } else if (!desc_idx_enabled_ ||
  614. desc_type_inst->opcode() != spv::Op::OpTypeRuntimeArray) {
  615. return;
  616. }
  617. // Move original block's preceding instructions into first new block
  618. std::unique_ptr<BasicBlock> new_blk_ptr;
  619. MovePreludeCode(ref_inst_itr, ref_block_itr, &new_blk_ptr);
  620. InstructionBuilder builder(
  621. context(), &*new_blk_ptr,
  622. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  623. new_blocks->push_back(std::move(new_blk_ptr));
  624. uint32_t error_id = builder.GetUintConstantId(kInstErrorBindlessBounds);
  625. // If length id not yet set, descriptor array is runtime size so
  626. // generate load of length from stage's debug input buffer.
  627. if (length_id == 0) {
  628. assert(desc_type_inst->opcode() == spv::Op::OpTypeRuntimeArray &&
  629. "unexpected bindless type");
  630. length_id = GenDebugReadLength(ref.var_id, &builder);
  631. }
  632. // Generate full runtime bounds test code with true branch
  633. // being full reference and false branch being debug output and zero
  634. // for the referenced value.
  635. uint32_t desc_idx_32b_id = Gen32BitCvtCode(ref.desc_idx_id, &builder);
  636. uint32_t length_32b_id = Gen32BitCvtCode(length_id, &builder);
  637. Instruction* ult_inst = builder.AddBinaryOp(GetBoolId(), spv::Op::OpULessThan,
  638. desc_idx_32b_id, length_32b_id);
  639. ref.desc_idx_id = desc_idx_32b_id;
  640. GenCheckCode(ult_inst->result_id(), error_id, 0u, length_id, stage_idx, &ref,
  641. new_blocks);
  642. // Move original block's remaining code into remainder/merge block and add
  643. // to new blocks
  644. BasicBlock* back_blk_ptr = &*new_blocks->back();
  645. MovePostludeCode(ref_block_itr, back_blk_ptr);
  646. }
  647. void InstBindlessCheckPass::GenDescInitCheckCode(
  648. BasicBlock::iterator ref_inst_itr,
  649. UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
  650. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  651. // Look for reference through descriptor. If not, return.
  652. RefAnalysis ref;
  653. if (!AnalyzeDescriptorReference(&*ref_inst_itr, &ref)) return;
  654. // Determine if we can only do initialization check
  655. bool init_check = false;
  656. if (ref.desc_load_id != 0 || !buffer_bounds_enabled_) {
  657. init_check = true;
  658. } else {
  659. // For now, only do bounds check for non-aggregate types. Otherwise
  660. // just do descriptor initialization check.
  661. // TODO(greg-lunarg): Do bounds check for aggregate loads and stores
  662. Instruction* ref_ptr_inst = get_def_use_mgr()->GetDef(ref.ptr_id);
  663. Instruction* pte_type_inst = GetPointeeTypeInst(ref_ptr_inst);
  664. spv::Op pte_type_op = pte_type_inst->opcode();
  665. if (pte_type_op == spv::Op::OpTypeArray ||
  666. pte_type_op == spv::Op::OpTypeRuntimeArray ||
  667. pte_type_op == spv::Op::OpTypeStruct)
  668. init_check = true;
  669. }
  670. // If initialization check and not enabled, return
  671. if (init_check && !desc_init_enabled_) return;
  672. // Move original block's preceding instructions into first new block
  673. std::unique_ptr<BasicBlock> new_blk_ptr;
  674. MovePreludeCode(ref_inst_itr, ref_block_itr, &new_blk_ptr);
  675. InstructionBuilder builder(
  676. context(), &*new_blk_ptr,
  677. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  678. new_blocks->push_back(std::move(new_blk_ptr));
  679. // If initialization check, use reference value of zero.
  680. // Else use the index of the last byte referenced.
  681. uint32_t ref_id = init_check ? builder.GetUintConstantId(0u)
  682. : GenLastByteIdx(&ref, &builder);
  683. // Read initialization/bounds from debug input buffer. If index id not yet
  684. // set, binding is single descriptor, so set index to constant 0.
  685. if (ref.desc_idx_id == 0) ref.desc_idx_id = builder.GetUintConstantId(0u);
  686. uint32_t init_id = GenDebugReadInit(ref.var_id, ref.desc_idx_id, &builder);
  687. // Generate runtime initialization/bounds test code with true branch
  688. // being full reference and false branch being debug output and zero
  689. // for the referenced value.
  690. Instruction* ult_inst =
  691. builder.AddBinaryOp(GetBoolId(), spv::Op::OpULessThan, ref_id, init_id);
  692. uint32_t error =
  693. init_check
  694. ? kInstErrorBindlessUninit
  695. : (spv::StorageClass(ref.strg_class) == spv::StorageClass::Uniform
  696. ? kInstErrorBuffOOBUniform
  697. : kInstErrorBuffOOBStorage);
  698. uint32_t error_id = builder.GetUintConstantId(error);
  699. GenCheckCode(ult_inst->result_id(), error_id, init_check ? 0 : ref_id,
  700. init_check ? builder.GetUintConstantId(0u) : init_id, stage_idx,
  701. &ref, new_blocks);
  702. // Move original block's remaining code into remainder/merge block and add
  703. // to new blocks
  704. BasicBlock* back_blk_ptr = &*new_blocks->back();
  705. MovePostludeCode(ref_block_itr, back_blk_ptr);
  706. }
  707. void InstBindlessCheckPass::GenTexBuffCheckCode(
  708. BasicBlock::iterator ref_inst_itr,
  709. UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
  710. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  711. // Only process OpImageRead and OpImageWrite with no optional operands
  712. Instruction* ref_inst = &*ref_inst_itr;
  713. spv::Op op = ref_inst->opcode();
  714. uint32_t num_in_oprnds = ref_inst->NumInOperands();
  715. if (!((op == spv::Op::OpImageRead && num_in_oprnds == 2) ||
  716. (op == spv::Op::OpImageFetch && num_in_oprnds == 2) ||
  717. (op == spv::Op::OpImageWrite && num_in_oprnds == 3)))
  718. return;
  719. // Pull components from descriptor reference
  720. RefAnalysis ref;
  721. if (!AnalyzeDescriptorReference(ref_inst, &ref)) return;
  722. // Only process if image is texel buffer
  723. Instruction* image_inst = get_def_use_mgr()->GetDef(ref.image_id);
  724. uint32_t image_ty_id = image_inst->type_id();
  725. Instruction* image_ty_inst = get_def_use_mgr()->GetDef(image_ty_id);
  726. if (spv::Dim(image_ty_inst->GetSingleWordInOperand(kSpvTypeImageDim)) !=
  727. spv::Dim::Buffer) {
  728. return;
  729. }
  730. if (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageDepth) != 0) return;
  731. if (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageArrayed) != 0) return;
  732. if (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageMS) != 0) return;
  733. // Enable ImageQuery Capability if not yet enabled
  734. context()->AddCapability(spv::Capability::ImageQuery);
  735. // Move original block's preceding instructions into first new block
  736. std::unique_ptr<BasicBlock> new_blk_ptr;
  737. MovePreludeCode(ref_inst_itr, ref_block_itr, &new_blk_ptr);
  738. InstructionBuilder builder(
  739. context(), &*new_blk_ptr,
  740. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  741. new_blocks->push_back(std::move(new_blk_ptr));
  742. // Get texel coordinate
  743. uint32_t coord_id =
  744. GenUintCastCode(ref_inst->GetSingleWordInOperand(1), &builder);
  745. // If index id not yet set, binding is single descriptor, so set index to
  746. // constant 0.
  747. if (ref.desc_idx_id == 0) ref.desc_idx_id = builder.GetUintConstantId(0u);
  748. // Get texel buffer size.
  749. Instruction* size_inst =
  750. builder.AddUnaryOp(GetUintId(), spv::Op::OpImageQuerySize, ref.image_id);
  751. uint32_t size_id = size_inst->result_id();
  752. // Generate runtime initialization/bounds test code with true branch
  753. // being full reference and false branch being debug output and zero
  754. // for the referenced value.
  755. Instruction* ult_inst =
  756. builder.AddBinaryOp(GetBoolId(), spv::Op::OpULessThan, coord_id, size_id);
  757. uint32_t error =
  758. (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageSampled) == 2)
  759. ? kInstErrorBuffOOBStorageTexel
  760. : kInstErrorBuffOOBUniformTexel;
  761. uint32_t error_id = builder.GetUintConstantId(error);
  762. GenCheckCode(ult_inst->result_id(), error_id, coord_id, size_id, stage_idx,
  763. &ref, new_blocks);
  764. // Move original block's remaining code into remainder/merge block and add
  765. // to new blocks
  766. BasicBlock* back_blk_ptr = &*new_blocks->back();
  767. MovePostludeCode(ref_block_itr, back_blk_ptr);
  768. }
  769. void InstBindlessCheckPass::InitializeInstBindlessCheck() {
  770. // Initialize base class
  771. InitializeInstrument();
  772. // If runtime array length support or buffer bounds checking are enabled,
  773. // create variable mappings. Length support is always enabled if descriptor
  774. // init check is enabled.
  775. if (desc_idx_enabled_ || buffer_bounds_enabled_ || texel_buffer_enabled_)
  776. for (auto& anno : get_module()->annotations())
  777. if (anno.opcode() == spv::Op::OpDecorate) {
  778. if (spv::Decoration(anno.GetSingleWordInOperand(1u)) ==
  779. spv::Decoration::DescriptorSet) {
  780. var2desc_set_[anno.GetSingleWordInOperand(0u)] =
  781. anno.GetSingleWordInOperand(2u);
  782. } else if (spv::Decoration(anno.GetSingleWordInOperand(1u)) ==
  783. spv::Decoration::Binding) {
  784. var2binding_[anno.GetSingleWordInOperand(0u)] =
  785. anno.GetSingleWordInOperand(2u);
  786. }
  787. }
  788. }
  789. Pass::Status InstBindlessCheckPass::ProcessImpl() {
  790. // Perform bindless bounds check on each entry point function in module
  791. InstProcessFunction pfn =
  792. [this](BasicBlock::iterator ref_inst_itr,
  793. UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
  794. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  795. return GenDescIdxCheckCode(ref_inst_itr, ref_block_itr, stage_idx,
  796. new_blocks);
  797. };
  798. bool modified = InstProcessEntryPointCallTree(pfn);
  799. if (desc_init_enabled_ || buffer_bounds_enabled_) {
  800. // Perform descriptor initialization and/or buffer bounds check on each
  801. // entry point function in module
  802. pfn = [this](BasicBlock::iterator ref_inst_itr,
  803. UptrVectorIterator<BasicBlock> ref_block_itr,
  804. uint32_t stage_idx,
  805. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  806. return GenDescInitCheckCode(ref_inst_itr, ref_block_itr, stage_idx,
  807. new_blocks);
  808. };
  809. modified |= InstProcessEntryPointCallTree(pfn);
  810. }
  811. if (texel_buffer_enabled_) {
  812. // Perform texel buffer bounds check on each entry point function in
  813. // module. Generate after descriptor bounds and initialization checks.
  814. pfn = [this](BasicBlock::iterator ref_inst_itr,
  815. UptrVectorIterator<BasicBlock> ref_block_itr,
  816. uint32_t stage_idx,
  817. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  818. return GenTexBuffCheckCode(ref_inst_itr, ref_block_itr, stage_idx,
  819. new_blocks);
  820. };
  821. modified |= InstProcessEntryPointCallTree(pfn);
  822. }
  823. return modified ? Status::SuccessWithChange : Status::SuccessWithoutChange;
  824. }
  825. Pass::Status InstBindlessCheckPass::Process() {
  826. InitializeInstBindlessCheck();
  827. return ProcessImpl();
  828. }
  829. } // namespace opt
  830. } // namespace spvtools