inst_bindless_check_pass.cpp 35 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805
  1. // Copyright (c) 2018 The Khronos Group Inc.
  2. // Copyright (c) 2018 Valve Corporation
  3. // Copyright (c) 2018 LunarG Inc.
  4. //
  5. // Licensed under the Apache License, Version 2.0 (the "License");
  6. // you may not use this file except in compliance with the License.
  7. // You may obtain a copy of the License at
  8. //
  9. // http://www.apache.org/licenses/LICENSE-2.0
  10. //
  11. // Unless required by applicable law or agreed to in writing, software
  12. // distributed under the License is distributed on an "AS IS" BASIS,
  13. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. // See the License for the specific language governing permissions and
  15. // limitations under the License.
  16. #include "inst_bindless_check_pass.h"
  17. namespace {
  18. // Input Operand Indices
  19. static const int kSpvImageSampleImageIdInIdx = 0;
  20. static const int kSpvSampledImageImageIdInIdx = 0;
  21. static const int kSpvSampledImageSamplerIdInIdx = 1;
  22. static const int kSpvImageSampledImageIdInIdx = 0;
  23. static const int kSpvCopyObjectOperandIdInIdx = 0;
  24. static const int kSpvLoadPtrIdInIdx = 0;
  25. static const int kSpvAccessChainBaseIdInIdx = 0;
  26. static const int kSpvAccessChainIndex0IdInIdx = 1;
  27. static const int kSpvTypeArrayLengthIdInIdx = 1;
  28. static const int kSpvConstantValueInIdx = 0;
  29. static const int kSpvVariableStorageClassInIdx = 0;
  30. static const int kSpvTypeImageDim = 1;
  31. static const int kSpvTypeImageDepth = 2;
  32. static const int kSpvTypeImageArrayed = 3;
  33. static const int kSpvTypeImageMS = 4;
  34. } // anonymous namespace
  35. // Avoid unused variable warning/error on Linux
  36. #ifndef NDEBUG
  37. #define USE_ASSERT(x) assert(x)
  38. #else
  39. #define USE_ASSERT(x) ((void)(x))
  40. #endif
  41. namespace spvtools {
  42. namespace opt {
  43. uint32_t InstBindlessCheckPass::GenDebugReadLength(
  44. uint32_t var_id, InstructionBuilder* builder) {
  45. uint32_t desc_set_idx =
  46. var2desc_set_[var_id] + kDebugInputBindlessOffsetLengths;
  47. uint32_t desc_set_idx_id = builder->GetUintConstantId(desc_set_idx);
  48. uint32_t binding_idx_id = builder->GetUintConstantId(var2binding_[var_id]);
  49. return GenDebugDirectRead({desc_set_idx_id, binding_idx_id}, builder);
  50. }
  51. uint32_t InstBindlessCheckPass::GenDebugReadInit(uint32_t var_id,
  52. uint32_t desc_idx_id,
  53. InstructionBuilder* builder) {
  54. uint32_t binding_idx_id = builder->GetUintConstantId(var2binding_[var_id]);
  55. uint32_t u_desc_idx_id = GenUintCastCode(desc_idx_id, builder);
  56. // If desc index checking is not enabled, we know the offset of initialization
  57. // entries is 1, so we can avoid loading this value and just add 1 to the
  58. // descriptor set.
  59. if (!desc_idx_enabled_) {
  60. uint32_t desc_set_idx_id =
  61. builder->GetUintConstantId(var2desc_set_[var_id] + 1);
  62. return GenDebugDirectRead({desc_set_idx_id, binding_idx_id, u_desc_idx_id},
  63. builder);
  64. } else {
  65. uint32_t desc_set_base_id =
  66. builder->GetUintConstantId(kDebugInputBindlessInitOffset);
  67. uint32_t desc_set_idx_id =
  68. builder->GetUintConstantId(var2desc_set_[var_id]);
  69. return GenDebugDirectRead(
  70. {desc_set_base_id, desc_set_idx_id, binding_idx_id, u_desc_idx_id},
  71. builder);
  72. }
  73. }
  74. uint32_t InstBindlessCheckPass::CloneOriginalImage(
  75. uint32_t old_image_id, InstructionBuilder* builder) {
  76. Instruction* new_image_inst;
  77. Instruction* old_image_inst = get_def_use_mgr()->GetDef(old_image_id);
  78. if (old_image_inst->opcode() == SpvOpLoad) {
  79. new_image_inst = builder->AddLoad(
  80. old_image_inst->type_id(),
  81. old_image_inst->GetSingleWordInOperand(kSpvLoadPtrIdInIdx));
  82. } else if (old_image_inst->opcode() == SpvOp::SpvOpSampledImage) {
  83. uint32_t clone_id = CloneOriginalImage(
  84. old_image_inst->GetSingleWordInOperand(kSpvSampledImageImageIdInIdx),
  85. builder);
  86. new_image_inst = builder->AddBinaryOp(
  87. old_image_inst->type_id(), SpvOpSampledImage, clone_id,
  88. old_image_inst->GetSingleWordInOperand(kSpvSampledImageSamplerIdInIdx));
  89. } else if (old_image_inst->opcode() == SpvOp::SpvOpImage) {
  90. uint32_t clone_id = CloneOriginalImage(
  91. old_image_inst->GetSingleWordInOperand(kSpvImageSampledImageIdInIdx),
  92. builder);
  93. new_image_inst =
  94. builder->AddUnaryOp(old_image_inst->type_id(), SpvOpImage, clone_id);
  95. } else {
  96. assert(old_image_inst->opcode() == SpvOp::SpvOpCopyObject &&
  97. "expecting OpCopyObject");
  98. uint32_t clone_id = CloneOriginalImage(
  99. old_image_inst->GetSingleWordInOperand(kSpvCopyObjectOperandIdInIdx),
  100. builder);
  101. // Since we are cloning, no need to create new copy
  102. new_image_inst = get_def_use_mgr()->GetDef(clone_id);
  103. }
  104. uid2offset_[new_image_inst->unique_id()] =
  105. uid2offset_[old_image_inst->unique_id()];
  106. uint32_t new_image_id = new_image_inst->result_id();
  107. get_decoration_mgr()->CloneDecorations(old_image_id, new_image_id);
  108. return new_image_id;
  109. }
  110. uint32_t InstBindlessCheckPass::CloneOriginalReference(
  111. RefAnalysis* ref, InstructionBuilder* builder) {
  112. // If original is image based, start by cloning descriptor load
  113. uint32_t new_image_id = 0;
  114. if (ref->desc_load_id != 0) {
  115. uint32_t old_image_id =
  116. ref->ref_inst->GetSingleWordInOperand(kSpvImageSampleImageIdInIdx);
  117. new_image_id = CloneOriginalImage(old_image_id, builder);
  118. }
  119. // Clone original reference
  120. std::unique_ptr<Instruction> new_ref_inst(ref->ref_inst->Clone(context()));
  121. uint32_t ref_result_id = ref->ref_inst->result_id();
  122. uint32_t new_ref_id = 0;
  123. if (ref_result_id != 0) {
  124. new_ref_id = TakeNextId();
  125. new_ref_inst->SetResultId(new_ref_id);
  126. }
  127. // Update new ref with new image if created
  128. if (new_image_id != 0)
  129. new_ref_inst->SetInOperand(kSpvImageSampleImageIdInIdx, {new_image_id});
  130. // Register new reference and add to new block
  131. Instruction* added_inst = builder->AddInstruction(std::move(new_ref_inst));
  132. uid2offset_[added_inst->unique_id()] =
  133. uid2offset_[ref->ref_inst->unique_id()];
  134. if (new_ref_id != 0)
  135. get_decoration_mgr()->CloneDecorations(ref_result_id, new_ref_id);
  136. return new_ref_id;
  137. }
  138. uint32_t InstBindlessCheckPass::GetImageId(Instruction* inst) {
  139. switch (inst->opcode()) {
  140. case SpvOp::SpvOpImageSampleImplicitLod:
  141. case SpvOp::SpvOpImageSampleExplicitLod:
  142. case SpvOp::SpvOpImageSampleDrefImplicitLod:
  143. case SpvOp::SpvOpImageSampleDrefExplicitLod:
  144. case SpvOp::SpvOpImageSampleProjImplicitLod:
  145. case SpvOp::SpvOpImageSampleProjExplicitLod:
  146. case SpvOp::SpvOpImageSampleProjDrefImplicitLod:
  147. case SpvOp::SpvOpImageSampleProjDrefExplicitLod:
  148. case SpvOp::SpvOpImageGather:
  149. case SpvOp::SpvOpImageDrefGather:
  150. case SpvOp::SpvOpImageQueryLod:
  151. case SpvOp::SpvOpImageSparseSampleImplicitLod:
  152. case SpvOp::SpvOpImageSparseSampleExplicitLod:
  153. case SpvOp::SpvOpImageSparseSampleDrefImplicitLod:
  154. case SpvOp::SpvOpImageSparseSampleDrefExplicitLod:
  155. case SpvOp::SpvOpImageSparseSampleProjImplicitLod:
  156. case SpvOp::SpvOpImageSparseSampleProjExplicitLod:
  157. case SpvOp::SpvOpImageSparseSampleProjDrefImplicitLod:
  158. case SpvOp::SpvOpImageSparseSampleProjDrefExplicitLod:
  159. case SpvOp::SpvOpImageSparseGather:
  160. case SpvOp::SpvOpImageSparseDrefGather:
  161. case SpvOp::SpvOpImageFetch:
  162. case SpvOp::SpvOpImageRead:
  163. case SpvOp::SpvOpImageQueryFormat:
  164. case SpvOp::SpvOpImageQueryOrder:
  165. case SpvOp::SpvOpImageQuerySizeLod:
  166. case SpvOp::SpvOpImageQuerySize:
  167. case SpvOp::SpvOpImageQueryLevels:
  168. case SpvOp::SpvOpImageQuerySamples:
  169. case SpvOp::SpvOpImageSparseFetch:
  170. case SpvOp::SpvOpImageSparseRead:
  171. case SpvOp::SpvOpImageWrite:
  172. return inst->GetSingleWordInOperand(kSpvImageSampleImageIdInIdx);
  173. default:
  174. break;
  175. }
  176. return 0;
  177. }
  178. Instruction* InstBindlessCheckPass::GetPointeeTypeInst(Instruction* ptr_inst) {
  179. uint32_t pte_ty_id = GetPointeeTypeId(ptr_inst);
  180. return get_def_use_mgr()->GetDef(pte_ty_id);
  181. }
  182. bool InstBindlessCheckPass::AnalyzeDescriptorReference(Instruction* ref_inst,
  183. RefAnalysis* ref) {
  184. ref->ref_inst = ref_inst;
  185. if (ref_inst->opcode() == SpvOpLoad || ref_inst->opcode() == SpvOpStore) {
  186. ref->desc_load_id = 0;
  187. ref->ptr_id = ref_inst->GetSingleWordInOperand(kSpvLoadPtrIdInIdx);
  188. Instruction* ptr_inst = get_def_use_mgr()->GetDef(ref->ptr_id);
  189. if (ptr_inst->opcode() != SpvOp::SpvOpAccessChain) return false;
  190. ref->var_id = ptr_inst->GetSingleWordInOperand(kSpvAccessChainBaseIdInIdx);
  191. Instruction* var_inst = get_def_use_mgr()->GetDef(ref->var_id);
  192. if (var_inst->opcode() != SpvOp::SpvOpVariable) return false;
  193. uint32_t storage_class =
  194. var_inst->GetSingleWordInOperand(kSpvVariableStorageClassInIdx);
  195. switch (storage_class) {
  196. case SpvStorageClassUniform:
  197. case SpvStorageClassUniformConstant:
  198. case SpvStorageClassStorageBuffer:
  199. break;
  200. default:
  201. return false;
  202. break;
  203. }
  204. Instruction* desc_type_inst = GetPointeeTypeInst(var_inst);
  205. switch (desc_type_inst->opcode()) {
  206. case SpvOpTypeArray:
  207. case SpvOpTypeRuntimeArray:
  208. // A load through a descriptor array will have at least 3 operands. We
  209. // do not want to instrument loads of descriptors here which are part of
  210. // an image-based reference.
  211. if (ptr_inst->NumInOperands() < 3) return false;
  212. ref->desc_idx_id =
  213. ptr_inst->GetSingleWordInOperand(kSpvAccessChainIndex0IdInIdx);
  214. break;
  215. default:
  216. ref->desc_idx_id = 0;
  217. break;
  218. }
  219. return true;
  220. }
  221. // Reference is not load or store. If not an image-based reference, return.
  222. ref->image_id = GetImageId(ref_inst);
  223. if (ref->image_id == 0) return false;
  224. // Search for descriptor load
  225. uint32_t desc_load_id = ref->image_id;
  226. Instruction* desc_load_inst;
  227. for (;;) {
  228. desc_load_inst = get_def_use_mgr()->GetDef(desc_load_id);
  229. if (desc_load_inst->opcode() == SpvOp::SpvOpSampledImage)
  230. desc_load_id =
  231. desc_load_inst->GetSingleWordInOperand(kSpvSampledImageImageIdInIdx);
  232. else if (desc_load_inst->opcode() == SpvOp::SpvOpImage)
  233. desc_load_id =
  234. desc_load_inst->GetSingleWordInOperand(kSpvImageSampledImageIdInIdx);
  235. else if (desc_load_inst->opcode() == SpvOp::SpvOpCopyObject)
  236. desc_load_id =
  237. desc_load_inst->GetSingleWordInOperand(kSpvCopyObjectOperandIdInIdx);
  238. else
  239. break;
  240. }
  241. if (desc_load_inst->opcode() != SpvOp::SpvOpLoad) {
  242. // TODO(greg-lunarg): Handle additional possibilities?
  243. return false;
  244. }
  245. ref->desc_load_id = desc_load_id;
  246. ref->ptr_id = desc_load_inst->GetSingleWordInOperand(kSpvLoadPtrIdInIdx);
  247. Instruction* ptr_inst = get_def_use_mgr()->GetDef(ref->ptr_id);
  248. if (ptr_inst->opcode() == SpvOp::SpvOpVariable) {
  249. ref->desc_idx_id = 0;
  250. ref->var_id = ref->ptr_id;
  251. } else if (ptr_inst->opcode() == SpvOp::SpvOpAccessChain) {
  252. if (ptr_inst->NumInOperands() != 2) {
  253. assert(false && "unexpected bindless index number");
  254. return false;
  255. }
  256. ref->desc_idx_id =
  257. ptr_inst->GetSingleWordInOperand(kSpvAccessChainIndex0IdInIdx);
  258. ref->var_id = ptr_inst->GetSingleWordInOperand(kSpvAccessChainBaseIdInIdx);
  259. Instruction* var_inst = get_def_use_mgr()->GetDef(ref->var_id);
  260. if (var_inst->opcode() != SpvOpVariable) {
  261. assert(false && "unexpected bindless base");
  262. return false;
  263. }
  264. } else {
  265. // TODO(greg-lunarg): Handle additional possibilities?
  266. return false;
  267. }
  268. return true;
  269. }
  270. uint32_t InstBindlessCheckPass::FindStride(uint32_t ty_id,
  271. uint32_t stride_deco) {
  272. uint32_t stride = 0xdeadbeef;
  273. bool found = get_decoration_mgr()->FindDecoration(
  274. ty_id, stride_deco, [&stride](const Instruction& deco_inst) {
  275. stride = deco_inst.GetSingleWordInOperand(2u);
  276. return true;
  277. });
  278. USE_ASSERT(found && "stride not found");
  279. return stride;
  280. }
  281. uint32_t InstBindlessCheckPass::ByteSize(uint32_t ty_id, uint32_t matrix_stride,
  282. bool col_major, bool in_matrix) {
  283. analysis::TypeManager* type_mgr = context()->get_type_mgr();
  284. const analysis::Type* sz_ty = type_mgr->GetType(ty_id);
  285. if (sz_ty->kind() == analysis::Type::kPointer) {
  286. // Assuming PhysicalStorageBuffer pointer
  287. return 8;
  288. }
  289. if (sz_ty->kind() == analysis::Type::kMatrix) {
  290. assert(matrix_stride != 0 && "missing matrix stride");
  291. const analysis::Matrix* m_ty = sz_ty->AsMatrix();
  292. if (col_major) {
  293. return m_ty->element_count() * matrix_stride;
  294. } else {
  295. const analysis::Vector* v_ty = m_ty->element_type()->AsVector();
  296. return v_ty->element_count() * matrix_stride;
  297. }
  298. }
  299. uint32_t size = 1;
  300. if (sz_ty->kind() == analysis::Type::kVector) {
  301. const analysis::Vector* v_ty = sz_ty->AsVector();
  302. size = v_ty->element_count();
  303. const analysis::Type* comp_ty = v_ty->element_type();
  304. // if vector in row major matrix, the vector is strided so return the
  305. // number of bytes spanned by the vector
  306. if (in_matrix && !col_major && matrix_stride > 0) {
  307. uint32_t comp_ty_id = type_mgr->GetId(comp_ty);
  308. return (size - 1) * matrix_stride + ByteSize(comp_ty_id, 0, false, false);
  309. }
  310. sz_ty = comp_ty;
  311. }
  312. switch (sz_ty->kind()) {
  313. case analysis::Type::kFloat: {
  314. const analysis::Float* f_ty = sz_ty->AsFloat();
  315. size *= f_ty->width();
  316. } break;
  317. case analysis::Type::kInteger: {
  318. const analysis::Integer* i_ty = sz_ty->AsInteger();
  319. size *= i_ty->width();
  320. } break;
  321. default: { assert(false && "unexpected type"); } break;
  322. }
  323. size /= 8;
  324. return size;
  325. }
  326. uint32_t InstBindlessCheckPass::GenLastByteIdx(RefAnalysis* ref,
  327. InstructionBuilder* builder) {
  328. // Find outermost buffer type and its access chain index
  329. Instruction* var_inst = get_def_use_mgr()->GetDef(ref->var_id);
  330. Instruction* desc_ty_inst = GetPointeeTypeInst(var_inst);
  331. uint32_t buff_ty_id;
  332. uint32_t ac_in_idx = 1;
  333. switch (desc_ty_inst->opcode()) {
  334. case SpvOpTypeArray:
  335. case SpvOpTypeRuntimeArray:
  336. buff_ty_id = desc_ty_inst->GetSingleWordInOperand(0);
  337. ++ac_in_idx;
  338. break;
  339. default:
  340. assert(desc_ty_inst->opcode() == SpvOpTypeStruct &&
  341. "unexpected descriptor type");
  342. buff_ty_id = desc_ty_inst->result_id();
  343. break;
  344. }
  345. // Process remaining access chain indices
  346. Instruction* ac_inst = get_def_use_mgr()->GetDef(ref->ptr_id);
  347. uint32_t curr_ty_id = buff_ty_id;
  348. uint32_t sum_id = 0u;
  349. uint32_t matrix_stride = 0u;
  350. bool col_major = false;
  351. uint32_t matrix_stride_id = 0u;
  352. bool in_matrix = false;
  353. while (ac_in_idx < ac_inst->NumInOperands()) {
  354. uint32_t curr_idx_id = ac_inst->GetSingleWordInOperand(ac_in_idx);
  355. Instruction* curr_ty_inst = get_def_use_mgr()->GetDef(curr_ty_id);
  356. uint32_t curr_offset_id = 0;
  357. switch (curr_ty_inst->opcode()) {
  358. case SpvOpTypeArray:
  359. case SpvOpTypeRuntimeArray: {
  360. // Get array stride and multiply by current index
  361. uint32_t arr_stride = FindStride(curr_ty_id, SpvDecorationArrayStride);
  362. uint32_t arr_stride_id = builder->GetUintConstantId(arr_stride);
  363. uint32_t curr_idx_32b_id = Gen32BitCvtCode(curr_idx_id, builder);
  364. Instruction* curr_offset_inst = builder->AddBinaryOp(
  365. GetUintId(), SpvOpIMul, arr_stride_id, curr_idx_32b_id);
  366. curr_offset_id = curr_offset_inst->result_id();
  367. // Get element type for next step
  368. curr_ty_id = curr_ty_inst->GetSingleWordInOperand(0);
  369. } break;
  370. case SpvOpTypeMatrix: {
  371. assert(matrix_stride != 0 && "missing matrix stride");
  372. matrix_stride_id = builder->GetUintConstantId(matrix_stride);
  373. uint32_t vec_ty_id = curr_ty_inst->GetSingleWordInOperand(0);
  374. // If column major, multiply column index by matrix stride, otherwise
  375. // by vector component size and save matrix stride for vector (row)
  376. // index
  377. uint32_t col_stride_id;
  378. if (col_major) {
  379. col_stride_id = matrix_stride_id;
  380. } else {
  381. Instruction* vec_ty_inst = get_def_use_mgr()->GetDef(vec_ty_id);
  382. uint32_t comp_ty_id = vec_ty_inst->GetSingleWordInOperand(0u);
  383. uint32_t col_stride = ByteSize(comp_ty_id, 0u, false, false);
  384. col_stride_id = builder->GetUintConstantId(col_stride);
  385. }
  386. uint32_t curr_idx_32b_id = Gen32BitCvtCode(curr_idx_id, builder);
  387. Instruction* curr_offset_inst = builder->AddBinaryOp(
  388. GetUintId(), SpvOpIMul, col_stride_id, curr_idx_32b_id);
  389. curr_offset_id = curr_offset_inst->result_id();
  390. // Get element type for next step
  391. curr_ty_id = vec_ty_id;
  392. in_matrix = true;
  393. } break;
  394. case SpvOpTypeVector: {
  395. // If inside a row major matrix type, multiply index by matrix stride,
  396. // else multiply by component size
  397. uint32_t comp_ty_id = curr_ty_inst->GetSingleWordInOperand(0u);
  398. uint32_t curr_idx_32b_id = Gen32BitCvtCode(curr_idx_id, builder);
  399. if (in_matrix && !col_major) {
  400. Instruction* curr_offset_inst = builder->AddBinaryOp(
  401. GetUintId(), SpvOpIMul, matrix_stride_id, curr_idx_32b_id);
  402. curr_offset_id = curr_offset_inst->result_id();
  403. } else {
  404. uint32_t comp_ty_sz = ByteSize(comp_ty_id, 0u, false, false);
  405. uint32_t comp_ty_sz_id = builder->GetUintConstantId(comp_ty_sz);
  406. Instruction* curr_offset_inst = builder->AddBinaryOp(
  407. GetUintId(), SpvOpIMul, comp_ty_sz_id, curr_idx_32b_id);
  408. curr_offset_id = curr_offset_inst->result_id();
  409. }
  410. // Get element type for next step
  411. curr_ty_id = comp_ty_id;
  412. } break;
  413. case SpvOpTypeStruct: {
  414. // Get buffer byte offset for the referenced member
  415. Instruction* curr_idx_inst = get_def_use_mgr()->GetDef(curr_idx_id);
  416. assert(curr_idx_inst->opcode() == SpvOpConstant &&
  417. "unexpected struct index");
  418. uint32_t member_idx = curr_idx_inst->GetSingleWordInOperand(0);
  419. uint32_t member_offset = 0xdeadbeef;
  420. bool found = get_decoration_mgr()->FindDecoration(
  421. curr_ty_id, SpvDecorationOffset,
  422. [&member_idx, &member_offset](const Instruction& deco_inst) {
  423. if (deco_inst.GetSingleWordInOperand(1u) != member_idx)
  424. return false;
  425. member_offset = deco_inst.GetSingleWordInOperand(3u);
  426. return true;
  427. });
  428. USE_ASSERT(found && "member offset not found");
  429. curr_offset_id = builder->GetUintConstantId(member_offset);
  430. // Look for matrix stride for this member if there is one. The matrix
  431. // stride is not on the matrix type, but in a OpMemberDecorate on the
  432. // enclosing struct type at the member index. If none found, reset
  433. // stride to 0.
  434. found = get_decoration_mgr()->FindDecoration(
  435. curr_ty_id, SpvDecorationMatrixStride,
  436. [&member_idx, &matrix_stride](const Instruction& deco_inst) {
  437. if (deco_inst.GetSingleWordInOperand(1u) != member_idx)
  438. return false;
  439. matrix_stride = deco_inst.GetSingleWordInOperand(3u);
  440. return true;
  441. });
  442. if (!found) matrix_stride = 0;
  443. // Look for column major decoration
  444. found = get_decoration_mgr()->FindDecoration(
  445. curr_ty_id, SpvDecorationColMajor,
  446. [&member_idx, &col_major](const Instruction& deco_inst) {
  447. if (deco_inst.GetSingleWordInOperand(1u) != member_idx)
  448. return false;
  449. col_major = true;
  450. return true;
  451. });
  452. if (!found) col_major = false;
  453. // Get element type for next step
  454. curr_ty_id = curr_ty_inst->GetSingleWordInOperand(member_idx);
  455. } break;
  456. default: { assert(false && "unexpected non-composite type"); } break;
  457. }
  458. if (sum_id == 0)
  459. sum_id = curr_offset_id;
  460. else {
  461. Instruction* sum_inst =
  462. builder->AddBinaryOp(GetUintId(), SpvOpIAdd, sum_id, curr_offset_id);
  463. sum_id = sum_inst->result_id();
  464. }
  465. ++ac_in_idx;
  466. }
  467. // Add in offset of last byte of referenced object
  468. uint32_t bsize = ByteSize(curr_ty_id, matrix_stride, col_major, in_matrix);
  469. uint32_t last = bsize - 1;
  470. uint32_t last_id = builder->GetUintConstantId(last);
  471. Instruction* sum_inst =
  472. builder->AddBinaryOp(GetUintId(), SpvOpIAdd, sum_id, last_id);
  473. return sum_inst->result_id();
  474. }
  475. void InstBindlessCheckPass::GenCheckCode(
  476. uint32_t check_id, uint32_t error_id, uint32_t offset_id,
  477. uint32_t length_id, uint32_t stage_idx, RefAnalysis* ref,
  478. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  479. BasicBlock* back_blk_ptr = &*new_blocks->back();
  480. InstructionBuilder builder(
  481. context(), back_blk_ptr,
  482. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  483. // Gen conditional branch on check_id. Valid branch generates original
  484. // reference. Invalid generates debug output and zero result (if needed).
  485. uint32_t merge_blk_id = TakeNextId();
  486. uint32_t valid_blk_id = TakeNextId();
  487. uint32_t invalid_blk_id = TakeNextId();
  488. std::unique_ptr<Instruction> merge_label(NewLabel(merge_blk_id));
  489. std::unique_ptr<Instruction> valid_label(NewLabel(valid_blk_id));
  490. std::unique_ptr<Instruction> invalid_label(NewLabel(invalid_blk_id));
  491. (void)builder.AddConditionalBranch(check_id, valid_blk_id, invalid_blk_id,
  492. merge_blk_id, SpvSelectionControlMaskNone);
  493. // Gen valid bounds branch
  494. std::unique_ptr<BasicBlock> new_blk_ptr(
  495. new BasicBlock(std::move(valid_label)));
  496. builder.SetInsertPoint(&*new_blk_ptr);
  497. uint32_t new_ref_id = CloneOriginalReference(ref, &builder);
  498. (void)builder.AddBranch(merge_blk_id);
  499. new_blocks->push_back(std::move(new_blk_ptr));
  500. // Gen invalid block
  501. new_blk_ptr.reset(new BasicBlock(std::move(invalid_label)));
  502. builder.SetInsertPoint(&*new_blk_ptr);
  503. uint32_t u_index_id = GenUintCastCode(ref->desc_idx_id, &builder);
  504. if (offset_id != 0) {
  505. // Buffer OOB
  506. uint32_t u_offset_id = GenUintCastCode(offset_id, &builder);
  507. uint32_t u_length_id = GenUintCastCode(length_id, &builder);
  508. GenDebugStreamWrite(uid2offset_[ref->ref_inst->unique_id()], stage_idx,
  509. {error_id, u_index_id, u_offset_id, u_length_id},
  510. &builder);
  511. } else if (buffer_bounds_enabled_ || texel_buffer_enabled_) {
  512. // Uninitialized Descriptor - Return additional unused zero so all error
  513. // modes will use same debug stream write function
  514. uint32_t u_length_id = GenUintCastCode(length_id, &builder);
  515. GenDebugStreamWrite(
  516. uid2offset_[ref->ref_inst->unique_id()], stage_idx,
  517. {error_id, u_index_id, u_length_id, builder.GetUintConstantId(0)},
  518. &builder);
  519. } else {
  520. // Uninitialized Descriptor - Normal error return
  521. uint32_t u_length_id = GenUintCastCode(length_id, &builder);
  522. GenDebugStreamWrite(uid2offset_[ref->ref_inst->unique_id()], stage_idx,
  523. {error_id, u_index_id, u_length_id}, &builder);
  524. }
  525. // Remember last invalid block id
  526. uint32_t last_invalid_blk_id = new_blk_ptr->GetLabelInst()->result_id();
  527. // Gen zero for invalid reference
  528. uint32_t ref_type_id = ref->ref_inst->type_id();
  529. (void)builder.AddBranch(merge_blk_id);
  530. new_blocks->push_back(std::move(new_blk_ptr));
  531. // Gen merge block
  532. new_blk_ptr.reset(new BasicBlock(std::move(merge_label)));
  533. builder.SetInsertPoint(&*new_blk_ptr);
  534. // Gen phi of new reference and zero, if necessary, and replace the
  535. // result id of the original reference with that of the Phi. Kill original
  536. // reference.
  537. if (new_ref_id != 0) {
  538. Instruction* phi_inst = builder.AddPhi(
  539. ref_type_id, {new_ref_id, valid_blk_id, GetNullId(ref_type_id),
  540. last_invalid_blk_id});
  541. context()->ReplaceAllUsesWith(ref->ref_inst->result_id(),
  542. phi_inst->result_id());
  543. }
  544. new_blocks->push_back(std::move(new_blk_ptr));
  545. context()->KillInst(ref->ref_inst);
  546. }
  547. void InstBindlessCheckPass::GenDescIdxCheckCode(
  548. BasicBlock::iterator ref_inst_itr,
  549. UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
  550. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  551. // Look for reference through indexed descriptor. If found, analyze and
  552. // save components. If not, return.
  553. RefAnalysis ref;
  554. if (!AnalyzeDescriptorReference(&*ref_inst_itr, &ref)) return;
  555. Instruction* ptr_inst = get_def_use_mgr()->GetDef(ref.ptr_id);
  556. if (ptr_inst->opcode() != SpvOp::SpvOpAccessChain) return;
  557. // If index and bound both compile-time constants and index < bound,
  558. // return without changing
  559. Instruction* var_inst = get_def_use_mgr()->GetDef(ref.var_id);
  560. Instruction* desc_type_inst = GetPointeeTypeInst(var_inst);
  561. uint32_t length_id = 0;
  562. if (desc_type_inst->opcode() == SpvOpTypeArray) {
  563. length_id =
  564. desc_type_inst->GetSingleWordInOperand(kSpvTypeArrayLengthIdInIdx);
  565. Instruction* index_inst = get_def_use_mgr()->GetDef(ref.desc_idx_id);
  566. Instruction* length_inst = get_def_use_mgr()->GetDef(length_id);
  567. if (index_inst->opcode() == SpvOpConstant &&
  568. length_inst->opcode() == SpvOpConstant &&
  569. index_inst->GetSingleWordInOperand(kSpvConstantValueInIdx) <
  570. length_inst->GetSingleWordInOperand(kSpvConstantValueInIdx))
  571. return;
  572. } else if (!desc_idx_enabled_ ||
  573. desc_type_inst->opcode() != SpvOpTypeRuntimeArray) {
  574. return;
  575. }
  576. // Move original block's preceding instructions into first new block
  577. std::unique_ptr<BasicBlock> new_blk_ptr;
  578. MovePreludeCode(ref_inst_itr, ref_block_itr, &new_blk_ptr);
  579. InstructionBuilder builder(
  580. context(), &*new_blk_ptr,
  581. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  582. new_blocks->push_back(std::move(new_blk_ptr));
  583. uint32_t error_id = builder.GetUintConstantId(kInstErrorBindlessBounds);
  584. // If length id not yet set, descriptor array is runtime size so
  585. // generate load of length from stage's debug input buffer.
  586. if (length_id == 0) {
  587. assert(desc_type_inst->opcode() == SpvOpTypeRuntimeArray &&
  588. "unexpected bindless type");
  589. length_id = GenDebugReadLength(ref.var_id, &builder);
  590. }
  591. // Generate full runtime bounds test code with true branch
  592. // being full reference and false branch being debug output and zero
  593. // for the referenced value.
  594. uint32_t desc_idx_32b_id = Gen32BitCvtCode(ref.desc_idx_id, &builder);
  595. uint32_t length_32b_id = Gen32BitCvtCode(length_id, &builder);
  596. Instruction* ult_inst = builder.AddBinaryOp(GetBoolId(), SpvOpULessThan,
  597. desc_idx_32b_id, length_32b_id);
  598. ref.desc_idx_id = desc_idx_32b_id;
  599. GenCheckCode(ult_inst->result_id(), error_id, 0u, length_id, stage_idx, &ref,
  600. new_blocks);
  601. // Move original block's remaining code into remainder/merge block and add
  602. // to new blocks
  603. BasicBlock* back_blk_ptr = &*new_blocks->back();
  604. MovePostludeCode(ref_block_itr, back_blk_ptr);
  605. }
  606. void InstBindlessCheckPass::GenDescInitCheckCode(
  607. BasicBlock::iterator ref_inst_itr,
  608. UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
  609. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  610. // Look for reference through descriptor. If not, return.
  611. RefAnalysis ref;
  612. if (!AnalyzeDescriptorReference(&*ref_inst_itr, &ref)) return;
  613. // Determine if we can only do initialization check
  614. bool init_check = false;
  615. if (ref.desc_load_id != 0 || !buffer_bounds_enabled_) {
  616. init_check = true;
  617. } else {
  618. // For now, only do bounds check for non-aggregate types. Otherwise
  619. // just do descriptor initialization check.
  620. // TODO(greg-lunarg): Do bounds check for aggregate loads and stores
  621. Instruction* ref_ptr_inst = get_def_use_mgr()->GetDef(ref.ptr_id);
  622. Instruction* pte_type_inst = GetPointeeTypeInst(ref_ptr_inst);
  623. uint32_t pte_type_op = pte_type_inst->opcode();
  624. if (pte_type_op == SpvOpTypeArray || pte_type_op == SpvOpTypeRuntimeArray ||
  625. pte_type_op == SpvOpTypeStruct)
  626. init_check = true;
  627. }
  628. // If initialization check and not enabled, return
  629. if (init_check && !desc_init_enabled_) return;
  630. // Move original block's preceding instructions into first new block
  631. std::unique_ptr<BasicBlock> new_blk_ptr;
  632. MovePreludeCode(ref_inst_itr, ref_block_itr, &new_blk_ptr);
  633. InstructionBuilder builder(
  634. context(), &*new_blk_ptr,
  635. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  636. new_blocks->push_back(std::move(new_blk_ptr));
  637. // If initialization check, use reference value of zero.
  638. // Else use the index of the last byte referenced.
  639. uint32_t ref_id = init_check ? builder.GetUintConstantId(0u)
  640. : GenLastByteIdx(&ref, &builder);
  641. // Read initialization/bounds from debug input buffer. If index id not yet
  642. // set, binding is single descriptor, so set index to constant 0.
  643. if (ref.desc_idx_id == 0) ref.desc_idx_id = builder.GetUintConstantId(0u);
  644. uint32_t init_id = GenDebugReadInit(ref.var_id, ref.desc_idx_id, &builder);
  645. // Generate runtime initialization/bounds test code with true branch
  646. // being full reference and false branch being debug output and zero
  647. // for the referenced value.
  648. Instruction* ult_inst =
  649. builder.AddBinaryOp(GetBoolId(), SpvOpULessThan, ref_id, init_id);
  650. uint32_t error =
  651. init_check ? kInstErrorBindlessUninit : kInstErrorBindlessBuffOOB;
  652. uint32_t error_id = builder.GetUintConstantId(error);
  653. GenCheckCode(ult_inst->result_id(), error_id, init_check ? 0 : ref_id,
  654. init_check ? builder.GetUintConstantId(0u) : init_id, stage_idx,
  655. &ref, new_blocks);
  656. // Move original block's remaining code into remainder/merge block and add
  657. // to new blocks
  658. BasicBlock* back_blk_ptr = &*new_blocks->back();
  659. MovePostludeCode(ref_block_itr, back_blk_ptr);
  660. }
  661. void InstBindlessCheckPass::GenTexBuffCheckCode(
  662. BasicBlock::iterator ref_inst_itr,
  663. UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
  664. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  665. // Only process OpImageRead and OpImageWrite with no optional operands
  666. Instruction* ref_inst = &*ref_inst_itr;
  667. SpvOp op = ref_inst->opcode();
  668. uint32_t num_in_oprnds = ref_inst->NumInOperands();
  669. if (!((op == SpvOpImageRead && num_in_oprnds == 2) ||
  670. (op == SpvOpImageFetch && num_in_oprnds == 2) ||
  671. (op == SpvOpImageWrite && num_in_oprnds == 3)))
  672. return;
  673. // Pull components from descriptor reference
  674. RefAnalysis ref;
  675. if (!AnalyzeDescriptorReference(ref_inst, &ref)) return;
  676. // Only process if image is texel buffer
  677. Instruction* image_inst = get_def_use_mgr()->GetDef(ref.image_id);
  678. uint32_t image_ty_id = image_inst->type_id();
  679. Instruction* image_ty_inst = get_def_use_mgr()->GetDef(image_ty_id);
  680. if (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageDim) != SpvDimBuffer)
  681. return;
  682. if (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageDepth) != 0) return;
  683. if (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageArrayed) != 0) return;
  684. if (image_ty_inst->GetSingleWordInOperand(kSpvTypeImageMS) != 0) return;
  685. // Enable ImageQuery Capability if not yet enabled
  686. if (!get_feature_mgr()->HasCapability(SpvCapabilityImageQuery)) {
  687. std::unique_ptr<Instruction> cap_image_query_inst(new Instruction(
  688. context(), SpvOpCapability, 0, 0,
  689. std::initializer_list<Operand>{
  690. {SPV_OPERAND_TYPE_CAPABILITY, {SpvCapabilityImageQuery}}}));
  691. get_def_use_mgr()->AnalyzeInstDefUse(&*cap_image_query_inst);
  692. context()->AddCapability(std::move(cap_image_query_inst));
  693. }
  694. // Move original block's preceding instructions into first new block
  695. std::unique_ptr<BasicBlock> new_blk_ptr;
  696. MovePreludeCode(ref_inst_itr, ref_block_itr, &new_blk_ptr);
  697. InstructionBuilder builder(
  698. context(), &*new_blk_ptr,
  699. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  700. new_blocks->push_back(std::move(new_blk_ptr));
  701. // Get texel coordinate
  702. uint32_t coord_id =
  703. GenUintCastCode(ref_inst->GetSingleWordInOperand(1), &builder);
  704. // If index id not yet set, binding is single descriptor, so set index to
  705. // constant 0.
  706. if (ref.desc_idx_id == 0) ref.desc_idx_id = builder.GetUintConstantId(0u);
  707. // Get texel buffer size.
  708. Instruction* size_inst =
  709. builder.AddUnaryOp(GetUintId(), SpvOpImageQuerySize, ref.image_id);
  710. uint32_t size_id = size_inst->result_id();
  711. // Generate runtime initialization/bounds test code with true branch
  712. // being full reference and false branch being debug output and zero
  713. // for the referenced value.
  714. Instruction* ult_inst =
  715. builder.AddBinaryOp(GetBoolId(), SpvOpULessThan, coord_id, size_id);
  716. uint32_t error_id = builder.GetUintConstantId(kInstErrorBindlessBuffOOB);
  717. GenCheckCode(ult_inst->result_id(), error_id, coord_id, size_id, stage_idx,
  718. &ref, new_blocks);
  719. // Move original block's remaining code into remainder/merge block and add
  720. // to new blocks
  721. BasicBlock* back_blk_ptr = &*new_blocks->back();
  722. MovePostludeCode(ref_block_itr, back_blk_ptr);
  723. }
  724. void InstBindlessCheckPass::InitializeInstBindlessCheck() {
  725. // Initialize base class
  726. InitializeInstrument();
  727. // If runtime array length support or buffer bounds checking are enabled,
  728. // create variable mappings. Length support is always enabled if descriptor
  729. // init check is enabled.
  730. if (desc_idx_enabled_ || buffer_bounds_enabled_ || texel_buffer_enabled_)
  731. for (auto& anno : get_module()->annotations())
  732. if (anno.opcode() == SpvOpDecorate) {
  733. if (anno.GetSingleWordInOperand(1u) == SpvDecorationDescriptorSet)
  734. var2desc_set_[anno.GetSingleWordInOperand(0u)] =
  735. anno.GetSingleWordInOperand(2u);
  736. else if (anno.GetSingleWordInOperand(1u) == SpvDecorationBinding)
  737. var2binding_[anno.GetSingleWordInOperand(0u)] =
  738. anno.GetSingleWordInOperand(2u);
  739. }
  740. }
  741. Pass::Status InstBindlessCheckPass::ProcessImpl() {
  742. // Perform bindless bounds check on each entry point function in module
  743. InstProcessFunction pfn =
  744. [this](BasicBlock::iterator ref_inst_itr,
  745. UptrVectorIterator<BasicBlock> ref_block_itr, uint32_t stage_idx,
  746. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  747. return GenDescIdxCheckCode(ref_inst_itr, ref_block_itr, stage_idx,
  748. new_blocks);
  749. };
  750. bool modified = InstProcessEntryPointCallTree(pfn);
  751. if (desc_init_enabled_ || buffer_bounds_enabled_) {
  752. // Perform descriptor initialization and/or buffer bounds check on each
  753. // entry point function in module
  754. pfn = [this](BasicBlock::iterator ref_inst_itr,
  755. UptrVectorIterator<BasicBlock> ref_block_itr,
  756. uint32_t stage_idx,
  757. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  758. return GenDescInitCheckCode(ref_inst_itr, ref_block_itr, stage_idx,
  759. new_blocks);
  760. };
  761. modified |= InstProcessEntryPointCallTree(pfn);
  762. }
  763. if (texel_buffer_enabled_) {
  764. // Perform texel buffer bounds check on each entry point function in
  765. // module. Generate after descriptor bounds and initialization checks.
  766. pfn = [this](BasicBlock::iterator ref_inst_itr,
  767. UptrVectorIterator<BasicBlock> ref_block_itr,
  768. uint32_t stage_idx,
  769. std::vector<std::unique_ptr<BasicBlock>>* new_blocks) {
  770. return GenTexBuffCheckCode(ref_inst_itr, ref_block_itr, stage_idx,
  771. new_blocks);
  772. };
  773. modified |= InstProcessEntryPointCallTree(pfn);
  774. }
  775. return modified ? Status::SuccessWithChange : Status::SuccessWithoutChange;
  776. }
  777. Pass::Status InstBindlessCheckPass::Process() {
  778. InitializeInstBindlessCheck();
  779. return ProcessImpl();
  780. }
  781. } // namespace opt
  782. } // namespace spvtools