instrument_pass.cpp 42 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988
  1. // Copyright (c) 2018 The Khronos Group Inc.
  2. // Copyright (c) 2018 Valve Corporation
  3. // Copyright (c) 2018 LunarG Inc.
  4. //
  5. // Licensed under the Apache License, Version 2.0 (the "License");
  6. // you may not use this file except in compliance with the License.
  7. // You may obtain a copy of the License at
  8. //
  9. // http://www.apache.org/licenses/LICENSE-2.0
  10. //
  11. // Unless required by applicable law or agreed to in writing, software
  12. // distributed under the License is distributed on an "AS IS" BASIS,
  13. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  14. // See the License for the specific language governing permissions and
  15. // limitations under the License.
  16. #include "instrument_pass.h"
  17. #include "source/cfa.h"
  18. #include "source/spirv_constant.h"
  19. namespace {
  20. // Common Parameter Positions
  21. static const int kInstCommonParamInstIdx = 0;
  22. static const int kInstCommonParamCnt = 1;
  23. // Indices of operands in SPIR-V instructions
  24. static const int kEntryPointExecutionModelInIdx = 0;
  25. static const int kEntryPointFunctionIdInIdx = 1;
  26. } // anonymous namespace
  27. namespace spvtools {
  28. namespace opt {
  29. void InstrumentPass::MovePreludeCode(
  30. BasicBlock::iterator ref_inst_itr,
  31. UptrVectorIterator<BasicBlock> ref_block_itr,
  32. std::unique_ptr<BasicBlock>* new_blk_ptr) {
  33. same_block_pre_.clear();
  34. same_block_post_.clear();
  35. // Initialize new block. Reuse label from original block.
  36. new_blk_ptr->reset(new BasicBlock(std::move(ref_block_itr->GetLabel())));
  37. // Move contents of original ref block up to ref instruction.
  38. for (auto cii = ref_block_itr->begin(); cii != ref_inst_itr;
  39. cii = ref_block_itr->begin()) {
  40. Instruction* inst = &*cii;
  41. inst->RemoveFromList();
  42. std::unique_ptr<Instruction> mv_ptr(inst);
  43. // Remember same-block ops for possible regeneration.
  44. if (IsSameBlockOp(&*mv_ptr)) {
  45. auto* sb_inst_ptr = mv_ptr.get();
  46. same_block_pre_[mv_ptr->result_id()] = sb_inst_ptr;
  47. }
  48. (*new_blk_ptr)->AddInstruction(std::move(mv_ptr));
  49. }
  50. }
  51. void InstrumentPass::MovePostludeCode(
  52. UptrVectorIterator<BasicBlock> ref_block_itr, BasicBlock* new_blk_ptr) {
  53. // new_blk_ptr->reset(new BasicBlock(NewLabel(ref_block_itr->id())));
  54. // Move contents of original ref block.
  55. for (auto cii = ref_block_itr->begin(); cii != ref_block_itr->end();
  56. cii = ref_block_itr->begin()) {
  57. Instruction* inst = &*cii;
  58. inst->RemoveFromList();
  59. std::unique_ptr<Instruction> mv_inst(inst);
  60. // Regenerate any same-block instruction that has not been seen in the
  61. // current block.
  62. if (same_block_pre_.size() > 0) {
  63. CloneSameBlockOps(&mv_inst, &same_block_post_, &same_block_pre_,
  64. new_blk_ptr);
  65. // Remember same-block ops in this block.
  66. if (IsSameBlockOp(&*mv_inst)) {
  67. const uint32_t rid = mv_inst->result_id();
  68. same_block_post_[rid] = rid;
  69. }
  70. }
  71. new_blk_ptr->AddInstruction(std::move(mv_inst));
  72. }
  73. }
  74. std::unique_ptr<Instruction> InstrumentPass::NewLabel(uint32_t label_id) {
  75. std::unique_ptr<Instruction> newLabel(
  76. new Instruction(context(), SpvOpLabel, 0, label_id, {}));
  77. get_def_use_mgr()->AnalyzeInstDefUse(&*newLabel);
  78. return newLabel;
  79. }
  80. uint32_t InstrumentPass::GenUintCastCode(uint32_t val_id,
  81. InstructionBuilder* builder) {
  82. // Cast value to 32-bit unsigned if necessary
  83. if (get_def_use_mgr()->GetDef(val_id)->type_id() == GetUintId())
  84. return val_id;
  85. return builder->AddUnaryOp(GetUintId(), SpvOpBitcast, val_id)->result_id();
  86. }
  87. void InstrumentPass::GenDebugOutputFieldCode(uint32_t base_offset_id,
  88. uint32_t field_offset,
  89. uint32_t field_value_id,
  90. InstructionBuilder* builder) {
  91. // Cast value to 32-bit unsigned if necessary
  92. uint32_t val_id = GenUintCastCode(field_value_id, builder);
  93. // Store value
  94. Instruction* data_idx_inst =
  95. builder->AddBinaryOp(GetUintId(), SpvOpIAdd, base_offset_id,
  96. builder->GetUintConstantId(field_offset));
  97. uint32_t buf_id = GetOutputBufferId();
  98. uint32_t buf_uint_ptr_id = GetBufferUintPtrId();
  99. Instruction* achain_inst =
  100. builder->AddTernaryOp(buf_uint_ptr_id, SpvOpAccessChain, buf_id,
  101. builder->GetUintConstantId(kDebugOutputDataOffset),
  102. data_idx_inst->result_id());
  103. (void)builder->AddBinaryOp(0, SpvOpStore, achain_inst->result_id(), val_id);
  104. }
  105. void InstrumentPass::GenCommonStreamWriteCode(uint32_t record_sz,
  106. uint32_t inst_id,
  107. uint32_t stage_idx,
  108. uint32_t base_offset_id,
  109. InstructionBuilder* builder) {
  110. // Store record size
  111. GenDebugOutputFieldCode(base_offset_id, kInstCommonOutSize,
  112. builder->GetUintConstantId(record_sz), builder);
  113. // Store Shader Id
  114. GenDebugOutputFieldCode(base_offset_id, kInstCommonOutShaderId,
  115. builder->GetUintConstantId(shader_id_), builder);
  116. // Store Instruction Idx
  117. GenDebugOutputFieldCode(base_offset_id, kInstCommonOutInstructionIdx, inst_id,
  118. builder);
  119. // Store Stage Idx
  120. GenDebugOutputFieldCode(base_offset_id, kInstCommonOutStageIdx,
  121. builder->GetUintConstantId(stage_idx), builder);
  122. }
  123. void InstrumentPass::GenFragCoordEltDebugOutputCode(
  124. uint32_t base_offset_id, uint32_t uint_frag_coord_id, uint32_t element,
  125. InstructionBuilder* builder) {
  126. Instruction* element_val_inst = builder->AddIdLiteralOp(
  127. GetUintId(), SpvOpCompositeExtract, uint_frag_coord_id, element);
  128. GenDebugOutputFieldCode(base_offset_id, kInstFragOutFragCoordX + element,
  129. element_val_inst->result_id(), builder);
  130. }
  131. uint32_t InstrumentPass::GenVarLoad(uint32_t var_id,
  132. InstructionBuilder* builder) {
  133. Instruction* var_inst = get_def_use_mgr()->GetDef(var_id);
  134. uint32_t type_id = GetPointeeTypeId(var_inst);
  135. Instruction* load_inst = builder->AddUnaryOp(type_id, SpvOpLoad, var_id);
  136. return load_inst->result_id();
  137. }
  138. void InstrumentPass::GenBuiltinOutputCode(uint32_t builtin_id,
  139. uint32_t builtin_off,
  140. uint32_t base_offset_id,
  141. InstructionBuilder* builder) {
  142. // Load and store builtin
  143. uint32_t load_id = GenVarLoad(builtin_id, builder);
  144. GenDebugOutputFieldCode(base_offset_id, builtin_off, load_id, builder);
  145. }
  146. void InstrumentPass::GenStageStreamWriteCode(uint32_t stage_idx,
  147. uint32_t base_offset_id,
  148. InstructionBuilder* builder) {
  149. // TODO(greg-lunarg): Add support for all stages
  150. switch (stage_idx) {
  151. case SpvExecutionModelVertex: {
  152. // Load and store VertexId and InstanceId
  153. GenBuiltinOutputCode(
  154. context()->GetBuiltinInputVarId(SpvBuiltInVertexIndex),
  155. kInstVertOutVertexIndex, base_offset_id, builder);
  156. GenBuiltinOutputCode(
  157. context()->GetBuiltinInputVarId(SpvBuiltInInstanceIndex),
  158. kInstVertOutInstanceIndex, base_offset_id, builder);
  159. } break;
  160. case SpvExecutionModelGLCompute: {
  161. // Load and store GlobalInvocationId.
  162. uint32_t load_id = GenVarLoad(
  163. context()->GetBuiltinInputVarId(SpvBuiltInGlobalInvocationId),
  164. builder);
  165. Instruction* x_inst = builder->AddIdLiteralOp(
  166. GetUintId(), SpvOpCompositeExtract, load_id, 0);
  167. Instruction* y_inst = builder->AddIdLiteralOp(
  168. GetUintId(), SpvOpCompositeExtract, load_id, 1);
  169. Instruction* z_inst = builder->AddIdLiteralOp(
  170. GetUintId(), SpvOpCompositeExtract, load_id, 2);
  171. if (version_ == 1) {
  172. // For version 1 format, as a stopgap, pack uvec3 into first word:
  173. // x << 21 | y << 10 | z. Second word is unused. (DEPRECATED)
  174. Instruction* x_shft_inst = builder->AddBinaryOp(
  175. GetUintId(), SpvOpShiftLeftLogical, x_inst->result_id(),
  176. builder->GetUintConstantId(21));
  177. Instruction* y_shft_inst = builder->AddBinaryOp(
  178. GetUintId(), SpvOpShiftLeftLogical, y_inst->result_id(),
  179. builder->GetUintConstantId(10));
  180. Instruction* x_or_y_inst = builder->AddBinaryOp(
  181. GetUintId(), SpvOpBitwiseOr, x_shft_inst->result_id(),
  182. y_shft_inst->result_id());
  183. Instruction* x_or_y_or_z_inst =
  184. builder->AddBinaryOp(GetUintId(), SpvOpBitwiseOr,
  185. x_or_y_inst->result_id(), z_inst->result_id());
  186. GenDebugOutputFieldCode(base_offset_id, kInstCompOutGlobalInvocationId,
  187. x_or_y_or_z_inst->result_id(), builder);
  188. } else {
  189. // For version 2 format, write all three words
  190. GenDebugOutputFieldCode(base_offset_id, kInstCompOutGlobalInvocationIdX,
  191. x_inst->result_id(), builder);
  192. GenDebugOutputFieldCode(base_offset_id, kInstCompOutGlobalInvocationIdY,
  193. y_inst->result_id(), builder);
  194. GenDebugOutputFieldCode(base_offset_id, kInstCompOutGlobalInvocationIdZ,
  195. z_inst->result_id(), builder);
  196. }
  197. } break;
  198. case SpvExecutionModelGeometry: {
  199. // Load and store PrimitiveId and InvocationId.
  200. GenBuiltinOutputCode(
  201. context()->GetBuiltinInputVarId(SpvBuiltInPrimitiveId),
  202. kInstGeomOutPrimitiveId, base_offset_id, builder);
  203. GenBuiltinOutputCode(
  204. context()->GetBuiltinInputVarId(SpvBuiltInInvocationId),
  205. kInstGeomOutInvocationId, base_offset_id, builder);
  206. } break;
  207. case SpvExecutionModelTessellationControl: {
  208. // Load and store InvocationId and PrimitiveId
  209. GenBuiltinOutputCode(
  210. context()->GetBuiltinInputVarId(SpvBuiltInInvocationId),
  211. kInstTessCtlOutInvocationId, base_offset_id, builder);
  212. GenBuiltinOutputCode(
  213. context()->GetBuiltinInputVarId(SpvBuiltInPrimitiveId),
  214. kInstTessCtlOutPrimitiveId, base_offset_id, builder);
  215. } break;
  216. case SpvExecutionModelTessellationEvaluation: {
  217. if (version_ == 1) {
  218. // For format version 1, load and store InvocationId.
  219. GenBuiltinOutputCode(
  220. context()->GetBuiltinInputVarId(SpvBuiltInInvocationId),
  221. kInstTessOutInvocationId, base_offset_id, builder);
  222. } else {
  223. // For format version 2, load and store PrimitiveId and TessCoord.uv
  224. GenBuiltinOutputCode(
  225. context()->GetBuiltinInputVarId(SpvBuiltInPrimitiveId),
  226. kInstTessEvalOutPrimitiveId, base_offset_id, builder);
  227. uint32_t load_id = GenVarLoad(
  228. context()->GetBuiltinInputVarId(SpvBuiltInTessCoord), builder);
  229. Instruction* uvec3_cast_inst =
  230. builder->AddUnaryOp(GetVec3UintId(), SpvOpBitcast, load_id);
  231. uint32_t uvec3_cast_id = uvec3_cast_inst->result_id();
  232. Instruction* u_inst = builder->AddIdLiteralOp(
  233. GetUintId(), SpvOpCompositeExtract, uvec3_cast_id, 0);
  234. Instruction* v_inst = builder->AddIdLiteralOp(
  235. GetUintId(), SpvOpCompositeExtract, uvec3_cast_id, 1);
  236. GenDebugOutputFieldCode(base_offset_id, kInstTessEvalOutTessCoordU,
  237. u_inst->result_id(), builder);
  238. GenDebugOutputFieldCode(base_offset_id, kInstTessEvalOutTessCoordV,
  239. v_inst->result_id(), builder);
  240. }
  241. } break;
  242. case SpvExecutionModelFragment: {
  243. // Load FragCoord and convert to Uint
  244. Instruction* frag_coord_inst = builder->AddUnaryOp(
  245. GetVec4FloatId(), SpvOpLoad,
  246. context()->GetBuiltinInputVarId(SpvBuiltInFragCoord));
  247. Instruction* uint_frag_coord_inst = builder->AddUnaryOp(
  248. GetVec4UintId(), SpvOpBitcast, frag_coord_inst->result_id());
  249. for (uint32_t u = 0; u < 2u; ++u)
  250. GenFragCoordEltDebugOutputCode(
  251. base_offset_id, uint_frag_coord_inst->result_id(), u, builder);
  252. } break;
  253. case SpvExecutionModelRayGenerationNV:
  254. case SpvExecutionModelIntersectionNV:
  255. case SpvExecutionModelAnyHitNV:
  256. case SpvExecutionModelClosestHitNV:
  257. case SpvExecutionModelMissNV:
  258. case SpvExecutionModelCallableNV: {
  259. // Load and store LaunchIdNV.
  260. uint32_t launch_id = GenVarLoad(
  261. context()->GetBuiltinInputVarId(SpvBuiltInLaunchIdNV), builder);
  262. Instruction* x_launch_inst = builder->AddIdLiteralOp(
  263. GetUintId(), SpvOpCompositeExtract, launch_id, 0);
  264. Instruction* y_launch_inst = builder->AddIdLiteralOp(
  265. GetUintId(), SpvOpCompositeExtract, launch_id, 1);
  266. Instruction* z_launch_inst = builder->AddIdLiteralOp(
  267. GetUintId(), SpvOpCompositeExtract, launch_id, 2);
  268. GenDebugOutputFieldCode(base_offset_id, kInstRayTracingOutLaunchIdX,
  269. x_launch_inst->result_id(), builder);
  270. GenDebugOutputFieldCode(base_offset_id, kInstRayTracingOutLaunchIdY,
  271. y_launch_inst->result_id(), builder);
  272. GenDebugOutputFieldCode(base_offset_id, kInstRayTracingOutLaunchIdZ,
  273. z_launch_inst->result_id(), builder);
  274. } break;
  275. default: { assert(false && "unsupported stage"); } break;
  276. }
  277. }
  278. void InstrumentPass::GenDebugStreamWrite(
  279. uint32_t instruction_idx, uint32_t stage_idx,
  280. const std::vector<uint32_t>& validation_ids, InstructionBuilder* builder) {
  281. // Call debug output function. Pass func_idx, instruction_idx and
  282. // validation ids as args.
  283. uint32_t val_id_cnt = static_cast<uint32_t>(validation_ids.size());
  284. uint32_t output_func_id = GetStreamWriteFunctionId(stage_idx, val_id_cnt);
  285. std::vector<uint32_t> args = {output_func_id,
  286. builder->GetUintConstantId(instruction_idx)};
  287. (void)args.insert(args.end(), validation_ids.begin(), validation_ids.end());
  288. (void)builder->AddNaryOp(GetVoidId(), SpvOpFunctionCall, args);
  289. }
  290. uint32_t InstrumentPass::GenDebugDirectRead(
  291. const std::vector<uint32_t>& offset_ids, InstructionBuilder* builder) {
  292. // Call debug input function. Pass func_idx and offset ids as args.
  293. uint32_t off_id_cnt = static_cast<uint32_t>(offset_ids.size());
  294. uint32_t input_func_id = GetDirectReadFunctionId(off_id_cnt);
  295. std::vector<uint32_t> args = {input_func_id};
  296. (void)args.insert(args.end(), offset_ids.begin(), offset_ids.end());
  297. return builder->AddNaryOp(GetUintId(), SpvOpFunctionCall, args)->result_id();
  298. }
  299. bool InstrumentPass::IsSameBlockOp(const Instruction* inst) const {
  300. return inst->opcode() == SpvOpSampledImage || inst->opcode() == SpvOpImage;
  301. }
  302. void InstrumentPass::CloneSameBlockOps(
  303. std::unique_ptr<Instruction>* inst,
  304. std::unordered_map<uint32_t, uint32_t>* same_blk_post,
  305. std::unordered_map<uint32_t, Instruction*>* same_blk_pre,
  306. BasicBlock* block_ptr) {
  307. (*inst)->ForEachInId(
  308. [&same_blk_post, &same_blk_pre, &block_ptr, this](uint32_t* iid) {
  309. const auto map_itr = (*same_blk_post).find(*iid);
  310. if (map_itr == (*same_blk_post).end()) {
  311. const auto map_itr2 = (*same_blk_pre).find(*iid);
  312. if (map_itr2 != (*same_blk_pre).end()) {
  313. // Clone pre-call same-block ops, map result id.
  314. const Instruction* in_inst = map_itr2->second;
  315. std::unique_ptr<Instruction> sb_inst(in_inst->Clone(context()));
  316. CloneSameBlockOps(&sb_inst, same_blk_post, same_blk_pre, block_ptr);
  317. const uint32_t rid = sb_inst->result_id();
  318. const uint32_t nid = this->TakeNextId();
  319. get_decoration_mgr()->CloneDecorations(rid, nid);
  320. sb_inst->SetResultId(nid);
  321. (*same_blk_post)[rid] = nid;
  322. *iid = nid;
  323. block_ptr->AddInstruction(std::move(sb_inst));
  324. }
  325. } else {
  326. // Reset same-block op operand.
  327. *iid = map_itr->second;
  328. }
  329. });
  330. }
  331. void InstrumentPass::UpdateSucceedingPhis(
  332. std::vector<std::unique_ptr<BasicBlock>>& new_blocks) {
  333. const auto first_blk = new_blocks.begin();
  334. const auto last_blk = new_blocks.end() - 1;
  335. const uint32_t first_id = (*first_blk)->id();
  336. const uint32_t last_id = (*last_blk)->id();
  337. const BasicBlock& const_last_block = *last_blk->get();
  338. const_last_block.ForEachSuccessorLabel(
  339. [&first_id, &last_id, this](const uint32_t succ) {
  340. BasicBlock* sbp = this->id2block_[succ];
  341. sbp->ForEachPhiInst([&first_id, &last_id, this](Instruction* phi) {
  342. bool changed = false;
  343. phi->ForEachInId([&first_id, &last_id, &changed](uint32_t* id) {
  344. if (*id == first_id) {
  345. *id = last_id;
  346. changed = true;
  347. }
  348. });
  349. if (changed) get_def_use_mgr()->AnalyzeInstUse(phi);
  350. });
  351. });
  352. }
  353. // Return id for output buffer uint ptr type
  354. uint32_t InstrumentPass::GetBufferUintPtrId() {
  355. if (buffer_uint_ptr_id_ == 0) {
  356. buffer_uint_ptr_id_ = context()->get_type_mgr()->FindPointerToType(
  357. GetUintId(), SpvStorageClassStorageBuffer);
  358. }
  359. return buffer_uint_ptr_id_;
  360. }
  361. uint32_t InstrumentPass::GetOutputBufferBinding() {
  362. switch (validation_id_) {
  363. case kInstValidationIdBindless:
  364. return kDebugOutputBindingStream;
  365. default:
  366. assert(false && "unexpected validation id");
  367. }
  368. return 0;
  369. }
  370. uint32_t InstrumentPass::GetInputBufferBinding() {
  371. switch (validation_id_) {
  372. case kInstValidationIdBindless:
  373. return kDebugInputBindingBindless;
  374. default:
  375. assert(false && "unexpected validation id");
  376. }
  377. return 0;
  378. }
  379. analysis::Type* InstrumentPass::GetUintRuntimeArrayType(
  380. analysis::DecorationManager* deco_mgr, analysis::TypeManager* type_mgr) {
  381. if (uint_rarr_ty_ == nullptr) {
  382. analysis::Integer uint_ty(32, false);
  383. analysis::Type* reg_uint_ty = type_mgr->GetRegisteredType(&uint_ty);
  384. analysis::RuntimeArray uint_rarr_ty_tmp(reg_uint_ty);
  385. uint_rarr_ty_ = type_mgr->GetRegisteredType(&uint_rarr_ty_tmp);
  386. uint32_t uint_arr_ty_id = type_mgr->GetTypeInstruction(uint_rarr_ty_);
  387. // By the Vulkan spec, a pre-existing RuntimeArray of uint must be part of
  388. // a block, and will therefore be decorated with an ArrayStride. Therefore
  389. // the undecorated type returned here will not be pre-existing and can
  390. // safely be decorated. Since this type is now decorated, it is out of
  391. // sync with the TypeManager and therefore the TypeManager must be
  392. // invalidated after this pass.
  393. assert(context()->get_def_use_mgr()->NumUses(uint_arr_ty_id) == 0 &&
  394. "used RuntimeArray type returned");
  395. deco_mgr->AddDecorationVal(uint_arr_ty_id, SpvDecorationArrayStride, 4u);
  396. }
  397. return uint_rarr_ty_;
  398. }
  399. void InstrumentPass::AddStorageBufferExt() {
  400. if (storage_buffer_ext_defined_) return;
  401. if (!get_feature_mgr()->HasExtension(kSPV_KHR_storage_buffer_storage_class)) {
  402. const std::string ext_name("SPV_KHR_storage_buffer_storage_class");
  403. const auto num_chars = ext_name.size();
  404. // Compute num words, accommodate the terminating null character.
  405. const auto num_words = (num_chars + 1 + 3) / 4;
  406. std::vector<uint32_t> ext_words(num_words, 0u);
  407. std::memcpy(ext_words.data(), ext_name.data(), num_chars);
  408. context()->AddExtension(std::unique_ptr<Instruction>(
  409. new Instruction(context(), SpvOpExtension, 0u, 0u,
  410. {{SPV_OPERAND_TYPE_LITERAL_STRING, ext_words}})));
  411. }
  412. storage_buffer_ext_defined_ = true;
  413. }
  414. // Return id for output buffer
  415. uint32_t InstrumentPass::GetOutputBufferId() {
  416. if (output_buffer_id_ == 0) {
  417. // If not created yet, create one
  418. analysis::DecorationManager* deco_mgr = get_decoration_mgr();
  419. analysis::TypeManager* type_mgr = context()->get_type_mgr();
  420. analysis::Type* reg_uint_rarr_ty =
  421. GetUintRuntimeArrayType(deco_mgr, type_mgr);
  422. analysis::Integer uint_ty(32, false);
  423. analysis::Type* reg_uint_ty = type_mgr->GetRegisteredType(&uint_ty);
  424. analysis::Struct buf_ty({reg_uint_ty, reg_uint_rarr_ty});
  425. analysis::Type* reg_buf_ty = type_mgr->GetRegisteredType(&buf_ty);
  426. uint32_t obufTyId = type_mgr->GetTypeInstruction(reg_buf_ty);
  427. // By the Vulkan spec, a pre-existing struct containing a RuntimeArray
  428. // must be a block, and will therefore be decorated with Block. Therefore
  429. // the undecorated type returned here will not be pre-existing and can
  430. // safely be decorated. Since this type is now decorated, it is out of
  431. // sync with the TypeManager and therefore the TypeManager must be
  432. // invalidated after this pass.
  433. assert(context()->get_def_use_mgr()->NumUses(obufTyId) == 0 &&
  434. "used struct type returned");
  435. deco_mgr->AddDecoration(obufTyId, SpvDecorationBlock);
  436. deco_mgr->AddMemberDecoration(obufTyId, kDebugOutputSizeOffset,
  437. SpvDecorationOffset, 0);
  438. deco_mgr->AddMemberDecoration(obufTyId, kDebugOutputDataOffset,
  439. SpvDecorationOffset, 4);
  440. uint32_t obufTyPtrId_ =
  441. type_mgr->FindPointerToType(obufTyId, SpvStorageClassStorageBuffer);
  442. output_buffer_id_ = TakeNextId();
  443. std::unique_ptr<Instruction> newVarOp(new Instruction(
  444. context(), SpvOpVariable, obufTyPtrId_, output_buffer_id_,
  445. {{spv_operand_type_t::SPV_OPERAND_TYPE_LITERAL_INTEGER,
  446. {SpvStorageClassStorageBuffer}}}));
  447. context()->AddGlobalValue(std::move(newVarOp));
  448. deco_mgr->AddDecorationVal(output_buffer_id_, SpvDecorationDescriptorSet,
  449. desc_set_);
  450. deco_mgr->AddDecorationVal(output_buffer_id_, SpvDecorationBinding,
  451. GetOutputBufferBinding());
  452. AddStorageBufferExt();
  453. if (get_module()->version() >= SPV_SPIRV_VERSION_WORD(1, 4)) {
  454. // Add the new buffer to all entry points.
  455. for (auto& entry : get_module()->entry_points()) {
  456. entry.AddOperand({SPV_OPERAND_TYPE_ID, {output_buffer_id_}});
  457. context()->AnalyzeUses(&entry);
  458. }
  459. }
  460. }
  461. return output_buffer_id_;
  462. }
  463. uint32_t InstrumentPass::GetInputBufferId() {
  464. if (input_buffer_id_ == 0) {
  465. // If not created yet, create one
  466. analysis::DecorationManager* deco_mgr = get_decoration_mgr();
  467. analysis::TypeManager* type_mgr = context()->get_type_mgr();
  468. analysis::Type* reg_uint_rarr_ty =
  469. GetUintRuntimeArrayType(deco_mgr, type_mgr);
  470. analysis::Struct buf_ty({reg_uint_rarr_ty});
  471. analysis::Type* reg_buf_ty = type_mgr->GetRegisteredType(&buf_ty);
  472. uint32_t ibufTyId = type_mgr->GetTypeInstruction(reg_buf_ty);
  473. // By the Vulkan spec, a pre-existing struct containing a RuntimeArray
  474. // must be a block, and will therefore be decorated with Block. Therefore
  475. // the undecorated type returned here will not be pre-existing and can
  476. // safely be decorated. Since this type is now decorated, it is out of
  477. // sync with the TypeManager and therefore the TypeManager must be
  478. // invalidated after this pass.
  479. assert(context()->get_def_use_mgr()->NumUses(ibufTyId) == 0 &&
  480. "used struct type returned");
  481. deco_mgr->AddDecoration(ibufTyId, SpvDecorationBlock);
  482. deco_mgr->AddMemberDecoration(ibufTyId, 0, SpvDecorationOffset, 0);
  483. uint32_t ibufTyPtrId_ =
  484. type_mgr->FindPointerToType(ibufTyId, SpvStorageClassStorageBuffer);
  485. input_buffer_id_ = TakeNextId();
  486. std::unique_ptr<Instruction> newVarOp(new Instruction(
  487. context(), SpvOpVariable, ibufTyPtrId_, input_buffer_id_,
  488. {{spv_operand_type_t::SPV_OPERAND_TYPE_LITERAL_INTEGER,
  489. {SpvStorageClassStorageBuffer}}}));
  490. context()->AddGlobalValue(std::move(newVarOp));
  491. deco_mgr->AddDecorationVal(input_buffer_id_, SpvDecorationDescriptorSet,
  492. desc_set_);
  493. deco_mgr->AddDecorationVal(input_buffer_id_, SpvDecorationBinding,
  494. GetInputBufferBinding());
  495. AddStorageBufferExt();
  496. if (get_module()->version() >= SPV_SPIRV_VERSION_WORD(1, 4)) {
  497. // Add the new buffer to all entry points.
  498. for (auto& entry : get_module()->entry_points()) {
  499. entry.AddOperand({SPV_OPERAND_TYPE_ID, {input_buffer_id_}});
  500. context()->AnalyzeUses(&entry);
  501. }
  502. }
  503. }
  504. return input_buffer_id_;
  505. }
  506. uint32_t InstrumentPass::GetVec4FloatId() {
  507. if (v4float_id_ == 0) {
  508. analysis::TypeManager* type_mgr = context()->get_type_mgr();
  509. analysis::Float float_ty(32);
  510. analysis::Type* reg_float_ty = type_mgr->GetRegisteredType(&float_ty);
  511. analysis::Vector v4float_ty(reg_float_ty, 4);
  512. analysis::Type* reg_v4float_ty = type_mgr->GetRegisteredType(&v4float_ty);
  513. v4float_id_ = type_mgr->GetTypeInstruction(reg_v4float_ty);
  514. }
  515. return v4float_id_;
  516. }
  517. uint32_t InstrumentPass::GetUintId() {
  518. if (uint_id_ == 0) {
  519. analysis::TypeManager* type_mgr = context()->get_type_mgr();
  520. analysis::Integer uint_ty(32, false);
  521. analysis::Type* reg_uint_ty = type_mgr->GetRegisteredType(&uint_ty);
  522. uint_id_ = type_mgr->GetTypeInstruction(reg_uint_ty);
  523. }
  524. return uint_id_;
  525. }
  526. uint32_t InstrumentPass::GetVecUintId(uint32_t len) {
  527. analysis::TypeManager* type_mgr = context()->get_type_mgr();
  528. analysis::Integer uint_ty(32, false);
  529. analysis::Type* reg_uint_ty = type_mgr->GetRegisteredType(&uint_ty);
  530. analysis::Vector v_uint_ty(reg_uint_ty, len);
  531. analysis::Type* reg_v_uint_ty = type_mgr->GetRegisteredType(&v_uint_ty);
  532. uint32_t v_uint_id = type_mgr->GetTypeInstruction(reg_v_uint_ty);
  533. return v_uint_id;
  534. }
  535. uint32_t InstrumentPass::GetVec4UintId() {
  536. if (v4uint_id_ == 0) v4uint_id_ = GetVecUintId(4u);
  537. return v4uint_id_;
  538. }
  539. uint32_t InstrumentPass::GetVec3UintId() {
  540. if (v3uint_id_ == 0) v3uint_id_ = GetVecUintId(3u);
  541. return v3uint_id_;
  542. }
  543. uint32_t InstrumentPass::GetBoolId() {
  544. if (bool_id_ == 0) {
  545. analysis::TypeManager* type_mgr = context()->get_type_mgr();
  546. analysis::Bool bool_ty;
  547. analysis::Type* reg_bool_ty = type_mgr->GetRegisteredType(&bool_ty);
  548. bool_id_ = type_mgr->GetTypeInstruction(reg_bool_ty);
  549. }
  550. return bool_id_;
  551. }
  552. uint32_t InstrumentPass::GetVoidId() {
  553. if (void_id_ == 0) {
  554. analysis::TypeManager* type_mgr = context()->get_type_mgr();
  555. analysis::Void void_ty;
  556. analysis::Type* reg_void_ty = type_mgr->GetRegisteredType(&void_ty);
  557. void_id_ = type_mgr->GetTypeInstruction(reg_void_ty);
  558. }
  559. return void_id_;
  560. }
  561. uint32_t InstrumentPass::GetStreamWriteFunctionId(uint32_t stage_idx,
  562. uint32_t val_spec_param_cnt) {
  563. // Total param count is common params plus validation-specific
  564. // params
  565. uint32_t param_cnt = kInstCommonParamCnt + val_spec_param_cnt;
  566. if (output_func_id_ == 0) {
  567. // Create function
  568. output_func_id_ = TakeNextId();
  569. analysis::TypeManager* type_mgr = context()->get_type_mgr();
  570. std::vector<const analysis::Type*> param_types;
  571. for (uint32_t c = 0; c < param_cnt; ++c)
  572. param_types.push_back(type_mgr->GetType(GetUintId()));
  573. analysis::Function func_ty(type_mgr->GetType(GetVoidId()), param_types);
  574. analysis::Type* reg_func_ty = type_mgr->GetRegisteredType(&func_ty);
  575. std::unique_ptr<Instruction> func_inst(new Instruction(
  576. get_module()->context(), SpvOpFunction, GetVoidId(), output_func_id_,
  577. {{spv_operand_type_t::SPV_OPERAND_TYPE_LITERAL_INTEGER,
  578. {SpvFunctionControlMaskNone}},
  579. {spv_operand_type_t::SPV_OPERAND_TYPE_ID,
  580. {type_mgr->GetTypeInstruction(reg_func_ty)}}}));
  581. get_def_use_mgr()->AnalyzeInstDefUse(&*func_inst);
  582. std::unique_ptr<Function> output_func =
  583. MakeUnique<Function>(std::move(func_inst));
  584. // Add parameters
  585. std::vector<uint32_t> param_vec;
  586. for (uint32_t c = 0; c < param_cnt; ++c) {
  587. uint32_t pid = TakeNextId();
  588. param_vec.push_back(pid);
  589. std::unique_ptr<Instruction> param_inst(
  590. new Instruction(get_module()->context(), SpvOpFunctionParameter,
  591. GetUintId(), pid, {}));
  592. get_def_use_mgr()->AnalyzeInstDefUse(&*param_inst);
  593. output_func->AddParameter(std::move(param_inst));
  594. }
  595. // Create first block
  596. uint32_t test_blk_id = TakeNextId();
  597. std::unique_ptr<Instruction> test_label(NewLabel(test_blk_id));
  598. std::unique_ptr<BasicBlock> new_blk_ptr =
  599. MakeUnique<BasicBlock>(std::move(test_label));
  600. InstructionBuilder builder(
  601. context(), &*new_blk_ptr,
  602. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  603. // Gen test if debug output buffer size will not be exceeded.
  604. uint32_t val_spec_offset =
  605. (version_ == 1) ? kInstStageOutCnt : kInst2StageOutCnt;
  606. uint32_t obuf_record_sz = val_spec_offset + val_spec_param_cnt;
  607. uint32_t buf_id = GetOutputBufferId();
  608. uint32_t buf_uint_ptr_id = GetBufferUintPtrId();
  609. Instruction* obuf_curr_sz_ac_inst =
  610. builder.AddBinaryOp(buf_uint_ptr_id, SpvOpAccessChain, buf_id,
  611. builder.GetUintConstantId(kDebugOutputSizeOffset));
  612. // Fetch the current debug buffer written size atomically, adding the
  613. // size of the record to be written.
  614. uint32_t obuf_record_sz_id = builder.GetUintConstantId(obuf_record_sz);
  615. uint32_t mask_none_id = builder.GetUintConstantId(SpvMemoryAccessMaskNone);
  616. uint32_t scope_invok_id = builder.GetUintConstantId(SpvScopeInvocation);
  617. Instruction* obuf_curr_sz_inst = builder.AddQuadOp(
  618. GetUintId(), SpvOpAtomicIAdd, obuf_curr_sz_ac_inst->result_id(),
  619. scope_invok_id, mask_none_id, obuf_record_sz_id);
  620. uint32_t obuf_curr_sz_id = obuf_curr_sz_inst->result_id();
  621. // Compute new written size
  622. Instruction* obuf_new_sz_inst =
  623. builder.AddBinaryOp(GetUintId(), SpvOpIAdd, obuf_curr_sz_id,
  624. builder.GetUintConstantId(obuf_record_sz));
  625. // Fetch the data bound
  626. Instruction* obuf_bnd_inst =
  627. builder.AddIdLiteralOp(GetUintId(), SpvOpArrayLength,
  628. GetOutputBufferId(), kDebugOutputDataOffset);
  629. // Test that new written size is less than or equal to debug output
  630. // data bound
  631. Instruction* obuf_safe_inst = builder.AddBinaryOp(
  632. GetBoolId(), SpvOpULessThanEqual, obuf_new_sz_inst->result_id(),
  633. obuf_bnd_inst->result_id());
  634. uint32_t merge_blk_id = TakeNextId();
  635. uint32_t write_blk_id = TakeNextId();
  636. std::unique_ptr<Instruction> merge_label(NewLabel(merge_blk_id));
  637. std::unique_ptr<Instruction> write_label(NewLabel(write_blk_id));
  638. (void)builder.AddConditionalBranch(obuf_safe_inst->result_id(),
  639. write_blk_id, merge_blk_id, merge_blk_id,
  640. SpvSelectionControlMaskNone);
  641. // Close safety test block and gen write block
  642. new_blk_ptr->SetParent(&*output_func);
  643. output_func->AddBasicBlock(std::move(new_blk_ptr));
  644. new_blk_ptr = MakeUnique<BasicBlock>(std::move(write_label));
  645. builder.SetInsertPoint(&*new_blk_ptr);
  646. // Generate common and stage-specific debug record members
  647. GenCommonStreamWriteCode(obuf_record_sz, param_vec[kInstCommonParamInstIdx],
  648. stage_idx, obuf_curr_sz_id, &builder);
  649. GenStageStreamWriteCode(stage_idx, obuf_curr_sz_id, &builder);
  650. // Gen writes of validation specific data
  651. for (uint32_t i = 0; i < val_spec_param_cnt; ++i) {
  652. GenDebugOutputFieldCode(obuf_curr_sz_id, val_spec_offset + i,
  653. param_vec[kInstCommonParamCnt + i], &builder);
  654. }
  655. // Close write block and gen merge block
  656. (void)builder.AddBranch(merge_blk_id);
  657. new_blk_ptr->SetParent(&*output_func);
  658. output_func->AddBasicBlock(std::move(new_blk_ptr));
  659. new_blk_ptr = MakeUnique<BasicBlock>(std::move(merge_label));
  660. builder.SetInsertPoint(&*new_blk_ptr);
  661. // Close merge block and function and add function to module
  662. (void)builder.AddNullaryOp(0, SpvOpReturn);
  663. new_blk_ptr->SetParent(&*output_func);
  664. output_func->AddBasicBlock(std::move(new_blk_ptr));
  665. std::unique_ptr<Instruction> func_end_inst(
  666. new Instruction(get_module()->context(), SpvOpFunctionEnd, 0, 0, {}));
  667. get_def_use_mgr()->AnalyzeInstDefUse(&*func_end_inst);
  668. output_func->SetFunctionEnd(std::move(func_end_inst));
  669. context()->AddFunction(std::move(output_func));
  670. output_func_param_cnt_ = param_cnt;
  671. }
  672. assert(param_cnt == output_func_param_cnt_ && "bad arg count");
  673. return output_func_id_;
  674. }
  675. uint32_t InstrumentPass::GetDirectReadFunctionId(uint32_t param_cnt) {
  676. uint32_t func_id = param2input_func_id_[param_cnt];
  677. if (func_id != 0) return func_id;
  678. // Create input function for param_cnt
  679. func_id = TakeNextId();
  680. analysis::TypeManager* type_mgr = context()->get_type_mgr();
  681. std::vector<const analysis::Type*> param_types;
  682. for (uint32_t c = 0; c < param_cnt; ++c)
  683. param_types.push_back(type_mgr->GetType(GetUintId()));
  684. analysis::Function func_ty(type_mgr->GetType(GetUintId()), param_types);
  685. analysis::Type* reg_func_ty = type_mgr->GetRegisteredType(&func_ty);
  686. std::unique_ptr<Instruction> func_inst(new Instruction(
  687. get_module()->context(), SpvOpFunction, GetUintId(), func_id,
  688. {{spv_operand_type_t::SPV_OPERAND_TYPE_LITERAL_INTEGER,
  689. {SpvFunctionControlMaskNone}},
  690. {spv_operand_type_t::SPV_OPERAND_TYPE_ID,
  691. {type_mgr->GetTypeInstruction(reg_func_ty)}}}));
  692. get_def_use_mgr()->AnalyzeInstDefUse(&*func_inst);
  693. std::unique_ptr<Function> input_func =
  694. MakeUnique<Function>(std::move(func_inst));
  695. // Add parameters
  696. std::vector<uint32_t> param_vec;
  697. for (uint32_t c = 0; c < param_cnt; ++c) {
  698. uint32_t pid = TakeNextId();
  699. param_vec.push_back(pid);
  700. std::unique_ptr<Instruction> param_inst(new Instruction(
  701. get_module()->context(), SpvOpFunctionParameter, GetUintId(), pid, {}));
  702. get_def_use_mgr()->AnalyzeInstDefUse(&*param_inst);
  703. input_func->AddParameter(std::move(param_inst));
  704. }
  705. // Create block
  706. uint32_t blk_id = TakeNextId();
  707. std::unique_ptr<Instruction> blk_label(NewLabel(blk_id));
  708. std::unique_ptr<BasicBlock> new_blk_ptr =
  709. MakeUnique<BasicBlock>(std::move(blk_label));
  710. InstructionBuilder builder(
  711. context(), &*new_blk_ptr,
  712. IRContext::kAnalysisDefUse | IRContext::kAnalysisInstrToBlockMapping);
  713. // For each offset parameter, generate new offset with parameter, adding last
  714. // loaded value if it exists, and load value from input buffer at new offset.
  715. // Return last loaded value.
  716. uint32_t buf_id = GetInputBufferId();
  717. uint32_t buf_uint_ptr_id = GetBufferUintPtrId();
  718. uint32_t last_value_id = 0;
  719. for (uint32_t p = 0; p < param_cnt; ++p) {
  720. uint32_t offset_id;
  721. if (p == 0) {
  722. offset_id = param_vec[0];
  723. } else {
  724. Instruction* offset_inst = builder.AddBinaryOp(
  725. GetUintId(), SpvOpIAdd, last_value_id, param_vec[p]);
  726. offset_id = offset_inst->result_id();
  727. }
  728. Instruction* ac_inst = builder.AddTernaryOp(
  729. buf_uint_ptr_id, SpvOpAccessChain, buf_id,
  730. builder.GetUintConstantId(kDebugInputDataOffset), offset_id);
  731. Instruction* load_inst =
  732. builder.AddUnaryOp(GetUintId(), SpvOpLoad, ac_inst->result_id());
  733. last_value_id = load_inst->result_id();
  734. }
  735. (void)builder.AddInstruction(MakeUnique<Instruction>(
  736. context(), SpvOpReturnValue, 0, 0,
  737. std::initializer_list<Operand>{{SPV_OPERAND_TYPE_ID, {last_value_id}}}));
  738. // Close block and function and add function to module
  739. new_blk_ptr->SetParent(&*input_func);
  740. input_func->AddBasicBlock(std::move(new_blk_ptr));
  741. std::unique_ptr<Instruction> func_end_inst(
  742. new Instruction(get_module()->context(), SpvOpFunctionEnd, 0, 0, {}));
  743. get_def_use_mgr()->AnalyzeInstDefUse(&*func_end_inst);
  744. input_func->SetFunctionEnd(std::move(func_end_inst));
  745. context()->AddFunction(std::move(input_func));
  746. param2input_func_id_[param_cnt] = func_id;
  747. return func_id;
  748. }
  749. bool InstrumentPass::InstrumentFunction(Function* func, uint32_t stage_idx,
  750. InstProcessFunction& pfn) {
  751. bool modified = false;
  752. // Compute function index
  753. uint32_t function_idx = 0;
  754. for (auto fii = get_module()->begin(); fii != get_module()->end(); ++fii) {
  755. if (&*fii == func) break;
  756. ++function_idx;
  757. }
  758. std::vector<std::unique_ptr<BasicBlock>> new_blks;
  759. // Using block iterators here because of block erasures and insertions.
  760. for (auto bi = func->begin(); bi != func->end(); ++bi) {
  761. for (auto ii = bi->begin(); ii != bi->end();) {
  762. // Generate instrumentation if warranted
  763. pfn(ii, bi, stage_idx, &new_blks);
  764. if (new_blks.size() == 0) {
  765. ++ii;
  766. continue;
  767. }
  768. // Add new blocks to label id map
  769. for (auto& blk : new_blks) id2block_[blk->id()] = &*blk;
  770. // If there are new blocks we know there will always be two or
  771. // more, so update succeeding phis with label of new last block.
  772. size_t newBlocksSize = new_blks.size();
  773. assert(newBlocksSize > 1);
  774. UpdateSucceedingPhis(new_blks);
  775. // Replace original block with new block(s)
  776. bi = bi.Erase();
  777. for (auto& bb : new_blks) {
  778. bb->SetParent(func);
  779. }
  780. bi = bi.InsertBefore(&new_blks);
  781. // Reset block iterator to last new block
  782. for (size_t i = 0; i < newBlocksSize - 1; i++) ++bi;
  783. modified = true;
  784. // Restart instrumenting at beginning of last new block,
  785. // but skip over any new phi or copy instruction.
  786. ii = bi->begin();
  787. if (ii->opcode() == SpvOpPhi || ii->opcode() == SpvOpCopyObject) ++ii;
  788. new_blks.clear();
  789. }
  790. }
  791. return modified;
  792. }
  793. bool InstrumentPass::InstProcessCallTreeFromRoots(InstProcessFunction& pfn,
  794. std::queue<uint32_t>* roots,
  795. uint32_t stage_idx) {
  796. bool modified = false;
  797. std::unordered_set<uint32_t> done;
  798. // Don't process input and output functions
  799. for (auto& ifn : param2input_func_id_) done.insert(ifn.second);
  800. if (output_func_id_ != 0) done.insert(output_func_id_);
  801. // Process all functions from roots
  802. while (!roots->empty()) {
  803. const uint32_t fi = roots->front();
  804. roots->pop();
  805. if (done.insert(fi).second) {
  806. Function* fn = id2function_.at(fi);
  807. // Add calls first so we don't add new output function
  808. context()->AddCalls(fn, roots);
  809. modified = InstrumentFunction(fn, stage_idx, pfn) || modified;
  810. }
  811. }
  812. return modified;
  813. }
  814. bool InstrumentPass::InstProcessEntryPointCallTree(InstProcessFunction& pfn) {
  815. // Make sure all entry points have the same execution model. Do not
  816. // instrument if they do not.
  817. // TODO(greg-lunarg): Handle mixed stages. Technically, a shader module
  818. // can contain entry points with different execution models, although
  819. // such modules will likely be rare as GLSL and HLSL are geared toward
  820. // one model per module. In such cases we will need
  821. // to clone any functions which are in the call trees of entrypoints
  822. // with differing execution models.
  823. uint32_t ecnt = 0;
  824. uint32_t stage = SpvExecutionModelMax;
  825. for (auto& e : get_module()->entry_points()) {
  826. if (ecnt == 0)
  827. stage = e.GetSingleWordInOperand(kEntryPointExecutionModelInIdx);
  828. else if (e.GetSingleWordInOperand(kEntryPointExecutionModelInIdx) != stage)
  829. return false;
  830. ++ecnt;
  831. }
  832. // Only supporting vertex, fragment and compute shaders at the moment.
  833. // TODO(greg-lunarg): Handle all stages.
  834. if (stage != SpvExecutionModelVertex && stage != SpvExecutionModelFragment &&
  835. stage != SpvExecutionModelGeometry &&
  836. stage != SpvExecutionModelGLCompute &&
  837. stage != SpvExecutionModelTessellationControl &&
  838. stage != SpvExecutionModelTessellationEvaluation &&
  839. stage != SpvExecutionModelRayGenerationNV &&
  840. stage != SpvExecutionModelIntersectionNV &&
  841. stage != SpvExecutionModelAnyHitNV &&
  842. stage != SpvExecutionModelClosestHitNV &&
  843. stage != SpvExecutionModelMissNV && stage != SpvExecutionModelCallableNV)
  844. return false;
  845. // Add together the roots of all entry points
  846. std::queue<uint32_t> roots;
  847. for (auto& e : get_module()->entry_points()) {
  848. roots.push(e.GetSingleWordInOperand(kEntryPointFunctionIdInIdx));
  849. }
  850. bool modified = InstProcessCallTreeFromRoots(pfn, &roots, stage);
  851. return modified;
  852. }
  853. void InstrumentPass::InitializeInstrument() {
  854. output_buffer_id_ = 0;
  855. buffer_uint_ptr_id_ = 0;
  856. output_func_id_ = 0;
  857. output_func_param_cnt_ = 0;
  858. input_buffer_id_ = 0;
  859. v4float_id_ = 0;
  860. uint_id_ = 0;
  861. v4uint_id_ = 0;
  862. v3uint_id_ = 0;
  863. bool_id_ = 0;
  864. void_id_ = 0;
  865. storage_buffer_ext_defined_ = false;
  866. uint_rarr_ty_ = nullptr;
  867. // clear collections
  868. id2function_.clear();
  869. id2block_.clear();
  870. // Initialize function and block maps.
  871. for (auto& fn : *get_module()) {
  872. id2function_[fn.result_id()] = &fn;
  873. for (auto& blk : fn) {
  874. id2block_[blk.id()] = &blk;
  875. }
  876. }
  877. // Remember original instruction offsets
  878. uint32_t module_offset = 0;
  879. Module* module = get_module();
  880. for (auto& i : context()->capabilities()) {
  881. (void)i;
  882. ++module_offset;
  883. }
  884. for (auto& i : module->extensions()) {
  885. (void)i;
  886. ++module_offset;
  887. }
  888. for (auto& i : module->ext_inst_imports()) {
  889. (void)i;
  890. ++module_offset;
  891. }
  892. ++module_offset; // memory_model
  893. for (auto& i : module->entry_points()) {
  894. (void)i;
  895. ++module_offset;
  896. }
  897. for (auto& i : module->execution_modes()) {
  898. (void)i;
  899. ++module_offset;
  900. }
  901. for (auto& i : module->debugs1()) {
  902. (void)i;
  903. ++module_offset;
  904. }
  905. for (auto& i : module->debugs2()) {
  906. (void)i;
  907. ++module_offset;
  908. }
  909. for (auto& i : module->debugs3()) {
  910. (void)i;
  911. ++module_offset;
  912. }
  913. for (auto& i : module->annotations()) {
  914. (void)i;
  915. ++module_offset;
  916. }
  917. for (auto& i : module->types_values()) {
  918. module_offset += 1;
  919. module_offset += static_cast<uint32_t>(i.dbg_line_insts().size());
  920. }
  921. auto curr_fn = get_module()->begin();
  922. for (; curr_fn != get_module()->end(); ++curr_fn) {
  923. // Count function instruction
  924. module_offset += 1;
  925. curr_fn->ForEachParam(
  926. [&module_offset](const Instruction*) { module_offset += 1; }, true);
  927. for (auto& blk : *curr_fn) {
  928. // Count label
  929. module_offset += 1;
  930. for (auto& inst : blk) {
  931. module_offset += static_cast<uint32_t>(inst.dbg_line_insts().size());
  932. uid2offset_[inst.unique_id()] = module_offset;
  933. module_offset += 1;
  934. }
  935. }
  936. // Count function end instruction
  937. module_offset += 1;
  938. }
  939. }
  940. } // namespace opt
  941. } // namespace spvtools