local_single_block_elim_pass.cpp 13 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305
  1. // Copyright (c) 2017 The Khronos Group Inc.
  2. // Copyright (c) 2017 Valve Corporation
  3. // Copyright (c) 2017 LunarG Inc.
  4. // Modifications Copyright (C) 2024 Advanced Micro Devices, Inc. All rights
  5. // reserved.
  6. //
  7. // Licensed under the Apache License, Version 2.0 (the "License");
  8. // you may not use this file except in compliance with the License.
  9. // You may obtain a copy of the License at
  10. //
  11. // http://www.apache.org/licenses/LICENSE-2.0
  12. //
  13. // Unless required by applicable law or agreed to in writing, software
  14. // distributed under the License is distributed on an "AS IS" BASIS,
  15. // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  16. // See the License for the specific language governing permissions and
  17. // limitations under the License.
  18. #include "source/opt/local_single_block_elim_pass.h"
  19. #include <vector>
  20. #include "source/util/string_utils.h"
  21. namespace spvtools {
  22. namespace opt {
  23. namespace {
  24. constexpr uint32_t kStoreValIdInIdx = 1;
  25. } // namespace
  26. bool LocalSingleBlockLoadStoreElimPass::HasOnlySupportedRefs(uint32_t ptrId) {
  27. if (supported_ref_ptrs_.find(ptrId) != supported_ref_ptrs_.end()) return true;
  28. if (get_def_use_mgr()->WhileEachUser(ptrId, [this](Instruction* user) {
  29. auto dbg_op = user->GetCommonDebugOpcode();
  30. if (dbg_op == CommonDebugInfoDebugDeclare ||
  31. dbg_op == CommonDebugInfoDebugValue) {
  32. return true;
  33. }
  34. spv::Op op = user->opcode();
  35. if (IsNonPtrAccessChain(op) || op == spv::Op::OpCopyObject) {
  36. if (!HasOnlySupportedRefs(user->result_id())) {
  37. return false;
  38. }
  39. } else if (op != spv::Op::OpStore && op != spv::Op::OpLoad &&
  40. op != spv::Op::OpName && !IsNonTypeDecorate(op)) {
  41. return false;
  42. }
  43. return true;
  44. })) {
  45. supported_ref_ptrs_.insert(ptrId);
  46. return true;
  47. }
  48. return false;
  49. }
  50. bool LocalSingleBlockLoadStoreElimPass::LocalSingleBlockLoadStoreElim(
  51. Function* func) {
  52. // Perform local store/load, load/load and store/store elimination
  53. // on each block
  54. bool modified = false;
  55. std::vector<Instruction*> instructions_to_kill;
  56. std::unordered_set<Instruction*> instructions_to_save;
  57. for (auto bi = func->begin(); bi != func->end(); ++bi) {
  58. var2store_.clear();
  59. var2load_.clear();
  60. auto next = bi->begin();
  61. for (auto ii = next; ii != bi->end(); ii = next) {
  62. ++next;
  63. switch (ii->opcode()) {
  64. case spv::Op::OpStore: {
  65. // Verify store variable is target type
  66. uint32_t varId;
  67. Instruction* ptrInst = GetPtr(&*ii, &varId);
  68. if (!IsTargetVar(varId)) continue;
  69. if (!HasOnlySupportedRefs(varId)) continue;
  70. // If a store to the whole variable, remember it for succeeding
  71. // loads and stores. Otherwise forget any previous store to that
  72. // variable.
  73. if (ptrInst->opcode() == spv::Op::OpVariable) {
  74. // If a previous store to same variable, mark the store
  75. // for deletion if not still used. Don't delete store
  76. // if debugging; let ssa-rewrite and DCE handle it
  77. auto prev_store = var2store_.find(varId);
  78. if (prev_store != var2store_.end() &&
  79. instructions_to_save.count(prev_store->second) == 0 &&
  80. !context()->get_debug_info_mgr()->IsVariableDebugDeclared(
  81. varId)) {
  82. instructions_to_kill.push_back(prev_store->second);
  83. modified = true;
  84. }
  85. bool kill_store = false;
  86. auto li = var2load_.find(varId);
  87. if (li != var2load_.end()) {
  88. if (ii->GetSingleWordInOperand(kStoreValIdInIdx) ==
  89. li->second->result_id()) {
  90. // We are storing the same value that already exists in the
  91. // memory location. The store does nothing.
  92. kill_store = true;
  93. }
  94. }
  95. if (!kill_store) {
  96. var2store_[varId] = &*ii;
  97. var2load_.erase(varId);
  98. } else {
  99. instructions_to_kill.push_back(&*ii);
  100. modified = true;
  101. }
  102. } else {
  103. assert(IsNonPtrAccessChain(ptrInst->opcode()));
  104. var2store_.erase(varId);
  105. var2load_.erase(varId);
  106. }
  107. } break;
  108. case spv::Op::OpLoad: {
  109. // Verify store variable is target type
  110. uint32_t varId;
  111. Instruction* ptrInst = GetPtr(&*ii, &varId);
  112. if (!IsTargetVar(varId)) continue;
  113. if (!HasOnlySupportedRefs(varId)) continue;
  114. uint32_t replId = 0;
  115. if (ptrInst->opcode() == spv::Op::OpVariable) {
  116. // If a load from a variable, look for a previous store or
  117. // load from that variable and use its value.
  118. auto si = var2store_.find(varId);
  119. if (si != var2store_.end()) {
  120. replId = si->second->GetSingleWordInOperand(kStoreValIdInIdx);
  121. } else {
  122. auto li = var2load_.find(varId);
  123. if (li != var2load_.end()) {
  124. replId = li->second->result_id();
  125. }
  126. }
  127. } else {
  128. // If a partial load of a previously seen store, remember
  129. // not to delete the store.
  130. auto si = var2store_.find(varId);
  131. if (si != var2store_.end()) instructions_to_save.insert(si->second);
  132. }
  133. if (replId != 0) {
  134. // replace load's result id and delete load
  135. context()->KillNamesAndDecorates(&*ii);
  136. context()->ReplaceAllUsesWith(ii->result_id(), replId);
  137. instructions_to_kill.push_back(&*ii);
  138. modified = true;
  139. } else {
  140. if (ptrInst->opcode() == spv::Op::OpVariable)
  141. var2load_[varId] = &*ii; // register load
  142. }
  143. } break;
  144. case spv::Op::OpFunctionCall: {
  145. // Conservatively assume all locals are redefined for now.
  146. // TODO(): Handle more optimally
  147. var2store_.clear();
  148. var2load_.clear();
  149. } break;
  150. default:
  151. break;
  152. }
  153. }
  154. }
  155. for (Instruction* inst : instructions_to_kill) {
  156. context()->KillInst(inst);
  157. }
  158. return modified;
  159. }
  160. void LocalSingleBlockLoadStoreElimPass::Initialize() {
  161. // Initialize Target Type Caches
  162. seen_target_vars_.clear();
  163. seen_non_target_vars_.clear();
  164. // Clear collections
  165. supported_ref_ptrs_.clear();
  166. // Initialize extensions allowlist
  167. InitExtensions();
  168. }
  169. bool LocalSingleBlockLoadStoreElimPass::AllExtensionsSupported() const {
  170. // If any extension not in allowlist, return false
  171. for (auto& ei : get_module()->extensions()) {
  172. const std::string extName = ei.GetInOperand(0).AsString();
  173. if (extensions_allowlist_.find(extName) == extensions_allowlist_.end())
  174. return false;
  175. }
  176. // only allow NonSemantic.Shader.DebugInfo.100, we cannot safely optimise
  177. // around unknown extended
  178. // instruction sets even if they are non-semantic
  179. for (auto& inst : context()->module()->ext_inst_imports()) {
  180. assert(inst.opcode() == spv::Op::OpExtInstImport &&
  181. "Expecting an import of an extension's instruction set.");
  182. const std::string extension_name = inst.GetInOperand(0).AsString();
  183. if (spvtools::utils::starts_with(extension_name, "NonSemantic.") &&
  184. extension_name != "NonSemantic.Shader.DebugInfo.100") {
  185. return false;
  186. }
  187. }
  188. return true;
  189. }
  190. Pass::Status LocalSingleBlockLoadStoreElimPass::ProcessImpl() {
  191. // Assumes relaxed logical addressing only (see instruction.h).
  192. if (context()->get_feature_mgr()->HasCapability(spv::Capability::Addresses))
  193. return Status::SuccessWithoutChange;
  194. // Do not process if module contains OpGroupDecorate. Additional
  195. // support required in KillNamesAndDecorates().
  196. // TODO(greg-lunarg): Add support for OpGroupDecorate
  197. for (auto& ai : get_module()->annotations())
  198. if (ai.opcode() == spv::Op::OpGroupDecorate)
  199. return Status::SuccessWithoutChange;
  200. // If any extensions in the module are not explicitly supported,
  201. // return unmodified.
  202. if (!AllExtensionsSupported()) return Status::SuccessWithoutChange;
  203. // Process all entry point functions
  204. ProcessFunction pfn = [this](Function* fp) {
  205. return LocalSingleBlockLoadStoreElim(fp);
  206. };
  207. bool modified = context()->ProcessReachableCallTree(pfn);
  208. return modified ? Status::SuccessWithChange : Status::SuccessWithoutChange;
  209. }
  210. LocalSingleBlockLoadStoreElimPass::LocalSingleBlockLoadStoreElimPass() =
  211. default;
  212. Pass::Status LocalSingleBlockLoadStoreElimPass::Process() {
  213. Initialize();
  214. return ProcessImpl();
  215. }
  216. void LocalSingleBlockLoadStoreElimPass::InitExtensions() {
  217. extensions_allowlist_.clear();
  218. extensions_allowlist_.insert({"SPV_AMD_shader_explicit_vertex_parameter",
  219. "SPV_AMD_shader_trinary_minmax",
  220. "SPV_AMD_gcn_shader",
  221. "SPV_KHR_shader_ballot",
  222. "SPV_AMD_shader_ballot",
  223. "SPV_AMDX_shader_enqueue",
  224. "SPV_AMD_gpu_shader_half_float",
  225. "SPV_KHR_shader_draw_parameters",
  226. "SPV_KHR_subgroup_vote",
  227. "SPV_KHR_8bit_storage",
  228. "SPV_KHR_16bit_storage",
  229. "SPV_KHR_device_group",
  230. "SPV_KHR_multiview",
  231. "SPV_NVX_multiview_per_view_attributes",
  232. "SPV_NV_viewport_array2",
  233. "SPV_NV_stereo_view_rendering",
  234. "SPV_NV_sample_mask_override_coverage",
  235. "SPV_NV_geometry_shader_passthrough",
  236. "SPV_AMD_texture_gather_bias_lod",
  237. "SPV_KHR_storage_buffer_storage_class",
  238. "SPV_KHR_variable_pointers",
  239. "SPV_AMD_gpu_shader_int16",
  240. "SPV_KHR_post_depth_coverage",
  241. "SPV_KHR_shader_atomic_counter_ops",
  242. "SPV_EXT_shader_stencil_export",
  243. "SPV_EXT_shader_viewport_index_layer",
  244. "SPV_AMD_shader_image_load_store_lod",
  245. "SPV_AMD_shader_fragment_mask",
  246. "SPV_EXT_fragment_fully_covered",
  247. "SPV_AMD_gpu_shader_half_float_fetch",
  248. "SPV_GOOGLE_decorate_string",
  249. "SPV_GOOGLE_hlsl_functionality1",
  250. "SPV_GOOGLE_user_type",
  251. "SPV_NV_shader_subgroup_partitioned",
  252. "SPV_EXT_demote_to_helper_invocation",
  253. "SPV_EXT_descriptor_indexing",
  254. "SPV_NV_fragment_shader_barycentric",
  255. "SPV_NV_compute_shader_derivatives",
  256. "SPV_NV_shader_image_footprint",
  257. "SPV_NV_shading_rate",
  258. "SPV_NV_mesh_shader",
  259. "SPV_EXT_mesh_shader",
  260. "SPV_NV_ray_tracing",
  261. "SPV_KHR_ray_tracing",
  262. "SPV_KHR_ray_query",
  263. "SPV_EXT_fragment_invocation_density",
  264. "SPV_EXT_physical_storage_buffer",
  265. "SPV_KHR_physical_storage_buffer",
  266. "SPV_KHR_terminate_invocation",
  267. "SPV_KHR_subgroup_uniform_control_flow",
  268. "SPV_KHR_integer_dot_product",
  269. "SPV_EXT_shader_image_int64",
  270. "SPV_KHR_non_semantic_info",
  271. "SPV_KHR_uniform_group_instructions",
  272. "SPV_KHR_fragment_shader_barycentric",
  273. "SPV_KHR_vulkan_memory_model",
  274. "SPV_NV_bindless_texture",
  275. "SPV_EXT_shader_atomic_float_add",
  276. "SPV_EXT_fragment_shader_interlock",
  277. "SPV_KHR_compute_shader_derivatives",
  278. "SPV_NV_cooperative_matrix",
  279. "SPV_KHR_cooperative_matrix",
  280. "SPV_KHR_ray_tracing_position_fetch",
  281. "SPV_KHR_fragment_shading_rate"});
  282. }
  283. } // namespace opt
  284. } // namespace spvtools