rendering_shader_container.cpp 38 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834
  1. /**************************************************************************/
  2. /* rendering_shader_container.cpp */
  3. /**************************************************************************/
  4. /* This file is part of: */
  5. /* GODOT ENGINE */
  6. /* https://godotengine.org */
  7. /**************************************************************************/
  8. /* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
  9. /* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
  10. /* */
  11. /* Permission is hereby granted, free of charge, to any person obtaining */
  12. /* a copy of this software and associated documentation files (the */
  13. /* "Software"), to deal in the Software without restriction, including */
  14. /* without limitation the rights to use, copy, modify, merge, publish, */
  15. /* distribute, sublicense, and/or sell copies of the Software, and to */
  16. /* permit persons to whom the Software is furnished to do so, subject to */
  17. /* the following conditions: */
  18. /* */
  19. /* The above copyright notice and this permission notice shall be */
  20. /* included in all copies or substantial portions of the Software. */
  21. /* */
  22. /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
  23. /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
  24. /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
  25. /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
  26. /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
  27. /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
  28. /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
  29. /**************************************************************************/
  30. #include "rendering_shader_container.h"
  31. #include "core/io/compression.h"
  32. #include "thirdparty/spirv-reflect/spirv_reflect.h"
  33. static inline uint32_t aligned_to(uint32_t p_size, uint32_t p_alignment) {
  34. if (p_size % p_alignment) {
  35. return p_size + (p_alignment - (p_size % p_alignment));
  36. } else {
  37. return p_size;
  38. }
  39. }
  40. RenderingShaderContainer::ReflectedShaderStage::ReflectedShaderStage() :
  41. _module(memnew(SpvReflectShaderModule)) {
  42. }
  43. RenderingShaderContainer::ReflectedShaderStage::~ReflectedShaderStage() {
  44. spvReflectDestroyShaderModule(_module);
  45. memdelete(_module);
  46. }
  47. const SpvReflectShaderModule &RenderingShaderContainer::ReflectedShaderStage::module() const {
  48. return *_module;
  49. }
  50. const Span<uint32_t> RenderingShaderContainer::ReflectedShaderStage::spirv() const {
  51. return _spirv_data.span().reinterpret<uint32_t>();
  52. }
  53. uint32_t RenderingShaderContainer::_from_bytes_header_extra_data(const uint8_t *p_bytes) {
  54. return 0;
  55. }
  56. uint32_t RenderingShaderContainer::_from_bytes_reflection_extra_data(const uint8_t *p_bytes) {
  57. return 0;
  58. }
  59. uint32_t RenderingShaderContainer::_from_bytes_reflection_binding_uniform_extra_data_start(const uint8_t *p_bytes) {
  60. return 0;
  61. }
  62. uint32_t RenderingShaderContainer::_from_bytes_reflection_binding_uniform_extra_data(const uint8_t *p_bytes, uint32_t p_index) {
  63. return 0;
  64. }
  65. uint32_t RenderingShaderContainer::_from_bytes_reflection_specialization_extra_data_start(const uint8_t *p_bytes) {
  66. return 0;
  67. }
  68. uint32_t RenderingShaderContainer::_from_bytes_reflection_specialization_extra_data(const uint8_t *p_bytes, uint32_t p_index) {
  69. return 0;
  70. }
  71. uint32_t RenderingShaderContainer::_from_bytes_shader_extra_data_start(const uint8_t *p_bytes) {
  72. return 0;
  73. }
  74. uint32_t RenderingShaderContainer::_from_bytes_shader_extra_data(const uint8_t *p_bytes, uint32_t p_index) {
  75. return 0;
  76. }
  77. uint32_t RenderingShaderContainer::_from_bytes_footer_extra_data(const uint8_t *p_bytes) {
  78. return 0;
  79. }
  80. uint32_t RenderingShaderContainer::_to_bytes_header_extra_data(uint8_t *) const {
  81. return 0;
  82. }
  83. uint32_t RenderingShaderContainer::_to_bytes_reflection_extra_data(uint8_t *) const {
  84. return 0;
  85. }
  86. uint32_t RenderingShaderContainer::_to_bytes_reflection_binding_uniform_extra_data(uint8_t *, uint32_t) const {
  87. return 0;
  88. }
  89. uint32_t RenderingShaderContainer::_to_bytes_reflection_specialization_extra_data(uint8_t *, uint32_t) const {
  90. return 0;
  91. }
  92. uint32_t RenderingShaderContainer::_to_bytes_shader_extra_data(uint8_t *, uint32_t) const {
  93. return 0;
  94. }
  95. uint32_t RenderingShaderContainer::_to_bytes_footer_extra_data(uint8_t *) const {
  96. return 0;
  97. }
  98. void RenderingShaderContainer::_set_from_shader_reflection_post(const RenderingDeviceCommons::ShaderReflection &p_reflection) {
  99. // Do nothing.
  100. }
  101. Error RenderingShaderContainer::reflect_spirv(const String &p_shader_name, Span<RenderingDeviceCommons::ShaderStageSPIRVData> p_spirv, LocalVector<ReflectedShaderStage> &r_refl) {
  102. using RDC = RenderingDeviceCommons;
  103. RDC::ShaderReflection reflection;
  104. shader_name = p_shader_name.utf8();
  105. const uint32_t spirv_size = p_spirv.size() + 0;
  106. r_refl.resize(spirv_size);
  107. for (uint32_t i = 0; i < spirv_size; i++) {
  108. RDC::ShaderStage stage = p_spirv[i].shader_stage;
  109. RDC::ShaderStage stage_flag = (RDC::ShaderStage)(1 << p_spirv[i].shader_stage);
  110. r_refl[i].shader_stage = p_spirv[i].shader_stage;
  111. r_refl[i]._spirv_data = p_spirv[i].spirv;
  112. if (p_spirv[i].shader_stage == RDC::SHADER_STAGE_COMPUTE) {
  113. reflection.is_compute = true;
  114. ERR_FAIL_COND_V_MSG(spirv_size != 1, FAILED,
  115. "Compute shaders can only receive one stage, dedicated to compute.");
  116. }
  117. ERR_FAIL_COND_V_MSG(reflection.stages_bits.has_flag(stage_flag), FAILED,
  118. "Stage " + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + " submitted more than once.");
  119. {
  120. SpvReflectShaderModule &module = *r_refl.ptr()[i]._module;
  121. const uint8_t *spirv = p_spirv[i].spirv.ptr();
  122. SpvReflectResult result = spvReflectCreateShaderModule2(SPV_REFLECT_MODULE_FLAG_NO_COPY, p_spirv[i].spirv.size(), spirv, &module);
  123. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  124. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed parsing shader.");
  125. for (uint32_t j = 0; j < module.capability_count; j++) {
  126. if (module.capabilities[j].value == SpvCapabilityMultiView) {
  127. reflection.has_multiview = true;
  128. break;
  129. }
  130. }
  131. if (reflection.is_compute) {
  132. reflection.compute_local_size[0] = module.entry_points->local_size.x;
  133. reflection.compute_local_size[1] = module.entry_points->local_size.y;
  134. reflection.compute_local_size[2] = module.entry_points->local_size.z;
  135. }
  136. uint32_t binding_count = 0;
  137. result = spvReflectEnumerateDescriptorBindings(&module, &binding_count, nullptr);
  138. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  139. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed enumerating descriptor bindings.");
  140. if (binding_count > 0) {
  141. // Parse bindings.
  142. Vector<SpvReflectDescriptorBinding *> bindings;
  143. bindings.resize(binding_count);
  144. result = spvReflectEnumerateDescriptorBindings(&module, &binding_count, bindings.ptrw());
  145. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  146. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed getting descriptor bindings.");
  147. for (uint32_t j = 0; j < binding_count; j++) {
  148. const SpvReflectDescriptorBinding &binding = *bindings[j];
  149. RDC::ShaderUniform uniform;
  150. bool need_array_dimensions = false;
  151. bool need_block_size = false;
  152. bool may_be_writable = false;
  153. switch (binding.descriptor_type) {
  154. case SPV_REFLECT_DESCRIPTOR_TYPE_SAMPLER: {
  155. uniform.type = RDC::UNIFORM_TYPE_SAMPLER;
  156. need_array_dimensions = true;
  157. } break;
  158. case SPV_REFLECT_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
  159. uniform.type = RDC::UNIFORM_TYPE_SAMPLER_WITH_TEXTURE;
  160. need_array_dimensions = true;
  161. } break;
  162. case SPV_REFLECT_DESCRIPTOR_TYPE_SAMPLED_IMAGE: {
  163. uniform.type = RDC::UNIFORM_TYPE_TEXTURE;
  164. need_array_dimensions = true;
  165. } break;
  166. case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
  167. uniform.type = RDC::UNIFORM_TYPE_IMAGE;
  168. need_array_dimensions = true;
  169. may_be_writable = true;
  170. } break;
  171. case SPV_REFLECT_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: {
  172. uniform.type = RDC::UNIFORM_TYPE_TEXTURE_BUFFER;
  173. need_array_dimensions = true;
  174. } break;
  175. case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
  176. uniform.type = RDC::UNIFORM_TYPE_IMAGE_BUFFER;
  177. need_array_dimensions = true;
  178. may_be_writable = true;
  179. } break;
  180. case SPV_REFLECT_DESCRIPTOR_TYPE_UNIFORM_BUFFER: {
  181. uniform.type = RDC::UNIFORM_TYPE_UNIFORM_BUFFER;
  182. need_block_size = true;
  183. } break;
  184. case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_BUFFER: {
  185. uniform.type = RDC::UNIFORM_TYPE_STORAGE_BUFFER;
  186. need_block_size = true;
  187. may_be_writable = true;
  188. } break;
  189. case SPV_REFLECT_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: {
  190. ERR_PRINT("Dynamic uniform buffer not supported.");
  191. continue;
  192. } break;
  193. case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
  194. ERR_PRINT("Dynamic storage buffer not supported.");
  195. continue;
  196. } break;
  197. case SPV_REFLECT_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
  198. uniform.type = RDC::UNIFORM_TYPE_INPUT_ATTACHMENT;
  199. need_array_dimensions = true;
  200. } break;
  201. case SPV_REFLECT_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: {
  202. ERR_PRINT("Acceleration structure not supported.");
  203. continue;
  204. } break;
  205. }
  206. if (need_array_dimensions) {
  207. if (binding.array.dims_count == 0) {
  208. uniform.length = 1;
  209. } else {
  210. for (uint32_t k = 0; k < binding.array.dims_count; k++) {
  211. if (k == 0) {
  212. uniform.length = binding.array.dims[0];
  213. } else {
  214. uniform.length *= binding.array.dims[k];
  215. }
  216. }
  217. }
  218. } else if (need_block_size) {
  219. uniform.length = binding.block.size;
  220. } else {
  221. uniform.length = 0;
  222. }
  223. if (may_be_writable) {
  224. if (binding.descriptor_type == SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_IMAGE) {
  225. uniform.writable = !(binding.decoration_flags & SPV_REFLECT_DECORATION_NON_WRITABLE);
  226. } else {
  227. uniform.writable = !(binding.decoration_flags & SPV_REFLECT_DECORATION_NON_WRITABLE) && !(binding.block.decoration_flags & SPV_REFLECT_DECORATION_NON_WRITABLE);
  228. }
  229. } else {
  230. uniform.writable = false;
  231. }
  232. uniform.binding = binding.binding;
  233. uint32_t set = binding.set;
  234. ERR_FAIL_COND_V_MSG(set >= RDC::MAX_UNIFORM_SETS, FAILED,
  235. "On shader stage '" + String(RDC::SHADER_STAGE_NAMES[stage]) + "', uniform '" + binding.name + "' uses a set (" + itos(set) + ") index larger than what is supported (" + itos(RDC::MAX_UNIFORM_SETS) + ").");
  236. if (set < (uint32_t)reflection.uniform_sets.size()) {
  237. // Check if this already exists.
  238. bool exists = false;
  239. for (int k = 0; k < reflection.uniform_sets[set].size(); k++) {
  240. if (reflection.uniform_sets[set][k].binding == uniform.binding) {
  241. // Already exists, verify that it's the same type.
  242. ERR_FAIL_COND_V_MSG(reflection.uniform_sets[set][k].type != uniform.type, FAILED,
  243. "On shader stage '" + String(RDC::SHADER_STAGE_NAMES[stage]) + "', uniform '" + binding.name + "' trying to reuse location for set=" + itos(set) + ", binding=" + itos(uniform.binding) + " with different uniform type.");
  244. // Also, verify that it's the same size.
  245. ERR_FAIL_COND_V_MSG(reflection.uniform_sets[set][k].length != uniform.length, FAILED,
  246. "On shader stage '" + String(RDC::SHADER_STAGE_NAMES[stage]) + "', uniform '" + binding.name + "' trying to reuse location for set=" + itos(set) + ", binding=" + itos(uniform.binding) + " with different uniform size.");
  247. // Also, verify that it has the same writability.
  248. ERR_FAIL_COND_V_MSG(reflection.uniform_sets[set][k].writable != uniform.writable, FAILED,
  249. "On shader stage '" + String(RDC::SHADER_STAGE_NAMES[stage]) + "', uniform '" + binding.name + "' trying to reuse location for set=" + itos(set) + ", binding=" + itos(uniform.binding) + " with different writability.");
  250. // Just append stage mask and return.
  251. reflection.uniform_sets.write[set].write[k].stages.set_flag(stage_flag);
  252. exists = true;
  253. break;
  254. }
  255. }
  256. if (exists) {
  257. continue; // Merged.
  258. }
  259. }
  260. uniform.stages.set_flag(stage_flag);
  261. if (set >= (uint32_t)reflection.uniform_sets.size()) {
  262. reflection.uniform_sets.resize(set + 1);
  263. }
  264. reflection.uniform_sets.write[set].push_back(uniform);
  265. }
  266. }
  267. {
  268. // Specialization constants.
  269. uint32_t sc_count = 0;
  270. result = spvReflectEnumerateSpecializationConstants(&module, &sc_count, nullptr);
  271. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  272. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed enumerating specialization constants.");
  273. if (sc_count) {
  274. Vector<SpvReflectSpecializationConstant *> spec_constants;
  275. spec_constants.resize(sc_count);
  276. result = spvReflectEnumerateSpecializationConstants(&module, &sc_count, spec_constants.ptrw());
  277. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  278. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed obtaining specialization constants.");
  279. for (uint32_t j = 0; j < sc_count; j++) {
  280. int32_t existing = -1;
  281. RDC::ShaderSpecializationConstant sconst;
  282. SpvReflectSpecializationConstant *spc = spec_constants[j];
  283. sconst.constant_id = spc->constant_id;
  284. sconst.int_value = 0; // Clear previous value JIC.
  285. switch (spc->constant_type) {
  286. case SPV_REFLECT_SPECIALIZATION_CONSTANT_BOOL: {
  287. sconst.type = RDC::PIPELINE_SPECIALIZATION_CONSTANT_TYPE_BOOL;
  288. sconst.bool_value = spc->default_value.int_bool_value != 0;
  289. } break;
  290. case SPV_REFLECT_SPECIALIZATION_CONSTANT_INT: {
  291. sconst.type = RDC::PIPELINE_SPECIALIZATION_CONSTANT_TYPE_INT;
  292. sconst.int_value = spc->default_value.int_bool_value;
  293. } break;
  294. case SPV_REFLECT_SPECIALIZATION_CONSTANT_FLOAT: {
  295. sconst.type = RDC::PIPELINE_SPECIALIZATION_CONSTANT_TYPE_FLOAT;
  296. sconst.float_value = spc->default_value.float_value;
  297. } break;
  298. }
  299. sconst.stages.set_flag(stage_flag);
  300. for (int k = 0; k < reflection.specialization_constants.size(); k++) {
  301. if (reflection.specialization_constants[k].constant_id == sconst.constant_id) {
  302. ERR_FAIL_COND_V_MSG(reflection.specialization_constants[k].type != sconst.type, FAILED, "More than one specialization constant used for id (" + itos(sconst.constant_id) + "), but their types differ.");
  303. ERR_FAIL_COND_V_MSG(reflection.specialization_constants[k].int_value != sconst.int_value, FAILED, "More than one specialization constant used for id (" + itos(sconst.constant_id) + "), but their default values differ.");
  304. existing = k;
  305. break;
  306. }
  307. }
  308. if (existing >= 0) {
  309. reflection.specialization_constants.write[existing].stages.set_flag(stage_flag);
  310. } else {
  311. reflection.specialization_constants.push_back(sconst);
  312. }
  313. }
  314. reflection.specialization_constants.sort();
  315. }
  316. }
  317. if (stage == RDC::SHADER_STAGE_VERTEX || stage == RDC::SHADER_STAGE_FRAGMENT) {
  318. uint32_t iv_count = 0;
  319. result = spvReflectEnumerateInputVariables(&module, &iv_count, nullptr);
  320. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  321. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed enumerating input variables.");
  322. if (iv_count) {
  323. Vector<SpvReflectInterfaceVariable *> input_vars;
  324. input_vars.resize(iv_count);
  325. result = spvReflectEnumerateInputVariables(&module, &iv_count, input_vars.ptrw());
  326. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  327. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed obtaining input variables.");
  328. for (const SpvReflectInterfaceVariable *v : input_vars) {
  329. if (!v) {
  330. continue;
  331. }
  332. if (stage == RDC::SHADER_STAGE_VERTEX) {
  333. if (v->decoration_flags == 0) { // Regular input.
  334. reflection.vertex_input_mask |= (((uint64_t)1) << v->location);
  335. }
  336. }
  337. if (v->built_in == SpvBuiltInViewIndex) {
  338. reflection.has_multiview = true;
  339. }
  340. }
  341. }
  342. }
  343. if (stage == RDC::SHADER_STAGE_FRAGMENT) {
  344. uint32_t ov_count = 0;
  345. result = spvReflectEnumerateOutputVariables(&module, &ov_count, nullptr);
  346. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  347. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed enumerating output variables.");
  348. if (ov_count) {
  349. Vector<SpvReflectInterfaceVariable *> output_vars;
  350. output_vars.resize(ov_count);
  351. result = spvReflectEnumerateOutputVariables(&module, &ov_count, output_vars.ptrw());
  352. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  353. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed obtaining output variables.");
  354. for (const SpvReflectInterfaceVariable *refvar : output_vars) {
  355. if (!refvar) {
  356. continue;
  357. }
  358. if (refvar->built_in != SpvBuiltInFragDepth) {
  359. reflection.fragment_output_mask |= 1 << refvar->location;
  360. }
  361. }
  362. }
  363. }
  364. uint32_t pc_count = 0;
  365. result = spvReflectEnumeratePushConstantBlocks(&module, &pc_count, nullptr);
  366. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  367. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed enumerating push constants.");
  368. if (pc_count) {
  369. ERR_FAIL_COND_V_MSG(pc_count > 1, FAILED,
  370. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "': Only one push constant is supported, which should be the same across shader stages.");
  371. Vector<SpvReflectBlockVariable *> pconstants;
  372. pconstants.resize(pc_count);
  373. result = spvReflectEnumeratePushConstantBlocks(&module, &pc_count, pconstants.ptrw());
  374. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  375. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed obtaining push constants.");
  376. #if 0
  377. if (pconstants[0] == nullptr) {
  378. Ref<FileAccess> f = FileAccess::open("res://popo.spv", FileAccess::WRITE);
  379. f->store_buffer((const uint8_t *)&SpirV[0], SpirV.size() * sizeof(uint32_t));
  380. }
  381. #endif
  382. ERR_FAIL_COND_V_MSG(reflection.push_constant_size && reflection.push_constant_size != pconstants[0]->size, FAILED,
  383. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "': Push constant block must be the same across shader stages.");
  384. reflection.push_constant_size = pconstants[0]->size;
  385. reflection.push_constant_stages.set_flag(stage_flag);
  386. //print_line("Stage: " + String(RDC::SHADER_STAGE_NAMES[stage]) + " push constant of size=" + itos(push_constant.push_constant_size));
  387. }
  388. }
  389. reflection.stages_bits.set_flag(stage_flag);
  390. }
  391. // Sort all uniform_sets by binding.
  392. for (uint32_t i = 0; i < reflection.uniform_sets.size(); i++) {
  393. reflection.uniform_sets.write[i].sort();
  394. }
  395. set_from_shader_reflection(reflection);
  396. return OK;
  397. }
  398. void RenderingShaderContainer::set_from_shader_reflection(const RenderingDeviceCommons::ShaderReflection &p_reflection) {
  399. reflection_binding_set_uniforms_count.clear();
  400. reflection_binding_set_uniforms_data.clear();
  401. reflection_specialization_data.clear();
  402. reflection_shader_stages.clear();
  403. reflection_data.vertex_input_mask = p_reflection.vertex_input_mask;
  404. reflection_data.fragment_output_mask = p_reflection.fragment_output_mask;
  405. reflection_data.specialization_constants_count = p_reflection.specialization_constants.size();
  406. reflection_data.is_compute = p_reflection.is_compute;
  407. reflection_data.has_multiview = p_reflection.has_multiview;
  408. reflection_data.compute_local_size[0] = p_reflection.compute_local_size[0];
  409. reflection_data.compute_local_size[1] = p_reflection.compute_local_size[1];
  410. reflection_data.compute_local_size[2] = p_reflection.compute_local_size[2];
  411. reflection_data.set_count = p_reflection.uniform_sets.size();
  412. reflection_data.push_constant_size = p_reflection.push_constant_size;
  413. reflection_data.push_constant_stages_mask = uint32_t(p_reflection.push_constant_stages);
  414. reflection_data.shader_name_len = shader_name.length();
  415. ReflectionBindingData binding_data;
  416. for (const Vector<RenderingDeviceCommons::ShaderUniform> &uniform_set : p_reflection.uniform_sets) {
  417. for (const RenderingDeviceCommons::ShaderUniform &uniform : uniform_set) {
  418. binding_data.type = uint32_t(uniform.type);
  419. binding_data.binding = uniform.binding;
  420. binding_data.stages = uint32_t(uniform.stages);
  421. binding_data.length = uniform.length;
  422. binding_data.writable = uint32_t(uniform.writable);
  423. reflection_binding_set_uniforms_data.push_back(binding_data);
  424. }
  425. reflection_binding_set_uniforms_count.push_back(uniform_set.size());
  426. }
  427. ReflectionSpecializationData specialization_data;
  428. for (const RenderingDeviceCommons::ShaderSpecializationConstant &spec : p_reflection.specialization_constants) {
  429. specialization_data.type = uint32_t(spec.type);
  430. specialization_data.constant_id = spec.constant_id;
  431. specialization_data.int_value = spec.int_value;
  432. specialization_data.stage_flags = uint32_t(spec.stages);
  433. reflection_specialization_data.push_back(specialization_data);
  434. }
  435. for (uint32_t i = 0; i < RenderingDeviceCommons::SHADER_STAGE_MAX; i++) {
  436. if (p_reflection.stages_bits.has_flag(RenderingDeviceCommons::ShaderStage(1U << i))) {
  437. reflection_shader_stages.push_back(RenderingDeviceCommons::ShaderStage(i));
  438. }
  439. }
  440. reflection_data.stage_count = reflection_shader_stages.size();
  441. _set_from_shader_reflection_post(p_reflection);
  442. }
  443. bool RenderingShaderContainer::set_code_from_spirv(const String &p_shader_name, Span<RenderingDeviceCommons::ShaderStageSPIRVData> p_spirv) {
  444. LocalVector<ReflectedShaderStage> spirv;
  445. ERR_FAIL_COND_V(reflect_spirv(p_shader_name, p_spirv, spirv) != OK, false);
  446. return _set_code_from_spirv(spirv.span());
  447. }
  448. RenderingDeviceCommons::ShaderReflection RenderingShaderContainer::get_shader_reflection() const {
  449. RenderingDeviceCommons::ShaderReflection shader_refl;
  450. shader_refl.push_constant_size = reflection_data.push_constant_size;
  451. shader_refl.push_constant_stages = reflection_data.push_constant_stages_mask;
  452. shader_refl.vertex_input_mask = reflection_data.vertex_input_mask;
  453. shader_refl.fragment_output_mask = reflection_data.fragment_output_mask;
  454. shader_refl.is_compute = reflection_data.is_compute;
  455. shader_refl.has_multiview = reflection_data.has_multiview;
  456. shader_refl.compute_local_size[0] = reflection_data.compute_local_size[0];
  457. shader_refl.compute_local_size[1] = reflection_data.compute_local_size[1];
  458. shader_refl.compute_local_size[2] = reflection_data.compute_local_size[2];
  459. shader_refl.uniform_sets.resize(reflection_data.set_count);
  460. shader_refl.specialization_constants.resize(reflection_data.specialization_constants_count);
  461. shader_refl.stages_vector.resize(reflection_data.stage_count);
  462. DEV_ASSERT(reflection_binding_set_uniforms_count.size() == reflection_data.set_count && "The amount of elements in the reflection and the shader container can't be different.");
  463. uint32_t uniform_index = 0;
  464. for (uint32_t i = 0; i < reflection_data.set_count; i++) {
  465. Vector<RenderingDeviceCommons::ShaderUniform> &uniform_set = shader_refl.uniform_sets.ptrw()[i];
  466. uint32_t uniforms_count = reflection_binding_set_uniforms_count[i];
  467. uniform_set.resize(uniforms_count);
  468. for (uint32_t j = 0; j < uniforms_count; j++) {
  469. const ReflectionBindingData &binding = reflection_binding_set_uniforms_data[uniform_index++];
  470. RenderingDeviceCommons::ShaderUniform &uniform = uniform_set.ptrw()[j];
  471. uniform.type = RenderingDeviceCommons::UniformType(binding.type);
  472. uniform.writable = binding.writable;
  473. uniform.length = binding.length;
  474. uniform.binding = binding.binding;
  475. uniform.stages = binding.stages;
  476. }
  477. }
  478. shader_refl.specialization_constants.resize(reflection_data.specialization_constants_count);
  479. for (uint32_t i = 0; i < reflection_data.specialization_constants_count; i++) {
  480. const ReflectionSpecializationData &spec = reflection_specialization_data[i];
  481. RenderingDeviceCommons::ShaderSpecializationConstant &sc = shader_refl.specialization_constants.ptrw()[i];
  482. sc.type = RenderingDeviceCommons::PipelineSpecializationConstantType(spec.type);
  483. sc.constant_id = spec.constant_id;
  484. sc.int_value = spec.int_value;
  485. sc.stages = spec.stage_flags;
  486. }
  487. shader_refl.stages_vector.resize(reflection_data.stage_count);
  488. for (uint32_t i = 0; i < reflection_data.stage_count; i++) {
  489. shader_refl.stages_vector.set(i, reflection_shader_stages[i]);
  490. shader_refl.stages_bits.set_flag(RenderingDeviceCommons::ShaderStage(1U << reflection_shader_stages[i]));
  491. }
  492. return shader_refl;
  493. }
  494. bool RenderingShaderContainer::from_bytes(const PackedByteArray &p_bytes) {
  495. const uint64_t alignment = sizeof(uint32_t);
  496. const uint8_t *bytes_ptr = p_bytes.ptr();
  497. uint64_t bytes_offset = 0;
  498. // Read container header.
  499. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + sizeof(ContainerHeader)) > p_bytes.size(), false, "Not enough bytes for a container header in shader container.");
  500. const ContainerHeader &container_header = *(const ContainerHeader *)(&bytes_ptr[bytes_offset]);
  501. bytes_offset += sizeof(ContainerHeader);
  502. bytes_offset += _from_bytes_header_extra_data(&bytes_ptr[bytes_offset]);
  503. ERR_FAIL_COND_V_MSG(container_header.magic_number != CONTAINER_MAGIC_NUMBER, false, "Incorrect magic number in shader container.");
  504. ERR_FAIL_COND_V_MSG(container_header.version > CONTAINER_VERSION, false, "Unsupported version in shader container.");
  505. ERR_FAIL_COND_V_MSG(container_header.format != _format(), false, "Incorrect format in shader container.");
  506. ERR_FAIL_COND_V_MSG(container_header.format_version > _format_version(), false, "Unsupported format version in shader container.");
  507. // Adjust shaders to the size indicated by the container header.
  508. shaders.resize(container_header.shader_count);
  509. // Read reflection data.
  510. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + sizeof(ReflectionData)) > p_bytes.size(), false, "Not enough bytes for reflection data in shader container.");
  511. reflection_data = *(const ReflectionData *)(&bytes_ptr[bytes_offset]);
  512. bytes_offset += sizeof(ReflectionData);
  513. bytes_offset += _from_bytes_reflection_extra_data(&bytes_ptr[bytes_offset]);
  514. // Read shader name.
  515. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + reflection_data.shader_name_len) > p_bytes.size(), false, "Not enough bytes for shader name in shader container.");
  516. if (reflection_data.shader_name_len > 0) {
  517. String shader_name_str;
  518. shader_name_str.append_utf8((const char *)(&bytes_ptr[bytes_offset]), reflection_data.shader_name_len);
  519. shader_name = shader_name_str.utf8();
  520. bytes_offset = aligned_to(bytes_offset + reflection_data.shader_name_len, alignment);
  521. } else {
  522. shader_name = CharString();
  523. }
  524. reflection_binding_set_uniforms_count.resize(reflection_data.set_count);
  525. reflection_binding_set_uniforms_data.clear();
  526. uint32_t uniform_index = 0;
  527. for (uint32_t i = 0; i < reflection_data.set_count; i++) {
  528. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + sizeof(uint32_t)) > p_bytes.size(), false, "Not enough bytes for uniform set count in shader container.");
  529. uint32_t uniforms_count = *(uint32_t *)(&bytes_ptr[bytes_offset]);
  530. reflection_binding_set_uniforms_count.ptrw()[i] = uniforms_count;
  531. bytes_offset += sizeof(uint32_t);
  532. reflection_binding_set_uniforms_data.resize(reflection_binding_set_uniforms_data.size() + uniforms_count);
  533. bytes_offset += _from_bytes_reflection_binding_uniform_extra_data_start(&bytes_ptr[bytes_offset]);
  534. for (uint32_t j = 0; j < uniforms_count; j++) {
  535. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + sizeof(ReflectionBindingData)) > p_bytes.size(), false, "Not enough bytes for uniform in shader container.");
  536. memcpy(&reflection_binding_set_uniforms_data.ptrw()[uniform_index], &bytes_ptr[bytes_offset], sizeof(ReflectionBindingData));
  537. bytes_offset += sizeof(ReflectionBindingData);
  538. bytes_offset += _from_bytes_reflection_binding_uniform_extra_data(&bytes_ptr[bytes_offset], uniform_index);
  539. uniform_index++;
  540. }
  541. }
  542. reflection_specialization_data.resize(reflection_data.specialization_constants_count);
  543. bytes_offset += _from_bytes_reflection_specialization_extra_data_start(&bytes_ptr[bytes_offset]);
  544. for (uint32_t i = 0; i < reflection_data.specialization_constants_count; i++) {
  545. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + sizeof(ReflectionSpecializationData)) > p_bytes.size(), false, "Not enough bytes for specialization in shader container.");
  546. memcpy(&reflection_specialization_data.ptrw()[i], &bytes_ptr[bytes_offset], sizeof(ReflectionSpecializationData));
  547. bytes_offset += sizeof(ReflectionSpecializationData);
  548. bytes_offset += _from_bytes_reflection_specialization_extra_data(&bytes_ptr[bytes_offset], i);
  549. }
  550. const uint32_t stage_count = reflection_data.stage_count;
  551. if (stage_count > 0) {
  552. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + stage_count * sizeof(RenderingDeviceCommons::ShaderStage)) > p_bytes.size(), false, "Not enough bytes for stages in shader container.");
  553. reflection_shader_stages.resize(stage_count);
  554. bytes_offset += _from_bytes_shader_extra_data_start(&bytes_ptr[bytes_offset]);
  555. memcpy(reflection_shader_stages.ptrw(), &bytes_ptr[bytes_offset], stage_count * sizeof(RenderingDeviceCommons::ShaderStage));
  556. bytes_offset += stage_count * sizeof(RenderingDeviceCommons::ShaderStage);
  557. }
  558. // Read shaders.
  559. for (int64_t i = 0; i < shaders.size(); i++) {
  560. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + sizeof(ShaderHeader)) > p_bytes.size(), false, "Not enough bytes for shader header in shader container.");
  561. const ShaderHeader &header = *(const ShaderHeader *)(&bytes_ptr[bytes_offset]);
  562. bytes_offset += sizeof(ShaderHeader);
  563. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + header.code_compressed_size) > p_bytes.size(), false, "Not enough bytes for a shader in shader container.");
  564. Shader &shader = shaders.ptrw()[i];
  565. shader.shader_stage = RenderingDeviceCommons::ShaderStage(header.shader_stage);
  566. shader.code_compression_flags = header.code_compression_flags;
  567. shader.code_decompressed_size = header.code_decompressed_size;
  568. shader.code_compressed_bytes.resize(header.code_compressed_size);
  569. memcpy(shader.code_compressed_bytes.ptrw(), &bytes_ptr[bytes_offset], header.code_compressed_size);
  570. bytes_offset = aligned_to(bytes_offset + header.code_compressed_size, alignment);
  571. bytes_offset += _from_bytes_shader_extra_data(&bytes_ptr[bytes_offset], i);
  572. }
  573. bytes_offset += _from_bytes_footer_extra_data(&bytes_ptr[bytes_offset]);
  574. ERR_FAIL_COND_V_MSG(bytes_offset != (uint64_t)p_bytes.size(), false, "Amount of bytes in the container does not match the amount of bytes read.");
  575. return true;
  576. }
  577. PackedByteArray RenderingShaderContainer::to_bytes() const {
  578. // Compute the exact size the container will require for writing everything out.
  579. const uint64_t alignment = sizeof(uint32_t);
  580. uint64_t total_size = 0;
  581. total_size += sizeof(ContainerHeader) + _to_bytes_header_extra_data(nullptr);
  582. total_size += sizeof(ReflectionData) + _to_bytes_reflection_extra_data(nullptr);
  583. total_size += aligned_to(reflection_data.shader_name_len, alignment);
  584. total_size += reflection_binding_set_uniforms_count.size() * sizeof(uint32_t);
  585. total_size += reflection_binding_set_uniforms_data.size() * sizeof(ReflectionBindingData);
  586. total_size += reflection_specialization_data.size() * sizeof(ReflectionSpecializationData);
  587. total_size += reflection_shader_stages.size() * sizeof(RenderingDeviceCommons::ShaderStage);
  588. for (uint32_t i = 0; i < reflection_binding_set_uniforms_data.size(); i++) {
  589. total_size += _to_bytes_reflection_binding_uniform_extra_data(nullptr, i);
  590. }
  591. for (uint32_t i = 0; i < reflection_specialization_data.size(); i++) {
  592. total_size += _to_bytes_reflection_specialization_extra_data(nullptr, i);
  593. }
  594. for (uint32_t i = 0; i < shaders.size(); i++) {
  595. total_size += sizeof(ShaderHeader);
  596. total_size += shaders[i].code_compressed_bytes.size();
  597. total_size = aligned_to(total_size, alignment);
  598. total_size += _to_bytes_shader_extra_data(nullptr, i);
  599. }
  600. total_size += _to_bytes_footer_extra_data(nullptr);
  601. // Create the array that will hold all of the data.
  602. PackedByteArray bytes;
  603. bytes.resize_initialized(total_size);
  604. // Write out the data to the array.
  605. uint64_t bytes_offset = 0;
  606. uint8_t *bytes_ptr = bytes.ptrw();
  607. ContainerHeader &container_header = *(ContainerHeader *)(&bytes_ptr[bytes_offset]);
  608. container_header.magic_number = CONTAINER_MAGIC_NUMBER;
  609. container_header.version = CONTAINER_VERSION;
  610. container_header.format = _format();
  611. container_header.format_version = _format_version();
  612. container_header.shader_count = shaders.size();
  613. bytes_offset += sizeof(ContainerHeader);
  614. bytes_offset += _to_bytes_header_extra_data(&bytes_ptr[bytes_offset]);
  615. memcpy(&bytes_ptr[bytes_offset], &reflection_data, sizeof(ReflectionData));
  616. bytes_offset += sizeof(ReflectionData);
  617. bytes_offset += _to_bytes_reflection_extra_data(&bytes_ptr[bytes_offset]);
  618. if (shader_name.size() > 0) {
  619. memcpy(&bytes_ptr[bytes_offset], shader_name.ptr(), reflection_data.shader_name_len);
  620. bytes_offset = aligned_to(bytes_offset + reflection_data.shader_name_len, alignment);
  621. }
  622. uint32_t uniform_index = 0;
  623. for (uint32_t uniform_count : reflection_binding_set_uniforms_count) {
  624. memcpy(&bytes_ptr[bytes_offset], &uniform_count, sizeof(uniform_count));
  625. bytes_offset += sizeof(uint32_t);
  626. for (uint32_t i = 0; i < uniform_count; i++) {
  627. memcpy(&bytes_ptr[bytes_offset], &reflection_binding_set_uniforms_data[uniform_index], sizeof(ReflectionBindingData));
  628. bytes_offset += sizeof(ReflectionBindingData);
  629. bytes_offset += _to_bytes_reflection_binding_uniform_extra_data(&bytes_ptr[bytes_offset], uniform_index);
  630. uniform_index++;
  631. }
  632. }
  633. for (uint32_t i = 0; i < reflection_specialization_data.size(); i++) {
  634. memcpy(&bytes_ptr[bytes_offset], &reflection_specialization_data.ptr()[i], sizeof(ReflectionSpecializationData));
  635. bytes_offset += sizeof(ReflectionSpecializationData);
  636. bytes_offset += _to_bytes_reflection_specialization_extra_data(&bytes_ptr[bytes_offset], i);
  637. }
  638. if (!reflection_shader_stages.is_empty()) {
  639. uint32_t stage_count = reflection_shader_stages.size();
  640. memcpy(&bytes_ptr[bytes_offset], reflection_shader_stages.ptr(), stage_count * sizeof(RenderingDeviceCommons::ShaderStage));
  641. bytes_offset += stage_count * sizeof(RenderingDeviceCommons::ShaderStage);
  642. }
  643. for (uint32_t i = 0; i < shaders.size(); i++) {
  644. const Shader &shader = shaders[i];
  645. ShaderHeader &header = *(ShaderHeader *)(&bytes.ptr()[bytes_offset]);
  646. header.shader_stage = shader.shader_stage;
  647. header.code_compressed_size = uint32_t(shader.code_compressed_bytes.size());
  648. header.code_compression_flags = shader.code_compression_flags;
  649. header.code_decompressed_size = shader.code_decompressed_size;
  650. bytes_offset += sizeof(ShaderHeader);
  651. memcpy(&bytes.ptrw()[bytes_offset], shader.code_compressed_bytes.ptr(), shader.code_compressed_bytes.size());
  652. bytes_offset = aligned_to(bytes_offset + shader.code_compressed_bytes.size(), alignment);
  653. bytes_offset += _to_bytes_shader_extra_data(&bytes_ptr[bytes_offset], i);
  654. }
  655. bytes_offset += _to_bytes_footer_extra_data(&bytes_ptr[bytes_offset]);
  656. ERR_FAIL_COND_V_MSG(bytes_offset != total_size, PackedByteArray(), "Amount of bytes written does not match the amount of bytes reserved for the container.");
  657. return bytes;
  658. }
  659. bool RenderingShaderContainer::compress_code(const uint8_t *p_decompressed_bytes, uint32_t p_decompressed_size, uint8_t *p_compressed_bytes, uint32_t *r_compressed_size, uint32_t *r_compressed_flags) const {
  660. DEV_ASSERT(p_decompressed_bytes != nullptr);
  661. DEV_ASSERT(p_decompressed_size > 0);
  662. DEV_ASSERT(p_compressed_bytes != nullptr);
  663. DEV_ASSERT(r_compressed_size != nullptr);
  664. DEV_ASSERT(r_compressed_flags != nullptr);
  665. *r_compressed_flags = 0;
  666. PackedByteArray zstd_bytes;
  667. const int64_t zstd_max_bytes = Compression::get_max_compressed_buffer_size(p_decompressed_size, Compression::MODE_ZSTD);
  668. zstd_bytes.resize(zstd_max_bytes);
  669. const int64_t zstd_size = Compression::compress(zstd_bytes.ptrw(), p_decompressed_bytes, p_decompressed_size, Compression::MODE_ZSTD);
  670. if (zstd_size > 0 && (uint32_t)(zstd_size) < p_decompressed_size) {
  671. // Only choose Zstd if it results in actual compression.
  672. memcpy(p_compressed_bytes, zstd_bytes.ptr(), zstd_size);
  673. *r_compressed_size = zstd_size;
  674. *r_compressed_flags |= COMPRESSION_FLAG_ZSTD;
  675. } else {
  676. // Just copy the input to the output directly.
  677. memcpy(p_compressed_bytes, p_decompressed_bytes, p_decompressed_size);
  678. *r_compressed_size = p_decompressed_size;
  679. }
  680. return true;
  681. }
  682. bool RenderingShaderContainer::decompress_code(const uint8_t *p_compressed_bytes, uint32_t p_compressed_size, uint32_t p_compressed_flags, uint8_t *p_decompressed_bytes, uint32_t p_decompressed_size) const {
  683. DEV_ASSERT(p_compressed_bytes != nullptr);
  684. DEV_ASSERT(p_compressed_size > 0);
  685. DEV_ASSERT(p_decompressed_bytes != nullptr);
  686. DEV_ASSERT(p_decompressed_size > 0);
  687. bool uses_zstd = p_compressed_flags & COMPRESSION_FLAG_ZSTD;
  688. if (uses_zstd) {
  689. if (!Compression::decompress(p_decompressed_bytes, p_decompressed_size, p_compressed_bytes, p_compressed_size, Compression::MODE_ZSTD)) {
  690. ERR_FAIL_V_MSG(false, "Malformed zstd input for decompressing shader code.");
  691. }
  692. } else {
  693. memcpy(p_decompressed_bytes, p_compressed_bytes, MIN(p_compressed_size, p_decompressed_size));
  694. }
  695. return true;
  696. }
  697. RenderingShaderContainer::RenderingShaderContainer() {}
  698. RenderingShaderContainer::~RenderingShaderContainer() {}