rendering_shader_container.cpp 38 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832
  1. /**************************************************************************/
  2. /* rendering_shader_container.cpp */
  3. /**************************************************************************/
  4. /* This file is part of: */
  5. /* GODOT ENGINE */
  6. /* https://godotengine.org */
  7. /**************************************************************************/
  8. /* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
  9. /* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
  10. /* */
  11. /* Permission is hereby granted, free of charge, to any person obtaining */
  12. /* a copy of this software and associated documentation files (the */
  13. /* "Software"), to deal in the Software without restriction, including */
  14. /* without limitation the rights to use, copy, modify, merge, publish, */
  15. /* distribute, sublicense, and/or sell copies of the Software, and to */
  16. /* permit persons to whom the Software is furnished to do so, subject to */
  17. /* the following conditions: */
  18. /* */
  19. /* The above copyright notice and this permission notice shall be */
  20. /* included in all copies or substantial portions of the Software. */
  21. /* */
  22. /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
  23. /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
  24. /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
  25. /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
  26. /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
  27. /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
  28. /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
  29. /**************************************************************************/
  30. #include "rendering_shader_container.h"
  31. #include "core/io/compression.h"
  32. #include "thirdparty/spirv-reflect/spirv_reflect.h"
  33. static inline uint32_t aligned_to(uint32_t p_size, uint32_t p_alignment) {
  34. if (p_size % p_alignment) {
  35. return p_size + (p_alignment - (p_size % p_alignment));
  36. } else {
  37. return p_size;
  38. }
  39. }
  40. RenderingShaderContainer::ReflectedShaderStage::ReflectedShaderStage() :
  41. _module(memnew(SpvReflectShaderModule)) {
  42. }
  43. RenderingShaderContainer::ReflectedShaderStage::~ReflectedShaderStage() {
  44. spvReflectDestroyShaderModule(_module);
  45. memdelete(_module);
  46. }
  47. const SpvReflectShaderModule &RenderingShaderContainer::ReflectedShaderStage::module() const {
  48. return *_module;
  49. }
  50. const Span<uint32_t> RenderingShaderContainer::ReflectedShaderStage::spirv() const {
  51. return _spirv_data.span().reinterpret<uint32_t>();
  52. }
  53. uint32_t RenderingShaderContainer::_from_bytes_header_extra_data(const uint8_t *p_bytes) {
  54. return 0;
  55. }
  56. uint32_t RenderingShaderContainer::_from_bytes_reflection_extra_data(const uint8_t *p_bytes) {
  57. return 0;
  58. }
  59. uint32_t RenderingShaderContainer::_from_bytes_reflection_binding_uniform_extra_data_start(const uint8_t *p_bytes) {
  60. return 0;
  61. }
  62. uint32_t RenderingShaderContainer::_from_bytes_reflection_binding_uniform_extra_data(const uint8_t *p_bytes, uint32_t p_index) {
  63. return 0;
  64. }
  65. uint32_t RenderingShaderContainer::_from_bytes_reflection_specialization_extra_data_start(const uint8_t *p_bytes) {
  66. return 0;
  67. }
  68. uint32_t RenderingShaderContainer::_from_bytes_reflection_specialization_extra_data(const uint8_t *p_bytes, uint32_t p_index) {
  69. return 0;
  70. }
  71. uint32_t RenderingShaderContainer::_from_bytes_shader_extra_data_start(const uint8_t *p_bytes) {
  72. return 0;
  73. }
  74. uint32_t RenderingShaderContainer::_from_bytes_shader_extra_data(const uint8_t *p_bytes, uint32_t p_index) {
  75. return 0;
  76. }
  77. uint32_t RenderingShaderContainer::_from_bytes_footer_extra_data(const uint8_t *p_bytes) {
  78. return 0;
  79. }
  80. uint32_t RenderingShaderContainer::_to_bytes_header_extra_data(uint8_t *) const {
  81. return 0;
  82. }
  83. uint32_t RenderingShaderContainer::_to_bytes_reflection_extra_data(uint8_t *) const {
  84. return 0;
  85. }
  86. uint32_t RenderingShaderContainer::_to_bytes_reflection_binding_uniform_extra_data(uint8_t *, uint32_t) const {
  87. return 0;
  88. }
  89. uint32_t RenderingShaderContainer::_to_bytes_reflection_specialization_extra_data(uint8_t *, uint32_t) const {
  90. return 0;
  91. }
  92. uint32_t RenderingShaderContainer::_to_bytes_shader_extra_data(uint8_t *, uint32_t) const {
  93. return 0;
  94. }
  95. uint32_t RenderingShaderContainer::_to_bytes_footer_extra_data(uint8_t *) const {
  96. return 0;
  97. }
  98. void RenderingShaderContainer::_set_from_shader_reflection_post(const RenderingDeviceCommons::ShaderReflection &p_reflection) {
  99. // Do nothing.
  100. }
  101. Error RenderingShaderContainer::reflect_spirv(const String &p_shader_name, Span<RenderingDeviceCommons::ShaderStageSPIRVData> p_spirv, LocalVector<ReflectedShaderStage> &r_refl) {
  102. using RDC = RenderingDeviceCommons;
  103. RDC::ShaderReflection reflection;
  104. const uint32_t spirv_size = p_spirv.size() + 0;
  105. r_refl.resize(spirv_size);
  106. for (uint32_t i = 0; i < spirv_size; i++) {
  107. RDC::ShaderStage stage = p_spirv[i].shader_stage;
  108. RDC::ShaderStage stage_flag = (RDC::ShaderStage)(1 << p_spirv[i].shader_stage);
  109. r_refl[i].shader_stage = p_spirv[i].shader_stage;
  110. r_refl[i]._spirv_data = p_spirv[i].spirv;
  111. if (p_spirv[i].shader_stage == RDC::SHADER_STAGE_COMPUTE) {
  112. reflection.is_compute = true;
  113. ERR_FAIL_COND_V_MSG(spirv_size != 1, FAILED,
  114. "Compute shaders can only receive one stage, dedicated to compute.");
  115. }
  116. ERR_FAIL_COND_V_MSG(reflection.stages_bits.has_flag(stage_flag), FAILED,
  117. "Stage " + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + " submitted more than once.");
  118. {
  119. SpvReflectShaderModule &module = *r_refl.ptr()[i]._module;
  120. const uint8_t *spirv = p_spirv[i].spirv.ptr();
  121. SpvReflectResult result = spvReflectCreateShaderModule2(SPV_REFLECT_MODULE_FLAG_NO_COPY, p_spirv[i].spirv.size(), spirv, &module);
  122. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  123. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed parsing shader.");
  124. for (uint32_t j = 0; j < module.capability_count; j++) {
  125. if (module.capabilities[j].value == SpvCapabilityMultiView) {
  126. reflection.has_multiview = true;
  127. break;
  128. }
  129. }
  130. if (reflection.is_compute) {
  131. reflection.compute_local_size[0] = module.entry_points->local_size.x;
  132. reflection.compute_local_size[1] = module.entry_points->local_size.y;
  133. reflection.compute_local_size[2] = module.entry_points->local_size.z;
  134. }
  135. uint32_t binding_count = 0;
  136. result = spvReflectEnumerateDescriptorBindings(&module, &binding_count, nullptr);
  137. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  138. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed enumerating descriptor bindings.");
  139. if (binding_count > 0) {
  140. // Parse bindings.
  141. Vector<SpvReflectDescriptorBinding *> bindings;
  142. bindings.resize(binding_count);
  143. result = spvReflectEnumerateDescriptorBindings(&module, &binding_count, bindings.ptrw());
  144. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  145. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed getting descriptor bindings.");
  146. for (uint32_t j = 0; j < binding_count; j++) {
  147. const SpvReflectDescriptorBinding &binding = *bindings[j];
  148. RDC::ShaderUniform uniform;
  149. bool need_array_dimensions = false;
  150. bool need_block_size = false;
  151. bool may_be_writable = false;
  152. switch (binding.descriptor_type) {
  153. case SPV_REFLECT_DESCRIPTOR_TYPE_SAMPLER: {
  154. uniform.type = RDC::UNIFORM_TYPE_SAMPLER;
  155. need_array_dimensions = true;
  156. } break;
  157. case SPV_REFLECT_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
  158. uniform.type = RDC::UNIFORM_TYPE_SAMPLER_WITH_TEXTURE;
  159. need_array_dimensions = true;
  160. } break;
  161. case SPV_REFLECT_DESCRIPTOR_TYPE_SAMPLED_IMAGE: {
  162. uniform.type = RDC::UNIFORM_TYPE_TEXTURE;
  163. need_array_dimensions = true;
  164. } break;
  165. case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
  166. uniform.type = RDC::UNIFORM_TYPE_IMAGE;
  167. need_array_dimensions = true;
  168. may_be_writable = true;
  169. } break;
  170. case SPV_REFLECT_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: {
  171. uniform.type = RDC::UNIFORM_TYPE_TEXTURE_BUFFER;
  172. need_array_dimensions = true;
  173. } break;
  174. case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
  175. uniform.type = RDC::UNIFORM_TYPE_IMAGE_BUFFER;
  176. need_array_dimensions = true;
  177. may_be_writable = true;
  178. } break;
  179. case SPV_REFLECT_DESCRIPTOR_TYPE_UNIFORM_BUFFER: {
  180. uniform.type = RDC::UNIFORM_TYPE_UNIFORM_BUFFER;
  181. need_block_size = true;
  182. } break;
  183. case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_BUFFER: {
  184. uniform.type = RDC::UNIFORM_TYPE_STORAGE_BUFFER;
  185. need_block_size = true;
  186. may_be_writable = true;
  187. } break;
  188. case SPV_REFLECT_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: {
  189. ERR_PRINT("Dynamic uniform buffer not supported.");
  190. continue;
  191. } break;
  192. case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
  193. ERR_PRINT("Dynamic storage buffer not supported.");
  194. continue;
  195. } break;
  196. case SPV_REFLECT_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
  197. uniform.type = RDC::UNIFORM_TYPE_INPUT_ATTACHMENT;
  198. need_array_dimensions = true;
  199. } break;
  200. case SPV_REFLECT_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: {
  201. ERR_PRINT("Acceleration structure not supported.");
  202. continue;
  203. } break;
  204. }
  205. if (need_array_dimensions) {
  206. if (binding.array.dims_count == 0) {
  207. uniform.length = 1;
  208. } else {
  209. for (uint32_t k = 0; k < binding.array.dims_count; k++) {
  210. if (k == 0) {
  211. uniform.length = binding.array.dims[0];
  212. } else {
  213. uniform.length *= binding.array.dims[k];
  214. }
  215. }
  216. }
  217. } else if (need_block_size) {
  218. uniform.length = binding.block.size;
  219. } else {
  220. uniform.length = 0;
  221. }
  222. if (may_be_writable) {
  223. if (binding.descriptor_type == SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_IMAGE) {
  224. uniform.writable = !(binding.decoration_flags & SPV_REFLECT_DECORATION_NON_WRITABLE);
  225. } else {
  226. uniform.writable = !(binding.decoration_flags & SPV_REFLECT_DECORATION_NON_WRITABLE) && !(binding.block.decoration_flags & SPV_REFLECT_DECORATION_NON_WRITABLE);
  227. }
  228. } else {
  229. uniform.writable = false;
  230. }
  231. uniform.binding = binding.binding;
  232. uint32_t set = binding.set;
  233. ERR_FAIL_COND_V_MSG(set >= RDC::MAX_UNIFORM_SETS, FAILED,
  234. "On shader stage '" + String(RDC::SHADER_STAGE_NAMES[stage]) + "', uniform '" + binding.name + "' uses a set (" + itos(set) + ") index larger than what is supported (" + itos(RDC::MAX_UNIFORM_SETS) + ").");
  235. if (set < (uint32_t)reflection.uniform_sets.size()) {
  236. // Check if this already exists.
  237. bool exists = false;
  238. for (int k = 0; k < reflection.uniform_sets[set].size(); k++) {
  239. if (reflection.uniform_sets[set][k].binding == uniform.binding) {
  240. // Already exists, verify that it's the same type.
  241. ERR_FAIL_COND_V_MSG(reflection.uniform_sets[set][k].type != uniform.type, FAILED,
  242. "On shader stage '" + String(RDC::SHADER_STAGE_NAMES[stage]) + "', uniform '" + binding.name + "' trying to reuse location for set=" + itos(set) + ", binding=" + itos(uniform.binding) + " with different uniform type.");
  243. // Also, verify that it's the same size.
  244. ERR_FAIL_COND_V_MSG(reflection.uniform_sets[set][k].length != uniform.length, FAILED,
  245. "On shader stage '" + String(RDC::SHADER_STAGE_NAMES[stage]) + "', uniform '" + binding.name + "' trying to reuse location for set=" + itos(set) + ", binding=" + itos(uniform.binding) + " with different uniform size.");
  246. // Also, verify that it has the same writability.
  247. ERR_FAIL_COND_V_MSG(reflection.uniform_sets[set][k].writable != uniform.writable, FAILED,
  248. "On shader stage '" + String(RDC::SHADER_STAGE_NAMES[stage]) + "', uniform '" + binding.name + "' trying to reuse location for set=" + itos(set) + ", binding=" + itos(uniform.binding) + " with different writability.");
  249. // Just append stage mask and return.
  250. reflection.uniform_sets.write[set].write[k].stages.set_flag(stage_flag);
  251. exists = true;
  252. break;
  253. }
  254. }
  255. if (exists) {
  256. continue; // Merged.
  257. }
  258. }
  259. uniform.stages.set_flag(stage_flag);
  260. if (set >= (uint32_t)reflection.uniform_sets.size()) {
  261. reflection.uniform_sets.resize(set + 1);
  262. }
  263. reflection.uniform_sets.write[set].push_back(uniform);
  264. }
  265. }
  266. {
  267. // Specialization constants.
  268. uint32_t sc_count = 0;
  269. result = spvReflectEnumerateSpecializationConstants(&module, &sc_count, nullptr);
  270. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  271. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed enumerating specialization constants.");
  272. if (sc_count) {
  273. Vector<SpvReflectSpecializationConstant *> spec_constants;
  274. spec_constants.resize(sc_count);
  275. result = spvReflectEnumerateSpecializationConstants(&module, &sc_count, spec_constants.ptrw());
  276. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  277. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed obtaining specialization constants.");
  278. for (uint32_t j = 0; j < sc_count; j++) {
  279. int32_t existing = -1;
  280. RDC::ShaderSpecializationConstant sconst;
  281. SpvReflectSpecializationConstant *spc = spec_constants[j];
  282. sconst.constant_id = spc->constant_id;
  283. sconst.int_value = 0; // Clear previous value JIC.
  284. switch (spc->constant_type) {
  285. case SPV_REFLECT_SPECIALIZATION_CONSTANT_BOOL: {
  286. sconst.type = RDC::PIPELINE_SPECIALIZATION_CONSTANT_TYPE_BOOL;
  287. sconst.bool_value = spc->default_value.int_bool_value != 0;
  288. } break;
  289. case SPV_REFLECT_SPECIALIZATION_CONSTANT_INT: {
  290. sconst.type = RDC::PIPELINE_SPECIALIZATION_CONSTANT_TYPE_INT;
  291. sconst.int_value = spc->default_value.int_bool_value;
  292. } break;
  293. case SPV_REFLECT_SPECIALIZATION_CONSTANT_FLOAT: {
  294. sconst.type = RDC::PIPELINE_SPECIALIZATION_CONSTANT_TYPE_FLOAT;
  295. sconst.float_value = spc->default_value.float_value;
  296. } break;
  297. }
  298. sconst.stages.set_flag(stage_flag);
  299. for (int k = 0; k < reflection.specialization_constants.size(); k++) {
  300. if (reflection.specialization_constants[k].constant_id == sconst.constant_id) {
  301. ERR_FAIL_COND_V_MSG(reflection.specialization_constants[k].type != sconst.type, FAILED, "More than one specialization constant used for id (" + itos(sconst.constant_id) + "), but their types differ.");
  302. ERR_FAIL_COND_V_MSG(reflection.specialization_constants[k].int_value != sconst.int_value, FAILED, "More than one specialization constant used for id (" + itos(sconst.constant_id) + "), but their default values differ.");
  303. existing = k;
  304. break;
  305. }
  306. }
  307. if (existing >= 0) {
  308. reflection.specialization_constants.write[existing].stages.set_flag(stage_flag);
  309. } else {
  310. reflection.specialization_constants.push_back(sconst);
  311. }
  312. }
  313. reflection.specialization_constants.sort();
  314. }
  315. }
  316. if (stage == RDC::SHADER_STAGE_VERTEX || stage == RDC::SHADER_STAGE_FRAGMENT) {
  317. uint32_t iv_count = 0;
  318. result = spvReflectEnumerateInputVariables(&module, &iv_count, nullptr);
  319. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  320. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed enumerating input variables.");
  321. if (iv_count) {
  322. Vector<SpvReflectInterfaceVariable *> input_vars;
  323. input_vars.resize(iv_count);
  324. result = spvReflectEnumerateInputVariables(&module, &iv_count, input_vars.ptrw());
  325. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  326. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed obtaining input variables.");
  327. for (const SpvReflectInterfaceVariable *v : input_vars) {
  328. if (!v) {
  329. continue;
  330. }
  331. if (stage == RDC::SHADER_STAGE_VERTEX) {
  332. if (v->decoration_flags == 0) { // Regular input.
  333. reflection.vertex_input_mask |= (((uint64_t)1) << v->location);
  334. }
  335. }
  336. if (v->built_in == SpvBuiltInViewIndex) {
  337. reflection.has_multiview = true;
  338. }
  339. }
  340. }
  341. }
  342. if (stage == RDC::SHADER_STAGE_FRAGMENT) {
  343. uint32_t ov_count = 0;
  344. result = spvReflectEnumerateOutputVariables(&module, &ov_count, nullptr);
  345. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  346. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed enumerating output variables.");
  347. if (ov_count) {
  348. Vector<SpvReflectInterfaceVariable *> output_vars;
  349. output_vars.resize(ov_count);
  350. result = spvReflectEnumerateOutputVariables(&module, &ov_count, output_vars.ptrw());
  351. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  352. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed obtaining output variables.");
  353. for (const SpvReflectInterfaceVariable *refvar : output_vars) {
  354. if (!refvar) {
  355. continue;
  356. }
  357. if (refvar->built_in != SpvBuiltInFragDepth) {
  358. reflection.fragment_output_mask |= 1 << refvar->location;
  359. }
  360. }
  361. }
  362. }
  363. uint32_t pc_count = 0;
  364. result = spvReflectEnumeratePushConstantBlocks(&module, &pc_count, nullptr);
  365. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  366. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed enumerating push constants.");
  367. if (pc_count) {
  368. ERR_FAIL_COND_V_MSG(pc_count > 1, FAILED,
  369. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "': Only one push constant is supported, which should be the same across shader stages.");
  370. Vector<SpvReflectBlockVariable *> pconstants;
  371. pconstants.resize(pc_count);
  372. result = spvReflectEnumeratePushConstantBlocks(&module, &pc_count, pconstants.ptrw());
  373. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  374. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed obtaining push constants.");
  375. #if 0
  376. if (pconstants[0] == nullptr) {
  377. Ref<FileAccess> f = FileAccess::open("res://popo.spv", FileAccess::WRITE);
  378. f->store_buffer((const uint8_t *)&SpirV[0], SpirV.size() * sizeof(uint32_t));
  379. }
  380. #endif
  381. ERR_FAIL_COND_V_MSG(reflection.push_constant_size && reflection.push_constant_size != pconstants[0]->size, FAILED,
  382. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "': Push constant block must be the same across shader stages.");
  383. reflection.push_constant_size = pconstants[0]->size;
  384. reflection.push_constant_stages.set_flag(stage_flag);
  385. //print_line("Stage: " + String(RDC::SHADER_STAGE_NAMES[stage]) + " push constant of size=" + itos(push_constant.push_constant_size));
  386. }
  387. }
  388. reflection.stages_bits.set_flag(stage_flag);
  389. }
  390. // Sort all uniform_sets by binding.
  391. for (uint32_t i = 0; i < reflection.uniform_sets.size(); i++) {
  392. reflection.uniform_sets.write[i].sort();
  393. }
  394. set_from_shader_reflection(reflection);
  395. return OK;
  396. }
  397. void RenderingShaderContainer::set_from_shader_reflection(const RenderingDeviceCommons::ShaderReflection &p_reflection) {
  398. reflection_binding_set_uniforms_count.clear();
  399. reflection_binding_set_uniforms_data.clear();
  400. reflection_specialization_data.clear();
  401. reflection_shader_stages.clear();
  402. reflection_data.vertex_input_mask = p_reflection.vertex_input_mask;
  403. reflection_data.fragment_output_mask = p_reflection.fragment_output_mask;
  404. reflection_data.specialization_constants_count = p_reflection.specialization_constants.size();
  405. reflection_data.is_compute = p_reflection.is_compute;
  406. reflection_data.has_multiview = p_reflection.has_multiview;
  407. reflection_data.compute_local_size[0] = p_reflection.compute_local_size[0];
  408. reflection_data.compute_local_size[1] = p_reflection.compute_local_size[1];
  409. reflection_data.compute_local_size[2] = p_reflection.compute_local_size[2];
  410. reflection_data.set_count = p_reflection.uniform_sets.size();
  411. reflection_data.push_constant_size = p_reflection.push_constant_size;
  412. reflection_data.push_constant_stages_mask = uint32_t(p_reflection.push_constant_stages);
  413. reflection_data.shader_name_len = shader_name.length();
  414. ReflectionBindingData binding_data;
  415. for (const Vector<RenderingDeviceCommons::ShaderUniform> &uniform_set : p_reflection.uniform_sets) {
  416. for (const RenderingDeviceCommons::ShaderUniform &uniform : uniform_set) {
  417. binding_data.type = uint32_t(uniform.type);
  418. binding_data.binding = uniform.binding;
  419. binding_data.stages = uint32_t(uniform.stages);
  420. binding_data.length = uniform.length;
  421. binding_data.writable = uint32_t(uniform.writable);
  422. reflection_binding_set_uniforms_data.push_back(binding_data);
  423. }
  424. reflection_binding_set_uniforms_count.push_back(uniform_set.size());
  425. }
  426. ReflectionSpecializationData specialization_data;
  427. for (const RenderingDeviceCommons::ShaderSpecializationConstant &spec : p_reflection.specialization_constants) {
  428. specialization_data.type = uint32_t(spec.type);
  429. specialization_data.constant_id = spec.constant_id;
  430. specialization_data.int_value = spec.int_value;
  431. specialization_data.stage_flags = uint32_t(spec.stages);
  432. reflection_specialization_data.push_back(specialization_data);
  433. }
  434. for (uint32_t i = 0; i < RenderingDeviceCommons::SHADER_STAGE_MAX; i++) {
  435. if (p_reflection.stages_bits.has_flag(RenderingDeviceCommons::ShaderStage(1U << i))) {
  436. reflection_shader_stages.push_back(RenderingDeviceCommons::ShaderStage(i));
  437. }
  438. }
  439. reflection_data.stage_count = reflection_shader_stages.size();
  440. _set_from_shader_reflection_post(p_reflection);
  441. }
  442. bool RenderingShaderContainer::set_code_from_spirv(const String &p_shader_name, Span<RenderingDeviceCommons::ShaderStageSPIRVData> p_spirv) {
  443. LocalVector<ReflectedShaderStage> spirv;
  444. ERR_FAIL_COND_V(reflect_spirv(p_shader_name, p_spirv, spirv) != OK, false);
  445. return _set_code_from_spirv(spirv.span());
  446. }
  447. RenderingDeviceCommons::ShaderReflection RenderingShaderContainer::get_shader_reflection() const {
  448. RenderingDeviceCommons::ShaderReflection shader_refl;
  449. shader_refl.push_constant_size = reflection_data.push_constant_size;
  450. shader_refl.push_constant_stages = reflection_data.push_constant_stages_mask;
  451. shader_refl.vertex_input_mask = reflection_data.vertex_input_mask;
  452. shader_refl.fragment_output_mask = reflection_data.fragment_output_mask;
  453. shader_refl.is_compute = reflection_data.is_compute;
  454. shader_refl.has_multiview = reflection_data.has_multiview;
  455. shader_refl.compute_local_size[0] = reflection_data.compute_local_size[0];
  456. shader_refl.compute_local_size[1] = reflection_data.compute_local_size[1];
  457. shader_refl.compute_local_size[2] = reflection_data.compute_local_size[2];
  458. shader_refl.uniform_sets.resize(reflection_data.set_count);
  459. shader_refl.specialization_constants.resize(reflection_data.specialization_constants_count);
  460. shader_refl.stages_vector.resize(reflection_data.stage_count);
  461. DEV_ASSERT(reflection_binding_set_uniforms_count.size() == reflection_data.set_count && "The amount of elements in the reflection and the shader container can't be different.");
  462. uint32_t uniform_index = 0;
  463. for (uint32_t i = 0; i < reflection_data.set_count; i++) {
  464. Vector<RenderingDeviceCommons::ShaderUniform> &uniform_set = shader_refl.uniform_sets.ptrw()[i];
  465. uint32_t uniforms_count = reflection_binding_set_uniforms_count[i];
  466. uniform_set.resize(uniforms_count);
  467. for (uint32_t j = 0; j < uniforms_count; j++) {
  468. const ReflectionBindingData &binding = reflection_binding_set_uniforms_data[uniform_index++];
  469. RenderingDeviceCommons::ShaderUniform &uniform = uniform_set.ptrw()[j];
  470. uniform.type = RenderingDeviceCommons::UniformType(binding.type);
  471. uniform.writable = binding.writable;
  472. uniform.length = binding.length;
  473. uniform.binding = binding.binding;
  474. uniform.stages = binding.stages;
  475. }
  476. }
  477. shader_refl.specialization_constants.resize(reflection_data.specialization_constants_count);
  478. for (uint32_t i = 0; i < reflection_data.specialization_constants_count; i++) {
  479. const ReflectionSpecializationData &spec = reflection_specialization_data[i];
  480. RenderingDeviceCommons::ShaderSpecializationConstant &sc = shader_refl.specialization_constants.ptrw()[i];
  481. sc.type = RenderingDeviceCommons::PipelineSpecializationConstantType(spec.type);
  482. sc.constant_id = spec.constant_id;
  483. sc.int_value = spec.int_value;
  484. sc.stages = spec.stage_flags;
  485. }
  486. shader_refl.stages_vector.resize(reflection_data.stage_count);
  487. for (uint32_t i = 0; i < reflection_data.stage_count; i++) {
  488. shader_refl.stages_vector.set(i, reflection_shader_stages[i]);
  489. shader_refl.stages_bits.set_flag(RenderingDeviceCommons::ShaderStage(1U << reflection_shader_stages[i]));
  490. }
  491. return shader_refl;
  492. }
  493. bool RenderingShaderContainer::from_bytes(const PackedByteArray &p_bytes) {
  494. const uint64_t alignment = sizeof(uint32_t);
  495. const uint8_t *bytes_ptr = p_bytes.ptr();
  496. uint64_t bytes_offset = 0;
  497. // Read container header.
  498. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + sizeof(ContainerHeader)) > p_bytes.size(), false, "Not enough bytes for a container header in shader container.");
  499. const ContainerHeader &container_header = *(const ContainerHeader *)(&bytes_ptr[bytes_offset]);
  500. bytes_offset += sizeof(ContainerHeader);
  501. bytes_offset += _from_bytes_header_extra_data(&bytes_ptr[bytes_offset]);
  502. ERR_FAIL_COND_V_MSG(container_header.magic_number != CONTAINER_MAGIC_NUMBER, false, "Incorrect magic number in shader container.");
  503. ERR_FAIL_COND_V_MSG(container_header.version > CONTAINER_VERSION, false, "Unsupported version in shader container.");
  504. ERR_FAIL_COND_V_MSG(container_header.format != _format(), false, "Incorrect format in shader container.");
  505. ERR_FAIL_COND_V_MSG(container_header.format_version > _format_version(), false, "Unsupported format version in shader container.");
  506. // Adjust shaders to the size indicated by the container header.
  507. shaders.resize(container_header.shader_count);
  508. // Read reflection data.
  509. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + sizeof(ReflectionData)) > p_bytes.size(), false, "Not enough bytes for reflection data in shader container.");
  510. reflection_data = *(const ReflectionData *)(&bytes_ptr[bytes_offset]);
  511. bytes_offset += sizeof(ReflectionData);
  512. bytes_offset += _from_bytes_reflection_extra_data(&bytes_ptr[bytes_offset]);
  513. // Read shader name.
  514. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + reflection_data.shader_name_len) > p_bytes.size(), false, "Not enough bytes for shader name in shader container.");
  515. if (reflection_data.shader_name_len > 0) {
  516. String shader_name_str;
  517. shader_name_str.append_utf8((const char *)(&bytes_ptr[bytes_offset]), reflection_data.shader_name_len);
  518. shader_name = shader_name_str.utf8();
  519. bytes_offset = aligned_to(bytes_offset + reflection_data.shader_name_len, alignment);
  520. } else {
  521. shader_name = CharString();
  522. }
  523. reflection_binding_set_uniforms_count.resize(reflection_data.set_count);
  524. reflection_binding_set_uniforms_data.clear();
  525. uint32_t uniform_index = 0;
  526. for (uint32_t i = 0; i < reflection_data.set_count; i++) {
  527. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + sizeof(uint32_t)) > p_bytes.size(), false, "Not enough bytes for uniform set count in shader container.");
  528. uint32_t uniforms_count = *(uint32_t *)(&bytes_ptr[bytes_offset]);
  529. reflection_binding_set_uniforms_count.ptrw()[i] = uniforms_count;
  530. bytes_offset += sizeof(uint32_t);
  531. reflection_binding_set_uniforms_data.resize(reflection_binding_set_uniforms_data.size() + uniforms_count);
  532. bytes_offset += _from_bytes_reflection_binding_uniform_extra_data_start(&bytes_ptr[bytes_offset]);
  533. for (uint32_t j = 0; j < uniforms_count; j++) {
  534. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + sizeof(ReflectionBindingData)) > p_bytes.size(), false, "Not enough bytes for uniform in shader container.");
  535. memcpy(&reflection_binding_set_uniforms_data.ptrw()[uniform_index], &bytes_ptr[bytes_offset], sizeof(ReflectionBindingData));
  536. bytes_offset += sizeof(ReflectionBindingData);
  537. bytes_offset += _from_bytes_reflection_binding_uniform_extra_data(&bytes_ptr[bytes_offset], uniform_index);
  538. uniform_index++;
  539. }
  540. }
  541. reflection_specialization_data.resize(reflection_data.specialization_constants_count);
  542. bytes_offset += _from_bytes_reflection_specialization_extra_data_start(&bytes_ptr[bytes_offset]);
  543. for (uint32_t i = 0; i < reflection_data.specialization_constants_count; i++) {
  544. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + sizeof(ReflectionSpecializationData)) > p_bytes.size(), false, "Not enough bytes for specialization in shader container.");
  545. memcpy(&reflection_specialization_data.ptrw()[i], &bytes_ptr[bytes_offset], sizeof(ReflectionSpecializationData));
  546. bytes_offset += sizeof(ReflectionSpecializationData);
  547. bytes_offset += _from_bytes_reflection_specialization_extra_data(&bytes_ptr[bytes_offset], i);
  548. }
  549. const uint32_t stage_count = reflection_data.stage_count;
  550. if (stage_count > 0) {
  551. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + stage_count * sizeof(RenderingDeviceCommons::ShaderStage)) > p_bytes.size(), false, "Not enough bytes for stages in shader container.");
  552. reflection_shader_stages.resize(stage_count);
  553. bytes_offset += _from_bytes_shader_extra_data_start(&bytes_ptr[bytes_offset]);
  554. memcpy(reflection_shader_stages.ptrw(), &bytes_ptr[bytes_offset], stage_count * sizeof(RenderingDeviceCommons::ShaderStage));
  555. bytes_offset += stage_count * sizeof(RenderingDeviceCommons::ShaderStage);
  556. }
  557. // Read shaders.
  558. for (int64_t i = 0; i < shaders.size(); i++) {
  559. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + sizeof(ShaderHeader)) > p_bytes.size(), false, "Not enough bytes for shader header in shader container.");
  560. const ShaderHeader &header = *(const ShaderHeader *)(&bytes_ptr[bytes_offset]);
  561. bytes_offset += sizeof(ShaderHeader);
  562. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + header.code_compressed_size) > p_bytes.size(), false, "Not enough bytes for a shader in shader container.");
  563. Shader &shader = shaders.ptrw()[i];
  564. shader.shader_stage = RenderingDeviceCommons::ShaderStage(header.shader_stage);
  565. shader.code_compression_flags = header.code_compression_flags;
  566. shader.code_decompressed_size = header.code_decompressed_size;
  567. shader.code_compressed_bytes.resize(header.code_compressed_size);
  568. memcpy(shader.code_compressed_bytes.ptrw(), &bytes_ptr[bytes_offset], header.code_compressed_size);
  569. bytes_offset = aligned_to(bytes_offset + header.code_compressed_size, alignment);
  570. bytes_offset += _from_bytes_shader_extra_data(&bytes_ptr[bytes_offset], i);
  571. }
  572. bytes_offset += _from_bytes_footer_extra_data(&bytes_ptr[bytes_offset]);
  573. ERR_FAIL_COND_V_MSG(bytes_offset != (uint64_t)p_bytes.size(), false, "Amount of bytes in the container does not match the amount of bytes read.");
  574. return true;
  575. }
  576. PackedByteArray RenderingShaderContainer::to_bytes() const {
  577. // Compute the exact size the container will require for writing everything out.
  578. const uint64_t alignment = sizeof(uint32_t);
  579. uint64_t total_size = 0;
  580. total_size += sizeof(ContainerHeader) + _to_bytes_header_extra_data(nullptr);
  581. total_size += sizeof(ReflectionData) + _to_bytes_reflection_extra_data(nullptr);
  582. total_size += aligned_to(reflection_data.shader_name_len, alignment);
  583. total_size += reflection_binding_set_uniforms_count.size() * sizeof(uint32_t);
  584. total_size += reflection_binding_set_uniforms_data.size() * sizeof(ReflectionBindingData);
  585. total_size += reflection_specialization_data.size() * sizeof(ReflectionSpecializationData);
  586. total_size += reflection_shader_stages.size() * sizeof(RenderingDeviceCommons::ShaderStage);
  587. for (uint32_t i = 0; i < reflection_binding_set_uniforms_data.size(); i++) {
  588. total_size += _to_bytes_reflection_binding_uniform_extra_data(nullptr, i);
  589. }
  590. for (uint32_t i = 0; i < reflection_specialization_data.size(); i++) {
  591. total_size += _to_bytes_reflection_specialization_extra_data(nullptr, i);
  592. }
  593. for (uint32_t i = 0; i < shaders.size(); i++) {
  594. total_size += sizeof(ShaderHeader);
  595. total_size += shaders[i].code_compressed_bytes.size();
  596. total_size = aligned_to(total_size, alignment);
  597. total_size += _to_bytes_shader_extra_data(nullptr, i);
  598. }
  599. total_size += _to_bytes_footer_extra_data(nullptr);
  600. // Create the array that will hold all of the data.
  601. PackedByteArray bytes;
  602. bytes.resize_initialized(total_size);
  603. // Write out the data to the array.
  604. uint64_t bytes_offset = 0;
  605. uint8_t *bytes_ptr = bytes.ptrw();
  606. ContainerHeader &container_header = *(ContainerHeader *)(&bytes_ptr[bytes_offset]);
  607. container_header.magic_number = CONTAINER_MAGIC_NUMBER;
  608. container_header.version = CONTAINER_VERSION;
  609. container_header.format = _format();
  610. container_header.format_version = _format_version();
  611. container_header.shader_count = shaders.size();
  612. bytes_offset += sizeof(ContainerHeader);
  613. bytes_offset += _to_bytes_header_extra_data(&bytes_ptr[bytes_offset]);
  614. memcpy(&bytes_ptr[bytes_offset], &reflection_data, sizeof(ReflectionData));
  615. bytes_offset += sizeof(ReflectionData);
  616. bytes_offset += _to_bytes_reflection_extra_data(&bytes_ptr[bytes_offset]);
  617. if (shader_name.size() > 0) {
  618. memcpy(&bytes_ptr[bytes_offset], shader_name.ptr(), reflection_data.shader_name_len);
  619. bytes_offset = aligned_to(bytes_offset + reflection_data.shader_name_len, alignment);
  620. }
  621. uint32_t uniform_index = 0;
  622. for (uint32_t uniform_count : reflection_binding_set_uniforms_count) {
  623. memcpy(&bytes_ptr[bytes_offset], &uniform_count, sizeof(uniform_count));
  624. bytes_offset += sizeof(uint32_t);
  625. for (uint32_t i = 0; i < uniform_count; i++) {
  626. memcpy(&bytes_ptr[bytes_offset], &reflection_binding_set_uniforms_data[uniform_index], sizeof(ReflectionBindingData));
  627. bytes_offset += sizeof(ReflectionBindingData);
  628. bytes_offset += _to_bytes_reflection_binding_uniform_extra_data(&bytes_ptr[bytes_offset], uniform_index);
  629. uniform_index++;
  630. }
  631. }
  632. for (uint32_t i = 0; i < reflection_specialization_data.size(); i++) {
  633. memcpy(&bytes_ptr[bytes_offset], &reflection_specialization_data.ptr()[i], sizeof(ReflectionSpecializationData));
  634. bytes_offset += sizeof(ReflectionSpecializationData);
  635. bytes_offset += _to_bytes_reflection_specialization_extra_data(&bytes_ptr[bytes_offset], i);
  636. }
  637. if (!reflection_shader_stages.is_empty()) {
  638. uint32_t stage_count = reflection_shader_stages.size();
  639. memcpy(&bytes_ptr[bytes_offset], reflection_shader_stages.ptr(), stage_count * sizeof(RenderingDeviceCommons::ShaderStage));
  640. bytes_offset += stage_count * sizeof(RenderingDeviceCommons::ShaderStage);
  641. }
  642. for (uint32_t i = 0; i < shaders.size(); i++) {
  643. const Shader &shader = shaders[i];
  644. ShaderHeader &header = *(ShaderHeader *)(&bytes.ptr()[bytes_offset]);
  645. header.shader_stage = shader.shader_stage;
  646. header.code_compressed_size = uint32_t(shader.code_compressed_bytes.size());
  647. header.code_compression_flags = shader.code_compression_flags;
  648. header.code_decompressed_size = shader.code_decompressed_size;
  649. bytes_offset += sizeof(ShaderHeader);
  650. memcpy(&bytes.ptrw()[bytes_offset], shader.code_compressed_bytes.ptr(), shader.code_compressed_bytes.size());
  651. bytes_offset = aligned_to(bytes_offset + shader.code_compressed_bytes.size(), alignment);
  652. bytes_offset += _to_bytes_shader_extra_data(&bytes_ptr[bytes_offset], i);
  653. }
  654. bytes_offset += _to_bytes_footer_extra_data(&bytes_ptr[bytes_offset]);
  655. ERR_FAIL_COND_V_MSG(bytes_offset != total_size, PackedByteArray(), "Amount of bytes written does not match the amount of bytes reserved for the container.");
  656. return bytes;
  657. }
  658. bool RenderingShaderContainer::compress_code(const uint8_t *p_decompressed_bytes, uint32_t p_decompressed_size, uint8_t *p_compressed_bytes, uint32_t *r_compressed_size, uint32_t *r_compressed_flags) const {
  659. DEV_ASSERT(p_decompressed_bytes != nullptr);
  660. DEV_ASSERT(p_decompressed_size > 0);
  661. DEV_ASSERT(p_compressed_bytes != nullptr);
  662. DEV_ASSERT(r_compressed_size != nullptr);
  663. DEV_ASSERT(r_compressed_flags != nullptr);
  664. *r_compressed_flags = 0;
  665. PackedByteArray zstd_bytes;
  666. const int64_t zstd_max_bytes = Compression::get_max_compressed_buffer_size(p_decompressed_size, Compression::MODE_ZSTD);
  667. zstd_bytes.resize(zstd_max_bytes);
  668. const int64_t zstd_size = Compression::compress(zstd_bytes.ptrw(), p_decompressed_bytes, p_decompressed_size, Compression::MODE_ZSTD);
  669. if (zstd_size > 0 && (uint32_t)(zstd_size) < p_decompressed_size) {
  670. // Only choose Zstd if it results in actual compression.
  671. memcpy(p_compressed_bytes, zstd_bytes.ptr(), zstd_size);
  672. *r_compressed_size = zstd_size;
  673. *r_compressed_flags |= COMPRESSION_FLAG_ZSTD;
  674. } else {
  675. // Just copy the input to the output directly.
  676. memcpy(p_compressed_bytes, p_decompressed_bytes, p_decompressed_size);
  677. *r_compressed_size = p_decompressed_size;
  678. }
  679. return true;
  680. }
  681. bool RenderingShaderContainer::decompress_code(const uint8_t *p_compressed_bytes, uint32_t p_compressed_size, uint32_t p_compressed_flags, uint8_t *p_decompressed_bytes, uint32_t p_decompressed_size) const {
  682. DEV_ASSERT(p_compressed_bytes != nullptr);
  683. DEV_ASSERT(p_compressed_size > 0);
  684. DEV_ASSERT(p_decompressed_bytes != nullptr);
  685. DEV_ASSERT(p_decompressed_size > 0);
  686. bool uses_zstd = p_compressed_flags & COMPRESSION_FLAG_ZSTD;
  687. if (uses_zstd) {
  688. if (!Compression::decompress(p_decompressed_bytes, p_decompressed_size, p_compressed_bytes, p_compressed_size, Compression::MODE_ZSTD)) {
  689. ERR_FAIL_V_MSG(false, "Malformed zstd input for decompressing shader code.");
  690. }
  691. } else {
  692. memcpy(p_decompressed_bytes, p_compressed_bytes, MIN(p_compressed_size, p_decompressed_size));
  693. }
  694. return true;
  695. }
  696. RenderingShaderContainer::RenderingShaderContainer() {}
  697. RenderingShaderContainer::~RenderingShaderContainer() {}