rendering_shader_container.cpp 39 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851
  1. /**************************************************************************/
  2. /* rendering_shader_container.cpp */
  3. /**************************************************************************/
  4. /* This file is part of: */
  5. /* GODOT ENGINE */
  6. /* https://godotengine.org */
  7. /**************************************************************************/
  8. /* Copyright (c) 2014-present Godot Engine contributors (see AUTHORS.md). */
  9. /* Copyright (c) 2007-2014 Juan Linietsky, Ariel Manzur. */
  10. /* */
  11. /* Permission is hereby granted, free of charge, to any person obtaining */
  12. /* a copy of this software and associated documentation files (the */
  13. /* "Software"), to deal in the Software without restriction, including */
  14. /* without limitation the rights to use, copy, modify, merge, publish, */
  15. /* distribute, sublicense, and/or sell copies of the Software, and to */
  16. /* permit persons to whom the Software is furnished to do so, subject to */
  17. /* the following conditions: */
  18. /* */
  19. /* The above copyright notice and this permission notice shall be */
  20. /* included in all copies or substantial portions of the Software. */
  21. /* */
  22. /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
  23. /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
  24. /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. */
  25. /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
  26. /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
  27. /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
  28. /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
  29. /**************************************************************************/
  30. #include "rendering_shader_container.h"
  31. #include "core/io/compression.h"
  32. #include "servers/rendering/renderer_rd/shader_rd.h"
  33. #include "thirdparty/spirv-reflect/spirv_reflect.h"
  34. static inline uint32_t aligned_to(uint32_t p_size, uint32_t p_alignment) {
  35. if (p_size % p_alignment) {
  36. return p_size + (p_alignment - (p_size % p_alignment));
  37. } else {
  38. return p_size;
  39. }
  40. }
  41. RenderingShaderContainer::ReflectedShaderStage::ReflectedShaderStage() :
  42. _module(memnew(SpvReflectShaderModule)) {
  43. }
  44. RenderingShaderContainer::ReflectedShaderStage::~ReflectedShaderStage() {
  45. spvReflectDestroyShaderModule(_module);
  46. memdelete(_module);
  47. }
  48. const SpvReflectShaderModule &RenderingShaderContainer::ReflectedShaderStage::module() const {
  49. return *_module;
  50. }
  51. const Span<uint32_t> RenderingShaderContainer::ReflectedShaderStage::spirv() const {
  52. return _spirv_data.span().reinterpret<uint32_t>();
  53. }
  54. uint32_t RenderingShaderContainer::_from_bytes_header_extra_data(const uint8_t *p_bytes) {
  55. return 0;
  56. }
  57. uint32_t RenderingShaderContainer::_from_bytes_reflection_extra_data(const uint8_t *p_bytes) {
  58. return 0;
  59. }
  60. uint32_t RenderingShaderContainer::_from_bytes_reflection_binding_uniform_extra_data_start(const uint8_t *p_bytes) {
  61. return 0;
  62. }
  63. uint32_t RenderingShaderContainer::_from_bytes_reflection_binding_uniform_extra_data(const uint8_t *p_bytes, uint32_t p_index) {
  64. return 0;
  65. }
  66. uint32_t RenderingShaderContainer::_from_bytes_reflection_specialization_extra_data_start(const uint8_t *p_bytes) {
  67. return 0;
  68. }
  69. uint32_t RenderingShaderContainer::_from_bytes_reflection_specialization_extra_data(const uint8_t *p_bytes, uint32_t p_index) {
  70. return 0;
  71. }
  72. uint32_t RenderingShaderContainer::_from_bytes_shader_extra_data_start(const uint8_t *p_bytes) {
  73. return 0;
  74. }
  75. uint32_t RenderingShaderContainer::_from_bytes_shader_extra_data(const uint8_t *p_bytes, uint32_t p_index) {
  76. return 0;
  77. }
  78. uint32_t RenderingShaderContainer::_from_bytes_footer_extra_data(const uint8_t *p_bytes) {
  79. return 0;
  80. }
  81. uint32_t RenderingShaderContainer::_to_bytes_header_extra_data(uint8_t *) const {
  82. return 0;
  83. }
  84. uint32_t RenderingShaderContainer::_to_bytes_reflection_extra_data(uint8_t *) const {
  85. return 0;
  86. }
  87. uint32_t RenderingShaderContainer::_to_bytes_reflection_binding_uniform_extra_data(uint8_t *, uint32_t) const {
  88. return 0;
  89. }
  90. uint32_t RenderingShaderContainer::_to_bytes_reflection_specialization_extra_data(uint8_t *, uint32_t) const {
  91. return 0;
  92. }
  93. uint32_t RenderingShaderContainer::_to_bytes_shader_extra_data(uint8_t *, uint32_t) const {
  94. return 0;
  95. }
  96. uint32_t RenderingShaderContainer::_to_bytes_footer_extra_data(uint8_t *) const {
  97. return 0;
  98. }
  99. void RenderingShaderContainer::_set_from_shader_reflection_post(const RenderingDeviceCommons::ShaderReflection &p_reflection) {
  100. // Do nothing.
  101. }
  102. Error RenderingShaderContainer::reflect_spirv(const String &p_shader_name, Span<RenderingDeviceCommons::ShaderStageSPIRVData> p_spirv, LocalVector<ReflectedShaderStage> &r_refl) {
  103. using RDC = RenderingDeviceCommons;
  104. RDC::ShaderReflection reflection;
  105. shader_name = p_shader_name.utf8();
  106. const uint32_t spirv_size = p_spirv.size() + 0;
  107. r_refl.resize(spirv_size);
  108. for (uint32_t i = 0; i < spirv_size; i++) {
  109. RDC::ShaderStage stage = p_spirv[i].shader_stage;
  110. RDC::ShaderStage stage_flag = (RDC::ShaderStage)(1 << p_spirv[i].shader_stage);
  111. r_refl[i].shader_stage = p_spirv[i].shader_stage;
  112. r_refl[i]._spirv_data = p_spirv[i].spirv;
  113. const Vector<uint64_t> &dynamic_buffers = p_spirv[i].dynamic_buffers;
  114. if (p_spirv[i].shader_stage == RDC::SHADER_STAGE_COMPUTE) {
  115. reflection.is_compute = true;
  116. ERR_FAIL_COND_V_MSG(spirv_size != 1, FAILED,
  117. "Compute shaders can only receive one stage, dedicated to compute.");
  118. }
  119. ERR_FAIL_COND_V_MSG(reflection.stages_bits.has_flag(stage_flag), FAILED,
  120. "Stage " + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + " submitted more than once.");
  121. {
  122. SpvReflectShaderModule &module = *r_refl.ptr()[i]._module;
  123. const uint8_t *spirv = p_spirv[i].spirv.ptr();
  124. SpvReflectResult result = spvReflectCreateShaderModule2(SPV_REFLECT_MODULE_FLAG_NO_COPY, p_spirv[i].spirv.size(), spirv, &module);
  125. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  126. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed parsing shader.");
  127. for (uint32_t j = 0; j < module.capability_count; j++) {
  128. if (module.capabilities[j].value == SpvCapabilityMultiView) {
  129. reflection.has_multiview = true;
  130. break;
  131. }
  132. }
  133. if (reflection.is_compute) {
  134. reflection.compute_local_size[0] = module.entry_points->local_size.x;
  135. reflection.compute_local_size[1] = module.entry_points->local_size.y;
  136. reflection.compute_local_size[2] = module.entry_points->local_size.z;
  137. }
  138. uint32_t binding_count = 0;
  139. result = spvReflectEnumerateDescriptorBindings(&module, &binding_count, nullptr);
  140. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  141. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed enumerating descriptor bindings.");
  142. if (binding_count > 0) {
  143. // Parse bindings.
  144. Vector<SpvReflectDescriptorBinding *> bindings;
  145. bindings.resize(binding_count);
  146. result = spvReflectEnumerateDescriptorBindings(&module, &binding_count, bindings.ptrw());
  147. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  148. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed getting descriptor bindings.");
  149. for (uint32_t j = 0; j < binding_count; j++) {
  150. const SpvReflectDescriptorBinding &binding = *bindings[j];
  151. RDC::ShaderUniform uniform;
  152. bool need_array_dimensions = false;
  153. bool need_block_size = false;
  154. bool may_be_writable = false;
  155. switch (binding.descriptor_type) {
  156. case SPV_REFLECT_DESCRIPTOR_TYPE_SAMPLER: {
  157. uniform.type = RDC::UNIFORM_TYPE_SAMPLER;
  158. need_array_dimensions = true;
  159. } break;
  160. case SPV_REFLECT_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER: {
  161. uniform.type = RDC::UNIFORM_TYPE_SAMPLER_WITH_TEXTURE;
  162. need_array_dimensions = true;
  163. } break;
  164. case SPV_REFLECT_DESCRIPTOR_TYPE_SAMPLED_IMAGE: {
  165. uniform.type = RDC::UNIFORM_TYPE_TEXTURE;
  166. need_array_dimensions = true;
  167. } break;
  168. case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_IMAGE: {
  169. uniform.type = RDC::UNIFORM_TYPE_IMAGE;
  170. need_array_dimensions = true;
  171. may_be_writable = true;
  172. } break;
  173. case SPV_REFLECT_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER: {
  174. uniform.type = RDC::UNIFORM_TYPE_TEXTURE_BUFFER;
  175. need_array_dimensions = true;
  176. } break;
  177. case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_TEXEL_BUFFER: {
  178. uniform.type = RDC::UNIFORM_TYPE_IMAGE_BUFFER;
  179. need_array_dimensions = true;
  180. may_be_writable = true;
  181. } break;
  182. case SPV_REFLECT_DESCRIPTOR_TYPE_UNIFORM_BUFFER: {
  183. const uint64_t key = ShaderRD::DynamicBuffer::encode(binding.set, binding.binding);
  184. if (dynamic_buffers.has(key)) {
  185. uniform.type = RDC::UNIFORM_TYPE_UNIFORM_BUFFER_DYNAMIC;
  186. reflection.has_dynamic_buffers = true;
  187. } else {
  188. uniform.type = RDC::UNIFORM_TYPE_UNIFORM_BUFFER;
  189. }
  190. need_block_size = true;
  191. } break;
  192. case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_BUFFER: {
  193. const uint64_t key = ShaderRD::DynamicBuffer::encode(binding.set, binding.binding);
  194. if (dynamic_buffers.has(key)) {
  195. uniform.type = RDC::UNIFORM_TYPE_STORAGE_BUFFER_DYNAMIC;
  196. reflection.has_dynamic_buffers = true;
  197. } else {
  198. uniform.type = RDC::UNIFORM_TYPE_STORAGE_BUFFER;
  199. }
  200. need_block_size = true;
  201. may_be_writable = true;
  202. } break;
  203. case SPV_REFLECT_DESCRIPTOR_TYPE_UNIFORM_BUFFER_DYNAMIC: {
  204. ERR_PRINT("Dynamic uniform buffer not supported.");
  205. continue;
  206. } break;
  207. case SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_BUFFER_DYNAMIC: {
  208. ERR_PRINT("Dynamic storage buffer not supported.");
  209. continue;
  210. } break;
  211. case SPV_REFLECT_DESCRIPTOR_TYPE_INPUT_ATTACHMENT: {
  212. uniform.type = RDC::UNIFORM_TYPE_INPUT_ATTACHMENT;
  213. need_array_dimensions = true;
  214. } break;
  215. case SPV_REFLECT_DESCRIPTOR_TYPE_ACCELERATION_STRUCTURE_KHR: {
  216. ERR_PRINT("Acceleration structure not supported.");
  217. continue;
  218. } break;
  219. }
  220. if (need_array_dimensions) {
  221. if (binding.array.dims_count == 0) {
  222. uniform.length = 1;
  223. } else {
  224. for (uint32_t k = 0; k < binding.array.dims_count; k++) {
  225. if (k == 0) {
  226. uniform.length = binding.array.dims[0];
  227. } else {
  228. uniform.length *= binding.array.dims[k];
  229. }
  230. }
  231. }
  232. } else if (need_block_size) {
  233. uniform.length = binding.block.size;
  234. } else {
  235. uniform.length = 0;
  236. }
  237. if (may_be_writable) {
  238. if (binding.descriptor_type == SPV_REFLECT_DESCRIPTOR_TYPE_STORAGE_IMAGE) {
  239. uniform.writable = !(binding.decoration_flags & SPV_REFLECT_DECORATION_NON_WRITABLE);
  240. } else {
  241. uniform.writable = !(binding.decoration_flags & SPV_REFLECT_DECORATION_NON_WRITABLE) && !(binding.block.decoration_flags & SPV_REFLECT_DECORATION_NON_WRITABLE);
  242. }
  243. } else {
  244. uniform.writable = false;
  245. }
  246. uniform.binding = binding.binding;
  247. uint32_t set = binding.set;
  248. ERR_FAIL_COND_V_MSG(set >= RDC::MAX_UNIFORM_SETS, FAILED,
  249. "On shader stage '" + String(RDC::SHADER_STAGE_NAMES[stage]) + "', uniform '" + binding.name + "' uses a set (" + itos(set) + ") index larger than what is supported (" + itos(RDC::MAX_UNIFORM_SETS) + ").");
  250. if (set < (uint32_t)reflection.uniform_sets.size()) {
  251. // Check if this already exists.
  252. bool exists = false;
  253. for (int k = 0; k < reflection.uniform_sets[set].size(); k++) {
  254. if (reflection.uniform_sets[set][k].binding == uniform.binding) {
  255. // Already exists, verify that it's the same type.
  256. ERR_FAIL_COND_V_MSG(reflection.uniform_sets[set][k].type != uniform.type, FAILED,
  257. "On shader stage '" + String(RDC::SHADER_STAGE_NAMES[stage]) + "', uniform '" + binding.name + "' trying to reuse location for set=" + itos(set) + ", binding=" + itos(uniform.binding) + " with different uniform type.");
  258. // Also, verify that it's the same size.
  259. ERR_FAIL_COND_V_MSG(reflection.uniform_sets[set][k].length != uniform.length, FAILED,
  260. "On shader stage '" + String(RDC::SHADER_STAGE_NAMES[stage]) + "', uniform '" + binding.name + "' trying to reuse location for set=" + itos(set) + ", binding=" + itos(uniform.binding) + " with different uniform size.");
  261. // Also, verify that it has the same writability.
  262. ERR_FAIL_COND_V_MSG(reflection.uniform_sets[set][k].writable != uniform.writable, FAILED,
  263. "On shader stage '" + String(RDC::SHADER_STAGE_NAMES[stage]) + "', uniform '" + binding.name + "' trying to reuse location for set=" + itos(set) + ", binding=" + itos(uniform.binding) + " with different writability.");
  264. // Just append stage mask and return.
  265. reflection.uniform_sets.write[set].write[k].stages.set_flag(stage_flag);
  266. exists = true;
  267. break;
  268. }
  269. }
  270. if (exists) {
  271. continue; // Merged.
  272. }
  273. }
  274. uniform.stages.set_flag(stage_flag);
  275. if (set >= (uint32_t)reflection.uniform_sets.size()) {
  276. reflection.uniform_sets.resize(set + 1);
  277. }
  278. reflection.uniform_sets.write[set].push_back(uniform);
  279. }
  280. }
  281. {
  282. // Specialization constants.
  283. uint32_t sc_count = 0;
  284. result = spvReflectEnumerateSpecializationConstants(&module, &sc_count, nullptr);
  285. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  286. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed enumerating specialization constants.");
  287. if (sc_count) {
  288. Vector<SpvReflectSpecializationConstant *> spec_constants;
  289. spec_constants.resize(sc_count);
  290. result = spvReflectEnumerateSpecializationConstants(&module, &sc_count, spec_constants.ptrw());
  291. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  292. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed obtaining specialization constants.");
  293. for (uint32_t j = 0; j < sc_count; j++) {
  294. int32_t existing = -1;
  295. RDC::ShaderSpecializationConstant sconst;
  296. SpvReflectSpecializationConstant *spc = spec_constants[j];
  297. sconst.constant_id = spc->constant_id;
  298. sconst.int_value = 0; // Clear previous value JIC.
  299. switch (spc->constant_type) {
  300. case SPV_REFLECT_SPECIALIZATION_CONSTANT_BOOL: {
  301. sconst.type = RDC::PIPELINE_SPECIALIZATION_CONSTANT_TYPE_BOOL;
  302. sconst.bool_value = spc->default_value.int_bool_value != 0;
  303. } break;
  304. case SPV_REFLECT_SPECIALIZATION_CONSTANT_INT: {
  305. sconst.type = RDC::PIPELINE_SPECIALIZATION_CONSTANT_TYPE_INT;
  306. sconst.int_value = spc->default_value.int_bool_value;
  307. } break;
  308. case SPV_REFLECT_SPECIALIZATION_CONSTANT_FLOAT: {
  309. sconst.type = RDC::PIPELINE_SPECIALIZATION_CONSTANT_TYPE_FLOAT;
  310. sconst.float_value = spc->default_value.float_value;
  311. } break;
  312. }
  313. sconst.stages.set_flag(stage_flag);
  314. for (int k = 0; k < reflection.specialization_constants.size(); k++) {
  315. if (reflection.specialization_constants[k].constant_id == sconst.constant_id) {
  316. ERR_FAIL_COND_V_MSG(reflection.specialization_constants[k].type != sconst.type, FAILED, "More than one specialization constant used for id (" + itos(sconst.constant_id) + "), but their types differ.");
  317. ERR_FAIL_COND_V_MSG(reflection.specialization_constants[k].int_value != sconst.int_value, FAILED, "More than one specialization constant used for id (" + itos(sconst.constant_id) + "), but their default values differ.");
  318. existing = k;
  319. break;
  320. }
  321. }
  322. if (existing >= 0) {
  323. reflection.specialization_constants.write[existing].stages.set_flag(stage_flag);
  324. } else {
  325. reflection.specialization_constants.push_back(sconst);
  326. }
  327. }
  328. reflection.specialization_constants.sort();
  329. }
  330. }
  331. if (stage == RDC::SHADER_STAGE_VERTEX || stage == RDC::SHADER_STAGE_FRAGMENT) {
  332. uint32_t iv_count = 0;
  333. result = spvReflectEnumerateInputVariables(&module, &iv_count, nullptr);
  334. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  335. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed enumerating input variables.");
  336. if (iv_count) {
  337. Vector<SpvReflectInterfaceVariable *> input_vars;
  338. input_vars.resize(iv_count);
  339. result = spvReflectEnumerateInputVariables(&module, &iv_count, input_vars.ptrw());
  340. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  341. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed obtaining input variables.");
  342. for (const SpvReflectInterfaceVariable *v : input_vars) {
  343. if (!v) {
  344. continue;
  345. }
  346. if (stage == RDC::SHADER_STAGE_VERTEX) {
  347. if (v->decoration_flags == 0) { // Regular input.
  348. reflection.vertex_input_mask |= (((uint64_t)1) << v->location);
  349. }
  350. }
  351. if (v->built_in == SpvBuiltInViewIndex) {
  352. reflection.has_multiview = true;
  353. }
  354. }
  355. }
  356. }
  357. if (stage == RDC::SHADER_STAGE_FRAGMENT) {
  358. uint32_t ov_count = 0;
  359. result = spvReflectEnumerateOutputVariables(&module, &ov_count, nullptr);
  360. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  361. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed enumerating output variables.");
  362. if (ov_count) {
  363. Vector<SpvReflectInterfaceVariable *> output_vars;
  364. output_vars.resize(ov_count);
  365. result = spvReflectEnumerateOutputVariables(&module, &ov_count, output_vars.ptrw());
  366. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  367. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed obtaining output variables.");
  368. for (const SpvReflectInterfaceVariable *refvar : output_vars) {
  369. if (!refvar) {
  370. continue;
  371. }
  372. if (refvar->built_in != SpvBuiltInFragDepth) {
  373. reflection.fragment_output_mask |= 1 << refvar->location;
  374. }
  375. }
  376. }
  377. }
  378. uint32_t pc_count = 0;
  379. result = spvReflectEnumeratePushConstantBlocks(&module, &pc_count, nullptr);
  380. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  381. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed enumerating push constants.");
  382. if (pc_count) {
  383. ERR_FAIL_COND_V_MSG(pc_count > 1, FAILED,
  384. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "': Only one push constant is supported, which should be the same across shader stages.");
  385. Vector<SpvReflectBlockVariable *> pconstants;
  386. pconstants.resize(pc_count);
  387. result = spvReflectEnumeratePushConstantBlocks(&module, &pc_count, pconstants.ptrw());
  388. ERR_FAIL_COND_V_MSG(result != SPV_REFLECT_RESULT_SUCCESS, FAILED,
  389. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "' failed obtaining push constants.");
  390. #if 0
  391. if (pconstants[0] == nullptr) {
  392. Ref<FileAccess> f = FileAccess::open("res://popo.spv", FileAccess::WRITE);
  393. f->store_buffer((const uint8_t *)&SpirV[0], SpirV.size() * sizeof(uint32_t));
  394. }
  395. #endif
  396. ERR_FAIL_COND_V_MSG(reflection.push_constant_size && reflection.push_constant_size != pconstants[0]->size, FAILED,
  397. "Reflection of SPIR-V shader stage '" + String(RDC::SHADER_STAGE_NAMES[p_spirv[i].shader_stage]) + "': Push constant block must be the same across shader stages.");
  398. reflection.push_constant_size = pconstants[0]->size;
  399. reflection.push_constant_stages.set_flag(stage_flag);
  400. //print_line("Stage: " + String(RDC::SHADER_STAGE_NAMES[stage]) + " push constant of size=" + itos(push_constant.push_constant_size));
  401. }
  402. }
  403. reflection.stages_bits.set_flag(stage_flag);
  404. }
  405. // Sort all uniform_sets by binding.
  406. for (uint32_t i = 0; i < reflection.uniform_sets.size(); i++) {
  407. reflection.uniform_sets.write[i].sort();
  408. }
  409. set_from_shader_reflection(reflection);
  410. return OK;
  411. }
  412. void RenderingShaderContainer::set_from_shader_reflection(const RenderingDeviceCommons::ShaderReflection &p_reflection) {
  413. reflection_binding_set_uniforms_count.clear();
  414. reflection_binding_set_uniforms_data.clear();
  415. reflection_specialization_data.clear();
  416. reflection_shader_stages.clear();
  417. reflection_data.vertex_input_mask = p_reflection.vertex_input_mask;
  418. reflection_data.fragment_output_mask = p_reflection.fragment_output_mask;
  419. reflection_data.specialization_constants_count = p_reflection.specialization_constants.size();
  420. reflection_data.is_compute = p_reflection.is_compute;
  421. reflection_data.has_multiview = p_reflection.has_multiview;
  422. reflection_data.has_dynamic_buffers = p_reflection.has_dynamic_buffers;
  423. reflection_data.compute_local_size[0] = p_reflection.compute_local_size[0];
  424. reflection_data.compute_local_size[1] = p_reflection.compute_local_size[1];
  425. reflection_data.compute_local_size[2] = p_reflection.compute_local_size[2];
  426. reflection_data.set_count = p_reflection.uniform_sets.size();
  427. reflection_data.push_constant_size = p_reflection.push_constant_size;
  428. reflection_data.push_constant_stages_mask = uint32_t(p_reflection.push_constant_stages);
  429. reflection_data.shader_name_len = shader_name.length();
  430. ReflectionBindingData binding_data;
  431. for (const Vector<RenderingDeviceCommons::ShaderUniform> &uniform_set : p_reflection.uniform_sets) {
  432. for (const RenderingDeviceCommons::ShaderUniform &uniform : uniform_set) {
  433. binding_data.type = uint32_t(uniform.type);
  434. binding_data.binding = uniform.binding;
  435. binding_data.stages = uint32_t(uniform.stages);
  436. binding_data.length = uniform.length;
  437. binding_data.writable = uint32_t(uniform.writable);
  438. reflection_binding_set_uniforms_data.push_back(binding_data);
  439. }
  440. reflection_binding_set_uniforms_count.push_back(uniform_set.size());
  441. }
  442. ReflectionSpecializationData specialization_data;
  443. for (const RenderingDeviceCommons::ShaderSpecializationConstant &spec : p_reflection.specialization_constants) {
  444. specialization_data.type = uint32_t(spec.type);
  445. specialization_data.constant_id = spec.constant_id;
  446. specialization_data.int_value = spec.int_value;
  447. specialization_data.stage_flags = uint32_t(spec.stages);
  448. reflection_specialization_data.push_back(specialization_data);
  449. }
  450. for (uint32_t i = 0; i < RenderingDeviceCommons::SHADER_STAGE_MAX; i++) {
  451. if (p_reflection.stages_bits.has_flag(RenderingDeviceCommons::ShaderStage(1U << i))) {
  452. reflection_shader_stages.push_back(RenderingDeviceCommons::ShaderStage(i));
  453. }
  454. }
  455. reflection_data.stage_count = reflection_shader_stages.size();
  456. _set_from_shader_reflection_post(p_reflection);
  457. }
  458. bool RenderingShaderContainer::set_code_from_spirv(const String &p_shader_name, Span<RenderingDeviceCommons::ShaderStageSPIRVData> p_spirv) {
  459. LocalVector<ReflectedShaderStage> spirv;
  460. ERR_FAIL_COND_V(reflect_spirv(p_shader_name, p_spirv, spirv) != OK, false);
  461. return _set_code_from_spirv(spirv.span());
  462. }
  463. RenderingDeviceCommons::ShaderReflection RenderingShaderContainer::get_shader_reflection() const {
  464. RenderingDeviceCommons::ShaderReflection shader_refl;
  465. shader_refl.push_constant_size = reflection_data.push_constant_size;
  466. shader_refl.push_constant_stages = reflection_data.push_constant_stages_mask;
  467. shader_refl.vertex_input_mask = reflection_data.vertex_input_mask;
  468. shader_refl.fragment_output_mask = reflection_data.fragment_output_mask;
  469. shader_refl.is_compute = reflection_data.is_compute;
  470. shader_refl.has_multiview = reflection_data.has_multiview;
  471. shader_refl.has_dynamic_buffers = reflection_data.has_dynamic_buffers;
  472. shader_refl.compute_local_size[0] = reflection_data.compute_local_size[0];
  473. shader_refl.compute_local_size[1] = reflection_data.compute_local_size[1];
  474. shader_refl.compute_local_size[2] = reflection_data.compute_local_size[2];
  475. shader_refl.uniform_sets.resize(reflection_data.set_count);
  476. shader_refl.specialization_constants.resize(reflection_data.specialization_constants_count);
  477. shader_refl.stages_vector.resize(reflection_data.stage_count);
  478. DEV_ASSERT(reflection_binding_set_uniforms_count.size() == reflection_data.set_count && "The amount of elements in the reflection and the shader container can't be different.");
  479. uint32_t uniform_index = 0;
  480. for (uint32_t i = 0; i < reflection_data.set_count; i++) {
  481. Vector<RenderingDeviceCommons::ShaderUniform> &uniform_set = shader_refl.uniform_sets.ptrw()[i];
  482. uint32_t uniforms_count = reflection_binding_set_uniforms_count[i];
  483. uniform_set.resize(uniforms_count);
  484. for (uint32_t j = 0; j < uniforms_count; j++) {
  485. const ReflectionBindingData &binding = reflection_binding_set_uniforms_data[uniform_index++];
  486. RenderingDeviceCommons::ShaderUniform &uniform = uniform_set.ptrw()[j];
  487. uniform.type = RenderingDeviceCommons::UniformType(binding.type);
  488. uniform.writable = binding.writable;
  489. uniform.length = binding.length;
  490. uniform.binding = binding.binding;
  491. uniform.stages = binding.stages;
  492. }
  493. }
  494. shader_refl.specialization_constants.resize(reflection_data.specialization_constants_count);
  495. for (uint32_t i = 0; i < reflection_data.specialization_constants_count; i++) {
  496. const ReflectionSpecializationData &spec = reflection_specialization_data[i];
  497. RenderingDeviceCommons::ShaderSpecializationConstant &sc = shader_refl.specialization_constants.ptrw()[i];
  498. sc.type = RenderingDeviceCommons::PipelineSpecializationConstantType(spec.type);
  499. sc.constant_id = spec.constant_id;
  500. sc.int_value = spec.int_value;
  501. sc.stages = spec.stage_flags;
  502. }
  503. shader_refl.stages_vector.resize(reflection_data.stage_count);
  504. for (uint32_t i = 0; i < reflection_data.stage_count; i++) {
  505. shader_refl.stages_vector.set(i, reflection_shader_stages[i]);
  506. shader_refl.stages_bits.set_flag(RenderingDeviceCommons::ShaderStage(1U << reflection_shader_stages[i]));
  507. }
  508. return shader_refl;
  509. }
  510. bool RenderingShaderContainer::from_bytes(const PackedByteArray &p_bytes) {
  511. const uint64_t alignment = sizeof(uint32_t);
  512. const uint8_t *bytes_ptr = p_bytes.ptr();
  513. uint64_t bytes_offset = 0;
  514. // Read container header.
  515. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + sizeof(ContainerHeader)) > p_bytes.size(), false, "Not enough bytes for a container header in shader container.");
  516. const ContainerHeader &container_header = *(const ContainerHeader *)(&bytes_ptr[bytes_offset]);
  517. bytes_offset += sizeof(ContainerHeader);
  518. bytes_offset += _from_bytes_header_extra_data(&bytes_ptr[bytes_offset]);
  519. ERR_FAIL_COND_V_MSG(container_header.magic_number != CONTAINER_MAGIC_NUMBER, false, "Incorrect magic number in shader container.");
  520. ERR_FAIL_COND_V_MSG(container_header.version > CONTAINER_VERSION, false, "Unsupported version in shader container.");
  521. ERR_FAIL_COND_V_MSG(container_header.format != _format(), false, "Incorrect format in shader container.");
  522. ERR_FAIL_COND_V_MSG(container_header.format_version > _format_version(), false, "Unsupported format version in shader container.");
  523. // Adjust shaders to the size indicated by the container header.
  524. shaders.resize(container_header.shader_count);
  525. // Read reflection data.
  526. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + sizeof(ReflectionData)) > p_bytes.size(), false, "Not enough bytes for reflection data in shader container.");
  527. reflection_data = *(const ReflectionData *)(&bytes_ptr[bytes_offset]);
  528. bytes_offset += sizeof(ReflectionData);
  529. bytes_offset += _from_bytes_reflection_extra_data(&bytes_ptr[bytes_offset]);
  530. // Read shader name.
  531. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + reflection_data.shader_name_len) > p_bytes.size(), false, "Not enough bytes for shader name in shader container.");
  532. if (reflection_data.shader_name_len > 0) {
  533. String shader_name_str;
  534. shader_name_str.append_utf8((const char *)(&bytes_ptr[bytes_offset]), reflection_data.shader_name_len);
  535. shader_name = shader_name_str.utf8();
  536. bytes_offset = aligned_to(bytes_offset + reflection_data.shader_name_len, alignment);
  537. } else {
  538. shader_name = CharString();
  539. }
  540. reflection_binding_set_uniforms_count.resize(reflection_data.set_count);
  541. reflection_binding_set_uniforms_data.clear();
  542. uint32_t uniform_index = 0;
  543. for (uint32_t i = 0; i < reflection_data.set_count; i++) {
  544. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + sizeof(uint32_t)) > p_bytes.size(), false, "Not enough bytes for uniform set count in shader container.");
  545. uint32_t uniforms_count = *(uint32_t *)(&bytes_ptr[bytes_offset]);
  546. reflection_binding_set_uniforms_count.ptrw()[i] = uniforms_count;
  547. bytes_offset += sizeof(uint32_t);
  548. reflection_binding_set_uniforms_data.resize(reflection_binding_set_uniforms_data.size() + uniforms_count);
  549. bytes_offset += _from_bytes_reflection_binding_uniform_extra_data_start(&bytes_ptr[bytes_offset]);
  550. for (uint32_t j = 0; j < uniforms_count; j++) {
  551. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + sizeof(ReflectionBindingData)) > p_bytes.size(), false, "Not enough bytes for uniform in shader container.");
  552. memcpy(&reflection_binding_set_uniforms_data.ptrw()[uniform_index], &bytes_ptr[bytes_offset], sizeof(ReflectionBindingData));
  553. bytes_offset += sizeof(ReflectionBindingData);
  554. bytes_offset += _from_bytes_reflection_binding_uniform_extra_data(&bytes_ptr[bytes_offset], uniform_index);
  555. uniform_index++;
  556. }
  557. }
  558. reflection_specialization_data.resize(reflection_data.specialization_constants_count);
  559. bytes_offset += _from_bytes_reflection_specialization_extra_data_start(&bytes_ptr[bytes_offset]);
  560. for (uint32_t i = 0; i < reflection_data.specialization_constants_count; i++) {
  561. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + sizeof(ReflectionSpecializationData)) > p_bytes.size(), false, "Not enough bytes for specialization in shader container.");
  562. memcpy(&reflection_specialization_data.ptrw()[i], &bytes_ptr[bytes_offset], sizeof(ReflectionSpecializationData));
  563. bytes_offset += sizeof(ReflectionSpecializationData);
  564. bytes_offset += _from_bytes_reflection_specialization_extra_data(&bytes_ptr[bytes_offset], i);
  565. }
  566. const uint32_t stage_count = reflection_data.stage_count;
  567. if (stage_count > 0) {
  568. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + stage_count * sizeof(RenderingDeviceCommons::ShaderStage)) > p_bytes.size(), false, "Not enough bytes for stages in shader container.");
  569. reflection_shader_stages.resize(stage_count);
  570. bytes_offset += _from_bytes_shader_extra_data_start(&bytes_ptr[bytes_offset]);
  571. memcpy(reflection_shader_stages.ptrw(), &bytes_ptr[bytes_offset], stage_count * sizeof(RenderingDeviceCommons::ShaderStage));
  572. bytes_offset += stage_count * sizeof(RenderingDeviceCommons::ShaderStage);
  573. }
  574. // Read shaders.
  575. for (int64_t i = 0; i < shaders.size(); i++) {
  576. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + sizeof(ShaderHeader)) > p_bytes.size(), false, "Not enough bytes for shader header in shader container.");
  577. const ShaderHeader &header = *(const ShaderHeader *)(&bytes_ptr[bytes_offset]);
  578. bytes_offset += sizeof(ShaderHeader);
  579. ERR_FAIL_COND_V_MSG(int64_t(bytes_offset + header.code_compressed_size) > p_bytes.size(), false, "Not enough bytes for a shader in shader container.");
  580. Shader &shader = shaders.ptrw()[i];
  581. shader.shader_stage = RenderingDeviceCommons::ShaderStage(header.shader_stage);
  582. shader.code_compression_flags = header.code_compression_flags;
  583. shader.code_decompressed_size = header.code_decompressed_size;
  584. shader.code_compressed_bytes.resize(header.code_compressed_size);
  585. memcpy(shader.code_compressed_bytes.ptrw(), &bytes_ptr[bytes_offset], header.code_compressed_size);
  586. bytes_offset = aligned_to(bytes_offset + header.code_compressed_size, alignment);
  587. bytes_offset += _from_bytes_shader_extra_data(&bytes_ptr[bytes_offset], i);
  588. }
  589. bytes_offset += _from_bytes_footer_extra_data(&bytes_ptr[bytes_offset]);
  590. ERR_FAIL_COND_V_MSG(bytes_offset != (uint64_t)p_bytes.size(), false, "Amount of bytes in the container does not match the amount of bytes read.");
  591. return true;
  592. }
  593. PackedByteArray RenderingShaderContainer::to_bytes() const {
  594. // Compute the exact size the container will require for writing everything out.
  595. const uint64_t alignment = sizeof(uint32_t);
  596. uint64_t total_size = 0;
  597. total_size += sizeof(ContainerHeader) + _to_bytes_header_extra_data(nullptr);
  598. total_size += sizeof(ReflectionData) + _to_bytes_reflection_extra_data(nullptr);
  599. total_size += aligned_to(reflection_data.shader_name_len, alignment);
  600. total_size += reflection_binding_set_uniforms_count.size() * sizeof(uint32_t);
  601. total_size += reflection_binding_set_uniforms_data.size() * sizeof(ReflectionBindingData);
  602. total_size += reflection_specialization_data.size() * sizeof(ReflectionSpecializationData);
  603. total_size += reflection_shader_stages.size() * sizeof(RenderingDeviceCommons::ShaderStage);
  604. for (uint32_t i = 0; i < reflection_binding_set_uniforms_data.size(); i++) {
  605. total_size += _to_bytes_reflection_binding_uniform_extra_data(nullptr, i);
  606. }
  607. for (uint32_t i = 0; i < reflection_specialization_data.size(); i++) {
  608. total_size += _to_bytes_reflection_specialization_extra_data(nullptr, i);
  609. }
  610. for (uint32_t i = 0; i < shaders.size(); i++) {
  611. total_size += sizeof(ShaderHeader);
  612. total_size += shaders[i].code_compressed_bytes.size();
  613. total_size = aligned_to(total_size, alignment);
  614. total_size += _to_bytes_shader_extra_data(nullptr, i);
  615. }
  616. total_size += _to_bytes_footer_extra_data(nullptr);
  617. // Create the array that will hold all of the data.
  618. PackedByteArray bytes;
  619. bytes.resize_initialized(total_size);
  620. // Write out the data to the array.
  621. uint64_t bytes_offset = 0;
  622. uint8_t *bytes_ptr = bytes.ptrw();
  623. ContainerHeader &container_header = *(ContainerHeader *)(&bytes_ptr[bytes_offset]);
  624. container_header.magic_number = CONTAINER_MAGIC_NUMBER;
  625. container_header.version = CONTAINER_VERSION;
  626. container_header.format = _format();
  627. container_header.format_version = _format_version();
  628. container_header.shader_count = shaders.size();
  629. bytes_offset += sizeof(ContainerHeader);
  630. bytes_offset += _to_bytes_header_extra_data(&bytes_ptr[bytes_offset]);
  631. memcpy(&bytes_ptr[bytes_offset], &reflection_data, sizeof(ReflectionData));
  632. bytes_offset += sizeof(ReflectionData);
  633. bytes_offset += _to_bytes_reflection_extra_data(&bytes_ptr[bytes_offset]);
  634. if (shader_name.size() > 0) {
  635. memcpy(&bytes_ptr[bytes_offset], shader_name.ptr(), reflection_data.shader_name_len);
  636. bytes_offset = aligned_to(bytes_offset + reflection_data.shader_name_len, alignment);
  637. }
  638. uint32_t uniform_index = 0;
  639. for (uint32_t uniform_count : reflection_binding_set_uniforms_count) {
  640. memcpy(&bytes_ptr[bytes_offset], &uniform_count, sizeof(uniform_count));
  641. bytes_offset += sizeof(uint32_t);
  642. for (uint32_t i = 0; i < uniform_count; i++) {
  643. memcpy(&bytes_ptr[bytes_offset], &reflection_binding_set_uniforms_data[uniform_index], sizeof(ReflectionBindingData));
  644. bytes_offset += sizeof(ReflectionBindingData);
  645. bytes_offset += _to_bytes_reflection_binding_uniform_extra_data(&bytes_ptr[bytes_offset], uniform_index);
  646. uniform_index++;
  647. }
  648. }
  649. for (uint32_t i = 0; i < reflection_specialization_data.size(); i++) {
  650. memcpy(&bytes_ptr[bytes_offset], &reflection_specialization_data.ptr()[i], sizeof(ReflectionSpecializationData));
  651. bytes_offset += sizeof(ReflectionSpecializationData);
  652. bytes_offset += _to_bytes_reflection_specialization_extra_data(&bytes_ptr[bytes_offset], i);
  653. }
  654. if (!reflection_shader_stages.is_empty()) {
  655. uint32_t stage_count = reflection_shader_stages.size();
  656. memcpy(&bytes_ptr[bytes_offset], reflection_shader_stages.ptr(), stage_count * sizeof(RenderingDeviceCommons::ShaderStage));
  657. bytes_offset += stage_count * sizeof(RenderingDeviceCommons::ShaderStage);
  658. }
  659. for (uint32_t i = 0; i < shaders.size(); i++) {
  660. const Shader &shader = shaders[i];
  661. ShaderHeader &header = *(ShaderHeader *)(&bytes.ptr()[bytes_offset]);
  662. header.shader_stage = shader.shader_stage;
  663. header.code_compressed_size = uint32_t(shader.code_compressed_bytes.size());
  664. header.code_compression_flags = shader.code_compression_flags;
  665. header.code_decompressed_size = shader.code_decompressed_size;
  666. bytes_offset += sizeof(ShaderHeader);
  667. memcpy(&bytes.ptrw()[bytes_offset], shader.code_compressed_bytes.ptr(), shader.code_compressed_bytes.size());
  668. bytes_offset = aligned_to(bytes_offset + shader.code_compressed_bytes.size(), alignment);
  669. bytes_offset += _to_bytes_shader_extra_data(&bytes_ptr[bytes_offset], i);
  670. }
  671. bytes_offset += _to_bytes_footer_extra_data(&bytes_ptr[bytes_offset]);
  672. ERR_FAIL_COND_V_MSG(bytes_offset != total_size, PackedByteArray(), "Amount of bytes written does not match the amount of bytes reserved for the container.");
  673. return bytes;
  674. }
  675. bool RenderingShaderContainer::compress_code(const uint8_t *p_decompressed_bytes, uint32_t p_decompressed_size, uint8_t *p_compressed_bytes, uint32_t *r_compressed_size, uint32_t *r_compressed_flags) const {
  676. DEV_ASSERT(p_decompressed_bytes != nullptr);
  677. DEV_ASSERT(p_decompressed_size > 0);
  678. DEV_ASSERT(p_compressed_bytes != nullptr);
  679. DEV_ASSERT(r_compressed_size != nullptr);
  680. DEV_ASSERT(r_compressed_flags != nullptr);
  681. *r_compressed_flags = 0;
  682. PackedByteArray zstd_bytes;
  683. const int64_t zstd_max_bytes = Compression::get_max_compressed_buffer_size(p_decompressed_size, Compression::MODE_ZSTD);
  684. zstd_bytes.resize(zstd_max_bytes);
  685. const int64_t zstd_size = Compression::compress(zstd_bytes.ptrw(), p_decompressed_bytes, p_decompressed_size, Compression::MODE_ZSTD);
  686. if (zstd_size > 0 && (uint32_t)(zstd_size) < p_decompressed_size) {
  687. // Only choose Zstd if it results in actual compression.
  688. memcpy(p_compressed_bytes, zstd_bytes.ptr(), zstd_size);
  689. *r_compressed_size = zstd_size;
  690. *r_compressed_flags |= COMPRESSION_FLAG_ZSTD;
  691. } else {
  692. // Just copy the input to the output directly.
  693. memcpy(p_compressed_bytes, p_decompressed_bytes, p_decompressed_size);
  694. *r_compressed_size = p_decompressed_size;
  695. }
  696. return true;
  697. }
  698. bool RenderingShaderContainer::decompress_code(const uint8_t *p_compressed_bytes, uint32_t p_compressed_size, uint32_t p_compressed_flags, uint8_t *p_decompressed_bytes, uint32_t p_decompressed_size) const {
  699. DEV_ASSERT(p_compressed_bytes != nullptr);
  700. DEV_ASSERT(p_compressed_size > 0);
  701. DEV_ASSERT(p_decompressed_bytes != nullptr);
  702. DEV_ASSERT(p_decompressed_size > 0);
  703. bool uses_zstd = p_compressed_flags & COMPRESSION_FLAG_ZSTD;
  704. if (uses_zstd) {
  705. if (!Compression::decompress(p_decompressed_bytes, p_decompressed_size, p_compressed_bytes, p_compressed_size, Compression::MODE_ZSTD)) {
  706. ERR_FAIL_V_MSG(false, "Malformed zstd input for decompressing shader code.");
  707. }
  708. } else {
  709. memcpy(p_decompressed_bytes, p_compressed_bytes, MIN(p_compressed_size, p_decompressed_size));
  710. }
  711. return true;
  712. }
  713. RenderingShaderContainer::RenderingShaderContainer() {}
  714. RenderingShaderContainer::~RenderingShaderContainer() {}