effects_rd.cpp 129 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129213021312132213321342135213621372138213921402141214221432144214521462147214821492150215121522153215421552156215721582159216021612162216321642165216621672168216921702171217221732174217521762177217821792180218121822183218421852186218721882189219021912192219321942195219621972198219922002201220222032204220522062207220822092210221122122213221422152216221722182219222022212222222322242225222622272228222922302231223222332234223522362237223822392240224122422243224422452246224722482249225022512252225322542255225622572258225922602261226222632264226522662267226822692270227122722273227422752276227722782279228022812282228322842285228622872288228922902291229222932294229522962297229822992300230123022303230423052306230723082309231023112312231323142315231623172318231923202321232223232324232523262327232823292330233123322333233423352336233723382339234023412342234323442345234623472348234923502351235223532354235523562357235823592360236123622363236423652366236723682369237023712372237323742375237623772378237923802381238223832384238523862387238823892390239123922393239423952396239723982399240024012402240324042405240624072408240924102411241224132414241524162417241824192420242124222423242424252426242724282429243024312432243324342435243624372438243924402441244224432444244524462447244824492450245124522453245424552456245724582459246024612462246324642465246624672468246924702471247224732474247524762477247824792480248124822483248424852486248724882489249024912492249324942495249624972498249925002501250225032504250525062507250825092510251125122513251425152516251725182519252025212522252325242525252625272528252925302531253225332534253525362537253825392540254125422543254425452546254725482549255025512552255325542555255625572558255925602561256225632564
  1. /*************************************************************************/
  2. /* effects_rd.cpp */
  3. /*************************************************************************/
  4. /* This file is part of: */
  5. /* GODOT ENGINE */
  6. /* https://godotengine.org */
  7. /*************************************************************************/
  8. /* Copyright (c) 2007-2021 Juan Linietsky, Ariel Manzur. */
  9. /* Copyright (c) 2014-2021 Godot Engine contributors (cf. AUTHORS.md). */
  10. /* */
  11. /* Permission is hereby granted, free of charge, to any person obtaining */
  12. /* a copy of this software and associated documentation files (the */
  13. /* "Software"), to deal in the Software without restriction, including */
  14. /* without limitation the rights to use, copy, modify, merge, publish, */
  15. /* distribute, sublicense, and/or sell copies of the Software, and to */
  16. /* permit persons to whom the Software is furnished to do so, subject to */
  17. /* the following conditions: */
  18. /* */
  19. /* The above copyright notice and this permission notice shall be */
  20. /* included in all copies or substantial portions of the Software. */
  21. /* */
  22. /* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, */
  23. /* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF */
  24. /* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.*/
  25. /* IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY */
  26. /* CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, */
  27. /* TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE */
  28. /* SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. */
  29. /*************************************************************************/
  30. #include "effects_rd.h"
  31. #include "core/config/project_settings.h"
  32. #include "core/math/math_defs.h"
  33. #include "core/os/os.h"
  34. #include "servers/rendering/renderer_rd/renderer_compositor_rd.h"
  35. #include "thirdparty/misc/cubemap_coeffs.h"
  36. bool EffectsRD::get_prefer_raster_effects() {
  37. return prefer_raster_effects;
  38. }
  39. static _FORCE_INLINE_ void store_camera(const CameraMatrix &p_mtx, float *p_array) {
  40. for (int i = 0; i < 4; i++) {
  41. for (int j = 0; j < 4; j++) {
  42. p_array[i * 4 + j] = p_mtx.matrix[i][j];
  43. }
  44. }
  45. }
  46. RID EffectsRD::_get_uniform_set_from_image(RID p_image) {
  47. if (image_to_uniform_set_cache.has(p_image)) {
  48. RID uniform_set = image_to_uniform_set_cache[p_image];
  49. if (RD::get_singleton()->uniform_set_is_valid(uniform_set)) {
  50. return uniform_set;
  51. }
  52. }
  53. Vector<RD::Uniform> uniforms;
  54. RD::Uniform u;
  55. u.uniform_type = RD::UNIFORM_TYPE_IMAGE;
  56. u.binding = 0;
  57. u.ids.push_back(p_image);
  58. uniforms.push_back(u);
  59. //any thing with the same configuration (one texture in binding 0 for set 0), is good
  60. RID uniform_set = RD::get_singleton()->uniform_set_create(uniforms, luminance_reduce.shader.version_get_shader(luminance_reduce.shader_version, 0), 1);
  61. image_to_uniform_set_cache[p_image] = uniform_set;
  62. return uniform_set;
  63. }
  64. RID EffectsRD::_get_uniform_set_for_input(RID p_texture) {
  65. if (input_to_uniform_set_cache.has(p_texture)) {
  66. RID uniform_set = input_to_uniform_set_cache[p_texture];
  67. if (RD::get_singleton()->uniform_set_is_valid(uniform_set)) {
  68. return uniform_set;
  69. }
  70. }
  71. Vector<RD::Uniform> uniforms;
  72. RD::Uniform u;
  73. u.uniform_type = RD::UNIFORM_TYPE_INPUT_ATTACHMENT;
  74. u.binding = 0;
  75. u.ids.push_back(p_texture);
  76. uniforms.push_back(u);
  77. // This is specific to our subpass shader
  78. RID uniform_set = RD::get_singleton()->uniform_set_create(uniforms, tonemap.shader.version_get_shader(tonemap.shader_version, TONEMAP_MODE_SUBPASS), 0);
  79. input_to_uniform_set_cache[p_texture] = uniform_set;
  80. return uniform_set;
  81. }
  82. RID EffectsRD::_get_uniform_set_from_texture(RID p_texture, bool p_use_mipmaps) {
  83. if (texture_to_uniform_set_cache.has(p_texture)) {
  84. RID uniform_set = texture_to_uniform_set_cache[p_texture];
  85. if (RD::get_singleton()->uniform_set_is_valid(uniform_set)) {
  86. return uniform_set;
  87. }
  88. }
  89. Vector<RD::Uniform> uniforms;
  90. RD::Uniform u;
  91. u.uniform_type = RD::UNIFORM_TYPE_SAMPLER_WITH_TEXTURE;
  92. u.binding = 0;
  93. u.ids.push_back(p_use_mipmaps ? default_mipmap_sampler : default_sampler);
  94. u.ids.push_back(p_texture);
  95. uniforms.push_back(u);
  96. // anything with the same configuration (one texture in binding 0 for set 0), is good
  97. RID uniform_set = RD::get_singleton()->uniform_set_create(uniforms, tonemap.shader.version_get_shader(tonemap.shader_version, 0), 0);
  98. texture_to_uniform_set_cache[p_texture] = uniform_set;
  99. return uniform_set;
  100. }
  101. RID EffectsRD::_get_compute_uniform_set_from_texture(RID p_texture, bool p_use_mipmaps) {
  102. if (texture_to_compute_uniform_set_cache.has(p_texture)) {
  103. RID uniform_set = texture_to_compute_uniform_set_cache[p_texture];
  104. if (RD::get_singleton()->uniform_set_is_valid(uniform_set)) {
  105. return uniform_set;
  106. }
  107. }
  108. Vector<RD::Uniform> uniforms;
  109. RD::Uniform u;
  110. u.uniform_type = RD::UNIFORM_TYPE_SAMPLER_WITH_TEXTURE;
  111. u.binding = 0;
  112. u.ids.push_back(p_use_mipmaps ? default_mipmap_sampler : default_sampler);
  113. u.ids.push_back(p_texture);
  114. uniforms.push_back(u);
  115. //any thing with the same configuration (one texture in binding 0 for set 0), is good
  116. RID uniform_set = RD::get_singleton()->uniform_set_create(uniforms, luminance_reduce.shader.version_get_shader(luminance_reduce.shader_version, 0), 0);
  117. texture_to_compute_uniform_set_cache[p_texture] = uniform_set;
  118. return uniform_set;
  119. }
  120. RID EffectsRD::_get_compute_uniform_set_from_texture_and_sampler(RID p_texture, RID p_sampler) {
  121. TextureSamplerPair tsp;
  122. tsp.texture = p_texture;
  123. tsp.sampler = p_sampler;
  124. if (texture_sampler_to_compute_uniform_set_cache.has(tsp)) {
  125. RID uniform_set = texture_sampler_to_compute_uniform_set_cache[tsp];
  126. if (RD::get_singleton()->uniform_set_is_valid(uniform_set)) {
  127. return uniform_set;
  128. }
  129. }
  130. Vector<RD::Uniform> uniforms;
  131. RD::Uniform u;
  132. u.uniform_type = RD::UNIFORM_TYPE_SAMPLER_WITH_TEXTURE;
  133. u.binding = 0;
  134. u.ids.push_back(p_sampler);
  135. u.ids.push_back(p_texture);
  136. uniforms.push_back(u);
  137. //any thing with the same configuration (one texture in binding 0 for set 0), is good
  138. RID uniform_set = RD::get_singleton()->uniform_set_create(uniforms, ssao.blur_shader.version_get_shader(ssao.blur_shader_version, 0), 0);
  139. texture_sampler_to_compute_uniform_set_cache[tsp] = uniform_set;
  140. return uniform_set;
  141. }
  142. RID EffectsRD::_get_compute_uniform_set_from_texture_pair(RID p_texture1, RID p_texture2, bool p_use_mipmaps) {
  143. TexturePair tp;
  144. tp.texture1 = p_texture1;
  145. tp.texture2 = p_texture2;
  146. if (texture_pair_to_compute_uniform_set_cache.has(tp)) {
  147. RID uniform_set = texture_pair_to_compute_uniform_set_cache[tp];
  148. if (RD::get_singleton()->uniform_set_is_valid(uniform_set)) {
  149. return uniform_set;
  150. }
  151. }
  152. Vector<RD::Uniform> uniforms;
  153. {
  154. RD::Uniform u;
  155. u.uniform_type = RD::UNIFORM_TYPE_SAMPLER_WITH_TEXTURE;
  156. u.binding = 0;
  157. u.ids.push_back(p_use_mipmaps ? default_mipmap_sampler : default_sampler);
  158. u.ids.push_back(p_texture1);
  159. uniforms.push_back(u);
  160. }
  161. {
  162. RD::Uniform u;
  163. u.uniform_type = RD::UNIFORM_TYPE_SAMPLER_WITH_TEXTURE;
  164. u.binding = 1;
  165. u.ids.push_back(p_use_mipmaps ? default_mipmap_sampler : default_sampler);
  166. u.ids.push_back(p_texture2);
  167. uniforms.push_back(u);
  168. }
  169. //any thing with the same configuration (one texture in binding 0 for set 0), is good
  170. RID uniform_set = RD::get_singleton()->uniform_set_create(uniforms, ssr_scale.shader.version_get_shader(ssr_scale.shader_version, 0), 1);
  171. texture_pair_to_compute_uniform_set_cache[tp] = uniform_set;
  172. return uniform_set;
  173. }
  174. RID EffectsRD::_get_compute_uniform_set_from_image_pair(RID p_texture1, RID p_texture2) {
  175. TexturePair tp;
  176. tp.texture1 = p_texture1;
  177. tp.texture2 = p_texture2;
  178. if (image_pair_to_compute_uniform_set_cache.has(tp)) {
  179. RID uniform_set = image_pair_to_compute_uniform_set_cache[tp];
  180. if (RD::get_singleton()->uniform_set_is_valid(uniform_set)) {
  181. return uniform_set;
  182. }
  183. }
  184. Vector<RD::Uniform> uniforms;
  185. {
  186. RD::Uniform u;
  187. u.uniform_type = RD::UNIFORM_TYPE_IMAGE;
  188. u.binding = 0;
  189. u.ids.push_back(p_texture1);
  190. uniforms.push_back(u);
  191. }
  192. {
  193. RD::Uniform u;
  194. u.uniform_type = RD::UNIFORM_TYPE_IMAGE;
  195. u.binding = 1;
  196. u.ids.push_back(p_texture2);
  197. uniforms.push_back(u);
  198. }
  199. //any thing with the same configuration (one texture in binding 0 for set 0), is good
  200. RID uniform_set = RD::get_singleton()->uniform_set_create(uniforms, ssr_scale.shader.version_get_shader(ssr_scale.shader_version, 0), 3);
  201. image_pair_to_compute_uniform_set_cache[tp] = uniform_set;
  202. return uniform_set;
  203. }
  204. void EffectsRD::copy_to_atlas_fb(RID p_source_rd_texture, RID p_dest_framebuffer, const Rect2 &p_uv_rect, RD::DrawListID p_draw_list, bool p_flip_y, bool p_panorama) {
  205. memset(&copy_to_fb.push_constant, 0, sizeof(CopyToFbPushConstant));
  206. copy_to_fb.push_constant.use_section = true;
  207. copy_to_fb.push_constant.section[0] = p_uv_rect.position.x;
  208. copy_to_fb.push_constant.section[1] = p_uv_rect.position.y;
  209. copy_to_fb.push_constant.section[2] = p_uv_rect.size.x;
  210. copy_to_fb.push_constant.section[3] = p_uv_rect.size.y;
  211. if (p_flip_y) {
  212. copy_to_fb.push_constant.flip_y = true;
  213. }
  214. RD::DrawListID draw_list = p_draw_list;
  215. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, copy_to_fb.pipelines[p_panorama ? COPY_TO_FB_COPY_PANORAMA_TO_DP : COPY_TO_FB_COPY].get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(p_dest_framebuffer)));
  216. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_source_rd_texture), 0);
  217. RD::get_singleton()->draw_list_bind_index_array(draw_list, index_array);
  218. RD::get_singleton()->draw_list_set_push_constant(draw_list, &copy_to_fb.push_constant, sizeof(CopyToFbPushConstant));
  219. RD::get_singleton()->draw_list_draw(draw_list, true);
  220. }
  221. void EffectsRD::copy_to_fb_rect(RID p_source_rd_texture, RID p_dest_framebuffer, const Rect2i &p_rect, bool p_flip_y, bool p_force_luminance, bool p_alpha_to_zero, bool p_srgb, RID p_secondary) {
  222. memset(&copy_to_fb.push_constant, 0, sizeof(CopyToFbPushConstant));
  223. if (p_flip_y) {
  224. copy_to_fb.push_constant.flip_y = true;
  225. }
  226. if (p_force_luminance) {
  227. copy_to_fb.push_constant.force_luminance = true;
  228. }
  229. if (p_alpha_to_zero) {
  230. copy_to_fb.push_constant.alpha_to_zero = true;
  231. }
  232. if (p_srgb) {
  233. copy_to_fb.push_constant.srgb = true;
  234. }
  235. RD::DrawListID draw_list = RD::get_singleton()->draw_list_begin(p_dest_framebuffer, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_READ, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_DISCARD, Vector<Color>(), 1.0, 0, p_rect);
  236. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, copy_to_fb.pipelines[p_secondary.is_valid() ? COPY_TO_FB_COPY2 : COPY_TO_FB_COPY].get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(p_dest_framebuffer)));
  237. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_source_rd_texture), 0);
  238. if (p_secondary.is_valid()) {
  239. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_secondary), 1);
  240. }
  241. RD::get_singleton()->draw_list_bind_index_array(draw_list, index_array);
  242. RD::get_singleton()->draw_list_set_push_constant(draw_list, &copy_to_fb.push_constant, sizeof(CopyToFbPushConstant));
  243. RD::get_singleton()->draw_list_draw(draw_list, true);
  244. RD::get_singleton()->draw_list_end();
  245. }
  246. void EffectsRD::copy_to_rect(RID p_source_rd_texture, RID p_dest_texture, const Rect2i &p_rect, bool p_flip_y, bool p_force_luminance, bool p_all_source, bool p_8_bit_dst, bool p_alpha_to_one) {
  247. memset(&copy.push_constant, 0, sizeof(CopyPushConstant));
  248. if (p_flip_y) {
  249. copy.push_constant.flags |= COPY_FLAG_FLIP_Y;
  250. }
  251. if (p_force_luminance) {
  252. copy.push_constant.flags |= COPY_FLAG_FORCE_LUMINANCE;
  253. }
  254. if (p_all_source) {
  255. copy.push_constant.flags |= COPY_FLAG_ALL_SOURCE;
  256. }
  257. if (p_alpha_to_one) {
  258. copy.push_constant.flags |= COPY_FLAG_ALPHA_TO_ONE;
  259. }
  260. copy.push_constant.section[0] = 0;
  261. copy.push_constant.section[1] = 0;
  262. copy.push_constant.section[2] = p_rect.size.width;
  263. copy.push_constant.section[3] = p_rect.size.height;
  264. copy.push_constant.target[0] = p_rect.position.x;
  265. copy.push_constant.target[1] = p_rect.position.y;
  266. RD::ComputeListID compute_list = RD::get_singleton()->compute_list_begin();
  267. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, copy.pipelines[p_8_bit_dst ? COPY_MODE_SIMPLY_COPY_8BIT : COPY_MODE_SIMPLY_COPY]);
  268. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_source_rd_texture), 0);
  269. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_dest_texture), 3);
  270. RD::get_singleton()->compute_list_set_push_constant(compute_list, &copy.push_constant, sizeof(CopyPushConstant));
  271. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_rect.size.width, p_rect.size.height, 1);
  272. RD::get_singleton()->compute_list_end();
  273. }
  274. void EffectsRD::copy_cubemap_to_panorama(RID p_source_cube, RID p_dest_panorama, const Size2i &p_panorama_size, float p_lod, bool p_is_array) {
  275. memset(&copy.push_constant, 0, sizeof(CopyPushConstant));
  276. copy.push_constant.section[0] = 0;
  277. copy.push_constant.section[1] = 0;
  278. copy.push_constant.section[2] = p_panorama_size.width;
  279. copy.push_constant.section[3] = p_panorama_size.height;
  280. copy.push_constant.target[0] = 0;
  281. copy.push_constant.target[1] = 0;
  282. copy.push_constant.camera_z_far = p_lod;
  283. RD::ComputeListID compute_list = RD::get_singleton()->compute_list_begin();
  284. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, copy.pipelines[p_is_array ? COPY_MODE_CUBE_ARRAY_TO_PANORAMA : COPY_MODE_CUBE_TO_PANORAMA]);
  285. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_source_cube), 0);
  286. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_dest_panorama), 3);
  287. RD::get_singleton()->compute_list_set_push_constant(compute_list, &copy.push_constant, sizeof(CopyPushConstant));
  288. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_panorama_size.width, p_panorama_size.height, 1);
  289. RD::get_singleton()->compute_list_end();
  290. }
  291. void EffectsRD::copy_depth_to_rect_and_linearize(RID p_source_rd_texture, RID p_dest_texture, const Rect2i &p_rect, bool p_flip_y, float p_z_near, float p_z_far) {
  292. memset(&copy.push_constant, 0, sizeof(CopyPushConstant));
  293. if (p_flip_y) {
  294. copy.push_constant.flags |= COPY_FLAG_FLIP_Y;
  295. }
  296. copy.push_constant.section[0] = 0;
  297. copy.push_constant.section[1] = 0;
  298. copy.push_constant.section[2] = p_rect.size.width;
  299. copy.push_constant.section[3] = p_rect.size.height;
  300. copy.push_constant.target[0] = p_rect.position.x;
  301. copy.push_constant.target[1] = p_rect.position.y;
  302. copy.push_constant.camera_z_far = p_z_far;
  303. copy.push_constant.camera_z_near = p_z_near;
  304. RD::ComputeListID compute_list = RD::get_singleton()->compute_list_begin();
  305. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, copy.pipelines[COPY_MODE_LINEARIZE_DEPTH]);
  306. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_source_rd_texture), 0);
  307. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_dest_texture), 3);
  308. RD::get_singleton()->compute_list_set_push_constant(compute_list, &copy.push_constant, sizeof(CopyPushConstant));
  309. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_rect.size.width, p_rect.size.height, 1);
  310. RD::get_singleton()->compute_list_end();
  311. }
  312. void EffectsRD::copy_depth_to_rect(RID p_source_rd_texture, RID p_dest_texture, const Rect2i &p_rect, bool p_flip_y) {
  313. memset(&copy.push_constant, 0, sizeof(CopyPushConstant));
  314. if (p_flip_y) {
  315. copy.push_constant.flags |= COPY_FLAG_FLIP_Y;
  316. }
  317. copy.push_constant.section[0] = 0;
  318. copy.push_constant.section[1] = 0;
  319. copy.push_constant.section[2] = p_rect.size.width;
  320. copy.push_constant.section[3] = p_rect.size.height;
  321. copy.push_constant.target[0] = p_rect.position.x;
  322. copy.push_constant.target[1] = p_rect.position.y;
  323. RD::ComputeListID compute_list = RD::get_singleton()->compute_list_begin();
  324. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, copy.pipelines[COPY_MODE_SIMPLY_COPY_DEPTH]);
  325. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_source_rd_texture), 0);
  326. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_dest_texture), 3);
  327. RD::get_singleton()->compute_list_set_push_constant(compute_list, &copy.push_constant, sizeof(CopyPushConstant));
  328. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_rect.size.width, p_rect.size.height, 1);
  329. RD::get_singleton()->compute_list_end();
  330. }
  331. void EffectsRD::set_color(RID p_dest_texture, const Color &p_color, const Rect2i &p_region, bool p_8bit_dst) {
  332. memset(&copy.push_constant, 0, sizeof(CopyPushConstant));
  333. copy.push_constant.section[0] = 0;
  334. copy.push_constant.section[1] = 0;
  335. copy.push_constant.section[2] = p_region.size.width;
  336. copy.push_constant.section[3] = p_region.size.height;
  337. copy.push_constant.target[0] = p_region.position.x;
  338. copy.push_constant.target[1] = p_region.position.y;
  339. copy.push_constant.set_color[0] = p_color.r;
  340. copy.push_constant.set_color[1] = p_color.g;
  341. copy.push_constant.set_color[2] = p_color.b;
  342. copy.push_constant.set_color[3] = p_color.a;
  343. RD::ComputeListID compute_list = RD::get_singleton()->compute_list_begin();
  344. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, copy.pipelines[p_8bit_dst ? COPY_MODE_SET_COLOR_8BIT : COPY_MODE_SET_COLOR]);
  345. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_dest_texture), 3);
  346. RD::get_singleton()->compute_list_set_push_constant(compute_list, &copy.push_constant, sizeof(CopyPushConstant));
  347. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_region.size.width, p_region.size.height, 1);
  348. RD::get_singleton()->compute_list_end();
  349. }
  350. void EffectsRD::gaussian_blur(RID p_source_rd_texture, RID p_texture, RID p_back_texture, const Rect2i &p_region, bool p_8bit_dst) {
  351. ERR_FAIL_COND_MSG(!prefer_raster_effects, "Can't use the compute version of the gaussian blur with the mobile renderer.");
  352. memset(&copy.push_constant, 0, sizeof(CopyPushConstant));
  353. uint32_t base_flags = 0;
  354. copy.push_constant.section[0] = p_region.position.x;
  355. copy.push_constant.section[1] = p_region.position.y;
  356. copy.push_constant.section[2] = p_region.size.width;
  357. copy.push_constant.section[3] = p_region.size.height;
  358. //HORIZONTAL
  359. RD::DrawListID compute_list = RD::get_singleton()->compute_list_begin();
  360. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, copy.pipelines[p_8bit_dst ? COPY_MODE_GAUSSIAN_COPY_8BIT : COPY_MODE_GAUSSIAN_COPY]);
  361. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_source_rd_texture), 0);
  362. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_back_texture), 3);
  363. copy.push_constant.flags = base_flags | COPY_FLAG_HORIZONTAL;
  364. RD::get_singleton()->compute_list_set_push_constant(compute_list, &copy.push_constant, sizeof(CopyPushConstant));
  365. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_region.size.width, p_region.size.height, 1);
  366. RD::get_singleton()->compute_list_add_barrier(compute_list);
  367. //VERTICAL
  368. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_back_texture), 0);
  369. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_texture), 3);
  370. copy.push_constant.flags = base_flags;
  371. RD::get_singleton()->compute_list_set_push_constant(compute_list, &copy.push_constant, sizeof(CopyPushConstant));
  372. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_region.size.width, p_region.size.height, 1);
  373. RD::get_singleton()->compute_list_end();
  374. }
  375. void EffectsRD::gaussian_glow(RID p_source_rd_texture, RID p_back_texture, const Size2i &p_size, float p_strength, bool p_high_quality, bool p_first_pass, float p_luminance_cap, float p_exposure, float p_bloom, float p_hdr_bleed_treshold, float p_hdr_bleed_scale, RID p_auto_exposure, float p_auto_exposure_grey) {
  376. ERR_FAIL_COND_MSG(prefer_raster_effects, "Can't use the compute version of the gaussian glow with the mobile renderer.");
  377. memset(&copy.push_constant, 0, sizeof(CopyPushConstant));
  378. CopyMode copy_mode = p_first_pass && p_auto_exposure.is_valid() ? COPY_MODE_GAUSSIAN_GLOW_AUTO_EXPOSURE : COPY_MODE_GAUSSIAN_GLOW;
  379. uint32_t base_flags = 0;
  380. copy.push_constant.section[2] = p_size.x;
  381. copy.push_constant.section[3] = p_size.y;
  382. copy.push_constant.glow_strength = p_strength;
  383. copy.push_constant.glow_bloom = p_bloom;
  384. copy.push_constant.glow_hdr_threshold = p_hdr_bleed_treshold;
  385. copy.push_constant.glow_hdr_scale = p_hdr_bleed_scale;
  386. copy.push_constant.glow_exposure = p_exposure;
  387. copy.push_constant.glow_white = 0; //actually unused
  388. copy.push_constant.glow_luminance_cap = p_luminance_cap;
  389. copy.push_constant.glow_auto_exposure_grey = p_auto_exposure_grey; //unused also
  390. RD::ComputeListID compute_list = RD::get_singleton()->compute_list_begin();
  391. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, copy.pipelines[copy_mode]);
  392. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_source_rd_texture), 0);
  393. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_back_texture), 3);
  394. if (p_auto_exposure.is_valid() && p_first_pass) {
  395. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_auto_exposure), 1);
  396. }
  397. copy.push_constant.flags = base_flags | (p_first_pass ? COPY_FLAG_GLOW_FIRST_PASS : 0) | (p_high_quality ? COPY_FLAG_HIGH_QUALITY_GLOW : 0);
  398. RD::get_singleton()->compute_list_set_push_constant(compute_list, &copy.push_constant, sizeof(CopyPushConstant));
  399. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_size.width, p_size.height, 1);
  400. RD::get_singleton()->compute_list_end();
  401. }
  402. void EffectsRD::gaussian_glow_raster(RID p_source_rd_texture, RID p_framebuffer_half, RID p_rd_texture_half, RID p_dest_framebuffer, const Vector2 &p_pixel_size, float p_strength, bool p_high_quality, bool p_first_pass, float p_luminance_cap, float p_exposure, float p_bloom, float p_hdr_bleed_treshold, float p_hdr_bleed_scale, RID p_auto_exposure, float p_auto_exposure_grey) {
  403. ERR_FAIL_COND_MSG(!prefer_raster_effects, "Can't use the raster version of the gaussian glow with the clustered renderer.");
  404. memset(&blur_raster.push_constant, 0, sizeof(BlurRasterPushConstant));
  405. BlurRasterMode blur_mode = p_first_pass && p_auto_exposure.is_valid() ? BLUR_MODE_GAUSSIAN_GLOW_AUTO_EXPOSURE : BLUR_MODE_GAUSSIAN_GLOW;
  406. uint32_t base_flags = 0;
  407. blur_raster.push_constant.pixel_size[0] = p_pixel_size.x;
  408. blur_raster.push_constant.pixel_size[1] = p_pixel_size.y;
  409. blur_raster.push_constant.glow_strength = p_strength;
  410. blur_raster.push_constant.glow_bloom = p_bloom;
  411. blur_raster.push_constant.glow_hdr_threshold = p_hdr_bleed_treshold;
  412. blur_raster.push_constant.glow_hdr_scale = p_hdr_bleed_scale;
  413. blur_raster.push_constant.glow_exposure = p_exposure;
  414. blur_raster.push_constant.glow_white = 0; //actually unused
  415. blur_raster.push_constant.glow_luminance_cap = p_luminance_cap;
  416. blur_raster.push_constant.glow_auto_exposure_grey = p_auto_exposure_grey; //unused also
  417. //HORIZONTAL
  418. RD::DrawListID draw_list = RD::get_singleton()->draw_list_begin(p_framebuffer_half, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_READ, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_DISCARD);
  419. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, blur_raster.pipelines[blur_mode].get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(p_framebuffer_half)));
  420. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_source_rd_texture), 0);
  421. if (p_auto_exposure.is_valid() && p_first_pass) {
  422. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_auto_exposure), 1);
  423. }
  424. RD::get_singleton()->draw_list_bind_index_array(draw_list, index_array);
  425. blur_raster.push_constant.flags = base_flags | BLUR_FLAG_HORIZONTAL | (p_first_pass ? BLUR_FLAG_GLOW_FIRST_PASS : 0);
  426. RD::get_singleton()->draw_list_set_push_constant(draw_list, &blur_raster.push_constant, sizeof(BlurRasterPushConstant));
  427. RD::get_singleton()->draw_list_draw(draw_list, true);
  428. RD::get_singleton()->draw_list_end();
  429. blur_mode = BLUR_MODE_GAUSSIAN_GLOW;
  430. //VERTICAL
  431. draw_list = RD::get_singleton()->draw_list_begin(p_dest_framebuffer, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_READ, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_DISCARD);
  432. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, blur_raster.pipelines[blur_mode].get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(p_dest_framebuffer)));
  433. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_rd_texture_half), 0);
  434. RD::get_singleton()->draw_list_bind_index_array(draw_list, index_array);
  435. blur_raster.push_constant.flags = base_flags;
  436. RD::get_singleton()->draw_list_set_push_constant(draw_list, &blur_raster.push_constant, sizeof(BlurRasterPushConstant));
  437. RD::get_singleton()->draw_list_draw(draw_list, true);
  438. RD::get_singleton()->draw_list_end();
  439. }
  440. void EffectsRD::screen_space_reflection(RID p_diffuse, RID p_normal_roughness, RenderingServer::EnvironmentSSRRoughnessQuality p_roughness_quality, RID p_blur_radius, RID p_blur_radius2, RID p_metallic, const Color &p_metallic_mask, RID p_depth, RID p_scale_depth, RID p_scale_normal, RID p_output, RID p_output_blur, const Size2i &p_screen_size, int p_max_steps, float p_fade_in, float p_fade_out, float p_tolerance, const CameraMatrix &p_camera) {
  441. RD::ComputeListID compute_list = RD::get_singleton()->compute_list_begin();
  442. { //scale color and depth to half
  443. ssr_scale.push_constant.camera_z_far = p_camera.get_z_far();
  444. ssr_scale.push_constant.camera_z_near = p_camera.get_z_near();
  445. ssr_scale.push_constant.orthogonal = p_camera.is_orthogonal();
  446. ssr_scale.push_constant.filter = false; //enabling causes arctifacts
  447. ssr_scale.push_constant.screen_size[0] = p_screen_size.x;
  448. ssr_scale.push_constant.screen_size[1] = p_screen_size.y;
  449. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, ssr_scale.pipeline);
  450. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_diffuse), 0);
  451. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture_pair(p_depth, p_normal_roughness), 1);
  452. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_output_blur), 2);
  453. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_image_pair(p_scale_depth, p_scale_normal), 3);
  454. RD::get_singleton()->compute_list_set_push_constant(compute_list, &ssr_scale.push_constant, sizeof(ScreenSpaceReflectionScalePushConstant));
  455. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_screen_size.width, p_screen_size.height, 1);
  456. RD::get_singleton()->compute_list_add_barrier(compute_list);
  457. }
  458. {
  459. ssr.push_constant.camera_z_far = p_camera.get_z_far();
  460. ssr.push_constant.camera_z_near = p_camera.get_z_near();
  461. ssr.push_constant.orthogonal = p_camera.is_orthogonal();
  462. ssr.push_constant.screen_size[0] = p_screen_size.x;
  463. ssr.push_constant.screen_size[1] = p_screen_size.y;
  464. ssr.push_constant.curve_fade_in = p_fade_in;
  465. ssr.push_constant.distance_fade = p_fade_out;
  466. ssr.push_constant.num_steps = p_max_steps;
  467. ssr.push_constant.depth_tolerance = p_tolerance;
  468. ssr.push_constant.use_half_res = true;
  469. ssr.push_constant.proj_info[0] = -2.0f / (p_screen_size.width * p_camera.matrix[0][0]);
  470. ssr.push_constant.proj_info[1] = -2.0f / (p_screen_size.height * p_camera.matrix[1][1]);
  471. ssr.push_constant.proj_info[2] = (1.0f - p_camera.matrix[0][2]) / p_camera.matrix[0][0];
  472. ssr.push_constant.proj_info[3] = (1.0f + p_camera.matrix[1][2]) / p_camera.matrix[1][1];
  473. ssr.push_constant.metallic_mask[0] = CLAMP(p_metallic_mask.r * 255.0, 0, 255);
  474. ssr.push_constant.metallic_mask[1] = CLAMP(p_metallic_mask.g * 255.0, 0, 255);
  475. ssr.push_constant.metallic_mask[2] = CLAMP(p_metallic_mask.b * 255.0, 0, 255);
  476. ssr.push_constant.metallic_mask[3] = CLAMP(p_metallic_mask.a * 255.0, 0, 255);
  477. store_camera(p_camera, ssr.push_constant.projection);
  478. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, ssr.pipelines[(p_roughness_quality != RS::ENV_SSR_ROUGNESS_QUALITY_DISABLED) ? SCREEN_SPACE_REFLECTION_ROUGH : SCREEN_SPACE_REFLECTION_NORMAL]);
  479. RD::get_singleton()->compute_list_set_push_constant(compute_list, &ssr.push_constant, sizeof(ScreenSpaceReflectionPushConstant));
  480. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_image_pair(p_output_blur, p_scale_depth), 0);
  481. if (p_roughness_quality != RS::ENV_SSR_ROUGNESS_QUALITY_DISABLED) {
  482. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_image_pair(p_output, p_blur_radius), 1);
  483. } else {
  484. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_output), 1);
  485. }
  486. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_metallic), 3);
  487. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_scale_normal), 2);
  488. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_screen_size.width, p_screen_size.height, 1);
  489. }
  490. if (p_roughness_quality != RS::ENV_SSR_ROUGNESS_QUALITY_DISABLED) {
  491. //blur
  492. RD::get_singleton()->compute_list_add_barrier(compute_list);
  493. ssr_filter.push_constant.orthogonal = p_camera.is_orthogonal();
  494. ssr_filter.push_constant.edge_tolerance = Math::sin(Math::deg2rad(15.0));
  495. ssr_filter.push_constant.proj_info[0] = -2.0f / (p_screen_size.width * p_camera.matrix[0][0]);
  496. ssr_filter.push_constant.proj_info[1] = -2.0f / (p_screen_size.height * p_camera.matrix[1][1]);
  497. ssr_filter.push_constant.proj_info[2] = (1.0f - p_camera.matrix[0][2]) / p_camera.matrix[0][0];
  498. ssr_filter.push_constant.proj_info[3] = (1.0f + p_camera.matrix[1][2]) / p_camera.matrix[1][1];
  499. ssr_filter.push_constant.vertical = 0;
  500. if (p_roughness_quality == RS::ENV_SSR_ROUGNESS_QUALITY_LOW) {
  501. ssr_filter.push_constant.steps = p_max_steps / 3;
  502. ssr_filter.push_constant.increment = 3;
  503. } else if (p_roughness_quality == RS::ENV_SSR_ROUGNESS_QUALITY_MEDIUM) {
  504. ssr_filter.push_constant.steps = p_max_steps / 2;
  505. ssr_filter.push_constant.increment = 2;
  506. } else {
  507. ssr_filter.push_constant.steps = p_max_steps;
  508. ssr_filter.push_constant.increment = 1;
  509. }
  510. ssr_filter.push_constant.screen_size[0] = p_screen_size.width;
  511. ssr_filter.push_constant.screen_size[1] = p_screen_size.height;
  512. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, ssr_filter.pipelines[SCREEN_SPACE_REFLECTION_FILTER_HORIZONTAL]);
  513. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_image_pair(p_output, p_blur_radius), 0);
  514. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_scale_normal), 1);
  515. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_image_pair(p_output_blur, p_blur_radius2), 2);
  516. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_scale_depth), 3);
  517. RD::get_singleton()->compute_list_set_push_constant(compute_list, &ssr_filter.push_constant, sizeof(ScreenSpaceReflectionFilterPushConstant));
  518. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_screen_size.width, p_screen_size.height, 1);
  519. RD::get_singleton()->compute_list_add_barrier(compute_list);
  520. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, ssr_filter.pipelines[SCREEN_SPACE_REFLECTION_FILTER_VERTICAL]);
  521. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_image_pair(p_output_blur, p_blur_radius2), 0);
  522. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_scale_normal), 1);
  523. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_output), 2);
  524. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_scale_depth), 3);
  525. ssr_filter.push_constant.vertical = 1;
  526. RD::get_singleton()->compute_list_set_push_constant(compute_list, &ssr_filter.push_constant, sizeof(ScreenSpaceReflectionFilterPushConstant));
  527. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_screen_size.width, p_screen_size.height, 1);
  528. }
  529. RD::get_singleton()->compute_list_end();
  530. }
  531. void EffectsRD::sub_surface_scattering(RID p_diffuse, RID p_diffuse2, RID p_depth, const CameraMatrix &p_camera, const Size2i &p_screen_size, float p_scale, float p_depth_scale, RenderingServer::SubSurfaceScatteringQuality p_quality) {
  532. RD::ComputeListID compute_list = RD::get_singleton()->compute_list_begin();
  533. Plane p = p_camera.xform4(Plane(1, 0, -1, 1));
  534. p.normal /= p.d;
  535. float unit_size = p.normal.x;
  536. { //scale color and depth to half
  537. sss.push_constant.camera_z_far = p_camera.get_z_far();
  538. sss.push_constant.camera_z_near = p_camera.get_z_near();
  539. sss.push_constant.orthogonal = p_camera.is_orthogonal();
  540. sss.push_constant.unit_size = unit_size;
  541. sss.push_constant.screen_size[0] = p_screen_size.x;
  542. sss.push_constant.screen_size[1] = p_screen_size.y;
  543. sss.push_constant.vertical = false;
  544. sss.push_constant.scale = p_scale;
  545. sss.push_constant.depth_scale = p_depth_scale;
  546. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, sss.pipelines[p_quality - 1]);
  547. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_diffuse), 0);
  548. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_diffuse2), 1);
  549. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_depth), 2);
  550. RD::get_singleton()->compute_list_set_push_constant(compute_list, &sss.push_constant, sizeof(SubSurfaceScatteringPushConstant));
  551. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_screen_size.width, p_screen_size.height, 1);
  552. RD::get_singleton()->compute_list_add_barrier(compute_list);
  553. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_diffuse2), 0);
  554. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_diffuse), 1);
  555. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_depth), 2);
  556. sss.push_constant.vertical = true;
  557. RD::get_singleton()->compute_list_set_push_constant(compute_list, &sss.push_constant, sizeof(SubSurfaceScatteringPushConstant));
  558. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_screen_size.width, p_screen_size.height, 1);
  559. RD::get_singleton()->compute_list_end();
  560. }
  561. }
  562. void EffectsRD::merge_specular(RID p_dest_framebuffer, RID p_specular, RID p_base, RID p_reflection) {
  563. RD::DrawListID draw_list = RD::get_singleton()->draw_list_begin(p_dest_framebuffer, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_READ, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_DISCARD, Vector<Color>());
  564. if (p_reflection.is_valid()) {
  565. if (p_base.is_valid()) {
  566. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, specular_merge.pipelines[SPECULAR_MERGE_SSR].get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(p_dest_framebuffer)));
  567. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_base), 2);
  568. } else {
  569. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, specular_merge.pipelines[SPECULAR_MERGE_ADDITIVE_SSR].get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(p_dest_framebuffer)));
  570. }
  571. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_specular), 0);
  572. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_reflection), 1);
  573. } else {
  574. if (p_base.is_valid()) {
  575. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, specular_merge.pipelines[SPECULAR_MERGE_ADD].get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(p_dest_framebuffer)));
  576. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_base), 2);
  577. } else {
  578. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, specular_merge.pipelines[SPECULAR_MERGE_ADDITIVE_ADD].get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(p_dest_framebuffer)));
  579. }
  580. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_specular), 0);
  581. }
  582. RD::get_singleton()->draw_list_bind_index_array(draw_list, index_array);
  583. RD::get_singleton()->draw_list_draw(draw_list, true);
  584. RD::get_singleton()->draw_list_end();
  585. }
  586. void EffectsRD::make_mipmap(RID p_source_rd_texture, RID p_dest_texture, const Size2i &p_size) {
  587. memset(&copy.push_constant, 0, sizeof(CopyPushConstant));
  588. copy.push_constant.section[0] = 0;
  589. copy.push_constant.section[1] = 0;
  590. copy.push_constant.section[2] = p_size.width;
  591. copy.push_constant.section[3] = p_size.height;
  592. RD::ComputeListID compute_list = RD::get_singleton()->compute_list_begin();
  593. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, copy.pipelines[COPY_MODE_MIPMAP]);
  594. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_source_rd_texture), 0);
  595. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_dest_texture), 3);
  596. RD::get_singleton()->compute_list_set_push_constant(compute_list, &copy.push_constant, sizeof(CopyPushConstant));
  597. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_size.width, p_size.height, 1);
  598. RD::get_singleton()->compute_list_end();
  599. }
  600. void EffectsRD::make_mipmap_raster(RID p_source_rd_texture, RID p_dest_framebuffer, const Size2i &p_size) {
  601. ERR_FAIL_COND_MSG(!prefer_raster_effects, "Can't use the raster version of mipmap with the clustered renderer.");
  602. memset(&blur_raster.push_constant, 0, sizeof(BlurRasterPushConstant));
  603. BlurRasterMode mode = BLUR_MIPMAP;
  604. blur_raster.push_constant.pixel_size[0] = 1.0 / float(p_size.x);
  605. blur_raster.push_constant.pixel_size[1] = 1.0 / float(p_size.y);
  606. RD::DrawListID draw_list = RD::get_singleton()->draw_list_begin(p_dest_framebuffer, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_READ, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_DISCARD);
  607. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, blur_raster.pipelines[mode].get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(p_dest_framebuffer)));
  608. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_source_rd_texture), 0);
  609. RD::get_singleton()->draw_list_bind_index_array(draw_list, index_array);
  610. RD::get_singleton()->draw_list_set_push_constant(draw_list, &blur_raster.push_constant, sizeof(BlurRasterPushConstant));
  611. RD::get_singleton()->draw_list_draw(draw_list, true);
  612. RD::get_singleton()->draw_list_end();
  613. }
  614. void EffectsRD::copy_cubemap_to_dp(RID p_source_rd_texture, RID p_dst_framebuffer, const Rect2 &p_rect, const Vector2 &p_dst_size, float p_z_near, float p_z_far, bool p_dp_flip) {
  615. CopyToDPPushConstant push_constant;
  616. push_constant.screen_rect[0] = p_rect.position.x;
  617. push_constant.screen_rect[1] = p_rect.position.y;
  618. push_constant.screen_rect[2] = p_rect.size.width;
  619. push_constant.screen_rect[3] = p_rect.size.height;
  620. push_constant.z_far = p_z_far;
  621. push_constant.z_near = p_z_near;
  622. push_constant.texel_size[0] = 1.0f / p_dst_size.x;
  623. push_constant.texel_size[1] = 1.0f / p_dst_size.y;
  624. push_constant.texel_size[0] *= p_dp_flip ? -1.0f : 1.0f; // Encode dp flip as x size sign
  625. RD::DrawListID draw_list = RD::get_singleton()->draw_list_begin(p_dst_framebuffer, RD::INITIAL_ACTION_DROP, RD::FINAL_ACTION_DISCARD, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_READ);
  626. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, cube_to_dp.pipeline.get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(p_dst_framebuffer)));
  627. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_source_rd_texture), 0);
  628. RD::get_singleton()->draw_list_bind_index_array(draw_list, index_array);
  629. RD::get_singleton()->draw_list_set_push_constant(draw_list, &push_constant, sizeof(CopyToDPPushConstant));
  630. RD::get_singleton()->draw_list_draw(draw_list, true);
  631. RD::get_singleton()->draw_list_end(RD::BARRIER_MASK_RASTER | RD::BARRIER_MASK_TRANSFER);
  632. }
  633. void EffectsRD::tonemapper(RID p_source_color, RID p_dst_framebuffer, const TonemapSettings &p_settings) {
  634. memset(&tonemap.push_constant, 0, sizeof(TonemapPushConstant));
  635. tonemap.push_constant.use_bcs = p_settings.use_bcs;
  636. tonemap.push_constant.bcs[0] = p_settings.brightness;
  637. tonemap.push_constant.bcs[1] = p_settings.contrast;
  638. tonemap.push_constant.bcs[2] = p_settings.saturation;
  639. tonemap.push_constant.use_glow = p_settings.use_glow;
  640. tonemap.push_constant.glow_intensity = p_settings.glow_intensity;
  641. tonemap.push_constant.glow_levels[0] = p_settings.glow_levels[0]; // clean this up to just pass by pointer or something
  642. tonemap.push_constant.glow_levels[1] = p_settings.glow_levels[1];
  643. tonemap.push_constant.glow_levels[2] = p_settings.glow_levels[2];
  644. tonemap.push_constant.glow_levels[3] = p_settings.glow_levels[3];
  645. tonemap.push_constant.glow_levels[4] = p_settings.glow_levels[4];
  646. tonemap.push_constant.glow_levels[5] = p_settings.glow_levels[5];
  647. tonemap.push_constant.glow_levels[6] = p_settings.glow_levels[6];
  648. tonemap.push_constant.glow_texture_size[0] = p_settings.glow_texture_size.x;
  649. tonemap.push_constant.glow_texture_size[1] = p_settings.glow_texture_size.y;
  650. tonemap.push_constant.glow_mode = p_settings.glow_mode;
  651. int mode = p_settings.glow_use_bicubic_upscale ? TONEMAP_MODE_BICUBIC_GLOW_FILTER : TONEMAP_MODE_NORMAL;
  652. if (p_settings.use_1d_color_correction) {
  653. mode += 2;
  654. }
  655. tonemap.push_constant.tonemapper = p_settings.tonemap_mode;
  656. tonemap.push_constant.use_auto_exposure = p_settings.use_auto_exposure;
  657. tonemap.push_constant.exposure = p_settings.exposure;
  658. tonemap.push_constant.white = p_settings.white;
  659. tonemap.push_constant.auto_exposure_grey = p_settings.auto_exposure_grey;
  660. tonemap.push_constant.use_color_correction = p_settings.use_color_correction;
  661. tonemap.push_constant.use_fxaa = p_settings.use_fxaa;
  662. tonemap.push_constant.use_debanding = p_settings.use_debanding;
  663. tonemap.push_constant.pixel_size[0] = 1.0 / p_settings.texture_size.x;
  664. tonemap.push_constant.pixel_size[1] = 1.0 / p_settings.texture_size.y;
  665. if (p_settings.view_count > 1) {
  666. // Use MULTIVIEW versions
  667. mode += 6;
  668. }
  669. RD::DrawListID draw_list = RD::get_singleton()->draw_list_begin(p_dst_framebuffer, RD::INITIAL_ACTION_DROP, RD::FINAL_ACTION_READ, RD::INITIAL_ACTION_DROP, RD::FINAL_ACTION_DISCARD);
  670. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, tonemap.pipelines[mode].get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(p_dst_framebuffer), false, RD::get_singleton()->draw_list_get_current_pass()));
  671. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_source_color), 0);
  672. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_settings.exposure_texture), 1);
  673. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_settings.glow_texture, true), 2);
  674. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_settings.color_correction_texture), 3);
  675. RD::get_singleton()->draw_list_bind_index_array(draw_list, index_array);
  676. RD::get_singleton()->draw_list_set_push_constant(draw_list, &tonemap.push_constant, sizeof(TonemapPushConstant));
  677. RD::get_singleton()->draw_list_draw(draw_list, true);
  678. RD::get_singleton()->draw_list_end();
  679. }
  680. void EffectsRD::tonemapper(RD::DrawListID p_subpass_draw_list, RID p_source_color, RD::FramebufferFormatID p_dst_format_id, const TonemapSettings &p_settings) {
  681. memset(&tonemap.push_constant, 0, sizeof(TonemapPushConstant));
  682. tonemap.push_constant.use_bcs = p_settings.use_bcs;
  683. tonemap.push_constant.bcs[0] = p_settings.brightness;
  684. tonemap.push_constant.bcs[1] = p_settings.contrast;
  685. tonemap.push_constant.bcs[2] = p_settings.saturation;
  686. ERR_FAIL_COND_MSG(p_settings.use_glow, "Glow is not supported when using subpasses.");
  687. tonemap.push_constant.use_glow = p_settings.use_glow;
  688. int mode = p_settings.use_1d_color_correction ? TONEMAP_MODE_SUBPASS_1D_LUT : TONEMAP_MODE_SUBPASS;
  689. if (p_settings.view_count > 1) {
  690. // Use MULTIVIEW versions
  691. mode += 6;
  692. }
  693. tonemap.push_constant.tonemapper = p_settings.tonemap_mode;
  694. tonemap.push_constant.use_auto_exposure = p_settings.use_auto_exposure;
  695. tonemap.push_constant.exposure = p_settings.exposure;
  696. tonemap.push_constant.white = p_settings.white;
  697. tonemap.push_constant.auto_exposure_grey = p_settings.auto_exposure_grey;
  698. tonemap.push_constant.use_color_correction = p_settings.use_color_correction;
  699. tonemap.push_constant.use_debanding = p_settings.use_debanding;
  700. RD::get_singleton()->draw_list_bind_render_pipeline(p_subpass_draw_list, tonemap.pipelines[mode].get_render_pipeline(RD::INVALID_ID, p_dst_format_id, false, RD::get_singleton()->draw_list_get_current_pass()));
  701. RD::get_singleton()->draw_list_bind_uniform_set(p_subpass_draw_list, _get_uniform_set_for_input(p_source_color), 0);
  702. RD::get_singleton()->draw_list_bind_uniform_set(p_subpass_draw_list, _get_uniform_set_from_texture(p_settings.exposure_texture), 1); // should be set to a default texture, it's ignored
  703. RD::get_singleton()->draw_list_bind_uniform_set(p_subpass_draw_list, _get_uniform_set_from_texture(p_settings.glow_texture, true), 2); // should be set to a default texture, it's ignored
  704. RD::get_singleton()->draw_list_bind_uniform_set(p_subpass_draw_list, _get_uniform_set_from_texture(p_settings.color_correction_texture), 3);
  705. RD::get_singleton()->draw_list_bind_index_array(p_subpass_draw_list, index_array);
  706. RD::get_singleton()->draw_list_set_push_constant(p_subpass_draw_list, &tonemap.push_constant, sizeof(TonemapPushConstant));
  707. RD::get_singleton()->draw_list_draw(p_subpass_draw_list, true);
  708. }
  709. void EffectsRD::luminance_reduction(RID p_source_texture, const Size2i p_source_size, const Vector<RID> p_reduce, RID p_prev_luminance, float p_min_luminance, float p_max_luminance, float p_adjust, bool p_set) {
  710. ERR_FAIL_COND_MSG(prefer_raster_effects, "Can't use compute version of luminance reduction with the mobile renderer.");
  711. luminance_reduce.push_constant.source_size[0] = p_source_size.x;
  712. luminance_reduce.push_constant.source_size[1] = p_source_size.y;
  713. luminance_reduce.push_constant.max_luminance = p_max_luminance;
  714. luminance_reduce.push_constant.min_luminance = p_min_luminance;
  715. luminance_reduce.push_constant.exposure_adjust = p_adjust;
  716. RD::ComputeListID compute_list = RD::get_singleton()->compute_list_begin();
  717. for (int i = 0; i < p_reduce.size(); i++) {
  718. if (i == 0) {
  719. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, luminance_reduce.pipelines[LUMINANCE_REDUCE_READ]);
  720. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_source_texture), 0);
  721. } else {
  722. RD::get_singleton()->compute_list_add_barrier(compute_list); //needs barrier, wait until previous is done
  723. if (i == p_reduce.size() - 1 && !p_set) {
  724. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, luminance_reduce.pipelines[LUMINANCE_REDUCE_WRITE]);
  725. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_prev_luminance), 2);
  726. } else {
  727. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, luminance_reduce.pipelines[LUMINANCE_REDUCE]);
  728. }
  729. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_reduce[i - 1]), 0);
  730. }
  731. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_reduce[i]), 1);
  732. RD::get_singleton()->compute_list_set_push_constant(compute_list, &luminance_reduce.push_constant, sizeof(LuminanceReducePushConstant));
  733. RD::get_singleton()->compute_list_dispatch_threads(compute_list, luminance_reduce.push_constant.source_size[0], luminance_reduce.push_constant.source_size[1], 1);
  734. luminance_reduce.push_constant.source_size[0] = MAX(luminance_reduce.push_constant.source_size[0] / 8, 1);
  735. luminance_reduce.push_constant.source_size[1] = MAX(luminance_reduce.push_constant.source_size[1] / 8, 1);
  736. }
  737. RD::get_singleton()->compute_list_end();
  738. }
  739. void EffectsRD::luminance_reduction_raster(RID p_source_texture, const Size2i p_source_size, const Vector<RID> p_reduce, Vector<RID> p_fb, RID p_prev_luminance, float p_min_luminance, float p_max_luminance, float p_adjust, bool p_set) {
  740. ERR_FAIL_COND_MSG(!prefer_raster_effects, "Can't use raster version of luminance reduction with the clustered renderer.");
  741. ERR_FAIL_COND_MSG(p_reduce.size() != p_fb.size(), "Incorrect frame buffer account for luminance reduction.");
  742. luminance_reduce_raster.push_constant.max_luminance = p_max_luminance;
  743. luminance_reduce_raster.push_constant.min_luminance = p_min_luminance;
  744. luminance_reduce_raster.push_constant.exposure_adjust = p_adjust;
  745. for (int i = 0; i < p_reduce.size(); i++) {
  746. luminance_reduce_raster.push_constant.source_size[0] = i == 0 ? p_source_size.x : luminance_reduce_raster.push_constant.dest_size[0];
  747. luminance_reduce_raster.push_constant.source_size[1] = i == 0 ? p_source_size.y : luminance_reduce_raster.push_constant.dest_size[1];
  748. luminance_reduce_raster.push_constant.dest_size[0] = MAX(luminance_reduce_raster.push_constant.source_size[0] / 8, 1);
  749. luminance_reduce_raster.push_constant.dest_size[1] = MAX(luminance_reduce_raster.push_constant.source_size[1] / 8, 1);
  750. bool final = !p_set && (luminance_reduce_raster.push_constant.dest_size[0] == 1) && (luminance_reduce_raster.push_constant.dest_size[1] == 1);
  751. LuminanceReduceRasterMode mode = final ? LUMINANCE_REDUCE_FRAGMENT_FINAL : (i == 0 ? LUMINANCE_REDUCE_FRAGMENT_FIRST : LUMINANCE_REDUCE_FRAGMENT);
  752. RD::DrawListID draw_list = RD::get_singleton()->draw_list_begin(p_fb[i], RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_READ, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_DISCARD);
  753. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, luminance_reduce_raster.pipelines[mode].get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(p_fb[i])));
  754. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(i == 0 ? p_source_texture : p_reduce[i - 1]), 0);
  755. if (final) {
  756. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_prev_luminance), 1);
  757. }
  758. RD::get_singleton()->draw_list_bind_index_array(draw_list, index_array);
  759. RD::get_singleton()->draw_list_set_push_constant(draw_list, &luminance_reduce_raster.push_constant, sizeof(LuminanceReduceRasterPushConstant));
  760. RD::get_singleton()->draw_list_draw(draw_list, true);
  761. RD::get_singleton()->draw_list_end();
  762. }
  763. }
  764. void EffectsRD::bokeh_dof(const BokehBuffers &p_buffers, bool p_dof_far, float p_dof_far_begin, float p_dof_far_size, bool p_dof_near, float p_dof_near_begin, float p_dof_near_size, float p_bokeh_size, RenderingServer::DOFBokehShape p_bokeh_shape, RS::DOFBlurQuality p_quality, bool p_use_jitter, float p_cam_znear, float p_cam_zfar, bool p_cam_orthogonal) {
  765. ERR_FAIL_COND_MSG(prefer_raster_effects, "Can't use compute version of BOKEH DOF with the mobile renderer.");
  766. bokeh.push_constant.blur_far_active = p_dof_far;
  767. bokeh.push_constant.blur_far_begin = p_dof_far_begin;
  768. bokeh.push_constant.blur_far_end = p_dof_far_begin + p_dof_far_size;
  769. bokeh.push_constant.blur_near_active = p_dof_near;
  770. bokeh.push_constant.blur_near_begin = p_dof_near_begin;
  771. bokeh.push_constant.blur_near_end = MAX(0, p_dof_near_begin - p_dof_near_size);
  772. bokeh.push_constant.use_jitter = p_use_jitter;
  773. bokeh.push_constant.jitter_seed = Math::randf() * 1000.0;
  774. bokeh.push_constant.z_near = p_cam_znear;
  775. bokeh.push_constant.z_far = p_cam_zfar;
  776. bokeh.push_constant.orthogonal = p_cam_orthogonal;
  777. bokeh.push_constant.blur_size = p_bokeh_size;
  778. bokeh.push_constant.second_pass = false;
  779. bokeh.push_constant.half_size = false;
  780. bokeh.push_constant.blur_scale = 0.5;
  781. RD::ComputeListID compute_list = RD::get_singleton()->compute_list_begin();
  782. /* FIRST PASS */
  783. // The alpha channel of the source color texture is filled with the expected circle size
  784. // If used for DOF far, the size is positive, if used for near, its negative.
  785. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, bokeh.compute_pipelines[BOKEH_GEN_BLUR_SIZE]);
  786. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_buffers.base_texture), 0);
  787. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_buffers.depth_texture), 1);
  788. bokeh.push_constant.size[0] = p_buffers.base_texture_size.x;
  789. bokeh.push_constant.size[1] = p_buffers.base_texture_size.y;
  790. RD::get_singleton()->compute_list_set_push_constant(compute_list, &bokeh.push_constant, sizeof(BokehPushConstant));
  791. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_buffers.base_texture_size.x, p_buffers.base_texture_size.y, 1);
  792. RD::get_singleton()->compute_list_add_barrier(compute_list);
  793. if (p_bokeh_shape == RS::DOF_BOKEH_BOX || p_bokeh_shape == RS::DOF_BOKEH_HEXAGON) {
  794. //second pass
  795. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, bokeh.compute_pipelines[p_bokeh_shape == RS::DOF_BOKEH_BOX ? BOKEH_GEN_BOKEH_BOX : BOKEH_GEN_BOKEH_HEXAGONAL]);
  796. static const int quality_samples[4] = { 6, 12, 12, 24 };
  797. bokeh.push_constant.steps = quality_samples[p_quality];
  798. if (p_quality == RS::DOF_BLUR_QUALITY_VERY_LOW || p_quality == RS::DOF_BLUR_QUALITY_LOW) {
  799. //box and hexagon are more or less the same, and they can work in either half (very low and low quality) or full (medium and high quality_ sizes)
  800. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_buffers.half_texture[0]), 0);
  801. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_buffers.base_texture), 1);
  802. bokeh.push_constant.size[0] = p_buffers.base_texture_size.x >> 1;
  803. bokeh.push_constant.size[1] = p_buffers.base_texture_size.y >> 1;
  804. bokeh.push_constant.half_size = true;
  805. bokeh.push_constant.blur_size *= 0.5;
  806. } else {
  807. //medium and high quality use full size
  808. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_buffers.secondary_texture), 0);
  809. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_buffers.base_texture), 1);
  810. }
  811. RD::get_singleton()->compute_list_set_push_constant(compute_list, &bokeh.push_constant, sizeof(BokehPushConstant));
  812. RD::get_singleton()->compute_list_dispatch_threads(compute_list, bokeh.push_constant.size[0], bokeh.push_constant.size[1], 1);
  813. RD::get_singleton()->compute_list_add_barrier(compute_list);
  814. //third pass
  815. bokeh.push_constant.second_pass = true;
  816. if (p_quality == RS::DOF_BLUR_QUALITY_VERY_LOW || p_quality == RS::DOF_BLUR_QUALITY_LOW) {
  817. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_buffers.half_texture[1]), 0);
  818. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_buffers.half_texture[0]), 1);
  819. } else {
  820. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_buffers.base_texture), 0);
  821. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_buffers.secondary_texture), 1);
  822. }
  823. RD::get_singleton()->compute_list_set_push_constant(compute_list, &bokeh.push_constant, sizeof(BokehPushConstant));
  824. RD::get_singleton()->compute_list_dispatch_threads(compute_list, bokeh.push_constant.size[0], bokeh.push_constant.size[1], 1);
  825. RD::get_singleton()->compute_list_add_barrier(compute_list);
  826. if (p_quality == RS::DOF_BLUR_QUALITY_VERY_LOW || p_quality == RS::DOF_BLUR_QUALITY_LOW) {
  827. //forth pass, upscale for low quality
  828. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, bokeh.compute_pipelines[BOKEH_COMPOSITE]);
  829. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_buffers.base_texture), 0);
  830. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_buffers.half_texture[1]), 1);
  831. bokeh.push_constant.size[0] = p_buffers.base_texture_size.x;
  832. bokeh.push_constant.size[1] = p_buffers.base_texture_size.y;
  833. bokeh.push_constant.half_size = false;
  834. bokeh.push_constant.second_pass = false;
  835. RD::get_singleton()->compute_list_set_push_constant(compute_list, &bokeh.push_constant, sizeof(BokehPushConstant));
  836. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_buffers.base_texture_size.x, p_buffers.base_texture_size.y, 1);
  837. }
  838. } else {
  839. //circle
  840. //second pass
  841. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, bokeh.compute_pipelines[BOKEH_GEN_BOKEH_CIRCULAR]);
  842. static const float quality_scale[4] = { 8.0, 4.0, 1.0, 0.5 };
  843. bokeh.push_constant.steps = 0;
  844. bokeh.push_constant.blur_scale = quality_scale[p_quality];
  845. //circle always runs in half size, otherwise too expensive
  846. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_buffers.half_texture[0]), 0);
  847. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_buffers.base_texture), 1);
  848. bokeh.push_constant.size[0] = p_buffers.base_texture_size.x >> 1;
  849. bokeh.push_constant.size[1] = p_buffers.base_texture_size.y >> 1;
  850. bokeh.push_constant.half_size = true;
  851. RD::get_singleton()->compute_list_set_push_constant(compute_list, &bokeh.push_constant, sizeof(BokehPushConstant));
  852. RD::get_singleton()->compute_list_dispatch_threads(compute_list, bokeh.push_constant.size[0], bokeh.push_constant.size[1], 1);
  853. RD::get_singleton()->compute_list_add_barrier(compute_list);
  854. //circle is just one pass, then upscale
  855. // upscale
  856. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, bokeh.compute_pipelines[BOKEH_COMPOSITE]);
  857. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_buffers.base_texture), 0);
  858. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_buffers.half_texture[0]), 1);
  859. bokeh.push_constant.size[0] = p_buffers.base_texture_size.x;
  860. bokeh.push_constant.size[1] = p_buffers.base_texture_size.y;
  861. bokeh.push_constant.half_size = false;
  862. bokeh.push_constant.second_pass = false;
  863. RD::get_singleton()->compute_list_set_push_constant(compute_list, &bokeh.push_constant, sizeof(BokehPushConstant));
  864. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_buffers.base_texture_size.x, p_buffers.base_texture_size.y, 1);
  865. }
  866. RD::get_singleton()->compute_list_end();
  867. }
  868. void EffectsRD::bokeh_dof_raster(const BokehBuffers &p_buffers, bool p_dof_far, float p_dof_far_begin, float p_dof_far_size, bool p_dof_near, float p_dof_near_begin, float p_dof_near_size, float p_dof_blur_amount, RenderingServer::DOFBokehShape p_bokeh_shape, RS::DOFBlurQuality p_quality, float p_cam_znear, float p_cam_zfar, bool p_cam_orthogonal) {
  869. ERR_FAIL_COND_MSG(!prefer_raster_effects, "Can't use blur DOF with the clustered renderer.");
  870. memset(&bokeh.push_constant, 0, sizeof(BokehPushConstant));
  871. bokeh.push_constant.orthogonal = p_cam_orthogonal;
  872. bokeh.push_constant.size[0] = p_buffers.base_texture_size.width;
  873. bokeh.push_constant.size[1] = p_buffers.base_texture_size.height;
  874. bokeh.push_constant.z_far = p_cam_zfar;
  875. bokeh.push_constant.z_near = p_cam_znear;
  876. bokeh.push_constant.second_pass = false;
  877. bokeh.push_constant.half_size = false;
  878. bokeh.push_constant.blur_size = p_dof_blur_amount;
  879. if (p_dof_far || p_dof_near) {
  880. if (p_dof_far) {
  881. bokeh.push_constant.blur_far_active = true;
  882. bokeh.push_constant.blur_far_begin = p_dof_far_begin;
  883. bokeh.push_constant.blur_far_end = p_dof_far_begin + p_dof_far_size;
  884. }
  885. if (p_dof_near) {
  886. bokeh.push_constant.blur_near_active = true;
  887. bokeh.push_constant.blur_near_begin = p_dof_near_begin;
  888. bokeh.push_constant.blur_near_end = p_dof_near_begin - p_dof_near_size;
  889. }
  890. {
  891. // generate our depth data
  892. RID framebuffer = p_buffers.base_weight_fb;
  893. RD::DrawListID draw_list = RD::get_singleton()->draw_list_begin(framebuffer, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_READ, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_DISCARD);
  894. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, bokeh.raster_pipelines[BOKEH_GEN_BLUR_SIZE].get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(framebuffer)));
  895. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_buffers.depth_texture), 0);
  896. RD::get_singleton()->draw_list_bind_index_array(draw_list, index_array);
  897. RD::get_singleton()->draw_list_set_push_constant(draw_list, &bokeh.push_constant, sizeof(BokehPushConstant));
  898. RD::get_singleton()->draw_list_draw(draw_list, true);
  899. RD::get_singleton()->draw_list_end();
  900. }
  901. if (p_bokeh_shape == RS::DOF_BOKEH_BOX || p_bokeh_shape == RS::DOF_BOKEH_HEXAGON) {
  902. // double pass approach
  903. BokehMode mode = p_bokeh_shape == RS::DOF_BOKEH_BOX ? BOKEH_GEN_BOKEH_BOX : BOKEH_GEN_BOKEH_HEXAGONAL;
  904. if (p_quality == RS::DOF_BLUR_QUALITY_VERY_LOW || p_quality == RS::DOF_BLUR_QUALITY_LOW) {
  905. //box and hexagon are more or less the same, and they can work in either half (very low and low quality) or full (medium and high quality_ sizes)
  906. bokeh.push_constant.size[0] = p_buffers.base_texture_size.x >> 1;
  907. bokeh.push_constant.size[1] = p_buffers.base_texture_size.y >> 1;
  908. bokeh.push_constant.half_size = true;
  909. bokeh.push_constant.blur_size *= 0.5;
  910. }
  911. static const int quality_samples[4] = { 6, 12, 12, 24 };
  912. bokeh.push_constant.blur_scale = 0.5;
  913. bokeh.push_constant.steps = quality_samples[p_quality];
  914. RID framebuffer = bokeh.push_constant.half_size ? p_buffers.half_fb[0] : p_buffers.secondary_fb;
  915. // Pass 1
  916. RD::DrawListID draw_list = RD::get_singleton()->draw_list_begin(framebuffer, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_READ, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_DISCARD);
  917. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, bokeh.raster_pipelines[mode].get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(framebuffer)));
  918. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_buffers.base_texture), 0);
  919. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_buffers.weight_texture[0]), 1);
  920. RD::get_singleton()->draw_list_bind_index_array(draw_list, index_array);
  921. RD::get_singleton()->draw_list_set_push_constant(draw_list, &bokeh.push_constant, sizeof(BokehPushConstant));
  922. RD::get_singleton()->draw_list_draw(draw_list, true);
  923. RD::get_singleton()->draw_list_end();
  924. // Pass 2
  925. if (!bokeh.push_constant.half_size) {
  926. // do not output weight, we're writing back into our base buffer
  927. mode = p_bokeh_shape == RS::DOF_BOKEH_BOX ? BOKEH_GEN_BOKEH_BOX_NOWEIGHT : BOKEH_GEN_BOKEH_HEXAGONAL_NOWEIGHT;
  928. }
  929. bokeh.push_constant.second_pass = true;
  930. framebuffer = bokeh.push_constant.half_size ? p_buffers.half_fb[1] : p_buffers.base_fb;
  931. RID texture = bokeh.push_constant.half_size ? p_buffers.half_texture[0] : p_buffers.secondary_texture;
  932. RID weight = bokeh.push_constant.half_size ? p_buffers.weight_texture[2] : p_buffers.weight_texture[1];
  933. draw_list = RD::get_singleton()->draw_list_begin(framebuffer, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_READ, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_DISCARD);
  934. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, bokeh.raster_pipelines[mode].get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(framebuffer)));
  935. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(texture), 0);
  936. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(weight), 1);
  937. RD::get_singleton()->draw_list_bind_index_array(draw_list, index_array);
  938. RD::get_singleton()->draw_list_set_push_constant(draw_list, &bokeh.push_constant, sizeof(BokehPushConstant));
  939. RD::get_singleton()->draw_list_draw(draw_list, true);
  940. RD::get_singleton()->draw_list_end();
  941. if (bokeh.push_constant.half_size) {
  942. // Compose pass
  943. mode = BOKEH_COMPOSITE;
  944. framebuffer = p_buffers.base_fb;
  945. draw_list = RD::get_singleton()->draw_list_begin(framebuffer, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_READ, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_DISCARD);
  946. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, bokeh.raster_pipelines[mode].get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(framebuffer)));
  947. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_buffers.half_texture[1]), 0);
  948. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_buffers.weight_texture[3]), 1);
  949. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_buffers.weight_texture[0]), 2);
  950. RD::get_singleton()->draw_list_bind_index_array(draw_list, index_array);
  951. RD::get_singleton()->draw_list_set_push_constant(draw_list, &bokeh.push_constant, sizeof(BokehPushConstant));
  952. RD::get_singleton()->draw_list_draw(draw_list, true);
  953. RD::get_singleton()->draw_list_end();
  954. }
  955. } else {
  956. // circular is a single pass approach
  957. BokehMode mode = BOKEH_GEN_BOKEH_CIRCULAR;
  958. {
  959. // circle always runs in half size, otherwise too expensive (though the code below does support making this optional)
  960. bokeh.push_constant.size[0] = p_buffers.base_texture_size.x >> 1;
  961. bokeh.push_constant.size[1] = p_buffers.base_texture_size.y >> 1;
  962. bokeh.push_constant.half_size = true;
  963. // bokeh.push_constant.blur_size *= 0.5;
  964. }
  965. static const float quality_scale[4] = { 8.0, 4.0, 1.0, 0.5 };
  966. bokeh.push_constant.blur_scale = quality_scale[p_quality];
  967. bokeh.push_constant.steps = 0.0;
  968. RID framebuffer = bokeh.push_constant.half_size ? p_buffers.half_fb[0] : p_buffers.secondary_fb;
  969. RD::DrawListID draw_list = RD::get_singleton()->draw_list_begin(framebuffer, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_READ, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_DISCARD);
  970. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, bokeh.raster_pipelines[mode].get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(framebuffer)));
  971. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_buffers.base_texture), 0);
  972. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_buffers.weight_texture[0]), 1);
  973. RD::get_singleton()->draw_list_bind_index_array(draw_list, index_array);
  974. RD::get_singleton()->draw_list_set_push_constant(draw_list, &bokeh.push_constant, sizeof(BokehPushConstant));
  975. RD::get_singleton()->draw_list_draw(draw_list, true);
  976. RD::get_singleton()->draw_list_end();
  977. if (bokeh.push_constant.half_size) {
  978. // Compose
  979. mode = BOKEH_COMPOSITE;
  980. framebuffer = p_buffers.base_fb;
  981. draw_list = RD::get_singleton()->draw_list_begin(framebuffer, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_READ, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_DISCARD);
  982. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, bokeh.raster_pipelines[mode].get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(framebuffer)));
  983. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_buffers.half_texture[0]), 0);
  984. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_buffers.weight_texture[2]), 1);
  985. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_buffers.weight_texture[0]), 2);
  986. RD::get_singleton()->draw_list_bind_index_array(draw_list, index_array);
  987. RD::get_singleton()->draw_list_set_push_constant(draw_list, &bokeh.push_constant, sizeof(BokehPushConstant));
  988. RD::get_singleton()->draw_list_draw(draw_list, true);
  989. RD::get_singleton()->draw_list_end();
  990. } else {
  991. // Just copy it back (we use our blur raster shader here)..
  992. draw_list = RD::get_singleton()->draw_list_begin(p_buffers.base_fb, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_READ, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_DISCARD);
  993. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, blur_raster.pipelines[BLUR_MODE_COPY].get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(p_buffers.base_fb)));
  994. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_buffers.secondary_texture), 0);
  995. RD::get_singleton()->draw_list_bind_index_array(draw_list, index_array);
  996. memset(&blur_raster.push_constant, 0, sizeof(BlurRasterPushConstant));
  997. RD::get_singleton()->draw_list_set_push_constant(draw_list, &blur_raster.push_constant, sizeof(BlurRasterPushConstant));
  998. RD::get_singleton()->draw_list_draw(draw_list, true);
  999. RD::get_singleton()->draw_list_end();
  1000. }
  1001. }
  1002. }
  1003. }
  1004. void EffectsRD::gather_ssao(RD::ComputeListID p_compute_list, const Vector<RID> p_ao_slices, const SSAOSettings &p_settings, bool p_adaptive_base_pass, RID p_gather_uniform_set, RID p_importance_map_uniform_set) {
  1005. RD::get_singleton()->compute_list_bind_uniform_set(p_compute_list, p_gather_uniform_set, 0);
  1006. if ((p_settings.quality == RS::ENV_SSAO_QUALITY_ULTRA) && !p_adaptive_base_pass) {
  1007. RD::get_singleton()->compute_list_bind_uniform_set(p_compute_list, p_importance_map_uniform_set, 1);
  1008. }
  1009. for (int i = 0; i < 4; i++) {
  1010. if ((p_settings.quality == RS::ENV_SSAO_QUALITY_VERY_LOW) && ((i == 1) || (i == 2))) {
  1011. continue;
  1012. }
  1013. ssao.gather_push_constant.pass_coord_offset[0] = i % 2;
  1014. ssao.gather_push_constant.pass_coord_offset[1] = i / 2;
  1015. ssao.gather_push_constant.pass_uv_offset[0] = ((i % 2) - 0.0) / p_settings.full_screen_size.x;
  1016. ssao.gather_push_constant.pass_uv_offset[1] = ((i / 2) - 0.0) / p_settings.full_screen_size.y;
  1017. ssao.gather_push_constant.pass = i;
  1018. RD::get_singleton()->compute_list_bind_uniform_set(p_compute_list, _get_uniform_set_from_image(p_ao_slices[i]), 2);
  1019. RD::get_singleton()->compute_list_set_push_constant(p_compute_list, &ssao.gather_push_constant, sizeof(SSAOGatherPushConstant));
  1020. Size2i size = Size2i(p_settings.full_screen_size.x >> (p_settings.half_size ? 2 : 1), p_settings.full_screen_size.y >> (p_settings.half_size ? 2 : 1));
  1021. RD::get_singleton()->compute_list_dispatch_threads(p_compute_list, size.x, size.y, 1);
  1022. }
  1023. RD::get_singleton()->compute_list_add_barrier(p_compute_list);
  1024. }
  1025. void EffectsRD::generate_ssao(RID p_depth_buffer, RID p_normal_buffer, RID p_depth_mipmaps_texture, const Vector<RID> &p_depth_mipmaps, RID p_ao, const Vector<RID> p_ao_slices, RID p_ao_pong, const Vector<RID> p_ao_pong_slices, RID p_upscale_buffer, RID p_importance_map, RID p_importance_map_pong, const CameraMatrix &p_projection, const SSAOSettings &p_settings, bool p_invalidate_uniform_sets, RID &r_downsample_uniform_set, RID &r_gather_uniform_set, RID &r_importance_map_uniform_set) {
  1026. RD::ComputeListID compute_list = RD::get_singleton()->compute_list_begin();
  1027. RD::get_singleton()->draw_command_begin_label("SSAO");
  1028. /* FIRST PASS */
  1029. // Downsample and deinterleave the depth buffer.
  1030. {
  1031. RD::get_singleton()->draw_command_begin_label("Downsample Depth");
  1032. if (p_invalidate_uniform_sets) {
  1033. Vector<RD::Uniform> uniforms;
  1034. {
  1035. RD::Uniform u;
  1036. u.uniform_type = RD::UNIFORM_TYPE_IMAGE;
  1037. u.binding = 0;
  1038. u.ids.push_back(p_depth_mipmaps[1]);
  1039. uniforms.push_back(u);
  1040. }
  1041. {
  1042. RD::Uniform u;
  1043. u.uniform_type = RD::UNIFORM_TYPE_IMAGE;
  1044. u.binding = 1;
  1045. u.ids.push_back(p_depth_mipmaps[2]);
  1046. uniforms.push_back(u);
  1047. }
  1048. {
  1049. RD::Uniform u;
  1050. u.uniform_type = RD::UNIFORM_TYPE_IMAGE;
  1051. u.binding = 2;
  1052. u.ids.push_back(p_depth_mipmaps[3]);
  1053. uniforms.push_back(u);
  1054. }
  1055. r_downsample_uniform_set = RD::get_singleton()->uniform_set_create(uniforms, ssao.downsample_shader.version_get_shader(ssao.downsample_shader_version, 2), 2);
  1056. }
  1057. float depth_linearize_mul = -p_projection.matrix[3][2];
  1058. float depth_linearize_add = p_projection.matrix[2][2];
  1059. if (depth_linearize_mul * depth_linearize_add < 0) {
  1060. depth_linearize_add = -depth_linearize_add;
  1061. }
  1062. ssao.downsample_push_constant.orthogonal = p_projection.is_orthogonal();
  1063. ssao.downsample_push_constant.z_near = depth_linearize_mul;
  1064. ssao.downsample_push_constant.z_far = depth_linearize_add;
  1065. if (ssao.downsample_push_constant.orthogonal) {
  1066. ssao.downsample_push_constant.z_near = p_projection.get_z_near();
  1067. ssao.downsample_push_constant.z_far = p_projection.get_z_far();
  1068. }
  1069. ssao.downsample_push_constant.pixel_size[0] = 1.0 / p_settings.full_screen_size.x;
  1070. ssao.downsample_push_constant.pixel_size[1] = 1.0 / p_settings.full_screen_size.y;
  1071. ssao.downsample_push_constant.radius_sq = p_settings.radius * p_settings.radius;
  1072. int downsample_pipeline = SSAO_DOWNSAMPLE;
  1073. if (p_settings.quality == RS::ENV_SSAO_QUALITY_VERY_LOW) {
  1074. downsample_pipeline = SSAO_DOWNSAMPLE_HALF;
  1075. } else if (p_settings.quality > RS::ENV_SSAO_QUALITY_MEDIUM) {
  1076. downsample_pipeline = SSAO_DOWNSAMPLE_MIPMAP;
  1077. }
  1078. if (p_settings.half_size) {
  1079. downsample_pipeline++;
  1080. }
  1081. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, ssao.pipelines[downsample_pipeline]);
  1082. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_depth_buffer), 0);
  1083. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_depth_mipmaps[0]), 1);
  1084. if (p_settings.quality > RS::ENV_SSAO_QUALITY_MEDIUM) {
  1085. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, r_downsample_uniform_set, 2);
  1086. }
  1087. RD::get_singleton()->compute_list_set_push_constant(compute_list, &ssao.downsample_push_constant, sizeof(SSAODownsamplePushConstant));
  1088. Size2i size(MAX(1, p_settings.full_screen_size.x >> (p_settings.half_size ? 2 : 1)), MAX(1, p_settings.full_screen_size.y >> (p_settings.half_size ? 2 : 1)));
  1089. RD::get_singleton()->compute_list_dispatch_threads(compute_list, size.x, size.y, 1);
  1090. RD::get_singleton()->compute_list_add_barrier(compute_list);
  1091. RD::get_singleton()->draw_command_end_label(); // Downsample SSAO
  1092. }
  1093. /* SECOND PASS */
  1094. // Sample SSAO
  1095. {
  1096. RD::get_singleton()->draw_command_begin_label("Gather Samples");
  1097. ssao.gather_push_constant.screen_size[0] = p_settings.full_screen_size.x;
  1098. ssao.gather_push_constant.screen_size[1] = p_settings.full_screen_size.y;
  1099. ssao.gather_push_constant.half_screen_pixel_size[0] = 1.0 / p_settings.half_screen_size.x;
  1100. ssao.gather_push_constant.half_screen_pixel_size[1] = 1.0 / p_settings.half_screen_size.y;
  1101. float tan_half_fov_x = 1.0 / p_projection.matrix[0][0];
  1102. float tan_half_fov_y = 1.0 / p_projection.matrix[1][1];
  1103. ssao.gather_push_constant.NDC_to_view_mul[0] = tan_half_fov_x * 2.0;
  1104. ssao.gather_push_constant.NDC_to_view_mul[1] = tan_half_fov_y * -2.0;
  1105. ssao.gather_push_constant.NDC_to_view_add[0] = tan_half_fov_x * -1.0;
  1106. ssao.gather_push_constant.NDC_to_view_add[1] = tan_half_fov_y;
  1107. ssao.gather_push_constant.is_orthogonal = p_projection.is_orthogonal();
  1108. ssao.gather_push_constant.half_screen_pixel_size_x025[0] = ssao.gather_push_constant.half_screen_pixel_size[0] * 0.25;
  1109. ssao.gather_push_constant.half_screen_pixel_size_x025[1] = ssao.gather_push_constant.half_screen_pixel_size[1] * 0.25;
  1110. float radius_near_limit = (p_settings.radius * 1.2f);
  1111. if (p_settings.quality <= RS::ENV_SSAO_QUALITY_LOW) {
  1112. radius_near_limit *= 1.50f;
  1113. if (p_settings.quality == RS::ENV_SSAO_QUALITY_VERY_LOW) {
  1114. ssao.gather_push_constant.radius *= 0.8f;
  1115. }
  1116. if (p_settings.half_size) {
  1117. ssao.gather_push_constant.radius *= 0.5f;
  1118. }
  1119. }
  1120. radius_near_limit /= tan_half_fov_y;
  1121. ssao.gather_push_constant.radius = p_settings.radius;
  1122. ssao.gather_push_constant.intensity = p_settings.intensity;
  1123. ssao.gather_push_constant.shadow_power = p_settings.power;
  1124. ssao.gather_push_constant.shadow_clamp = 0.98;
  1125. ssao.gather_push_constant.fade_out_mul = -1.0 / (p_settings.fadeout_to - p_settings.fadeout_from);
  1126. ssao.gather_push_constant.fade_out_add = p_settings.fadeout_from / (p_settings.fadeout_to - p_settings.fadeout_from) + 1.0;
  1127. ssao.gather_push_constant.horizon_angle_threshold = p_settings.horizon;
  1128. ssao.gather_push_constant.inv_radius_near_limit = 1.0f / radius_near_limit;
  1129. ssao.gather_push_constant.neg_inv_radius = -1.0 / ssao.gather_push_constant.radius;
  1130. ssao.gather_push_constant.load_counter_avg_div = 9.0 / float((p_settings.quarter_screen_size.x) * (p_settings.quarter_screen_size.y) * 255);
  1131. ssao.gather_push_constant.adaptive_sample_limit = p_settings.adaptive_target;
  1132. ssao.gather_push_constant.detail_intensity = p_settings.detail;
  1133. ssao.gather_push_constant.quality = MAX(0, p_settings.quality - 1);
  1134. ssao.gather_push_constant.size_multiplier = p_settings.half_size ? 2 : 1;
  1135. if (p_invalidate_uniform_sets) {
  1136. Vector<RD::Uniform> uniforms;
  1137. {
  1138. RD::Uniform u;
  1139. u.uniform_type = RD::UNIFORM_TYPE_SAMPLER_WITH_TEXTURE;
  1140. u.binding = 0;
  1141. u.ids.push_back(ssao.mirror_sampler);
  1142. u.ids.push_back(p_depth_mipmaps_texture);
  1143. uniforms.push_back(u);
  1144. }
  1145. {
  1146. RD::Uniform u;
  1147. u.uniform_type = RD::UNIFORM_TYPE_IMAGE;
  1148. u.binding = 1;
  1149. u.ids.push_back(p_normal_buffer);
  1150. uniforms.push_back(u);
  1151. }
  1152. {
  1153. RD::Uniform u;
  1154. u.uniform_type = RD::UNIFORM_TYPE_UNIFORM_BUFFER;
  1155. u.binding = 2;
  1156. u.ids.push_back(ssao.gather_constants_buffer);
  1157. uniforms.push_back(u);
  1158. }
  1159. r_gather_uniform_set = RD::get_singleton()->uniform_set_create(uniforms, ssao.gather_shader.version_get_shader(ssao.gather_shader_version, 0), 0);
  1160. }
  1161. if (p_invalidate_uniform_sets) {
  1162. Vector<RD::Uniform> uniforms;
  1163. {
  1164. RD::Uniform u;
  1165. u.uniform_type = RD::UNIFORM_TYPE_IMAGE;
  1166. u.binding = 0;
  1167. u.ids.push_back(p_ao_pong);
  1168. uniforms.push_back(u);
  1169. }
  1170. {
  1171. RD::Uniform u;
  1172. u.uniform_type = RD::UNIFORM_TYPE_SAMPLER_WITH_TEXTURE;
  1173. u.binding = 1;
  1174. u.ids.push_back(default_sampler);
  1175. u.ids.push_back(p_importance_map);
  1176. uniforms.push_back(u);
  1177. }
  1178. {
  1179. RD::Uniform u;
  1180. u.uniform_type = RD::UNIFORM_TYPE_STORAGE_BUFFER;
  1181. u.binding = 2;
  1182. u.ids.push_back(ssao.importance_map_load_counter);
  1183. uniforms.push_back(u);
  1184. }
  1185. r_importance_map_uniform_set = RD::get_singleton()->uniform_set_create(uniforms, ssao.gather_shader.version_get_shader(ssao.gather_shader_version, 2), 1);
  1186. }
  1187. if (p_settings.quality == RS::ENV_SSAO_QUALITY_ULTRA) {
  1188. RD::get_singleton()->draw_command_begin_label("Generate Importance Map");
  1189. ssao.importance_map_push_constant.half_screen_pixel_size[0] = 1.0 / p_settings.half_screen_size.x;
  1190. ssao.importance_map_push_constant.half_screen_pixel_size[1] = 1.0 / p_settings.half_screen_size.y;
  1191. ssao.importance_map_push_constant.intensity = p_settings.intensity;
  1192. ssao.importance_map_push_constant.power = p_settings.power;
  1193. //base pass
  1194. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, ssao.pipelines[SSAO_GATHER_BASE]);
  1195. gather_ssao(compute_list, p_ao_pong_slices, p_settings, true, r_gather_uniform_set, RID());
  1196. //generate importance map
  1197. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, ssao.pipelines[SSAO_GENERATE_IMPORTANCE_MAP]);
  1198. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_ao_pong), 0);
  1199. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_importance_map), 1);
  1200. RD::get_singleton()->compute_list_set_push_constant(compute_list, &ssao.importance_map_push_constant, sizeof(SSAOImportanceMapPushConstant));
  1201. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_settings.quarter_screen_size.x, p_settings.quarter_screen_size.y, 1);
  1202. RD::get_singleton()->compute_list_add_barrier(compute_list);
  1203. //process importance map A
  1204. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, ssao.pipelines[SSAO_PROCESS_IMPORTANCE_MAPA]);
  1205. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_importance_map), 0);
  1206. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_importance_map_pong), 1);
  1207. RD::get_singleton()->compute_list_set_push_constant(compute_list, &ssao.importance_map_push_constant, sizeof(SSAOImportanceMapPushConstant));
  1208. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_settings.quarter_screen_size.x, p_settings.quarter_screen_size.y, 1);
  1209. RD::get_singleton()->compute_list_add_barrier(compute_list);
  1210. //process Importance Map B
  1211. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, ssao.pipelines[SSAO_PROCESS_IMPORTANCE_MAPB]);
  1212. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_importance_map_pong), 0);
  1213. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_importance_map), 1);
  1214. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, ssao.counter_uniform_set, 2);
  1215. RD::get_singleton()->compute_list_set_push_constant(compute_list, &ssao.importance_map_push_constant, sizeof(SSAOImportanceMapPushConstant));
  1216. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_settings.quarter_screen_size.x, p_settings.quarter_screen_size.y, 1);
  1217. RD::get_singleton()->compute_list_add_barrier(compute_list);
  1218. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, ssao.pipelines[SSAO_GATHER_ADAPTIVE]);
  1219. RD::get_singleton()->draw_command_end_label(); // Importance Map
  1220. } else {
  1221. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, ssao.pipelines[SSAO_GATHER]);
  1222. }
  1223. gather_ssao(compute_list, p_ao_slices, p_settings, false, r_gather_uniform_set, r_importance_map_uniform_set);
  1224. RD::get_singleton()->draw_command_end_label(); // Gather SSAO
  1225. }
  1226. // /* THIRD PASS */
  1227. // // Blur
  1228. //
  1229. {
  1230. RD::get_singleton()->draw_command_begin_label("Edge Aware Blur");
  1231. ssao.blur_push_constant.edge_sharpness = 1.0 - p_settings.sharpness;
  1232. ssao.blur_push_constant.half_screen_pixel_size[0] = 1.0 / p_settings.half_screen_size.x;
  1233. ssao.blur_push_constant.half_screen_pixel_size[1] = 1.0 / p_settings.half_screen_size.y;
  1234. int blur_passes = p_settings.quality > RS::ENV_SSAO_QUALITY_VERY_LOW ? p_settings.blur_passes : 1;
  1235. for (int pass = 0; pass < blur_passes; pass++) {
  1236. int blur_pipeline = SSAO_BLUR_PASS;
  1237. if (p_settings.quality > RS::ENV_SSAO_QUALITY_VERY_LOW) {
  1238. if (pass < blur_passes - 2) {
  1239. blur_pipeline = SSAO_BLUR_PASS_WIDE;
  1240. } else {
  1241. blur_pipeline = SSAO_BLUR_PASS_SMART;
  1242. }
  1243. }
  1244. for (int i = 0; i < 4; i++) {
  1245. if ((p_settings.quality == RS::ENV_SSAO_QUALITY_VERY_LOW) && ((i == 1) || (i == 2))) {
  1246. continue;
  1247. }
  1248. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, ssao.pipelines[blur_pipeline]);
  1249. if (pass % 2 == 0) {
  1250. if (p_settings.quality == RS::ENV_SSAO_QUALITY_VERY_LOW) {
  1251. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_ao_slices[i]), 0);
  1252. } else {
  1253. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture_and_sampler(p_ao_slices[i], ssao.mirror_sampler), 0);
  1254. }
  1255. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_ao_pong_slices[i]), 1);
  1256. } else {
  1257. if (p_settings.quality == RS::ENV_SSAO_QUALITY_VERY_LOW) {
  1258. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_ao_pong_slices[i]), 0);
  1259. } else {
  1260. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture_and_sampler(p_ao_pong_slices[i], ssao.mirror_sampler), 0);
  1261. }
  1262. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_ao_slices[i]), 1);
  1263. }
  1264. RD::get_singleton()->compute_list_set_push_constant(compute_list, &ssao.blur_push_constant, sizeof(SSAOBlurPushConstant));
  1265. Size2i size(p_settings.full_screen_size.x >> (p_settings.half_size ? 2 : 1), p_settings.full_screen_size.y >> (p_settings.half_size ? 2 : 1));
  1266. RD::get_singleton()->compute_list_dispatch_threads(compute_list, size.x, size.y, 1);
  1267. }
  1268. if (p_settings.quality > RS::ENV_SSAO_QUALITY_VERY_LOW) {
  1269. RD::get_singleton()->compute_list_add_barrier(compute_list);
  1270. }
  1271. }
  1272. RD::get_singleton()->draw_command_end_label(); // Blur
  1273. }
  1274. /* FOURTH PASS */
  1275. // Interleave buffers
  1276. // back to full size
  1277. {
  1278. RD::get_singleton()->draw_command_begin_label("Interleave Buffers");
  1279. ssao.interleave_push_constant.inv_sharpness = 1.0 - p_settings.sharpness;
  1280. ssao.interleave_push_constant.pixel_size[0] = 1.0 / p_settings.full_screen_size.x;
  1281. ssao.interleave_push_constant.pixel_size[1] = 1.0 / p_settings.full_screen_size.y;
  1282. ssao.interleave_push_constant.size_modifier = uint32_t(p_settings.half_size ? 4 : 2);
  1283. int interleave_pipeline = SSAO_INTERLEAVE_HALF;
  1284. if (p_settings.quality == RS::ENV_SSAO_QUALITY_LOW) {
  1285. interleave_pipeline = SSAO_INTERLEAVE;
  1286. } else if (p_settings.quality >= RS::ENV_SSAO_QUALITY_MEDIUM) {
  1287. interleave_pipeline = SSAO_INTERLEAVE_SMART;
  1288. }
  1289. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, ssao.pipelines[interleave_pipeline]);
  1290. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_upscale_buffer), 0);
  1291. if (p_settings.quality > RS::ENV_SSAO_QUALITY_VERY_LOW && p_settings.blur_passes % 2 == 0) {
  1292. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_ao), 1);
  1293. } else {
  1294. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_ao_pong), 1);
  1295. }
  1296. RD::get_singleton()->compute_list_set_push_constant(compute_list, &ssao.interleave_push_constant, sizeof(SSAOInterleavePushConstant));
  1297. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_settings.full_screen_size.x, p_settings.full_screen_size.y, 1);
  1298. RD::get_singleton()->compute_list_add_barrier(compute_list);
  1299. RD::get_singleton()->draw_command_end_label(); // Interleave
  1300. }
  1301. RD::get_singleton()->draw_command_end_label(); //SSAO
  1302. RD::get_singleton()->compute_list_end(RD::BARRIER_MASK_TRANSFER); //wait for upcoming transfer
  1303. int zero[1] = { 0 };
  1304. RD::get_singleton()->buffer_update(ssao.importance_map_load_counter, 0, sizeof(uint32_t), &zero, 0); //no barrier
  1305. }
  1306. void EffectsRD::roughness_limit(RID p_source_normal, RID p_roughness, const Size2i &p_size, float p_curve) {
  1307. roughness_limiter.push_constant.screen_size[0] = p_size.x;
  1308. roughness_limiter.push_constant.screen_size[1] = p_size.y;
  1309. roughness_limiter.push_constant.curve = p_curve;
  1310. RD::ComputeListID compute_list = RD::get_singleton()->compute_list_begin();
  1311. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, roughness_limiter.pipeline);
  1312. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_source_normal), 0);
  1313. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_roughness), 1);
  1314. RD::get_singleton()->compute_list_set_push_constant(compute_list, &roughness_limiter.push_constant, sizeof(RoughnessLimiterPushConstant)); //not used but set anyway
  1315. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_size.x, p_size.y, 1);
  1316. RD::get_singleton()->compute_list_end();
  1317. }
  1318. void EffectsRD::cubemap_roughness(RID p_source_rd_texture, RID p_dest_texture, uint32_t p_face_id, uint32_t p_sample_count, float p_roughness, float p_size) {
  1319. ERR_FAIL_COND_MSG(prefer_raster_effects, "Can't use compute based cubemap roughness with the mobile renderer.");
  1320. memset(&roughness.push_constant, 0, sizeof(CubemapRoughnessPushConstant));
  1321. roughness.push_constant.face_id = p_face_id > 9 ? 0 : p_face_id;
  1322. roughness.push_constant.roughness = p_roughness;
  1323. roughness.push_constant.sample_count = p_sample_count;
  1324. roughness.push_constant.use_direct_write = p_roughness == 0.0;
  1325. roughness.push_constant.face_size = p_size;
  1326. RD::ComputeListID compute_list = RD::get_singleton()->compute_list_begin();
  1327. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, roughness.compute_pipeline);
  1328. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_source_rd_texture), 0);
  1329. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_dest_texture), 1);
  1330. RD::get_singleton()->compute_list_set_push_constant(compute_list, &roughness.push_constant, sizeof(CubemapRoughnessPushConstant));
  1331. int x_groups = (p_size - 1) / 8 + 1;
  1332. int y_groups = (p_size - 1) / 8 + 1;
  1333. RD::get_singleton()->compute_list_dispatch(compute_list, x_groups, y_groups, p_face_id > 9 ? 6 : 1);
  1334. RD::get_singleton()->compute_list_end();
  1335. }
  1336. void EffectsRD::cubemap_roughness_raster(RID p_source_rd_texture, RID p_dest_framebuffer, uint32_t p_face_id, uint32_t p_sample_count, float p_roughness, float p_size) {
  1337. ERR_FAIL_COND_MSG(!prefer_raster_effects, "Can't use raster based cubemap roughness with the clustered renderer.");
  1338. ERR_FAIL_COND_MSG(p_face_id >= 6, "Raster implementation of cubemap roughness must process one side at a time.");
  1339. memset(&roughness.push_constant, 0, sizeof(CubemapRoughnessPushConstant));
  1340. roughness.push_constant.face_id = p_face_id;
  1341. roughness.push_constant.roughness = p_roughness;
  1342. roughness.push_constant.sample_count = p_sample_count;
  1343. roughness.push_constant.use_direct_write = p_roughness == 0.0;
  1344. roughness.push_constant.face_size = p_size;
  1345. RD::DrawListID draw_list = RD::get_singleton()->draw_list_begin(p_dest_framebuffer, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_READ, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_DISCARD);
  1346. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, roughness.raster_pipeline.get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(p_dest_framebuffer)));
  1347. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_source_rd_texture), 0);
  1348. RD::get_singleton()->draw_list_bind_index_array(draw_list, index_array);
  1349. RD::get_singleton()->draw_list_set_push_constant(draw_list, &roughness.push_constant, sizeof(CubemapRoughnessPushConstant));
  1350. RD::get_singleton()->draw_list_draw(draw_list, true);
  1351. RD::get_singleton()->draw_list_end();
  1352. }
  1353. void EffectsRD::cubemap_downsample(RID p_source_cubemap, RID p_dest_cubemap, const Size2i &p_size) {
  1354. ERR_FAIL_COND_MSG(prefer_raster_effects, "Can't use compute based cubemap downsample with the mobile renderer.");
  1355. cubemap_downsampler.push_constant.face_size = p_size.x;
  1356. cubemap_downsampler.push_constant.face_id = 0; // we render all 6 sides to each layer in one call
  1357. RD::ComputeListID compute_list = RD::get_singleton()->compute_list_begin();
  1358. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, cubemap_downsampler.compute_pipeline);
  1359. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_source_cubemap), 0);
  1360. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_dest_cubemap), 1);
  1361. int x_groups = (p_size.x - 1) / 8 + 1;
  1362. int y_groups = (p_size.y - 1) / 8 + 1;
  1363. RD::get_singleton()->compute_list_set_push_constant(compute_list, &cubemap_downsampler.push_constant, sizeof(CubemapDownsamplerPushConstant));
  1364. RD::get_singleton()->compute_list_dispatch(compute_list, x_groups, y_groups, 6); // one z_group for each face
  1365. RD::get_singleton()->compute_list_end();
  1366. }
  1367. void EffectsRD::cubemap_downsample_raster(RID p_source_cubemap, RID p_dest_framebuffer, uint32_t p_face_id, const Size2i &p_size) {
  1368. ERR_FAIL_COND_MSG(!prefer_raster_effects, "Can't use raster based cubemap downsample with the clustered renderer.");
  1369. ERR_FAIL_COND_MSG(p_face_id >= 6, "Raster implementation of cubemap downsample must process one side at a time.");
  1370. cubemap_downsampler.push_constant.face_size = p_size.x;
  1371. cubemap_downsampler.push_constant.face_id = p_face_id;
  1372. RD::DrawListID draw_list = RD::get_singleton()->draw_list_begin(p_dest_framebuffer, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_READ, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_DISCARD);
  1373. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, cubemap_downsampler.raster_pipeline.get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(p_dest_framebuffer)));
  1374. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_source_cubemap), 0);
  1375. RD::get_singleton()->draw_list_bind_index_array(draw_list, index_array);
  1376. RD::get_singleton()->draw_list_set_push_constant(draw_list, &cubemap_downsampler.push_constant, sizeof(CubemapDownsamplerPushConstant));
  1377. RD::get_singleton()->draw_list_draw(draw_list, true);
  1378. RD::get_singleton()->draw_list_end();
  1379. }
  1380. void EffectsRD::cubemap_filter(RID p_source_cubemap, Vector<RID> p_dest_cubemap, bool p_use_array) {
  1381. ERR_FAIL_COND_MSG(prefer_raster_effects, "Can't use compute based cubemap filter with the mobile renderer.");
  1382. Vector<RD::Uniform> uniforms;
  1383. for (int i = 0; i < p_dest_cubemap.size(); i++) {
  1384. RD::Uniform u;
  1385. u.uniform_type = RD::UNIFORM_TYPE_IMAGE;
  1386. u.binding = i;
  1387. u.ids.push_back(p_dest_cubemap[i]);
  1388. uniforms.push_back(u);
  1389. }
  1390. if (RD::get_singleton()->uniform_set_is_valid(filter.image_uniform_set)) {
  1391. RD::get_singleton()->free(filter.image_uniform_set);
  1392. }
  1393. filter.image_uniform_set = RD::get_singleton()->uniform_set_create(uniforms, filter.compute_shader.version_get_shader(filter.shader_version, 0), 2);
  1394. int pipeline = p_use_array ? FILTER_MODE_HIGH_QUALITY_ARRAY : FILTER_MODE_HIGH_QUALITY;
  1395. pipeline = filter.use_high_quality ? pipeline : pipeline + 1;
  1396. RD::ComputeListID compute_list = RD::get_singleton()->compute_list_begin();
  1397. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, filter.compute_pipelines[pipeline]);
  1398. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_source_cubemap, true), 0);
  1399. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, filter.uniform_set, 1);
  1400. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, filter.image_uniform_set, 2);
  1401. int x_groups = p_use_array ? 1792 : 342; // (128 * 128 * 7) / 64 : (128*128 + 64*64 + 32*32 + 16*16 + 8*8 + 4*4 + 2*2) / 64
  1402. RD::get_singleton()->compute_list_dispatch(compute_list, x_groups, 6, 1); // one y_group for each face
  1403. RD::get_singleton()->compute_list_end();
  1404. }
  1405. void EffectsRD::cubemap_filter_raster(RID p_source_cubemap, RID p_dest_framebuffer, uint32_t p_face_id, uint32_t p_mip_level) {
  1406. ERR_FAIL_COND_MSG(!prefer_raster_effects, "Can't use raster based cubemap filter with the clustered renderer.");
  1407. ERR_FAIL_COND_MSG(p_face_id >= 6, "Raster implementation of cubemap filter must process one side at a time.");
  1408. // TODO implement!
  1409. CubemapFilterRasterPushConstant push_constant;
  1410. push_constant.mip_level = p_mip_level;
  1411. push_constant.face_id = p_face_id;
  1412. CubemapFilterMode mode = filter.use_high_quality ? FILTER_MODE_HIGH_QUALITY : FILTER_MODE_LOW_QUALITY;
  1413. RD::DrawListID draw_list = RD::get_singleton()->draw_list_begin(p_dest_framebuffer, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_READ, RD::INITIAL_ACTION_KEEP, RD::FINAL_ACTION_DISCARD);
  1414. RD::get_singleton()->draw_list_bind_render_pipeline(draw_list, filter.raster_pipelines[mode].get_render_pipeline(RD::INVALID_ID, RD::get_singleton()->framebuffer_get_format(p_dest_framebuffer)));
  1415. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, _get_uniform_set_from_texture(p_source_cubemap), 0);
  1416. RD::get_singleton()->draw_list_bind_uniform_set(draw_list, filter.uniform_set, 1);
  1417. RD::get_singleton()->draw_list_bind_index_array(draw_list, index_array);
  1418. RD::get_singleton()->draw_list_set_push_constant(draw_list, &push_constant, sizeof(CubemapFilterRasterPushConstant));
  1419. RD::get_singleton()->draw_list_draw(draw_list, true);
  1420. RD::get_singleton()->draw_list_end();
  1421. }
  1422. void EffectsRD::resolve_gi(RID p_source_depth, RID p_source_normal_roughness, RID p_source_voxel_gi, RID p_dest_depth, RID p_dest_normal_roughness, RID p_dest_voxel_gi, Vector2i p_screen_size, int p_samples, uint32_t p_barrier) {
  1423. ResolvePushConstant push_constant;
  1424. push_constant.screen_size[0] = p_screen_size.x;
  1425. push_constant.screen_size[1] = p_screen_size.y;
  1426. push_constant.samples = p_samples;
  1427. RD::ComputeListID compute_list = RD::get_singleton()->compute_list_begin();
  1428. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, resolve.pipelines[p_source_voxel_gi.is_valid() ? RESOLVE_MODE_GI_VOXEL_GI : RESOLVE_MODE_GI]);
  1429. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture_pair(p_source_depth, p_source_normal_roughness), 0);
  1430. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_image_pair(p_dest_depth, p_dest_normal_roughness), 1);
  1431. if (p_source_voxel_gi.is_valid()) {
  1432. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_source_voxel_gi), 2);
  1433. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_dest_voxel_gi), 3);
  1434. }
  1435. RD::get_singleton()->compute_list_set_push_constant(compute_list, &push_constant, sizeof(ResolvePushConstant));
  1436. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_screen_size.x, p_screen_size.y, 1);
  1437. RD::get_singleton()->compute_list_end(p_barrier);
  1438. }
  1439. void EffectsRD::resolve_depth(RID p_source_depth, RID p_dest_depth, Vector2i p_screen_size, int p_samples, uint32_t p_barrier) {
  1440. ResolvePushConstant push_constant;
  1441. push_constant.screen_size[0] = p_screen_size.x;
  1442. push_constant.screen_size[1] = p_screen_size.y;
  1443. push_constant.samples = p_samples;
  1444. RD::ComputeListID compute_list = RD::get_singleton()->compute_list_begin();
  1445. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, resolve.pipelines[RESOLVE_MODE_DEPTH]);
  1446. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_compute_uniform_set_from_texture(p_source_depth), 0);
  1447. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, _get_uniform_set_from_image(p_dest_depth), 1);
  1448. RD::get_singleton()->compute_list_set_push_constant(compute_list, &push_constant, sizeof(ResolvePushConstant));
  1449. RD::get_singleton()->compute_list_dispatch_threads(compute_list, p_screen_size.x, p_screen_size.y, 1);
  1450. RD::get_singleton()->compute_list_end(p_barrier);
  1451. }
  1452. void EffectsRD::sort_buffer(RID p_uniform_set, int p_size) {
  1453. Sort::PushConstant push_constant;
  1454. push_constant.total_elements = p_size;
  1455. bool done = true;
  1456. int numThreadGroups = ((p_size - 1) >> 9) + 1;
  1457. if (numThreadGroups > 1) {
  1458. done = false;
  1459. }
  1460. RD::ComputeListID compute_list = RD::get_singleton()->compute_list_begin();
  1461. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, sort.pipelines[SORT_MODE_BLOCK]);
  1462. RD::get_singleton()->compute_list_bind_uniform_set(compute_list, p_uniform_set, 1);
  1463. RD::get_singleton()->compute_list_set_push_constant(compute_list, &push_constant, sizeof(Sort::PushConstant));
  1464. RD::get_singleton()->compute_list_dispatch(compute_list, numThreadGroups, 1, 1);
  1465. int presorted = 512;
  1466. while (!done) {
  1467. RD::get_singleton()->compute_list_add_barrier(compute_list);
  1468. done = true;
  1469. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, sort.pipelines[SORT_MODE_STEP]);
  1470. numThreadGroups = 0;
  1471. if (p_size > presorted) {
  1472. if (p_size > presorted * 2) {
  1473. done = false;
  1474. }
  1475. int pow2 = presorted;
  1476. while (pow2 < p_size) {
  1477. pow2 *= 2;
  1478. }
  1479. numThreadGroups = pow2 >> 9;
  1480. }
  1481. unsigned int nMergeSize = presorted * 2;
  1482. for (unsigned int nMergeSubSize = nMergeSize >> 1; nMergeSubSize > 256; nMergeSubSize = nMergeSubSize >> 1) {
  1483. push_constant.job_params[0] = nMergeSubSize;
  1484. if (nMergeSubSize == nMergeSize >> 1) {
  1485. push_constant.job_params[1] = (2 * nMergeSubSize - 1);
  1486. push_constant.job_params[2] = -1;
  1487. } else {
  1488. push_constant.job_params[1] = nMergeSubSize;
  1489. push_constant.job_params[2] = 1;
  1490. }
  1491. push_constant.job_params[3] = 0;
  1492. RD::get_singleton()->compute_list_set_push_constant(compute_list, &push_constant, sizeof(Sort::PushConstant));
  1493. RD::get_singleton()->compute_list_dispatch(compute_list, numThreadGroups, 1, 1);
  1494. RD::get_singleton()->compute_list_add_barrier(compute_list);
  1495. }
  1496. RD::get_singleton()->compute_list_bind_compute_pipeline(compute_list, sort.pipelines[SORT_MODE_INNER]);
  1497. RD::get_singleton()->compute_list_set_push_constant(compute_list, &push_constant, sizeof(Sort::PushConstant));
  1498. RD::get_singleton()->compute_list_dispatch(compute_list, numThreadGroups, 1, 1);
  1499. presorted *= 2;
  1500. }
  1501. RD::get_singleton()->compute_list_end();
  1502. }
  1503. EffectsRD::EffectsRD(bool p_prefer_raster_effects) {
  1504. prefer_raster_effects = p_prefer_raster_effects;
  1505. if (prefer_raster_effects) {
  1506. // init blur shader (on compute use copy shader)
  1507. Vector<String> blur_modes;
  1508. blur_modes.push_back("\n#define MODE_MIPMAP\n"); // BLUR_MIPMAP
  1509. blur_modes.push_back("\n#define MODE_GAUSSIAN_BLUR\n"); // BLUR_MODE_GAUSSIAN_BLUR
  1510. blur_modes.push_back("\n#define MODE_GAUSSIAN_GLOW\n"); // BLUR_MODE_GAUSSIAN_GLOW
  1511. blur_modes.push_back("\n#define MODE_GAUSSIAN_GLOW\n#define GLOW_USE_AUTO_EXPOSURE\n"); // BLUR_MODE_GAUSSIAN_GLOW_AUTO_EXPOSURE
  1512. blur_modes.push_back("\n#define MODE_COPY\n"); // BLUR_MODE_COPY
  1513. blur_raster.shader.initialize(blur_modes);
  1514. memset(&blur_raster.push_constant, 0, sizeof(BlurRasterPushConstant));
  1515. blur_raster.shader_version = blur_raster.shader.version_create();
  1516. for (int i = 0; i < BLUR_MODE_MAX; i++) {
  1517. blur_raster.pipelines[i].setup(blur_raster.shader.version_get_shader(blur_raster.shader_version, i), RD::RENDER_PRIMITIVE_TRIANGLES, RD::PipelineRasterizationState(), RD::PipelineMultisampleState(), RD::PipelineDepthStencilState(), RD::PipelineColorBlendState::create_disabled(), 0);
  1518. }
  1519. } else {
  1520. // not used in clustered
  1521. for (int i = 0; i < BLUR_MODE_MAX; i++) {
  1522. blur_raster.pipelines[i].clear();
  1523. }
  1524. }
  1525. if (!prefer_raster_effects) { // Initialize copy
  1526. Vector<String> copy_modes;
  1527. copy_modes.push_back("\n#define MODE_GAUSSIAN_BLUR\n");
  1528. copy_modes.push_back("\n#define MODE_GAUSSIAN_BLUR\n#define DST_IMAGE_8BIT\n");
  1529. copy_modes.push_back("\n#define MODE_GAUSSIAN_GLOW\n");
  1530. copy_modes.push_back("\n#define MODE_GAUSSIAN_GLOW\n#define GLOW_USE_AUTO_EXPOSURE\n");
  1531. copy_modes.push_back("\n#define MODE_SIMPLE_COPY\n");
  1532. copy_modes.push_back("\n#define MODE_SIMPLE_COPY\n#define DST_IMAGE_8BIT\n");
  1533. copy_modes.push_back("\n#define MODE_SIMPLE_COPY_DEPTH\n");
  1534. copy_modes.push_back("\n#define MODE_SET_COLOR\n");
  1535. copy_modes.push_back("\n#define MODE_SET_COLOR\n#define DST_IMAGE_8BIT\n");
  1536. copy_modes.push_back("\n#define MODE_MIPMAP\n");
  1537. copy_modes.push_back("\n#define MODE_LINEARIZE_DEPTH_COPY\n");
  1538. copy_modes.push_back("\n#define MODE_CUBEMAP_TO_PANORAMA\n");
  1539. copy_modes.push_back("\n#define MODE_CUBEMAP_ARRAY_TO_PANORAMA\n");
  1540. copy.shader.initialize(copy_modes);
  1541. memset(&copy.push_constant, 0, sizeof(CopyPushConstant));
  1542. if (prefer_raster_effects) {
  1543. // disable shaders we can't use
  1544. copy.shader.set_variant_enabled(COPY_MODE_GAUSSIAN_COPY, false);
  1545. copy.shader.set_variant_enabled(COPY_MODE_GAUSSIAN_COPY_8BIT, false);
  1546. copy.shader.set_variant_enabled(COPY_MODE_GAUSSIAN_GLOW, false);
  1547. copy.shader.set_variant_enabled(COPY_MODE_GAUSSIAN_GLOW_AUTO_EXPOSURE, false);
  1548. }
  1549. copy.shader_version = copy.shader.version_create();
  1550. for (int i = 0; i < COPY_MODE_MAX; i++) {
  1551. if (copy.shader.is_variant_enabled(i)) {
  1552. copy.pipelines[i] = RD::get_singleton()->compute_pipeline_create(copy.shader.version_get_shader(copy.shader_version, i));
  1553. }
  1554. }
  1555. }
  1556. {
  1557. Vector<String> copy_modes;
  1558. copy_modes.push_back("\n");
  1559. copy_modes.push_back("\n#define MODE_PANORAMA_TO_DP\n");
  1560. copy_modes.push_back("\n#define MODE_TWO_SOURCES\n");
  1561. copy_to_fb.shader.initialize(copy_modes);
  1562. copy_to_fb.shader_version = copy_to_fb.shader.version_create();
  1563. //use additive
  1564. for (int i = 0; i < COPY_TO_FB_MAX; i++) {
  1565. copy_to_fb.pipelines[i].setup(copy_to_fb.shader.version_get_shader(copy_to_fb.shader_version, i), RD::RENDER_PRIMITIVE_TRIANGLES, RD::PipelineRasterizationState(), RD::PipelineMultisampleState(), RD::PipelineDepthStencilState(), RD::PipelineColorBlendState::create_disabled(), 0);
  1566. }
  1567. }
  1568. {
  1569. // Initialize roughness
  1570. Vector<String> cubemap_roughness_modes;
  1571. cubemap_roughness_modes.push_back("");
  1572. if (prefer_raster_effects) {
  1573. roughness.raster_shader.initialize(cubemap_roughness_modes);
  1574. roughness.shader_version = roughness.raster_shader.version_create();
  1575. roughness.raster_pipeline.setup(roughness.raster_shader.version_get_shader(roughness.shader_version, 0), RD::RENDER_PRIMITIVE_TRIANGLES, RD::PipelineRasterizationState(), RD::PipelineMultisampleState(), RD::PipelineDepthStencilState(), RD::PipelineColorBlendState::create_disabled(), 0);
  1576. } else {
  1577. roughness.compute_shader.initialize(cubemap_roughness_modes);
  1578. roughness.shader_version = roughness.compute_shader.version_create();
  1579. roughness.compute_pipeline = RD::get_singleton()->compute_pipeline_create(roughness.compute_shader.version_get_shader(roughness.shader_version, 0));
  1580. roughness.raster_pipeline.clear();
  1581. }
  1582. }
  1583. {
  1584. // Initialize tonemapper
  1585. Vector<String> tonemap_modes;
  1586. tonemap_modes.push_back("\n");
  1587. tonemap_modes.push_back("\n#define USE_GLOW_FILTER_BICUBIC\n");
  1588. tonemap_modes.push_back("\n#define USE_1D_LUT\n");
  1589. tonemap_modes.push_back("\n#define USE_GLOW_FILTER_BICUBIC\n#define USE_1D_LUT\n");
  1590. tonemap_modes.push_back("\n#define SUBPASS\n");
  1591. tonemap_modes.push_back("\n#define SUBPASS\n#define USE_1D_LUT\n");
  1592. // multiview versions of our shaders
  1593. tonemap_modes.push_back("\n#define MULTIVIEW\n");
  1594. tonemap_modes.push_back("\n#define MULTIVIEW\n#define USE_GLOW_FILTER_BICUBIC\n");
  1595. tonemap_modes.push_back("\n#define MULTIVIEW\n#define USE_1D_LUT\n");
  1596. tonemap_modes.push_back("\n#define MULTIVIEW\n#define USE_GLOW_FILTER_BICUBIC\n#define USE_1D_LUT\n");
  1597. tonemap_modes.push_back("\n#define MULTIVIEW\n#define SUBPASS\n");
  1598. tonemap_modes.push_back("\n#define MULTIVIEW\n#define SUBPASS\n#define USE_1D_LUT\n");
  1599. tonemap.shader.initialize(tonemap_modes);
  1600. if (!RendererCompositorRD::singleton->is_xr_enabled()) {
  1601. tonemap.shader.set_variant_enabled(TONEMAP_MODE_NORMAL_MULTIVIEW, false);
  1602. tonemap.shader.set_variant_enabled(TONEMAP_MODE_BICUBIC_GLOW_FILTER_MULTIVIEW, false);
  1603. tonemap.shader.set_variant_enabled(TONEMAP_MODE_1D_LUT_MULTIVIEW, false);
  1604. tonemap.shader.set_variant_enabled(TONEMAP_MODE_BICUBIC_GLOW_FILTER_1D_LUT_MULTIVIEW, false);
  1605. tonemap.shader.set_variant_enabled(TONEMAP_MODE_SUBPASS_MULTIVIEW, false);
  1606. tonemap.shader.set_variant_enabled(TONEMAP_MODE_SUBPASS_1D_LUT_MULTIVIEW, false);
  1607. }
  1608. tonemap.shader_version = tonemap.shader.version_create();
  1609. for (int i = 0; i < TONEMAP_MODE_MAX; i++) {
  1610. if (tonemap.shader.is_variant_enabled(i)) {
  1611. tonemap.pipelines[i].setup(tonemap.shader.version_get_shader(tonemap.shader_version, i), RD::RENDER_PRIMITIVE_TRIANGLES, RD::PipelineRasterizationState(), RD::PipelineMultisampleState(), RD::PipelineDepthStencilState(), RD::PipelineColorBlendState::create_disabled(), 0);
  1612. } else {
  1613. tonemap.pipelines[i].clear();
  1614. }
  1615. }
  1616. }
  1617. if (prefer_raster_effects) {
  1618. Vector<String> luminance_reduce_modes;
  1619. luminance_reduce_modes.push_back("\n#define FIRST_PASS\n"); // LUMINANCE_REDUCE_FRAGMENT_FIRST
  1620. luminance_reduce_modes.push_back("\n"); // LUMINANCE_REDUCE_FRAGMENT
  1621. luminance_reduce_modes.push_back("\n#define FINAL_PASS\n"); // LUMINANCE_REDUCE_FRAGMENT_FINAL
  1622. luminance_reduce_raster.shader.initialize(luminance_reduce_modes);
  1623. memset(&luminance_reduce_raster.push_constant, 0, sizeof(LuminanceReduceRasterPushConstant));
  1624. luminance_reduce_raster.shader_version = luminance_reduce_raster.shader.version_create();
  1625. for (int i = 0; i < LUMINANCE_REDUCE_FRAGMENT_MAX; i++) {
  1626. luminance_reduce_raster.pipelines[i].setup(luminance_reduce_raster.shader.version_get_shader(luminance_reduce_raster.shader_version, i), RD::RENDER_PRIMITIVE_TRIANGLES, RD::PipelineRasterizationState(), RD::PipelineMultisampleState(), RD::PipelineDepthStencilState(), RD::PipelineColorBlendState::create_disabled(), 0);
  1627. }
  1628. } else {
  1629. // Initialize luminance_reduce
  1630. Vector<String> luminance_reduce_modes;
  1631. luminance_reduce_modes.push_back("\n#define READ_TEXTURE\n");
  1632. luminance_reduce_modes.push_back("\n");
  1633. luminance_reduce_modes.push_back("\n#define WRITE_LUMINANCE\n");
  1634. luminance_reduce.shader.initialize(luminance_reduce_modes);
  1635. luminance_reduce.shader_version = luminance_reduce.shader.version_create();
  1636. for (int i = 0; i < LUMINANCE_REDUCE_MAX; i++) {
  1637. luminance_reduce.pipelines[i] = RD::get_singleton()->compute_pipeline_create(luminance_reduce.shader.version_get_shader(luminance_reduce.shader_version, i));
  1638. }
  1639. for (int i = 0; i < LUMINANCE_REDUCE_FRAGMENT_MAX; i++) {
  1640. luminance_reduce_raster.pipelines[i].clear();
  1641. }
  1642. }
  1643. {
  1644. // Initialize copier
  1645. Vector<String> copy_modes;
  1646. copy_modes.push_back("\n");
  1647. cube_to_dp.shader.initialize(copy_modes);
  1648. cube_to_dp.shader_version = cube_to_dp.shader.version_create();
  1649. RID shader = cube_to_dp.shader.version_get_shader(cube_to_dp.shader_version, 0);
  1650. RD::PipelineDepthStencilState dss;
  1651. dss.enable_depth_test = true;
  1652. dss.depth_compare_operator = RD::COMPARE_OP_ALWAYS;
  1653. dss.enable_depth_write = true;
  1654. cube_to_dp.pipeline.setup(shader, RD::RENDER_PRIMITIVE_TRIANGLES, RD::PipelineRasterizationState(), RD::PipelineMultisampleState(), dss, RD::PipelineColorBlendState(), 0);
  1655. }
  1656. // Initialize bokeh
  1657. Vector<String> bokeh_modes;
  1658. bokeh_modes.push_back("\n#define MODE_GEN_BLUR_SIZE\n");
  1659. bokeh_modes.push_back("\n#define MODE_BOKEH_BOX\n#define OUTPUT_WEIGHT\n");
  1660. bokeh_modes.push_back("\n#define MODE_BOKEH_BOX\n");
  1661. bokeh_modes.push_back("\n#define MODE_BOKEH_HEXAGONAL\n#define OUTPUT_WEIGHT\n");
  1662. bokeh_modes.push_back("\n#define MODE_BOKEH_HEXAGONAL\n");
  1663. bokeh_modes.push_back("\n#define MODE_BOKEH_CIRCULAR\n#define OUTPUT_WEIGHT\n");
  1664. bokeh_modes.push_back("\n#define MODE_COMPOSITE_BOKEH\n");
  1665. if (prefer_raster_effects) {
  1666. bokeh.raster_shader.initialize(bokeh_modes);
  1667. bokeh.shader_version = bokeh.raster_shader.version_create();
  1668. const int att_count[BOKEH_MAX] = { 1, 2, 1, 2, 1, 2, 1 };
  1669. for (int i = 0; i < BOKEH_MAX; i++) {
  1670. RD::PipelineColorBlendState blend_state = (i == BOKEH_COMPOSITE) ? RD::PipelineColorBlendState::create_blend(att_count[i]) : RD::PipelineColorBlendState::create_disabled(att_count[i]);
  1671. bokeh.raster_pipelines[i].setup(bokeh.raster_shader.version_get_shader(bokeh.shader_version, i), RD::RENDER_PRIMITIVE_TRIANGLES, RD::PipelineRasterizationState(), RD::PipelineMultisampleState(), RD::PipelineDepthStencilState(), blend_state, 0);
  1672. }
  1673. } else {
  1674. bokeh.compute_shader.initialize(bokeh_modes);
  1675. bokeh.compute_shader.set_variant_enabled(BOKEH_GEN_BOKEH_BOX_NOWEIGHT, false);
  1676. bokeh.compute_shader.set_variant_enabled(BOKEH_GEN_BOKEH_HEXAGONAL_NOWEIGHT, false);
  1677. bokeh.shader_version = bokeh.compute_shader.version_create();
  1678. for (int i = 0; i < BOKEH_MAX; i++) {
  1679. if (bokeh.compute_shader.is_variant_enabled(i)) {
  1680. bokeh.compute_pipelines[i] = RD::get_singleton()->compute_pipeline_create(bokeh.compute_shader.version_get_shader(bokeh.shader_version, i));
  1681. }
  1682. }
  1683. for (int i = 0; i < BOKEH_MAX; i++) {
  1684. bokeh.raster_pipelines[i].clear();
  1685. }
  1686. }
  1687. if (!prefer_raster_effects) {
  1688. // Initialize ssao
  1689. RD::SamplerState sampler;
  1690. sampler.mag_filter = RD::SAMPLER_FILTER_NEAREST;
  1691. sampler.min_filter = RD::SAMPLER_FILTER_NEAREST;
  1692. sampler.mip_filter = RD::SAMPLER_FILTER_NEAREST;
  1693. sampler.repeat_u = RD::SAMPLER_REPEAT_MODE_MIRRORED_REPEAT;
  1694. sampler.repeat_v = RD::SAMPLER_REPEAT_MODE_MIRRORED_REPEAT;
  1695. sampler.repeat_w = RD::SAMPLER_REPEAT_MODE_MIRRORED_REPEAT;
  1696. sampler.max_lod = 4;
  1697. ssao.mirror_sampler = RD::get_singleton()->sampler_create(sampler);
  1698. uint32_t pipeline = 0;
  1699. {
  1700. Vector<String> ssao_modes;
  1701. ssao_modes.push_back("\n");
  1702. ssao_modes.push_back("\n#define USE_HALF_SIZE\n");
  1703. ssao_modes.push_back("\n#define GENERATE_MIPS\n");
  1704. ssao_modes.push_back("\n#define GENERATE_MIPS\n#define USE_HALF_SIZE");
  1705. ssao_modes.push_back("\n#define USE_HALF_BUFFERS\n");
  1706. ssao_modes.push_back("\n#define USE_HALF_BUFFERS\n#define USE_HALF_SIZE");
  1707. ssao.downsample_shader.initialize(ssao_modes);
  1708. ssao.downsample_shader_version = ssao.downsample_shader.version_create();
  1709. for (int i = 0; i <= SSAO_DOWNSAMPLE_HALF_RES_HALF; i++) {
  1710. ssao.pipelines[pipeline] = RD::get_singleton()->compute_pipeline_create(ssao.downsample_shader.version_get_shader(ssao.downsample_shader_version, i));
  1711. pipeline++;
  1712. }
  1713. }
  1714. {
  1715. Vector<String> ssao_modes;
  1716. ssao_modes.push_back("\n");
  1717. ssao_modes.push_back("\n#define SSAO_BASE\n");
  1718. ssao_modes.push_back("\n#define ADAPTIVE\n");
  1719. ssao.gather_shader.initialize(ssao_modes);
  1720. ssao.gather_shader_version = ssao.gather_shader.version_create();
  1721. for (int i = SSAO_GATHER; i <= SSAO_GATHER_ADAPTIVE; i++) {
  1722. ssao.pipelines[pipeline] = RD::get_singleton()->compute_pipeline_create(ssao.gather_shader.version_get_shader(ssao.gather_shader_version, i - SSAO_GATHER));
  1723. pipeline++;
  1724. }
  1725. ssao.gather_constants_buffer = RD::get_singleton()->uniform_buffer_create(sizeof(SSAOGatherConstants));
  1726. SSAOGatherConstants gather_constants;
  1727. const int sub_pass_count = 5;
  1728. for (int pass = 0; pass < 4; pass++) {
  1729. for (int subPass = 0; subPass < sub_pass_count; subPass++) {
  1730. int a = pass;
  1731. int spmap[5]{ 0, 1, 4, 3, 2 };
  1732. int b = spmap[subPass];
  1733. float ca, sa;
  1734. float angle0 = (float(a) + float(b) / float(sub_pass_count)) * Math_PI * 0.5f;
  1735. ca = Math::cos(angle0);
  1736. sa = Math::sin(angle0);
  1737. float scale = 1.0f + (a - 1.5f + (b - (sub_pass_count - 1.0f) * 0.5f) / float(sub_pass_count)) * 0.07f;
  1738. gather_constants.rotation_matrices[pass * 20 + subPass * 4 + 0] = scale * ca;
  1739. gather_constants.rotation_matrices[pass * 20 + subPass * 4 + 1] = scale * -sa;
  1740. gather_constants.rotation_matrices[pass * 20 + subPass * 4 + 2] = -scale * sa;
  1741. gather_constants.rotation_matrices[pass * 20 + subPass * 4 + 3] = -scale * ca;
  1742. }
  1743. }
  1744. RD::get_singleton()->buffer_update(ssao.gather_constants_buffer, 0, sizeof(SSAOGatherConstants), &gather_constants);
  1745. }
  1746. {
  1747. Vector<String> ssao_modes;
  1748. ssao_modes.push_back("\n#define GENERATE_MAP\n");
  1749. ssao_modes.push_back("\n#define PROCESS_MAPA\n");
  1750. ssao_modes.push_back("\n#define PROCESS_MAPB\n");
  1751. ssao.importance_map_shader.initialize(ssao_modes);
  1752. ssao.importance_map_shader_version = ssao.importance_map_shader.version_create();
  1753. for (int i = SSAO_GENERATE_IMPORTANCE_MAP; i <= SSAO_PROCESS_IMPORTANCE_MAPB; i++) {
  1754. ssao.pipelines[pipeline] = RD::get_singleton()->compute_pipeline_create(ssao.importance_map_shader.version_get_shader(ssao.importance_map_shader_version, i - SSAO_GENERATE_IMPORTANCE_MAP));
  1755. pipeline++;
  1756. }
  1757. ssao.importance_map_load_counter = RD::get_singleton()->storage_buffer_create(sizeof(uint32_t));
  1758. int zero[1] = { 0 };
  1759. RD::get_singleton()->buffer_update(ssao.importance_map_load_counter, 0, sizeof(uint32_t), &zero);
  1760. RD::get_singleton()->set_resource_name(ssao.importance_map_load_counter, "Importance Map Load Counter");
  1761. Vector<RD::Uniform> uniforms;
  1762. {
  1763. RD::Uniform u;
  1764. u.uniform_type = RD::UNIFORM_TYPE_STORAGE_BUFFER;
  1765. u.binding = 0;
  1766. u.ids.push_back(ssao.importance_map_load_counter);
  1767. uniforms.push_back(u);
  1768. }
  1769. ssao.counter_uniform_set = RD::get_singleton()->uniform_set_create(uniforms, ssao.importance_map_shader.version_get_shader(ssao.importance_map_shader_version, 2), 2);
  1770. RD::get_singleton()->set_resource_name(ssao.counter_uniform_set, "Load Counter Uniform Set");
  1771. }
  1772. {
  1773. Vector<String> ssao_modes;
  1774. ssao_modes.push_back("\n#define MODE_NON_SMART\n");
  1775. ssao_modes.push_back("\n#define MODE_SMART\n");
  1776. ssao_modes.push_back("\n#define MODE_WIDE\n");
  1777. ssao.blur_shader.initialize(ssao_modes);
  1778. ssao.blur_shader_version = ssao.blur_shader.version_create();
  1779. for (int i = SSAO_BLUR_PASS; i <= SSAO_BLUR_PASS_WIDE; i++) {
  1780. ssao.pipelines[pipeline] = RD::get_singleton()->compute_pipeline_create(ssao.blur_shader.version_get_shader(ssao.blur_shader_version, i - SSAO_BLUR_PASS));
  1781. pipeline++;
  1782. }
  1783. }
  1784. {
  1785. Vector<String> ssao_modes;
  1786. ssao_modes.push_back("\n#define MODE_NON_SMART\n");
  1787. ssao_modes.push_back("\n#define MODE_SMART\n");
  1788. ssao_modes.push_back("\n#define MODE_HALF\n");
  1789. ssao.interleave_shader.initialize(ssao_modes);
  1790. ssao.interleave_shader_version = ssao.interleave_shader.version_create();
  1791. for (int i = SSAO_INTERLEAVE; i <= SSAO_INTERLEAVE_HALF; i++) {
  1792. ssao.pipelines[pipeline] = RD::get_singleton()->compute_pipeline_create(ssao.interleave_shader.version_get_shader(ssao.interleave_shader_version, i - SSAO_INTERLEAVE));
  1793. RD::get_singleton()->set_resource_name(ssao.pipelines[pipeline], "Interleave Pipeline " + itos(i));
  1794. pipeline++;
  1795. }
  1796. }
  1797. ERR_FAIL_COND(pipeline != SSAO_MAX);
  1798. }
  1799. if (!prefer_raster_effects) {
  1800. // Initialize roughness limiter
  1801. Vector<String> shader_modes;
  1802. shader_modes.push_back("");
  1803. roughness_limiter.shader.initialize(shader_modes);
  1804. roughness_limiter.shader_version = roughness_limiter.shader.version_create();
  1805. roughness_limiter.pipeline = RD::get_singleton()->compute_pipeline_create(roughness_limiter.shader.version_get_shader(roughness_limiter.shader_version, 0));
  1806. }
  1807. {
  1808. //Initialize cubemap downsampler
  1809. Vector<String> cubemap_downsampler_modes;
  1810. cubemap_downsampler_modes.push_back("");
  1811. if (prefer_raster_effects) {
  1812. cubemap_downsampler.raster_shader.initialize(cubemap_downsampler_modes);
  1813. cubemap_downsampler.shader_version = cubemap_downsampler.raster_shader.version_create();
  1814. cubemap_downsampler.raster_pipeline.setup(cubemap_downsampler.raster_shader.version_get_shader(cubemap_downsampler.shader_version, 0), RD::RENDER_PRIMITIVE_TRIANGLES, RD::PipelineRasterizationState(), RD::PipelineMultisampleState(), RD::PipelineDepthStencilState(), RD::PipelineColorBlendState::create_disabled(), 0);
  1815. } else {
  1816. cubemap_downsampler.compute_shader.initialize(cubemap_downsampler_modes);
  1817. cubemap_downsampler.shader_version = cubemap_downsampler.compute_shader.version_create();
  1818. cubemap_downsampler.compute_pipeline = RD::get_singleton()->compute_pipeline_create(cubemap_downsampler.compute_shader.version_get_shader(cubemap_downsampler.shader_version, 0));
  1819. cubemap_downsampler.raster_pipeline.clear();
  1820. }
  1821. }
  1822. {
  1823. // Initialize cubemap filter
  1824. filter.use_high_quality = GLOBAL_GET("rendering/reflections/sky_reflections/fast_filter_high_quality");
  1825. Vector<String> cubemap_filter_modes;
  1826. cubemap_filter_modes.push_back("\n#define USE_HIGH_QUALITY\n");
  1827. cubemap_filter_modes.push_back("\n#define USE_LOW_QUALITY\n");
  1828. cubemap_filter_modes.push_back("\n#define USE_HIGH_QUALITY\n#define USE_TEXTURE_ARRAY\n");
  1829. cubemap_filter_modes.push_back("\n#define USE_LOW_QUALITY\n#define USE_TEXTURE_ARRAY\n");
  1830. if (filter.use_high_quality) {
  1831. filter.coefficient_buffer = RD::get_singleton()->storage_buffer_create(sizeof(high_quality_coeffs));
  1832. RD::get_singleton()->buffer_update(filter.coefficient_buffer, 0, sizeof(high_quality_coeffs), &high_quality_coeffs[0]);
  1833. } else {
  1834. filter.coefficient_buffer = RD::get_singleton()->storage_buffer_create(sizeof(low_quality_coeffs));
  1835. RD::get_singleton()->buffer_update(filter.coefficient_buffer, 0, sizeof(low_quality_coeffs), &low_quality_coeffs[0]);
  1836. }
  1837. if (prefer_raster_effects) {
  1838. filter.raster_shader.initialize(cubemap_filter_modes);
  1839. // array variants are not supported in raster
  1840. filter.raster_shader.set_variant_enabled(FILTER_MODE_HIGH_QUALITY_ARRAY, false);
  1841. filter.raster_shader.set_variant_enabled(FILTER_MODE_LOW_QUALITY_ARRAY, false);
  1842. filter.shader_version = filter.raster_shader.version_create();
  1843. for (int i = 0; i < FILTER_MODE_MAX; i++) {
  1844. if (filter.raster_shader.is_variant_enabled(i)) {
  1845. filter.raster_pipelines[i].setup(filter.raster_shader.version_get_shader(filter.shader_version, i), RD::RENDER_PRIMITIVE_TRIANGLES, RD::PipelineRasterizationState(), RD::PipelineMultisampleState(), RD::PipelineDepthStencilState(), RD::PipelineColorBlendState::create_disabled(), 0);
  1846. } else {
  1847. filter.raster_pipelines[i].clear();
  1848. }
  1849. }
  1850. Vector<RD::Uniform> uniforms;
  1851. {
  1852. RD::Uniform u;
  1853. u.uniform_type = RD::UNIFORM_TYPE_STORAGE_BUFFER;
  1854. u.binding = 0;
  1855. u.ids.push_back(filter.coefficient_buffer);
  1856. uniforms.push_back(u);
  1857. }
  1858. filter.uniform_set = RD::get_singleton()->uniform_set_create(uniforms, filter.raster_shader.version_get_shader(filter.shader_version, filter.use_high_quality ? 0 : 1), 1);
  1859. } else {
  1860. filter.compute_shader.initialize(cubemap_filter_modes);
  1861. filter.shader_version = filter.compute_shader.version_create();
  1862. for (int i = 0; i < FILTER_MODE_MAX; i++) {
  1863. filter.compute_pipelines[i] = RD::get_singleton()->compute_pipeline_create(filter.compute_shader.version_get_shader(filter.shader_version, i));
  1864. filter.raster_pipelines[i].clear();
  1865. }
  1866. Vector<RD::Uniform> uniforms;
  1867. {
  1868. RD::Uniform u;
  1869. u.uniform_type = RD::UNIFORM_TYPE_STORAGE_BUFFER;
  1870. u.binding = 0;
  1871. u.ids.push_back(filter.coefficient_buffer);
  1872. uniforms.push_back(u);
  1873. }
  1874. filter.uniform_set = RD::get_singleton()->uniform_set_create(uniforms, filter.compute_shader.version_get_shader(filter.shader_version, filter.use_high_quality ? 0 : 1), 1);
  1875. }
  1876. }
  1877. if (!prefer_raster_effects) {
  1878. Vector<String> specular_modes;
  1879. specular_modes.push_back("\n#define MODE_MERGE\n");
  1880. specular_modes.push_back("\n#define MODE_MERGE\n#define MODE_SSR\n");
  1881. specular_modes.push_back("\n");
  1882. specular_modes.push_back("\n#define MODE_SSR\n");
  1883. specular_merge.shader.initialize(specular_modes);
  1884. specular_merge.shader_version = specular_merge.shader.version_create();
  1885. //use additive
  1886. RD::PipelineColorBlendState::Attachment ba;
  1887. ba.enable_blend = true;
  1888. ba.src_color_blend_factor = RD::BLEND_FACTOR_ONE;
  1889. ba.dst_color_blend_factor = RD::BLEND_FACTOR_ONE;
  1890. ba.src_alpha_blend_factor = RD::BLEND_FACTOR_ONE;
  1891. ba.dst_alpha_blend_factor = RD::BLEND_FACTOR_ONE;
  1892. ba.color_blend_op = RD::BLEND_OP_ADD;
  1893. ba.alpha_blend_op = RD::BLEND_OP_ADD;
  1894. RD::PipelineColorBlendState blend_additive;
  1895. blend_additive.attachments.push_back(ba);
  1896. for (int i = 0; i < SPECULAR_MERGE_MAX; i++) {
  1897. RD::PipelineColorBlendState blend_state;
  1898. if (i == SPECULAR_MERGE_ADDITIVE_ADD || i == SPECULAR_MERGE_ADDITIVE_SSR) {
  1899. blend_state = blend_additive;
  1900. } else {
  1901. blend_state = RD::PipelineColorBlendState::create_disabled();
  1902. }
  1903. specular_merge.pipelines[i].setup(specular_merge.shader.version_get_shader(specular_merge.shader_version, i), RD::RENDER_PRIMITIVE_TRIANGLES, RD::PipelineRasterizationState(), RD::PipelineMultisampleState(), RD::PipelineDepthStencilState(), blend_state, 0);
  1904. }
  1905. }
  1906. if (!prefer_raster_effects) {
  1907. {
  1908. Vector<String> ssr_modes;
  1909. ssr_modes.push_back("\n");
  1910. ssr_modes.push_back("\n#define MODE_ROUGH\n");
  1911. ssr.shader.initialize(ssr_modes);
  1912. ssr.shader_version = ssr.shader.version_create();
  1913. for (int i = 0; i < SCREEN_SPACE_REFLECTION_MAX; i++) {
  1914. ssr.pipelines[i] = RD::get_singleton()->compute_pipeline_create(ssr.shader.version_get_shader(ssr.shader_version, i));
  1915. }
  1916. }
  1917. {
  1918. Vector<String> ssr_filter_modes;
  1919. ssr_filter_modes.push_back("\n");
  1920. ssr_filter_modes.push_back("\n#define VERTICAL_PASS\n");
  1921. ssr_filter.shader.initialize(ssr_filter_modes);
  1922. ssr_filter.shader_version = ssr_filter.shader.version_create();
  1923. for (int i = 0; i < SCREEN_SPACE_REFLECTION_FILTER_MAX; i++) {
  1924. ssr_filter.pipelines[i] = RD::get_singleton()->compute_pipeline_create(ssr_filter.shader.version_get_shader(ssr_filter.shader_version, i));
  1925. }
  1926. }
  1927. {
  1928. Vector<String> ssr_scale_modes;
  1929. ssr_scale_modes.push_back("\n");
  1930. ssr_scale.shader.initialize(ssr_scale_modes);
  1931. ssr_scale.shader_version = ssr_scale.shader.version_create();
  1932. ssr_scale.pipeline = RD::get_singleton()->compute_pipeline_create(ssr_scale.shader.version_get_shader(ssr_scale.shader_version, 0));
  1933. }
  1934. {
  1935. Vector<String> sss_modes;
  1936. sss_modes.push_back("\n#define USE_11_SAMPLES\n");
  1937. sss_modes.push_back("\n#define USE_17_SAMPLES\n");
  1938. sss_modes.push_back("\n#define USE_25_SAMPLES\n");
  1939. sss.shader.initialize(sss_modes);
  1940. sss.shader_version = sss.shader.version_create();
  1941. for (int i = 0; i < sss_modes.size(); i++) {
  1942. sss.pipelines[i] = RD::get_singleton()->compute_pipeline_create(sss.shader.version_get_shader(sss.shader_version, i));
  1943. }
  1944. }
  1945. {
  1946. Vector<String> resolve_modes;
  1947. resolve_modes.push_back("\n#define MODE_RESOLVE_GI\n");
  1948. resolve_modes.push_back("\n#define MODE_RESOLVE_GI\n#define VOXEL_GI_RESOLVE\n");
  1949. resolve_modes.push_back("\n#define MODE_RESOLVE_DEPTH\n");
  1950. resolve.shader.initialize(resolve_modes);
  1951. resolve.shader_version = resolve.shader.version_create();
  1952. for (int i = 0; i < RESOLVE_MODE_MAX; i++) {
  1953. resolve.pipelines[i] = RD::get_singleton()->compute_pipeline_create(resolve.shader.version_get_shader(resolve.shader_version, i));
  1954. }
  1955. }
  1956. }
  1957. {
  1958. Vector<String> sort_modes;
  1959. sort_modes.push_back("\n#define MODE_SORT_BLOCK\n");
  1960. sort_modes.push_back("\n#define MODE_SORT_STEP\n");
  1961. sort_modes.push_back("\n#define MODE_SORT_INNER\n");
  1962. sort.shader.initialize(sort_modes);
  1963. sort.shader_version = sort.shader.version_create();
  1964. for (int i = 0; i < SORT_MODE_MAX; i++) {
  1965. sort.pipelines[i] = RD::get_singleton()->compute_pipeline_create(sort.shader.version_get_shader(sort.shader_version, i));
  1966. }
  1967. }
  1968. RD::SamplerState sampler;
  1969. sampler.mag_filter = RD::SAMPLER_FILTER_LINEAR;
  1970. sampler.min_filter = RD::SAMPLER_FILTER_LINEAR;
  1971. sampler.max_lod = 0;
  1972. default_sampler = RD::get_singleton()->sampler_create(sampler);
  1973. RD::get_singleton()->set_resource_name(default_sampler, "Default Linear Sampler");
  1974. sampler.min_filter = RD::SAMPLER_FILTER_LINEAR;
  1975. sampler.mip_filter = RD::SAMPLER_FILTER_LINEAR;
  1976. sampler.max_lod = 1e20;
  1977. default_mipmap_sampler = RD::get_singleton()->sampler_create(sampler);
  1978. RD::get_singleton()->set_resource_name(default_mipmap_sampler, "Default MipMap Sampler");
  1979. { //create index array for copy shaders
  1980. Vector<uint8_t> pv;
  1981. pv.resize(6 * 4);
  1982. {
  1983. uint8_t *w = pv.ptrw();
  1984. int *p32 = (int *)w;
  1985. p32[0] = 0;
  1986. p32[1] = 1;
  1987. p32[2] = 2;
  1988. p32[3] = 0;
  1989. p32[4] = 2;
  1990. p32[5] = 3;
  1991. }
  1992. index_buffer = RD::get_singleton()->index_buffer_create(6, RenderingDevice::INDEX_BUFFER_FORMAT_UINT32, pv);
  1993. index_array = RD::get_singleton()->index_array_create(index_buffer, 0, 6);
  1994. }
  1995. }
  1996. EffectsRD::~EffectsRD() {
  1997. if (RD::get_singleton()->uniform_set_is_valid(filter.image_uniform_set)) {
  1998. RD::get_singleton()->free(filter.image_uniform_set);
  1999. }
  2000. if (RD::get_singleton()->uniform_set_is_valid(filter.uniform_set)) {
  2001. RD::get_singleton()->free(filter.uniform_set);
  2002. }
  2003. RD::get_singleton()->free(default_sampler);
  2004. RD::get_singleton()->free(default_mipmap_sampler);
  2005. RD::get_singleton()->free(index_buffer); //array gets freed as dependency
  2006. RD::get_singleton()->free(filter.coefficient_buffer);
  2007. if (prefer_raster_effects) {
  2008. blur_raster.shader.version_free(blur_raster.shader_version);
  2009. bokeh.raster_shader.version_free(blur_raster.shader_version);
  2010. luminance_reduce_raster.shader.version_free(luminance_reduce_raster.shader_version);
  2011. roughness.raster_shader.version_free(roughness.shader_version);
  2012. cubemap_downsampler.raster_shader.version_free(cubemap_downsampler.shader_version);
  2013. filter.raster_shader.version_free(filter.shader_version);
  2014. } else {
  2015. bokeh.compute_shader.version_free(bokeh.shader_version);
  2016. luminance_reduce.shader.version_free(luminance_reduce.shader_version);
  2017. roughness.compute_shader.version_free(roughness.shader_version);
  2018. cubemap_downsampler.compute_shader.version_free(cubemap_downsampler.shader_version);
  2019. filter.compute_shader.version_free(filter.shader_version);
  2020. }
  2021. if (!prefer_raster_effects) {
  2022. copy.shader.version_free(copy.shader_version);
  2023. resolve.shader.version_free(resolve.shader_version);
  2024. specular_merge.shader.version_free(specular_merge.shader_version);
  2025. ssao.blur_shader.version_free(ssao.blur_shader_version);
  2026. ssao.gather_shader.version_free(ssao.gather_shader_version);
  2027. ssao.downsample_shader.version_free(ssao.downsample_shader_version);
  2028. ssao.interleave_shader.version_free(ssao.interleave_shader_version);
  2029. ssao.importance_map_shader.version_free(ssao.importance_map_shader_version);
  2030. roughness_limiter.shader.version_free(roughness_limiter.shader_version);
  2031. ssr.shader.version_free(ssr.shader_version);
  2032. ssr_filter.shader.version_free(ssr_filter.shader_version);
  2033. ssr_scale.shader.version_free(ssr_scale.shader_version);
  2034. sss.shader.version_free(sss.shader_version);
  2035. RD::get_singleton()->free(ssao.mirror_sampler);
  2036. RD::get_singleton()->free(ssao.gather_constants_buffer);
  2037. RD::get_singleton()->free(ssao.importance_map_load_counter);
  2038. }
  2039. copy_to_fb.shader.version_free(copy_to_fb.shader_version);
  2040. cube_to_dp.shader.version_free(cube_to_dp.shader_version);
  2041. sort.shader.version_free(sort.shader_version);
  2042. tonemap.shader.version_free(tonemap.shader_version);
  2043. }