ShaderProgramParser.cpp 27 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030
  1. // Copyright (C) 2009-2021, Panagiotis Christopoulos Charitos and contributors.
  2. // All rights reserved.
  3. // Code licensed under the BSD License.
  4. // http://www.anki3d.org/LICENSE
  5. #include <AnKi/ShaderCompiler/ShaderProgramParser.h>
  6. namespace anki
  7. {
  8. #define ANKI_PP_ERROR_MALFORMED() \
  9. ANKI_SHADER_COMPILER_LOGE("%s: Malformed expression: %s", fname.cstr(), line.cstr()); \
  10. return Error::USER_DATA
  11. #define ANKI_PP_ERROR_MALFORMED_MSG(msg_) \
  12. ANKI_SHADER_COMPILER_LOGE("%s: " msg_ ": %s", fname.cstr(), line.cstr()); \
  13. return Error::USER_DATA
  14. static const Array<CString, U32(ShaderType::COUNT)> SHADER_STAGE_NAMES = {
  15. {"VERTEX", "TESSELLATION_CONTROL", "TESSELLATION_EVALUATION", "GEOMETRY", "FRAGMENT", "COMPUTE", "RAY_GEN",
  16. "ANY_HIT", "CLOSEST_HIT", "MISS", "INTERSECTION", "CALLABLE"}};
  17. static const char* SHADER_HEADER = R"(#version 460 core
  18. #define ANKI_BACKEND_MINOR %u
  19. #define ANKI_BACKEND_MAJOR %u
  20. #define ANKI_VENDOR_%s 1
  21. #define ANKI_%s_SHADER 1
  22. #define gl_VertexID gl_VertexIndex
  23. #extension GL_EXT_control_flow_attributes : require
  24. #define ANKI_UNROLL [[unroll]]
  25. #define ANKI_LOOP [[dont_unroll]]
  26. #define ANKI_BRANCH [[branch]]
  27. #define ANKI_FLATTEN [[flatten]]
  28. #extension GL_KHR_shader_subgroup_vote : require
  29. #extension GL_KHR_shader_subgroup_ballot : require
  30. #extension GL_KHR_shader_subgroup_shuffle : require
  31. #extension GL_KHR_shader_subgroup_arithmetic : require
  32. #extension GL_EXT_samplerless_texture_functions : require
  33. #extension GL_EXT_shader_image_load_formatted : require
  34. #extension GL_EXT_nonuniform_qualifier : enable
  35. #extension GL_EXT_buffer_reference : enable
  36. #extension GL_EXT_buffer_reference2 : enable
  37. #extension GL_EXT_shader_explicit_arithmetic_types : enable
  38. #extension GL_EXT_shader_explicit_arithmetic_types_int8 : enable
  39. #extension GL_EXT_shader_explicit_arithmetic_types_int16 : enable
  40. #extension GL_EXT_shader_explicit_arithmetic_types_int32 : enable
  41. #extension GL_EXT_shader_explicit_arithmetic_types_int64 : enable
  42. #extension GL_EXT_shader_explicit_arithmetic_types_float16 : enable
  43. #extension GL_EXT_shader_explicit_arithmetic_types_float32 : enable
  44. #extension GL_EXT_shader_explicit_arithmetic_types_float64 : enable
  45. #extension GL_EXT_shader_atomic_int64 : enable
  46. #extension GL_EXT_nonuniform_qualifier : enable
  47. #extension GL_EXT_scalar_block_layout : enable
  48. #define ANKI_MAX_BINDLESS_TEXTURES %u
  49. #define ANKI_MAX_BINDLESS_IMAGES %u
  50. #if %u || defined(ANKI_RAY_GEN_SHADER) || defined(ANKI_ANY_HIT_SHADER) || defined(ANKI_CLOSEST_HIT_SHADER) || defined(ANKI_MISS_SHADER) || defined(ANKI_INTERSECTION_SHADER) || defined(ANKI_CALLABLE_SHADER)
  51. # extension GL_EXT_ray_tracing : enable
  52. #endif
  53. #define ANKI_BINDLESS_SET(set_) \
  54. layout(set = set_, binding = 0) uniform utexture2D u_bindlessTextures2dU32[ANKI_MAX_BINDLESS_TEXTURES]; \
  55. layout(set = set_, binding = 0) uniform itexture2D u_bindlessTextures2dI32[ANKI_MAX_BINDLESS_TEXTURES]; \
  56. layout(set = set_, binding = 0) uniform texture2D u_bindlessTextures2dF32[ANKI_MAX_BINDLESS_TEXTURES]; \
  57. layout(set = set_, binding = 1) uniform readonly uimage2D u_bindlessImages2dU32[ANKI_MAX_BINDLESS_IMAGES]; \
  58. layout(set = set_, binding = 1) uniform readonly iimage2D u_bindlessImages2dI32[ANKI_MAX_BINDLESS_IMAGES]; \
  59. layout(set = set_, binding = 1) uniform readonly image2D u_bindlessImages2dF32[ANKI_MAX_BINDLESS_IMAGES]
  60. #define F32 float
  61. #define _ANKI_SIZEOF_float 4
  62. #define Vec2 vec2
  63. #define _ANKI_SIZEOF_vec2 8
  64. #define Vec3 vec3
  65. #define _ANKI_SIZEOF_vec3 12
  66. #define Vec4 vec4
  67. #define _ANKI_SIZEOF_vec4 16
  68. #define F16 float16_t
  69. #define _ANKI_SIZEOF_float16_t 2
  70. #define HVec2 f16vec2
  71. #define _ANKI_SIZEOF_f16vec2 4
  72. #define HVec3 f16vec3
  73. #define _ANKI_SIZEOF_f16vec3 6
  74. #define HVec4 f16vec4
  75. #define _ANKI_SIZEOF_f16vec4 8
  76. #define U8 uint8_t
  77. #define _ANKI_SIZEOF_uint8_t 1
  78. #define U8Vec2 u8vec2
  79. #define _ANKI_SIZEOF_u8vec2 2
  80. #define U8Vec3 u8vec3
  81. #define _ANKI_SIZEOF_u8vec3 3
  82. #define U8Vec4 u8vec4
  83. #define _ANKI_SIZEOF_u8vec4 4
  84. #define I8 int8_t
  85. #define _ANKI_SIZEOF_int8_t 1
  86. #define I8Vec2 i8vec2
  87. #define _ANKI_SIZEOF_i8vec2 2
  88. #define I8Vec3 i8vec3
  89. #define _ANKI_SIZEOF_i8vec3 3
  90. #define I8Vec4 i8vec4
  91. #define _ANKI_SIZEOF_i8vec4 4
  92. #define U16 uint16_t
  93. #define _ANKI_SIZEOF_uint16_t 2
  94. #define U16Vec2 u16vec2
  95. #define _ANKI_SIZEOF_u16vec2 4
  96. #define U16Vec3 u16vec3
  97. #define _ANKI_SIZEOF_u16vec3 6
  98. #define U16Vec4 u16vec4
  99. #define _ANKI_SIZEOF_u16vec4 8
  100. #define I16 int16_t
  101. #define _ANKI_SIZEOF_int16_t 2
  102. #define I16Vec2 i16vec2
  103. #define _ANKI_SIZEOF_i16vec2 4
  104. #define I16Vec3 i16vec3
  105. #define _ANKI_SIZEOF_i16vec3 6
  106. #define i16Vec4 i16vec4
  107. #define _ANKI_SIZEOF_i16vec4 8
  108. #define U32 uint
  109. #define _ANKI_SIZEOF_uint 4
  110. #define UVec2 uvec2
  111. #define _ANKI_SIZEOF_uvec2 8
  112. #define UVec3 uvec3
  113. #define _ANKI_SIZEOF_uvec3 12
  114. #define UVec4 uvec4
  115. #define _ANKI_SIZEOF_uvec4 16
  116. #define I32 int
  117. #define _ANKI_SIZEOF_int 4
  118. #define IVec2 ivec2
  119. #define _ANKI_SIZEOF_ivec2 8
  120. #define IVec3 ivec3
  121. #define _ANKI_SIZEOF_ivec3 12
  122. #define IVec4 ivec4
  123. #define _ANKI_SIZEOF_ivec4 16
  124. #define U64 uint64_t
  125. #define _ANKI_SIZEOF_uint64_t 8
  126. #define U64Vec2 u64vec2
  127. #define _ANKI_SIZEOF_u64vec2 16
  128. #define U64Vec3 u64vec3
  129. #define _ANKI_SIZEOF_u64vec3 24
  130. #define U64Vec4 u64vec4
  131. #define _ANKI_SIZEOF_u64vec4 32
  132. #define I64 int64_t
  133. #define _ANKI_SIZEOF_int64_t 8
  134. #define I64Vec2 i64vec2
  135. #define _ANKI_SIZEOF_i64vec2 16
  136. #define I64Vec3 i64vec3
  137. #define _ANKI_SIZEOF_i64vec3 24
  138. #define I64Vec4 i64vec4
  139. #define _ANKI_SIZEOF_i64vec4 32
  140. #define Mat3 mat3
  141. #define Mat4 mat4
  142. #define _ANKI_SIZEOF_mat4 64
  143. #define Mat3x4 mat3x4
  144. #define _ANKI_SIZEOF_mat3x4 48
  145. #define Bool bool
  146. #define _ANKI_CONCATENATE(a, b) a##b
  147. #define ANKI_CONCATENATE(a, b) _ANKI_CONCATENATE(a, b)
  148. #define ANKI_SIZEOF(type) _ANKI_CONCATENATE(_ANKI_SIZEOF_, type)
  149. #define ANKI_ALIGNOF(type) _ANKI_CONCATENATE(_ANKI_ALIGNOF_, type)
  150. #define _ANKI_SCONST_X(type, n, id, defltVal) \
  151. layout(constant_id = id) const type n = defltVal; \
  152. const U32 ANKI_CONCATENATE(n, _CONST_ID) = id
  153. #define _ANKI_SCONST_X2(type, componentType, n, id, defltVal, constWorkaround) \
  154. layout(constant_id = id + 0) const componentType ANKI_CONCATENATE(_anki_const_0_2_, n) = defltVal[0]; \
  155. layout(constant_id = id + 1) const componentType ANKI_CONCATENATE(_anki_const_1_2_, n) = defltVal[1]; \
  156. constWorkaround componentType ANKI_CONCATENATE(n, _X) = ANKI_CONCATENATE(_anki_const_0_2_, n) + componentType(0); \
  157. constWorkaround componentType ANKI_CONCATENATE(n, _Y) = ANKI_CONCATENATE(_anki_const_1_2_, n) + componentType(0); \
  158. constWorkaround type n = type(ANKI_CONCATENATE(n, _X), ANKI_CONCATENATE(n, _Y)); \
  159. const UVec2 ANKI_CONCATENATE(n, _CONST_ID) = UVec2(id, id + 1)
  160. #define _ANKI_SCONST_X3(type, componentType, n, id, defltVal, constWorkaround) \
  161. layout(constant_id = id + 0) const componentType ANKI_CONCATENATE(_anki_const_0_3_, n) = defltVal[0]; \
  162. layout(constant_id = id + 1) const componentType ANKI_CONCATENATE(_anki_const_1_3_, n) = defltVal[1]; \
  163. layout(constant_id = id + 2) const componentType ANKI_CONCATENATE(_anki_const_2_3_, n) = defltVal[2]; \
  164. constWorkaround componentType ANKI_CONCATENATE(n, _X) = ANKI_CONCATENATE(_anki_const_0_3_, n) + componentType(0); \
  165. constWorkaround componentType ANKI_CONCATENATE(n, _Y) = ANKI_CONCATENATE(_anki_const_1_3_, n) + componentType(0); \
  166. constWorkaround componentType ANKI_CONCATENATE(n, _Z) = ANKI_CONCATENATE(_anki_const_2_3_, n) + componentType(0); \
  167. constWorkaround type n = type(ANKI_CONCATENATE(n, _X), ANKI_CONCATENATE(n, _Y), ANKI_CONCATENATE(n, _Z)); \
  168. const UVec3 ANKI_CONCATENATE(n, _CONST_ID) = UVec3(id, id + 1, id + 2)
  169. #define _ANKI_SCONST_X4(type, componentType, n, id, defltVal, constWorkaround) \
  170. layout(constant_id = id + 0) const componentType ANKI_CONCATENATE(_anki_const_0_4_, n) = defltVal[0]; \
  171. layout(constant_id = id + 1) const componentType ANKI_CONCATENATE(_anki_const_1_4_, n) = defltVal[1]; \
  172. layout(constant_id = id + 2) const componentType ANKI_CONCATENATE(_anki_const_2_4_, n) = defltVal[2]; \
  173. layout(constant_id = id + 3) const componentType ANKI_CONCATENATE(_anki_const_3_4_, n) = defltVal[3]; \
  174. constWorkaround componentType ANKI_CONCATENATE(n, _X) = ANKI_CONCATENATE(_anki_const_0_4_, n) + componentType(0); \
  175. constWorkaround componentType ANKI_CONCATENATE(n, _Y) = ANKI_CONCATENATE(_anki_const_1_4_, n) + componentType(0); \
  176. constWorkaround componentType ANKI_CONCATENATE(n, _Z) = ANKI_CONCATENATE(_anki_const_2_4_, n) + componentType(0); \
  177. constWorkaround componentType ANKI_CONCATENATE(n, _W) = ANKI_CONCATENATE(_anki_const_3_4_, n) + componentType(0); \
  178. constWorkaround type n = type(ANKI_CONCATENATE(n, _X), ANKI_CONCATENATE(n, _Y), ANKI_CONCATENATE(n, _Z), \
  179. ANKI_CONCATENATE(n, _W)); \
  180. const UVec4 ANKI_CONCATENATE(n, _CONST_ID) = UVec4(id, id + 1, id + 2, id + 3)
  181. #define ANKI_SPECIALIZATION_CONSTANT_I32(n, id, defltVal) _ANKI_SCONST_X(I32, n, id, defltVal)
  182. #define ANKI_SPECIALIZATION_CONSTANT_IVEC2(n, id, defltVal) _ANKI_SCONST_X2(IVec2, I32, n, id, defltVal, const)
  183. #define ANKI_SPECIALIZATION_CONSTANT_IVEC3(n, id, defltVal) _ANKI_SCONST_X3(IVec3, I32, n, id, defltVal, const)
  184. #define ANKI_SPECIALIZATION_CONSTANT_IVEC4(n, id, defltVal) _ANKI_SCONST_X4(IVec4, I32, n, id, defltVal, const)
  185. #define ANKI_SPECIALIZATION_CONSTANT_U32(n, id, defltVal) _ANKI_SCONST_X(U32, n, id, defltVal)
  186. #define ANKI_SPECIALIZATION_CONSTANT_UVEC2(n, id, defltVal) _ANKI_SCONST_X2(UVec2, U32, n, id, defltVal, const)
  187. #define ANKI_SPECIALIZATION_CONSTANT_UVEC3(n, id, defltVal) _ANKI_SCONST_X3(UVec3, U32, n, id, defltVal, const)
  188. #define ANKI_SPECIALIZATION_CONSTANT_UVEC4(n, id, defltVal) _ANKI_SCONST_X4(UVec4, U32, n, id, defltVal, const)
  189. #define ANKI_SPECIALIZATION_CONSTANT_F32(n, id, defltVal) _ANKI_SCONST_X(F32, n, id, defltVal)
  190. #define ANKI_SPECIALIZATION_CONSTANT_VEC2(n, id, defltVal) _ANKI_SCONST_X2(Vec2, F32, n, id, defltVal,)
  191. #define ANKI_SPECIALIZATION_CONSTANT_VEC3(n, id, defltVal) _ANKI_SCONST_X3(Vec3, F32, n, id, defltVal,)
  192. #define ANKI_SPECIALIZATION_CONSTANT_VEC4(n, id, defltVal) _ANKI_SCONST_X4(Vec4, F32, n, id, defltVal,)
  193. #define ANKI_REF(type, alignment) \
  194. layout(buffer_reference, scalar, buffer_reference_align = (alignment)) buffer type##Ref \
  195. { \
  196. type m_value; \
  197. }
  198. #define ANKI_PADDING(bytes) U8 _padding_ ## __LINE__[bytes]
  199. layout(std140, row_major) uniform;
  200. layout(std140, row_major) buffer;
  201. )";
  202. static const U64 SHADER_HEADER_HASH = computeHash(SHADER_HEADER, sizeof(SHADER_HEADER));
  203. ShaderProgramParser::ShaderProgramParser(CString fname, ShaderProgramFilesystemInterface* fsystem,
  204. GenericMemoryPoolAllocator<U8> alloc,
  205. const GpuDeviceCapabilities& gpuCapabilities,
  206. const BindlessLimits& bindlessLimits)
  207. : m_alloc(alloc)
  208. , m_fname(alloc, fname)
  209. , m_fsystem(fsystem)
  210. , m_gpuCapabilities(gpuCapabilities)
  211. , m_bindlessLimits(bindlessLimits)
  212. {
  213. }
  214. ShaderProgramParser::~ShaderProgramParser()
  215. {
  216. }
  217. void ShaderProgramParser::tokenizeLine(CString line, DynamicArrayAuto<StringAuto>& tokens) const
  218. {
  219. ANKI_ASSERT(line.getLength() > 0);
  220. StringAuto l(m_alloc, line);
  221. // Replace all tabs with spaces
  222. for(char& c : l)
  223. {
  224. if(c == '\t')
  225. {
  226. c = ' ';
  227. }
  228. }
  229. // Split
  230. StringListAuto spaceTokens(m_alloc);
  231. spaceTokens.splitString(l, ' ', false);
  232. // Create the array
  233. for(const String& s : spaceTokens)
  234. {
  235. tokens.emplaceBack(m_alloc, s);
  236. }
  237. }
  238. Error ShaderProgramParser::parsePragmaStart(const StringAuto* begin, const StringAuto* end, CString line, CString fname)
  239. {
  240. ANKI_ASSERT(begin && end);
  241. if(begin >= end)
  242. {
  243. ANKI_PP_ERROR_MALFORMED();
  244. }
  245. ShaderType shaderType = ShaderType::COUNT;
  246. if(*begin == "vert")
  247. {
  248. shaderType = ShaderType::VERTEX;
  249. }
  250. else if(*begin == "tessc")
  251. {
  252. shaderType = ShaderType::TESSELLATION_CONTROL;
  253. }
  254. else if(*begin == "tesse")
  255. {
  256. }
  257. else if(*begin == "geom")
  258. {
  259. shaderType = ShaderType::GEOMETRY;
  260. }
  261. else if(*begin == "frag")
  262. {
  263. shaderType = ShaderType::FRAGMENT;
  264. }
  265. else if(*begin == "comp")
  266. {
  267. shaderType = ShaderType::COMPUTE;
  268. }
  269. else if(*begin == "rgen")
  270. {
  271. shaderType = ShaderType::RAY_GEN;
  272. }
  273. else if(*begin == "ahit")
  274. {
  275. shaderType = ShaderType::ANY_HIT;
  276. }
  277. else if(*begin == "chit")
  278. {
  279. shaderType = ShaderType::CLOSEST_HIT;
  280. }
  281. else if(*begin == "miss")
  282. {
  283. shaderType = ShaderType::MISS;
  284. }
  285. else if(*begin == "int")
  286. {
  287. shaderType = ShaderType::INTERSECTION;
  288. }
  289. else if(*begin == "call")
  290. {
  291. shaderType = ShaderType::CALLABLE;
  292. }
  293. else
  294. {
  295. ANKI_PP_ERROR_MALFORMED();
  296. }
  297. m_codeLines.pushBackSprintf("#ifdef ANKI_%s_SHADER", SHADER_STAGE_NAMES[shaderType].cstr());
  298. ++begin;
  299. if(begin != end)
  300. {
  301. // Should be the last token
  302. ANKI_PP_ERROR_MALFORMED();
  303. }
  304. // Set the mask
  305. const ShaderTypeBit mask = ShaderTypeBit(1 << shaderType);
  306. if(!!(mask & m_shaderTypes))
  307. {
  308. ANKI_PP_ERROR_MALFORMED_MSG("Can't have #pragma start <shader> appearing more than once");
  309. }
  310. m_shaderTypes |= mask;
  311. // Check bounds
  312. if(m_insideShader)
  313. {
  314. ANKI_PP_ERROR_MALFORMED_MSG("Can't have #pragma start before you close the previous pragma start");
  315. }
  316. m_insideShader = true;
  317. return Error::NONE;
  318. }
  319. Error ShaderProgramParser::parsePragmaEnd(const StringAuto* begin, const StringAuto* end, CString line, CString fname)
  320. {
  321. ANKI_ASSERT(begin && end);
  322. // Check tokens
  323. if(begin != end)
  324. {
  325. ANKI_PP_ERROR_MALFORMED();
  326. }
  327. // Check bounds
  328. if(!m_insideShader)
  329. {
  330. ANKI_PP_ERROR_MALFORMED_MSG("Can't have #pragma end before you open with a pragma start");
  331. }
  332. m_insideShader = false;
  333. // Write code
  334. m_codeLines.pushBack("#endif // Shader guard");
  335. return Error::NONE;
  336. }
  337. Error ShaderProgramParser::parsePragmaMutator(const StringAuto* begin, const StringAuto* end, CString line,
  338. CString fname)
  339. {
  340. ANKI_ASSERT(begin && end);
  341. if(begin >= end)
  342. {
  343. ANKI_PP_ERROR_MALFORMED();
  344. }
  345. m_mutators.emplaceBack(m_alloc);
  346. Mutator& mutator = m_mutators.getBack();
  347. // Name
  348. {
  349. if(begin >= end)
  350. {
  351. // Need to have a name
  352. ANKI_PP_ERROR_MALFORMED();
  353. }
  354. // Check for duplicate mutators
  355. for(U32 i = 0; i < m_mutators.getSize() - 1; ++i)
  356. {
  357. if(m_mutators[i].m_name == *begin)
  358. {
  359. ANKI_PP_ERROR_MALFORMED_MSG("Duplicate mutator");
  360. }
  361. }
  362. if(begin->getLength() > MAX_SHADER_BINARY_NAME_LENGTH)
  363. {
  364. ANKI_PP_ERROR_MALFORMED_MSG("Too big name");
  365. }
  366. mutator.m_name.create(begin->toCString());
  367. ++begin;
  368. }
  369. // Values
  370. {
  371. // Gather them
  372. for(; begin < end; ++begin)
  373. {
  374. MutatorValue value = 0;
  375. if(tokenIsComment(begin->toCString()))
  376. {
  377. break;
  378. }
  379. if(begin->toNumber(value))
  380. {
  381. ANKI_PP_ERROR_MALFORMED();
  382. }
  383. mutator.m_values.emplaceBack(value);
  384. }
  385. // Check for correct count
  386. if(mutator.m_values.getSize() < 2)
  387. {
  388. ANKI_PP_ERROR_MALFORMED_MSG("Mutator with less that 2 values doesn't make sense");
  389. }
  390. std::sort(mutator.m_values.getBegin(), mutator.m_values.getEnd());
  391. // Check for duplicates
  392. for(U32 i = 1; i < mutator.m_values.getSize(); ++i)
  393. {
  394. if(mutator.m_values[i - 1] == mutator.m_values[i])
  395. {
  396. ANKI_PP_ERROR_MALFORMED_MSG("Same value appeared more than once");
  397. }
  398. }
  399. }
  400. return Error::NONE;
  401. }
  402. Error ShaderProgramParser::parsePragmaLibraryName(const StringAuto* begin, const StringAuto* end, CString line,
  403. CString fname)
  404. {
  405. ANKI_ASSERT(begin && end);
  406. if(begin >= end)
  407. {
  408. ANKI_PP_ERROR_MALFORMED();
  409. }
  410. if(m_libName.getLength() > 0)
  411. {
  412. ANKI_PP_ERROR_MALFORMED_MSG("Library name already set");
  413. }
  414. m_libName = *begin;
  415. return Error::NONE;
  416. }
  417. Error ShaderProgramParser::parsePragmaRayType(const StringAuto* begin, const StringAuto* end, CString line,
  418. CString fname)
  419. {
  420. ANKI_ASSERT(begin && end);
  421. if(begin >= end)
  422. {
  423. ANKI_PP_ERROR_MALFORMED();
  424. }
  425. if(m_rayType != MAX_U32)
  426. {
  427. ANKI_PP_ERROR_MALFORMED_MSG("Ray type already set");
  428. }
  429. ANKI_CHECK(begin->toNumber(m_rayType));
  430. if(m_rayType > 128)
  431. {
  432. ANKI_PP_ERROR_MALFORMED_MSG("Ray type has a very large value");
  433. }
  434. return Error::NONE;
  435. }
  436. Error ShaderProgramParser::parsePragmaRewriteMutation(const StringAuto* begin, const StringAuto* end, CString line,
  437. CString fname)
  438. {
  439. ANKI_ASSERT(begin && end);
  440. // Some basic sanity checks
  441. const U tokenCount = end - begin;
  442. constexpr U minTokenCount = 2 + 1 + 2; // Mutator + value + "to" + mutator + value
  443. if(tokenCount < minTokenCount)
  444. {
  445. ANKI_PP_ERROR_MALFORMED();
  446. }
  447. MutationRewrite& rewrite = *m_mutationRewrites.emplaceBack(m_alloc);
  448. Bool servingFrom = true;
  449. do
  450. {
  451. if(*begin == "to")
  452. {
  453. if(servingFrom == false)
  454. {
  455. ANKI_PP_ERROR_MALFORMED();
  456. }
  457. servingFrom = false;
  458. }
  459. else
  460. {
  461. // Mutator & value
  462. // Get mutator and value
  463. const CString mutatorName = *begin;
  464. ++begin;
  465. if(begin == end)
  466. {
  467. ANKI_PP_ERROR_MALFORMED();
  468. }
  469. const CString valueStr = *begin;
  470. MutatorValue value;
  471. if(valueStr.toNumber(value))
  472. {
  473. ANKI_PP_ERROR_MALFORMED_MSG("Malformed value");
  474. }
  475. // Get or create new record
  476. if(servingFrom)
  477. {
  478. MutationRewrite::Record& rec = *rewrite.m_records.emplaceBack();
  479. for(U32 i = 0; i < m_mutators.getSize(); ++i)
  480. {
  481. if(m_mutators[i].getName() == mutatorName)
  482. {
  483. rec.m_mutatorIndex = i;
  484. break;
  485. }
  486. }
  487. if(rec.m_mutatorIndex == MAX_U32)
  488. {
  489. ANKI_PP_ERROR_MALFORMED_MSG("Mutator not found");
  490. }
  491. if(!mutatorHasValue(m_mutators[rec.m_mutatorIndex], value))
  492. {
  493. ANKI_PP_ERROR_MALFORMED_MSG("Incorect value for mutator");
  494. }
  495. rec.m_valueFrom = value;
  496. }
  497. else
  498. {
  499. Bool found = false;
  500. for(MutationRewrite::Record& rec : rewrite.m_records)
  501. {
  502. if(m_mutators[rec.m_mutatorIndex].m_name == mutatorName)
  503. {
  504. if(!mutatorHasValue(m_mutators[rec.m_mutatorIndex], value))
  505. {
  506. ANKI_PP_ERROR_MALFORMED_MSG("Incorect value for mutator");
  507. }
  508. rec.m_valueTo = value;
  509. found = true;
  510. break;
  511. }
  512. }
  513. if(!found)
  514. {
  515. ANKI_PP_ERROR_MALFORMED();
  516. }
  517. }
  518. }
  519. ++begin;
  520. } while(begin < end && !tokenIsComment(*begin));
  521. // Sort for some later cross checking
  522. std::sort(rewrite.m_records.getBegin(), rewrite.m_records.getEnd(),
  523. [](const MutationRewrite::Record& a, const MutationRewrite::Record& b) {
  524. return a.m_mutatorIndex < b.m_mutatorIndex;
  525. });
  526. // More cross checking
  527. for(U32 i = 1; i < rewrite.m_records.getSize(); ++i)
  528. {
  529. if(rewrite.m_records[i - 1].m_mutatorIndex == rewrite.m_records[i].m_mutatorIndex)
  530. {
  531. ANKI_PP_ERROR_MALFORMED_MSG("Mutator appeared more than once");
  532. }
  533. }
  534. for(U32 i = 0; i < m_mutationRewrites.getSize() - 1; ++i)
  535. {
  536. const MutationRewrite& other = m_mutationRewrites[i];
  537. if(other.m_records.getSize() != rewrite.m_records.getSize())
  538. {
  539. continue;
  540. }
  541. Bool same = true;
  542. for(U32 j = 0; j < rewrite.m_records.getSize(); ++j)
  543. {
  544. if(rewrite.m_records[j] != other.m_records[j])
  545. {
  546. same = false;
  547. break;
  548. }
  549. }
  550. if(same)
  551. {
  552. ANKI_PP_ERROR_MALFORMED_MSG("Mutation already exists");
  553. }
  554. }
  555. return Error::NONE;
  556. }
  557. Error ShaderProgramParser::parseInclude(const StringAuto* begin, const StringAuto* end, CString line, CString fname,
  558. U32 depth)
  559. {
  560. // Gather the path
  561. StringAuto path(m_alloc);
  562. for(; begin < end; ++begin)
  563. {
  564. path.append(*begin);
  565. }
  566. if(path.isEmpty())
  567. {
  568. ANKI_PP_ERROR_MALFORMED();
  569. }
  570. // Check
  571. const char firstChar = path[0];
  572. const char lastChar = path[path.getLength() - 1];
  573. if((firstChar == '\"' && lastChar == '\"') || (firstChar == '<' && lastChar == '>'))
  574. {
  575. StringAuto fname2(m_alloc);
  576. fname2.create(path.begin() + 1, path.begin() + path.getLength() - 1);
  577. if(parseFile(fname2, depth + 1))
  578. {
  579. ANKI_PP_ERROR_MALFORMED_MSG("Error parsing include. See previous errors");
  580. }
  581. }
  582. else
  583. {
  584. ANKI_PP_ERROR_MALFORMED();
  585. }
  586. return Error::NONE;
  587. }
  588. Error ShaderProgramParser::parseLine(CString line, CString fname, Bool& foundPragmaOnce, U32 depth)
  589. {
  590. // Tokenize
  591. DynamicArrayAuto<StringAuto> tokens(m_alloc);
  592. tokenizeLine(line, tokens);
  593. ANKI_ASSERT(tokens.getSize() > 0);
  594. const StringAuto* token = tokens.getBegin();
  595. const StringAuto* end = tokens.getEnd();
  596. // Skip the hash
  597. Bool foundAloneHash = false;
  598. if(*token == "#")
  599. {
  600. ++token;
  601. foundAloneHash = true;
  602. }
  603. if((token < end) && ((foundAloneHash && *token == "include") || *token == "#include"))
  604. {
  605. // We _must_ have an #include
  606. ANKI_CHECK(parseInclude(token + 1, end, line, fname, depth));
  607. }
  608. else if((token < end) && ((foundAloneHash && *token == "pragma") || *token == "#pragma"))
  609. {
  610. // We may have a #pragma once or a #pragma anki or something else
  611. ++token;
  612. if(*token == "once")
  613. {
  614. // Pragma once
  615. if(foundPragmaOnce)
  616. {
  617. ANKI_PP_ERROR_MALFORMED_MSG("Can't have more than one #pragma once per file");
  618. }
  619. if(token + 1 != end)
  620. {
  621. ANKI_PP_ERROR_MALFORMED();
  622. }
  623. // Add the guard unique for this file
  624. foundPragmaOnce = true;
  625. const U64 hash = fname.computeHash();
  626. m_codeLines.pushBackSprintf("#ifndef _ANKI_INCL_GUARD_%llu\n"
  627. "#define _ANKI_INCL_GUARD_%llu",
  628. hash, hash);
  629. }
  630. else if(*token == "anki")
  631. {
  632. // Must be a #pragma anki
  633. ++token;
  634. if(*token == "mutator")
  635. {
  636. ANKI_CHECK(parsePragmaMutator(token + 1, end, line, fname));
  637. }
  638. else if(*token == "start")
  639. {
  640. ANKI_CHECK(parsePragmaStart(token + 1, end, line, fname));
  641. }
  642. else if(*token == "end")
  643. {
  644. ANKI_CHECK(parsePragmaEnd(token + 1, end, line, fname));
  645. }
  646. else if(*token == "rewrite_mutation")
  647. {
  648. ANKI_CHECK(parsePragmaRewriteMutation(token + 1, end, line, fname));
  649. }
  650. else if(*token == "library")
  651. {
  652. ANKI_CHECK(parsePragmaLibraryName(token + 1, end, line, fname));
  653. }
  654. else if(*token == "ray_type")
  655. {
  656. ANKI_CHECK(parsePragmaRayType(token + 1, end, line, fname));
  657. }
  658. else
  659. {
  660. ANKI_PP_ERROR_MALFORMED();
  661. }
  662. // Add the line as a comment because of hashing of the source
  663. m_codeLines.pushBackSprintf("//%s", line.cstr());
  664. }
  665. else
  666. {
  667. // Some other pragma
  668. ANKI_SHADER_COMPILER_LOGW("Ignoring: %s", line.cstr());
  669. m_codeLines.pushBack(line);
  670. }
  671. }
  672. else
  673. {
  674. // Ignore
  675. m_codeLines.pushBack(line);
  676. }
  677. return Error::NONE;
  678. }
  679. Error ShaderProgramParser::parseFile(CString fname, U32 depth)
  680. {
  681. // First check the depth
  682. if(depth > MAX_INCLUDE_DEPTH)
  683. {
  684. ANKI_SHADER_COMPILER_LOGE("The include depth is too high. Probably circular includance");
  685. }
  686. Bool foundPragmaOnce = false;
  687. // Load file in lines
  688. StringAuto txt(m_alloc);
  689. ANKI_CHECK(m_fsystem->readAllText(fname, txt));
  690. StringListAuto lines(m_alloc);
  691. lines.splitString(txt.toCString(), '\n');
  692. if(lines.getSize() < 1)
  693. {
  694. ANKI_SHADER_COMPILER_LOGE("Source is empty");
  695. }
  696. // Parse lines
  697. for(const String& line : lines)
  698. {
  699. if(line.find("pragma") != CString::NPOS || line.find("include") != CString::NPOS)
  700. {
  701. // Possibly a preprocessor directive we care
  702. ANKI_CHECK(parseLine(line.toCString(), fname, foundPragmaOnce, depth));
  703. }
  704. else
  705. {
  706. // Just append the line
  707. m_codeLines.pushBack(line.toCString());
  708. }
  709. }
  710. if(foundPragmaOnce)
  711. {
  712. // Append the guard
  713. m_codeLines.pushBack("#endif // Include guard");
  714. }
  715. return Error::NONE;
  716. }
  717. Error ShaderProgramParser::parse()
  718. {
  719. ANKI_ASSERT(!m_fname.isEmpty());
  720. ANKI_ASSERT(m_codeLines.isEmpty());
  721. const CString fname = m_fname;
  722. // Parse recursively
  723. ANKI_CHECK(parseFile(fname, 0));
  724. // Checks
  725. {
  726. if(!!(m_shaderTypes & ShaderTypeBit::COMPUTE))
  727. {
  728. if(m_shaderTypes != ShaderTypeBit::COMPUTE)
  729. {
  730. ANKI_SHADER_COMPILER_LOGE("Can't combine compute shader with other types of shaders");
  731. return Error::USER_DATA;
  732. }
  733. }
  734. else if(!!(m_shaderTypes & ShaderTypeBit::ALL_GRAPHICS))
  735. {
  736. if(!(m_shaderTypes & ShaderTypeBit::VERTEX))
  737. {
  738. ANKI_SHADER_COMPILER_LOGE("Missing vertex shader");
  739. return Error::USER_DATA;
  740. }
  741. if(!(m_shaderTypes & ShaderTypeBit::FRAGMENT))
  742. {
  743. ANKI_SHADER_COMPILER_LOGE("Missing fragment shader");
  744. return Error::USER_DATA;
  745. }
  746. }
  747. if(m_insideShader)
  748. {
  749. ANKI_SHADER_COMPILER_LOGE("Forgot a \"pragma anki end\"");
  750. return Error::USER_DATA;
  751. }
  752. }
  753. // Create the code lines
  754. if(m_codeLines.getSize())
  755. {
  756. m_codeLines.join("\n", m_codeSource);
  757. m_codeLines.destroy();
  758. }
  759. // Create the hash
  760. {
  761. if(m_codeSource.getLength())
  762. {
  763. m_codeSourceHash = appendHash(m_codeSource.getBegin(), m_codeSource.getLength(), SHADER_HEADER_HASH);
  764. }
  765. if(m_libName.getLength() > 0)
  766. {
  767. m_codeSourceHash = appendHash(m_libName.getBegin(), m_libName.getLength(), m_codeSourceHash);
  768. }
  769. m_codeSourceHash = appendHash(&m_rayType, sizeof(m_rayType), m_codeSourceHash);
  770. }
  771. return Error::NONE;
  772. }
  773. void ShaderProgramParser::generateAnkiShaderHeader(ShaderType shaderType, const GpuDeviceCapabilities& caps,
  774. const BindlessLimits& limits, StringAuto& header)
  775. {
  776. header.sprintf(SHADER_HEADER, caps.m_minorApiVersion, caps.m_majorApiVersion,
  777. GPU_VENDOR_STR[caps.m_gpuVendor].cstr(), SHADER_STAGE_NAMES[shaderType].cstr(),
  778. limits.m_bindlessTextureCount, limits.m_bindlessImageCount, U(caps.m_rayTracingEnabled));
  779. }
  780. Error ShaderProgramParser::generateVariant(ConstWeakArray<MutatorValue> mutation,
  781. ShaderProgramParserVariant& variant) const
  782. {
  783. // Sanity checks
  784. ANKI_ASSERT(m_codeSource.getLength() > 0);
  785. ANKI_ASSERT(mutation.getSize() == m_mutators.getSize());
  786. for(U32 i = 0; i < mutation.getSize(); ++i)
  787. {
  788. ANKI_ASSERT(mutatorHasValue(m_mutators[i], mutation[i]) && "Value not found");
  789. }
  790. // Init variant
  791. ::new(&variant) ShaderProgramParserVariant();
  792. variant.m_alloc = m_alloc;
  793. // Create the mutator defines
  794. StringAuto mutatorDefines(m_alloc);
  795. for(U32 i = 0; i < mutation.getSize(); ++i)
  796. {
  797. mutatorDefines.append(StringAuto(m_alloc).sprintf("#define %s %d\n", m_mutators[i].m_name.cstr(), mutation[i]));
  798. }
  799. // Generate souce per stage
  800. for(ShaderType shaderType : EnumIterable<ShaderType>())
  801. {
  802. if(!(ShaderTypeBit(1u << shaderType) & m_shaderTypes))
  803. {
  804. continue;
  805. }
  806. // Create the header
  807. StringAuto header(m_alloc);
  808. generateAnkiShaderHeader(shaderType, m_gpuCapabilities, m_bindlessLimits, header);
  809. // Create the final source without the bindings
  810. StringAuto finalSource(m_alloc);
  811. finalSource.append(header);
  812. finalSource.append(mutatorDefines);
  813. finalSource.append(m_codeSource);
  814. // Move the source
  815. variant.m_sources[shaderType] = std::move(finalSource);
  816. }
  817. return Error::NONE;
  818. }
  819. Bool ShaderProgramParser::rewriteMutation(WeakArray<MutatorValue> mutation) const
  820. {
  821. // Checks
  822. ANKI_ASSERT(mutation.getSize() == m_mutators.getSize());
  823. for(U32 i = 0; i < mutation.getSize(); ++i)
  824. {
  825. ANKI_ASSERT(mutatorHasValue(m_mutators[i], mutation[i]));
  826. }
  827. // Early exit
  828. if(mutation.getSize() == 0)
  829. {
  830. return false;
  831. }
  832. // Find if mutation exists
  833. for(const MutationRewrite& rewrite : m_mutationRewrites)
  834. {
  835. Bool found = true;
  836. for(U32 i = 0; i < rewrite.m_records.getSize(); ++i)
  837. {
  838. if(rewrite.m_records[i].m_valueFrom != mutation[rewrite.m_records[i].m_mutatorIndex])
  839. {
  840. found = false;
  841. break;
  842. }
  843. }
  844. if(found)
  845. {
  846. // Rewrite it
  847. for(U32 i = 0; i < rewrite.m_records.getSize(); ++i)
  848. {
  849. mutation[rewrite.m_records[i].m_mutatorIndex] = rewrite.m_records[i].m_valueTo;
  850. }
  851. return true;
  852. }
  853. }
  854. return false;
  855. }
  856. Bool ShaderProgramParser::mutatorHasValue(const ShaderProgramParserMutator& mutator, MutatorValue value)
  857. {
  858. for(MutatorValue v : mutator.m_values)
  859. {
  860. if(value == v)
  861. {
  862. return true;
  863. }
  864. }
  865. return false;
  866. }
  867. } // end namespace anki