slang.azsl 18 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498
  1. // O3DE: this file is taken from slang repository,
  2. // and slightly syntax adapted to AZSL grammar.
  3. // shaders.slang
  4. //
  5. // This example builds on the simplistic shaders presented in the
  6. // "Hello, World" example by adding support for (intentionally
  7. // simplistic) surface materil and light shading.
  8. //
  9. // The code here is not meant to exemplify state-of-the-art material
  10. // and lighting techniques, but rather to show how a shader
  11. // library can be developed in a modular fashion without reliance
  12. // on the C preprocessor manual parameter-binding decorations.
  13. //
  14. // We are going to define a simple model for surface material shading.
  15. //
  16. // The first building block in our model will be the representation of
  17. // the geometry attributes of a surface as fed into the material.
  18. //
  19. struct SurfaceGeometry
  20. {
  21. float3 position;
  22. float3 normal;
  23. // TODO: tangent vectors would be the natural next thing to add here,
  24. // and would be required for anisotropic materials. However, the
  25. // simplistic model loading code we are currently using doesn't
  26. // produce tangents...
  27. //
  28. // float3 tangentU;
  29. // float3 tangentV;
  30. // We store a single UV parameterization in these geometry attributes.
  31. // A more complex renderer might need support for multiple UV sets,
  32. // and indeed it might choose to use interfaces and generics to capture
  33. // the different requirements that different materials impose on
  34. // the available surface attributes. We won't go to that kind of
  35. // trouble for such a simple example.
  36. //
  37. float2 uv;
  38. };
  39. //
  40. // Next, we want to define the fundamental concept of a refletance
  41. // function, so that we can use it as a building block for other
  42. // parts of the system. This is a case where we are trying to
  43. // show how a proper physically-based renderer (PBR) might
  44. // decompose the problem using Slang, even though our simple
  45. // example is *not* physically based.
  46. //
  47. interface IBRDF
  48. {
  49. // Technically, a BRDF is only a function of the incident
  50. // (`wi`) and exitant (`wo`) directions, but for simplicity
  51. // we are passing in the surface normal (`N`) as well.
  52. //
  53. float3 evaluate(float3 wo, float3 wi, float3 N);
  54. };
  55. //
  56. // We can now define various implemntations of the `IBRDF` interface
  57. // that represent different reflectance functions we want to support.
  58. // For now we keep things simple by defining about the simplest
  59. // reflectance function we can think of: the Blinn-Phong reflectance
  60. // model:
  61. //
  62. class BlinnPhong : IBRDF
  63. {
  64. // Blinn-Phong needs diffuse and specular reflectances, plus
  65. // a specular exponent value (which relates to "roughness"
  66. // in more modern physically-based models).
  67. //
  68. float3 kd;
  69. float3 ks;
  70. float specularity;
  71. // Here we implement the one requirement of the `IBRDF` interface
  72. // for our concrete implementation, using a textbook definition
  73. // of Blinng-Phong shading.
  74. //
  75. // Note: our "BRDF" definition here folds the N-dot-L term into
  76. // the evlauation of the reflectance function in case there are
  77. // useful algebraic simplifications this enables.
  78. //
  79. float3 evaluate(float3 V, float3 L, float3 N)
  80. {
  81. float nDotL = saturate(dot(N, L));
  82. float3 H = normalize(L + V);
  83. float nDotH = saturate(dot(N, H));
  84. return kd*nDotL + ks*pow(nDotH, specularity);
  85. }
  86. };
  87. //
  88. // It is important to note that a reflectance function is *not*
  89. // a "material." In most cases, a material will have spatially-varying
  90. // properties so that it cannot be summarized as a single `IBRDF`
  91. // instance.
  92. //
  93. // Thus a "material" is a value that can produce a BRDF for any point
  94. // on a surface (e.g., by sampling texture maps, etc.).
  95. //
  96. interface IMaterial
  97. {
  98. // Different concrete material implementations might yield BRDF
  99. // values with different types. E.g., one material might yield
  100. // reflectance functions using `BlinnPhong` while another uses
  101. // a much more complicated/accurate representation.
  102. //
  103. // We encapsulate the choice of BRDF parameters/evaluation in
  104. // our material interface with an "associated type." In the
  105. // simplest terms, think of this as an interface requirement
  106. // that is a type, instead of a method.
  107. //
  108. // (If you are C++-minded, you might think of this as akin to
  109. // how every container provided an `iterator` type, but different
  110. // containers may have different types of iterators)
  111. //
  112. associatedtype BRDF : IBRDF;
  113. // For our simple example program, it is enough for a material to
  114. // be able to return a BRDF given a point on the surface.
  115. //
  116. // A more complex implementation of material shading might also
  117. // have the material return updated surface geometry to reflect
  118. // the result of normal mapping, occlusion mapping, etc. or
  119. // return an opacity/coverage value for partially transparent
  120. // surfaces.
  121. //
  122. BRDF prepare(SurfaceGeometry geometry);
  123. };
  124. // We will now define a trivial first implementation of the material
  125. // interface, which uses our Blinn-Phong BRDF with uniform values
  126. // for its parameters.
  127. //
  128. // Note that this implemetnation is being provided *after* the
  129. // shader parameter `gMaterial` is declared, so that there is no
  130. // assumption in the shader code that `gMaterial` will be plugged
  131. // in using an instance of `SimpleMaterial`
  132. //
  133. //
  134. class SimpleMaterial : IMaterial
  135. {
  136. // We declare the properties we need as fields of the material type.
  137. // When `SimpleMaterial` is used for `TMaterial` above, then
  138. // `gMaterial` will be a `ParameterBlock<SimpleMaterial>`, and these
  139. // parameters will be allocated to a constant buffer that is part of
  140. // that parameter block.
  141. //
  142. // TODO: A future version of this example will include texture parameters
  143. // here to show that they are declared just like simple uniforms.
  144. //
  145. float3 diffuseColor;
  146. float3 specularColor;
  147. float specularity;
  148. // To satisfy the requirements of the `IMaterial` interface, our
  149. // material type needs to provide a suitable `BRDF` type. We
  150. // do this by using a simple `typedef`, although a nested
  151. // `struct` type can also satisfy an assocaited type requirement.
  152. //
  153. // A future version of the Slang compiler may allow the "right"
  154. // associated type definition to be inferred from the signature
  155. // of the `prepare()` method below.
  156. //
  157. typealias BRDF = BlinnPhong;
  158. BlinnPhong prepare(SurfaceGeometry geometry)
  159. {
  160. BlinnPhong brdf;
  161. brdf.kd = diffuseColor;
  162. brdf.ks = specularColor;
  163. brdf.specularity = specularity;
  164. return brdf;
  165. }
  166. };
  167. //
  168. // Note that no other code in this file statically
  169. // references the `SimpleMaterial` type, and instead
  170. // it is up to the application to "plug in" this type,
  171. // or another `IMaterial` implementation for the
  172. // `TMaterial` parameter.
  173. //
  174. // A light, or an entire lighting *environment* is an object
  175. // that can illuminate a surface using some BRDF implemented
  176. // with our abstractions above.
  177. //
  178. interface ILightEnv
  179. {
  180. // The `illuminate` method is intended to integrate incoming
  181. // illumination from this light (environment) incident at the
  182. // surface point given by `g` (which has the reflectance function
  183. // `brdf`) and reflected into the outgoing direction `wo`.
  184. //
  185. float3 illuminate<B:IBRDF>(SurfaceGeometry g, B brdf, float3 wo);
  186. //
  187. // Note that the `illuminate()` method is allowed as an interface
  188. // requirement in Slang even though it is a generic. Constract that
  189. // with C++ where a `template` method cannot be `virtual`.
  190. };
  191. // Given the `ILightEnv` interface, we can write up almost textbook
  192. // definition of directional and point lights.
  193. class DirectionalLight : ILightEnv
  194. {
  195. float3 direction;
  196. float3 intensity;
  197. float3 illuminate<B:IBRDF>(SurfaceGeometry g, B brdf, float3 wo)
  198. {
  199. return intensity * brdf.evaluate(wo, direction, g.normal);
  200. }
  201. };
  202. class PointLight : ILightEnv
  203. {
  204. float3 position;
  205. float3 intensity;
  206. float3 illuminate<B:IBRDF>(SurfaceGeometry g, B brdf, float3 wo)
  207. {
  208. float3 delta = position - g.position;
  209. float d = length(delta);
  210. float3 direction = normalize(delta);
  211. float3 illuminance = intensity / (d*d);
  212. return illuminance * brdf.evaluate(wo, direction, g.normal);
  213. }
  214. };
  215. // In most cases, a shader entry point will only be specialized for a single
  216. // material, but interesting rendering almost always needs multiple lights.
  217. // For that reason we will next define types to represent *composite* lighting
  218. // environment with multiple lights.
  219. //
  220. // A naive approach might be to have a single undifferntiated list of lights
  221. // where any type of light may appear at any index, but this would lose all
  222. // of the benefits of static specialization: we would have to perform dynamic
  223. // branching to determine what kind of light is stored at each index.
  224. //
  225. // Instead, we will start with a type for *homogeneous* arrays of lights:
  226. //
  227. /* AZSLc: generic classes not supported for now (only generic functions)
  228. class LightArray<L : ILightEnv, let N : int> : ILightEnv
  229. */
  230. class LightArray : ILightEnv
  231. {
  232. // The `LightArray` type has two generic parameters:
  233. //
  234. // - `L` is a type parameter, representing the type of lights that will be in our array
  235. // - `N` is a generic *value* parameter, representing the maximum number of lights allowed
  236. //
  237. // Slang's support for generic value parameters is currently experimental,
  238. // and the syntax might change.
  239. int count;
  240. ILightEnv lights[10];
  241. float3 illuminate<B:IBRDF>(SurfaceGeometry g, B brdf, float3 wo)
  242. {
  243. // Our light array integrates illumination by naively summing
  244. // contributions from all the lights in the array (up to `count`).
  245. //
  246. // A more advanced renderer might try apply sampling techniques
  247. // to pick a subset of lights to sample.
  248. //
  249. float3 sum = 0;
  250. for( int ii = 0; ii < count; ++ii )
  251. {
  252. sum += lights[ii].illuminate(g, brdf, wo);
  253. }
  254. return sum;
  255. }
  256. };
  257. // `LightArray` can handle multiple lights as long as they have the
  258. // same type, but we need a way to have a scene with multiple lights
  259. // of different types *without* losing static specialization.
  260. //
  261. // The `LightPair<T,U>` type supports this in about the simplest way
  262. // possible, by aggregating a light (environment) of type `T` and
  263. // one of type `U`. Those light environments might themselves be
  264. // `LightArray`s or `LightPair`s, so that arbitrarily complex
  265. // environments can be created from just these two composite types.
  266. //
  267. // This is probably a good place to insert a reminder the Slang's
  268. // generics are *not* C++ templates, so that the error messages
  269. // produced when working with these types are in general reasonable,
  270. // and this is *not* any form of "template metaprogramming."
  271. //
  272. // That said, we expect that future versions of Slang will make
  273. // defining composite types light this a bit less cumbersome.
  274. //
  275. /* AZSLc: not supported
  276. struct LightPair<T : ILightEnv, U : ILightEnv> : ILightEnv
  277. */
  278. class LightPair : ILightEnv
  279. {
  280. T first;
  281. U second;
  282. float3 illuminate<B:IBRDF>(SurfaceGeometry g, B brdf, float3 wo)
  283. {
  284. return first.illuminate(g, brdf, wo)
  285. + second.illuminate(g, brdf, wo);
  286. }
  287. };
  288. // As a final (degenerate) case, we will define a light
  289. // environment with *no* lights, which contributes no illumination.
  290. //
  291. class EmptyLightEnv : ILightEnv
  292. {
  293. float3 illuminate<B:IBRDF>(SurfaceGeometry g, B brdf, float3 wo)
  294. {
  295. return 0;
  296. }
  297. };
  298. // The code above constitutes the "shader library" for our
  299. // application, while the code below this point is the
  300. // implementation of a simple forward rendering pass
  301. // using that library.
  302. //
  303. // While the shader library has used many of Slang's advanced
  304. // mechanisms, the vertex and fragment shaders will be
  305. // much more modest, and hopefully easier to follow.
  306. // We will start with a `struct` for per-view parameters that
  307. // will be allocated into a `ParameterBlock`.
  308. //
  309. // As written, this isn't very different from using an HLSL
  310. // `cbuffer` declaration, but importantly this code will
  311. // continue to work if we add one or more resources (e.g.,
  312. // an enironment map texture) to the `PerView` type.
  313. //
  314. struct PerView
  315. {
  316. float4x4 viewProjection;
  317. float3 eyePosition;
  318. };
  319. //ParameterBlock<PerView> gViewParams;
  320. // Declaring a block for per-model parameter data is
  321. // similarly simple.
  322. //
  323. struct PerModel
  324. {
  325. float4x4 modelTransform;
  326. float4x4 inverseTransposeModelTransform;
  327. };
  328. // AZSLc: not AZSL
  329. //ParameterBlock<PerModel> gModelParams;
  330. // We want our shader to work with any kind of lighting environment
  331. // - that is, and type that implements `ILightEnv`. Furthermore,
  332. // we want the parameters of that lighting environment to be passed
  333. // as parameter block - `ParameterBlock<L>` for some type `L`.
  334. //
  335. // We handle this by defining a global generic type parameter for
  336. // our shader, and constrainting it to implement `ILightEnv`...
  337. //
  338. // AZSLc: not AZSL
  339. //type_param TLightEnv : ILightEnv;
  340. //
  341. // ... and then defining a parameter block that uses that type
  342. // parameter as the "element type" of the block:
  343. //
  344. // AZSLc: not AZSL
  345. //ParameterBlock<TLightEnv> gLightEnv;
  346. // Our handling of the material parameter for our shader
  347. // is quite similar to the case for the lighting environment:
  348. //
  349. // AZSLc: not AZSL
  350. //type_param TMaterial : IMaterial;
  351. //ParameterBlock<TMaterial> gMaterial;
  352. // Our vertex shader entry point is only marginally more
  353. // complicated than the Hello World example. We will
  354. // start by declaring the various "connector" `struct`s.
  355. //
  356. struct AssembledVertex
  357. {
  358. float3 position : POSITION;
  359. float3 normal : NORMAL;
  360. float2 uv : UV;
  361. };
  362. struct CoarseVertex
  363. {
  364. float3 worldPosition;
  365. float3 worldNormal;
  366. float2 uv;
  367. };
  368. struct VertexStageOutput
  369. {
  370. CoarseVertex coarseVertex : CoarseVertex;
  371. float4 position : SV_Position;
  372. };
  373. // Perhaps most interesting new feature of the entry
  374. // point decalrations is that we use a `[shader(...)]`
  375. // attribute (as introduced in HLSL Shader Model 6.x)
  376. // in order to tag our entry points.
  377. //
  378. // This attribute informs the Slang compiler which
  379. // functions are intended to be compiled as shader
  380. // entry points (and what stage they target), so that
  381. // the programmer no longer needs to specify the
  382. // entry point name/stage through the API (or on
  383. // the command line when using `slangc`).
  384. //
  385. // While HLSL added this feature only in newer versions,
  386. // the Slang compiler supports this attribute across
  387. // *all* targets, so that it is okay to use whether you
  388. // want DXBC, DXIL, or SPIR-V output.
  389. //
  390. [shader("vertex")]
  391. VertexStageOutput vertexMain(
  392. AssembledVertex assembledVertex)
  393. {
  394. VertexStageOutput output;
  395. float3 position = assembledVertex.position;
  396. float3 normal = assembledVertex.normal;
  397. float2 uv = assembledVertex.uv;
  398. float3 worldPosition = mul(gModelParams.modelTransform, float4(position, 1.0)).xyz;
  399. float3 worldNormal = mul(gModelParams.inverseTransposeModelTransform, float4(normal, 0.0)).xyz;
  400. output.coarseVertex.worldPosition = worldPosition;
  401. output.coarseVertex.worldNormal = worldNormal;
  402. output.coarseVertex.uv = uv;
  403. output.position = mul(gViewParams.viewProjection, float4(worldPosition, 1.0));
  404. return output;
  405. }
  406. // Our fragment shader is almost trivial, with the most interesting
  407. // thing being how it uses the `TMaterial` type parameter (through the
  408. // value stored in the `gMaterial` parameter block) to dispatch to
  409. // the correct implementation of the `getDiffuseColor()` method
  410. // in the `IMaterial` interface.
  411. //
  412. // The `gMaterial` parameter block declaration thus serves not only
  413. // to group certain shader parameters for efficient CPU-to-GPU
  414. // communication, but also to select the code that will execute
  415. // in specialized versions of the `fragmentMain` entry point.
  416. //
  417. [shader("fragment")]
  418. float4 fragmentMain(
  419. CoarseVertex coarseVertex : CoarseVertex) : SV_Target
  420. {
  421. // We start by using our interpolated vertex attributes
  422. // to construct the local surface geometry that we will
  423. // use for material evaluation.
  424. //
  425. SurfaceGeometry g;
  426. g.position = coarseVertex.worldPosition;
  427. g.normal = normalize(coarseVertex.worldNormal);
  428. g.uv = coarseVertex.uv;
  429. float3 V = normalize(gViewParams.eyePosition - g.position);
  430. // Next we prepare the material, which involves running
  431. // any "pattern generation" logic of the material (e.g.,
  432. // sampling and blending texture layers), to produce
  433. // a BRDF suitable for evaluating under illumination
  434. // from different light sources.
  435. //
  436. // Note that the return type here is `TMaterial.BRDF`,
  437. // which is the `BRDF` type *assocaited* with the (unknown)
  438. // `TMaterial` type. When `TMaterial` gets substituted for
  439. // a concrete type later (e.g., `SimpleMaterial`) this
  440. // will resolve to a concrete type too (e.g., `SimpleMaterial.BRDF`
  441. // which is an alias for `BlinnPhong`).
  442. //
  443. // AZSLc: dot in slang is coloncolon in AZSL
  444. TMaterial::BRDF brdf = gMaterial.prepare(g);
  445. // Now that we've done the first step of material evaluation
  446. // and sampled texture maps, etc., it is time to start
  447. // integrating incident light at our surface point.
  448. //
  449. // Because we've wrapped up the lighting environment as
  450. // a single (composite) object, this is as simple as calling
  451. // its `illuminate()` method. Our particular fragment shader
  452. // is thus abstracted from how the renderer chooses to structure
  453. // this integration step, somewhat similar to how an
  454. // `illuminance` loop in RenderMan Shading Language works.
  455. //
  456. float3 color = gLightEnv.illuminate(g, brdf, V);
  457. return float4(color, 1);
  458. }