HelloQuad.cs 9.4 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173
  1. using System.Runtime.InteropServices;
  2. using AtomicEngine;
  3. public class Program
  4. {
  5. public static void Main(string[] args)
  6. {
  7. Application.Run<HelloQuad>(args);
  8. }
  9. }
  10. // This struct represents a vertex in our geometry, its layout should be sequential and we're specifying the size although it's
  11. // not necessary in this example. They hold the vertex position in 3D, the color channels, and the texture coordinates (UV)
  12. [StructLayout(LayoutKind.Sequential, Size = 24)]
  13. public struct VertexUVColor
  14. {
  15. // These are the vertex position in each individual axis. z is depth in this case, useful for sorting in orthographic projection
  16. public float x, y, z; // 3 elements of 4 bytes each (single precision 32-bit): 3x4 = 12 bytes
  17. // Individual color channels: red, green, blue, alpha (transparency; 0 = transparent, 255 = opaque)
  18. public byte r, g, b, a; // 4 elements of 1 byte
  19. // These are the texture x and y coordinates, commonly called u and v respectively; they are simply normalized cartesian coordinates
  20. public float u, v; // 2x4 = 8 bytes here, totalling 12+4+8 bytes = 24 bytes (total size of this struct)
  21. }
  22. public class HelloQuad : AppDelegate
  23. {
  24. // Scene reference kept here so it won't be collected by the GC
  25. Scene scene;
  26. Graphics graphics;
  27. Viewport viewport;
  28. Camera camera;
  29. Texture2D texture;
  30. VertexBuffer vertexBuffer;
  31. public override void Start()
  32. {
  33. // We get the variables we are going to use in this example
  34. Renderer renderer = GetSubsystem<Renderer>();
  35. graphics = GetSubsystem<Graphics>();
  36. viewport = renderer.GetViewport(0);
  37. // We create a new Scene
  38. scene = new Scene();
  39. // The Octree should be added to the root scene node (mandatory?) TODO: answer this
  40. scene.CreateComponent<Octree>();
  41. // We pass the scene we just created to be displayed in the viewport
  42. viewport.Scene = scene;
  43. // We create a new camera on the scene, called "Camera". Tip: you can think of a camera as a glorified projection matrix
  44. // - Scene.CreateChild(string name) returns a new Node with that name.
  45. // - Node.CreateComponent<ComponentType>() returns a component attached to that Node
  46. camera = scene.CreateChild("Camera").CreateComponent<Camera>();
  47. // We can access the Node any component is attached to using Component.Node
  48. camera.Node.Position = new Vector3(0.5f, 0.5f, 0.0f);
  49. // Remember, 'camera' is a Camera component, so we access it directly here
  50. camera.Orthographic = true;
  51. camera.OrthoSize = 1.5f;
  52. // We pass our newly created camera to the viewport so it's used to display our scene
  53. viewport.Camera = camera;
  54. // We create an XML from string so this code is fully self-contained
  55. XMLFile xml = new XMLFile(); xml.FromString("<renderpath><command type=\"sendevent\"/></renderpath>");
  56. // We create a new RenderPath. A Viewport comes by default with some events, and you can use viewport.GetRenderPath().Clone()
  57. // to clone the default RenderPath and Append instructions to it instead (see AtomicBlaster for examples on how to do effects)
  58. RenderPath renderpath = new RenderPath();
  59. renderpath.Append(xml);
  60. // We replace the viewport's default renderpath by the one we just created
  61. viewport.SetRenderPath(renderpath);
  62. // We subscribe to the RenderPathEvent. Here we pass an anonymous function that just absorbs the argument and calls Render()
  63. SubscribeToEvent<RenderPathEvent>(e => { Render(); });
  64. // Here we setup our shaders, we are using the BasicVColUnlitAlpha "technique" and selecting DIFFMAP and VERTEXCOLOR
  65. // DIFFMAP is the diffuse texture and VERTEXCOLOR is a color each vertex holds that is used to 'tint' the surface
  66. // See this link: github.com/AtomicGameEngine/AtomicGameEngine/tree/master/Resources/CoreData/Techniques
  67. ShaderVariation pixelShader = graphics.GetShader(ShaderType.PS, "Basic", "DIFFMAP VERTEXCOLOR");
  68. ShaderVariation vertexShader = graphics.GetShader(ShaderType.VS, "Basic", "DIFFMAP VERTEXCOLOR");
  69. graphics.SetShaders(vertexShader, pixelShader);
  70. // This vertex shader parameter just applies no transformation (Identity Matrix means no transformation) so the vertices
  71. // display in world coordinates what allow us to use the camera properly. NOTE: Identity Matrix is also called Unit Matrix
  72. graphics.SetShaderParameter(ShaderParams.VSP_MODEL, Matrix3x4.IDENTITY);
  73. // We set the pixel shader diffuse color to be white. You can change this to 'tint' the texture similar to vertex colors
  74. // but this applies to the whole material and in this example vertex colors will also affect it
  75. graphics.SetShaderParameter(ShaderParams.PSP_MATDIFFCOLOR, Color.White);
  76. // We set cull mode to NONE so our geometry won't be culled (ignored), for this example we don't really need any culling
  77. graphics.SetCullMode(CullMode.CULL_NONE);
  78. // We create a texture from literal data so this code is fully self-contained, you can safely skip the lines below.
  79. // In your real projects you're most likely going to load textures from the disk using Texture.Load
  80. Image image = new Image();
  81. image.SetSize(16, 16, 3);
  82. Color z = Color.White;
  83. Color M = Color.Blue;
  84. Color k = Color.Black;
  85. Color[] imageData =
  86. {
  87. k,k,k,k,k,k,k,k,k,k,k,k,k,k,k,k,
  88. k,z,z,z,z,z,z,z,z,z,z,z,z,z,M,k,
  89. k,z,z,z,z,z,z,M,M,z,z,z,z,z,z,k,
  90. k,z,z,z,z,z,z,M,M,z,z,z,z,z,z,k,
  91. k,z,z,z,z,z,M,z,z,M,z,z,z,z,z,k,
  92. k,z,z,z,z,z,M,z,z,M,z,z,z,z,z,k,
  93. k,z,z,z,z,M,z,z,z,z,M,z,z,z,z,k,
  94. k,z,z,z,z,M,z,z,z,z,M,z,z,z,z,k,
  95. k,z,z,z,M,z,z,z,z,z,z,M,z,z,z,k,
  96. k,z,z,z,M,z,z,z,z,z,z,M,z,z,z,k,
  97. k,z,z,M,M,M,M,M,M,M,M,M,M,z,z,k,
  98. k,z,z,M,z,z,z,z,z,z,z,z,M,z,z,k,
  99. k,z,M,z,z,z,z,z,z,z,z,z,z,M,z,k,
  100. k,z,M,z,z,z,z,z,z,z,z,z,z,M,z,k,
  101. k,z,z,z,z,z,z,z,z,z,z,z,z,z,z,k,
  102. k,k,k,k,k,k,k,k,k,k,k,k,k,k,k,k,
  103. };
  104. for (int pixel = 0; pixel < imageData.Length; pixel++)
  105. {
  106. image.SetPixel(pixel % 16, 15 - pixel / 16, imageData[pixel]);
  107. }
  108. texture = new Texture2D();
  109. texture.SetData(image);
  110. // We call this function that creates the quad geometry
  111. CreateQuad();
  112. }
  113. // We use unsafe code only to access the vertex buffer data
  114. private unsafe void CreateQuad()
  115. {
  116. // We create a new VertexBuffer object, it holds our vertices and is passed to the GPU
  117. vertexBuffer = new VertexBuffer();
  118. // We set its size and the elements it's containing, the 3rd optional argument (dynamic) should be 'true' if you're planning
  119. // to update the VertexBuffer constantly, that will improve performance in those cases.
  120. vertexBuffer.SetSize(6, Constants.MASK_POSITION | Constants.MASK_TEXCOORD1 | Constants.MASK_COLOR, false);
  121. // Here we lock the vertexBuffer what returns a pointer (IntPtr) to its data (vertexData here), I'm using a code block for clarity
  122. System.IntPtr vertexData = vertexBuffer.Lock(0, 6, true);
  123. {
  124. // We can cast the data pointer to whatever data type we want, here we are using the custom VertexUVColor struct
  125. VertexUVColor* vertex = (VertexUVColor*) vertexData;
  126. // Each of these blocks is a vertex, we set the their position (x and y), texture coordinate (u and v) and color in individual
  127. // red, green and blue channels (r, g, b), alpha has no effect in this example because there's no transparency in the shader
  128. vertex[0] = new VertexUVColor{ x = 0, y = 0, u = 0, v = 0, r = 255, g = 0, b = 255 };
  129. vertex[1] = new VertexUVColor{ x = 0, y = 1, u = 0, v = 1, r = 255, g = 255, b = 0 };
  130. vertex[2] = new VertexUVColor{ x = 1, y = 1, u = 1, v = 1, r = 255, g = 255, b = 255 };
  131. vertex[3] = new VertexUVColor{ x = 0, y = 0, u = 0, v = 0, r = 255, g = 0, b = 255 };
  132. vertex[4] = new VertexUVColor{ x = 1, y = 1, u = 1, v = 1, r = 255, g = 255, b = 255 };
  133. vertex[5] = new VertexUVColor{ x = 1, y = 0, u = 1, v = 0, r = 0, g = 255, b = 0 };
  134. }
  135. // Don't forget to unlock the VertexBuffer after you modify it
  136. vertexBuffer.Unlock();
  137. }
  138. void Render()
  139. {
  140. // We clear the whole screen white before drawing anything
  141. graphics.Clear(Constants.CLEAR_COLOR, Color.White);
  142. // The 3 lines below don't have to be set every frame in this specific example, but you'll most likely be changing them often
  143. viewport.View.SetCameraShaderParameters(camera);
  144. // We set the Texture to be used in the next draw call and we are also setting the filter to nearest neighbor so it looks sharp
  145. graphics.SetTexture((uint)TextureUnit.TU_DIFFUSE, texture);
  146. graphics.SetDefaultTextureFilterMode(TextureFilterMode.FILTER_NEAREST);
  147. // We set the VertexBuffer to be used on the next draw call
  148. graphics.SetVertexBuffer(vertexBuffer);
  149. // We finally call Draw passing the primitive type our VertexBuffer uses, TRIANGLE_LIST basically means that every 3 vertices
  150. // in the buffer should have a face (triangle) between them (see: http://math.hws.edu/graphicsbook/c3/triangle-primitives.png)
  151. graphics.Draw(PrimitiveType.TRIANGLE_LIST, 0, 6);
  152. }
  153. }