triangle-vulkan.c 79 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979198019811982198319841985198619871988198919901991199219931994199519961997199819992000200120022003200420052006200720082009201020112012201320142015201620172018201920202021202220232024202520262027202820292030203120322033203420352036203720382039204020412042204320442045204620472048204920502051205220532054205520562057205820592060206120622063206420652066206720682069207020712072207320742075207620772078207920802081208220832084208520862087208820892090209120922093209420952096209720982099210021012102210321042105210621072108210921102111211221132114211521162117211821192120212121222123212421252126212721282129
  1. /*
  2. * Copyright (c) 2015-2016 The Khronos Group Inc.
  3. * Copyright (c) 2015-2016 Valve Corporation
  4. * Copyright (c) 2015-2016 LunarG, Inc.
  5. *
  6. * Licensed under the Apache License, Version 2.0 (the "License");
  7. * you may not use this file except in compliance with the License.
  8. * You may obtain a copy of the License at
  9. *
  10. * http://www.apache.org/licenses/LICENSE-2.0
  11. *
  12. * Unless required by applicable law or agreed to in writing, software
  13. * distributed under the License is distributed on an "AS IS" BASIS,
  14. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. * See the License for the specific language governing permissions and
  16. * limitations under the License.
  17. *
  18. * Author: Chia-I Wu <[email protected]>
  19. * Author: Cody Northrop <[email protected]>
  20. * Author: Courtney Goeltzenleuchter <[email protected]>
  21. * Author: Ian Elliott <[email protected]>
  22. * Author: Jon Ashburn <[email protected]>
  23. * Author: Piers Daniell <[email protected]>
  24. * Author: Gwan-gyeong Mun <[email protected]>
  25. * Porter: Camilla Löwy <[email protected]>
  26. */
  27. /*
  28. * Draw a textured triangle with depth testing. This is written against Intel
  29. * ICD. It does not do state transition nor object memory binding like it
  30. * should. It also does no error checking.
  31. */
  32. #include <stdio.h>
  33. #include <stdlib.h>
  34. #include <string.h>
  35. #include <stdbool.h>
  36. #include <assert.h>
  37. #include <signal.h>
  38. #ifdef _WIN32
  39. #include <windows.h>
  40. #endif
  41. #define GLAD_VULKAN_IMPLEMENTATION
  42. #include <glad/vulkan.h>
  43. #define GLFW_INCLUDE_NONE
  44. #include <GLFW/glfw3.h>
  45. #define DEMO_TEXTURE_COUNT 1
  46. #define VERTEX_BUFFER_BIND_ID 0
  47. #define APP_SHORT_NAME "tri"
  48. #define APP_LONG_NAME "The Vulkan Triangle Demo Program"
  49. #define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
  50. #if defined(NDEBUG) && defined(__GNUC__)
  51. #define U_ASSERT_ONLY __attribute__((unused))
  52. #else
  53. #define U_ASSERT_ONLY
  54. #endif
  55. #define ERR_EXIT(err_msg, err_class) \
  56. do { \
  57. printf(err_msg); \
  58. fflush(stdout); \
  59. exit(1); \
  60. } while (0)
  61. static const uint32_t fragShaderCode[] = {
  62. 0x07230203,0x00010000,0x00080007,0x00000014,0x00000000,0x00020011,0x00000001,0x0006000b,
  63. 0x00000001,0x4c534c47,0x6474732e,0x3035342e,0x00000000,0x0003000e,0x00000000,0x00000001,
  64. 0x0007000f,0x00000004,0x00000004,0x6e69616d,0x00000000,0x00000009,0x00000011,0x00030010,
  65. 0x00000004,0x00000007,0x00030003,0x00000002,0x00000190,0x00090004,0x415f4c47,0x735f4252,
  66. 0x72617065,0x5f657461,0x64616873,0x6f5f7265,0x63656a62,0x00007374,0x00090004,0x415f4c47,
  67. 0x735f4252,0x69646168,0x6c5f676e,0x75676e61,0x5f656761,0x70303234,0x006b6361,0x00040005,
  68. 0x00000004,0x6e69616d,0x00000000,0x00050005,0x00000009,0x61724675,0x6c6f4367,0x0000726f,
  69. 0x00030005,0x0000000d,0x00786574,0x00050005,0x00000011,0x63786574,0x64726f6f,0x00000000,
  70. 0x00040047,0x00000009,0x0000001e,0x00000000,0x00040047,0x0000000d,0x00000022,0x00000000,
  71. 0x00040047,0x0000000d,0x00000021,0x00000000,0x00040047,0x00000011,0x0000001e,0x00000000,
  72. 0x00020013,0x00000002,0x00030021,0x00000003,0x00000002,0x00030016,0x00000006,0x00000020,
  73. 0x00040017,0x00000007,0x00000006,0x00000004,0x00040020,0x00000008,0x00000003,0x00000007,
  74. 0x0004003b,0x00000008,0x00000009,0x00000003,0x00090019,0x0000000a,0x00000006,0x00000001,
  75. 0x00000000,0x00000000,0x00000000,0x00000001,0x00000000,0x0003001b,0x0000000b,0x0000000a,
  76. 0x00040020,0x0000000c,0x00000000,0x0000000b,0x0004003b,0x0000000c,0x0000000d,0x00000000,
  77. 0x00040017,0x0000000f,0x00000006,0x00000002,0x00040020,0x00000010,0x00000001,0x0000000f,
  78. 0x0004003b,0x00000010,0x00000011,0x00000001,0x00050036,0x00000002,0x00000004,0x00000000,
  79. 0x00000003,0x000200f8,0x00000005,0x0004003d,0x0000000b,0x0000000e,0x0000000d,0x0004003d,
  80. 0x0000000f,0x00000012,0x00000011,0x00050057,0x00000007,0x00000013,0x0000000e,0x00000012,
  81. 0x0003003e,0x00000009,0x00000013,0x000100fd,0x00010038
  82. };
  83. static const uint32_t vertShaderCode[] = {
  84. 0x07230203,0x00010000,0x00080007,0x00000018,0x00000000,0x00020011,0x00000001,0x0006000b,
  85. 0x00000001,0x4c534c47,0x6474732e,0x3035342e,0x00000000,0x0003000e,0x00000000,0x00000001,
  86. 0x0009000f,0x00000000,0x00000004,0x6e69616d,0x00000000,0x00000009,0x0000000b,0x00000010,
  87. 0x00000014,0x00030003,0x00000002,0x00000190,0x00090004,0x415f4c47,0x735f4252,0x72617065,
  88. 0x5f657461,0x64616873,0x6f5f7265,0x63656a62,0x00007374,0x00090004,0x415f4c47,0x735f4252,
  89. 0x69646168,0x6c5f676e,0x75676e61,0x5f656761,0x70303234,0x006b6361,0x00040005,0x00000004,
  90. 0x6e69616d,0x00000000,0x00050005,0x00000009,0x63786574,0x64726f6f,0x00000000,0x00040005,
  91. 0x0000000b,0x72747461,0x00000000,0x00060005,0x0000000e,0x505f6c67,0x65567265,0x78657472,
  92. 0x00000000,0x00060006,0x0000000e,0x00000000,0x505f6c67,0x7469736f,0x006e6f69,0x00030005,
  93. 0x00000010,0x00000000,0x00030005,0x00000014,0x00736f70,0x00040047,0x00000009,0x0000001e,
  94. 0x00000000,0x00040047,0x0000000b,0x0000001e,0x00000001,0x00050048,0x0000000e,0x00000000,
  95. 0x0000000b,0x00000000,0x00030047,0x0000000e,0x00000002,0x00040047,0x00000014,0x0000001e,
  96. 0x00000000,0x00020013,0x00000002,0x00030021,0x00000003,0x00000002,0x00030016,0x00000006,
  97. 0x00000020,0x00040017,0x00000007,0x00000006,0x00000002,0x00040020,0x00000008,0x00000003,
  98. 0x00000007,0x0004003b,0x00000008,0x00000009,0x00000003,0x00040020,0x0000000a,0x00000001,
  99. 0x00000007,0x0004003b,0x0000000a,0x0000000b,0x00000001,0x00040017,0x0000000d,0x00000006,
  100. 0x00000004,0x0003001e,0x0000000e,0x0000000d,0x00040020,0x0000000f,0x00000003,0x0000000e,
  101. 0x0004003b,0x0000000f,0x00000010,0x00000003,0x00040015,0x00000011,0x00000020,0x00000001,
  102. 0x0004002b,0x00000011,0x00000012,0x00000000,0x00040020,0x00000013,0x00000001,0x0000000d,
  103. 0x0004003b,0x00000013,0x00000014,0x00000001,0x00040020,0x00000016,0x00000003,0x0000000d,
  104. 0x00050036,0x00000002,0x00000004,0x00000000,0x00000003,0x000200f8,0x00000005,0x0004003d,
  105. 0x00000007,0x0000000c,0x0000000b,0x0003003e,0x00000009,0x0000000c,0x0004003d,0x0000000d,
  106. 0x00000015,0x00000014,0x00050041,0x00000016,0x00000017,0x00000010,0x00000012,0x0003003e,
  107. 0x00000017,0x00000015,0x000100fd,0x00010038
  108. };
  109. struct texture_object {
  110. VkSampler sampler;
  111. VkImage image;
  112. VkImageLayout imageLayout;
  113. VkDeviceMemory mem;
  114. VkImageView view;
  115. int32_t tex_width, tex_height;
  116. };
  117. static int validation_error = 0;
  118. VKAPI_ATTR VkBool32 VKAPI_CALL
  119. BreakCallback(VkFlags msgFlags, VkDebugReportObjectTypeEXT objType,
  120. uint64_t srcObject, size_t location, int32_t msgCode,
  121. const char *pLayerPrefix, const char *pMsg,
  122. void *pUserData) {
  123. #ifdef _WIN32
  124. DebugBreak();
  125. #else
  126. raise(SIGTRAP);
  127. #endif
  128. return false;
  129. }
  130. typedef struct {
  131. VkImage image;
  132. VkCommandBuffer cmd;
  133. VkImageView view;
  134. } SwapchainBuffers;
  135. struct demo {
  136. GLFWwindow* window;
  137. VkSurfaceKHR surface;
  138. bool use_staging_buffer;
  139. VkInstance inst;
  140. VkPhysicalDevice gpu;
  141. VkDevice device;
  142. VkQueue queue;
  143. VkPhysicalDeviceProperties gpu_props;
  144. VkPhysicalDeviceFeatures gpu_features;
  145. VkQueueFamilyProperties *queue_props;
  146. uint32_t graphics_queue_node_index;
  147. uint32_t enabled_extension_count;
  148. uint32_t enabled_layer_count;
  149. const char *extension_names[64];
  150. const char *enabled_layers[64];
  151. int width, height;
  152. VkFormat format;
  153. VkColorSpaceKHR color_space;
  154. uint32_t swapchainImageCount;
  155. VkSwapchainKHR swapchain;
  156. SwapchainBuffers *buffers;
  157. VkCommandPool cmd_pool;
  158. struct {
  159. VkFormat format;
  160. VkImage image;
  161. VkDeviceMemory mem;
  162. VkImageView view;
  163. } depth;
  164. struct texture_object textures[DEMO_TEXTURE_COUNT];
  165. struct {
  166. VkBuffer buf;
  167. VkDeviceMemory mem;
  168. VkPipelineVertexInputStateCreateInfo vi;
  169. VkVertexInputBindingDescription vi_bindings[1];
  170. VkVertexInputAttributeDescription vi_attrs[2];
  171. } vertices;
  172. VkCommandBuffer setup_cmd; // Command Buffer for initialization commands
  173. VkCommandBuffer draw_cmd; // Command Buffer for drawing commands
  174. VkPipelineLayout pipeline_layout;
  175. VkDescriptorSetLayout desc_layout;
  176. VkPipelineCache pipelineCache;
  177. VkRenderPass render_pass;
  178. VkPipeline pipeline;
  179. VkShaderModule vert_shader_module;
  180. VkShaderModule frag_shader_module;
  181. VkDescriptorPool desc_pool;
  182. VkDescriptorSet desc_set;
  183. VkFramebuffer *framebuffers;
  184. VkPhysicalDeviceMemoryProperties memory_properties;
  185. int32_t curFrame;
  186. int32_t frameCount;
  187. bool validate;
  188. bool use_break;
  189. VkDebugReportCallbackEXT msg_callback;
  190. float depthStencil;
  191. float depthIncrement;
  192. uint32_t current_buffer;
  193. uint32_t queue_count;
  194. };
  195. VKAPI_ATTR VkBool32 VKAPI_CALL
  196. dbgFunc(VkFlags msgFlags, VkDebugReportObjectTypeEXT objType,
  197. uint64_t srcObject, size_t location, int32_t msgCode,
  198. const char *pLayerPrefix, const char *pMsg, void *pUserData) {
  199. char *message = (char *)malloc(strlen(pMsg) + 100);
  200. assert(message);
  201. validation_error = 1;
  202. if (msgFlags & VK_DEBUG_REPORT_ERROR_BIT_EXT) {
  203. sprintf(message, "ERROR: [%s] Code %d : %s", pLayerPrefix, msgCode,
  204. pMsg);
  205. } else if (msgFlags & VK_DEBUG_REPORT_WARNING_BIT_EXT) {
  206. sprintf(message, "WARNING: [%s] Code %d : %s", pLayerPrefix, msgCode,
  207. pMsg);
  208. } else {
  209. return false;
  210. }
  211. printf("%s\n", message);
  212. fflush(stdout);
  213. free(message);
  214. /*
  215. * false indicates that layer should not bail-out of an
  216. * API call that had validation failures. This may mean that the
  217. * app dies inside the driver due to invalid parameter(s).
  218. * That's what would happen without validation layers, so we'll
  219. * keep that behavior here.
  220. */
  221. return false;
  222. }
  223. // Forward declaration:
  224. static void demo_resize(struct demo *demo);
  225. static bool memory_type_from_properties(struct demo *demo, uint32_t typeBits,
  226. VkFlags requirements_mask,
  227. uint32_t *typeIndex) {
  228. uint32_t i;
  229. // Search memtypes to find first index with those properties
  230. for (i = 0; i < VK_MAX_MEMORY_TYPES; i++) {
  231. if ((typeBits & 1) == 1) {
  232. // Type is available, does it match user properties?
  233. if ((demo->memory_properties.memoryTypes[i].propertyFlags &
  234. requirements_mask) == requirements_mask) {
  235. *typeIndex = i;
  236. return true;
  237. }
  238. }
  239. typeBits >>= 1;
  240. }
  241. // No memory types matched, return failure
  242. return false;
  243. }
  244. static void demo_flush_init_cmd(struct demo *demo) {
  245. VkResult U_ASSERT_ONLY err;
  246. if (demo->setup_cmd == VK_NULL_HANDLE)
  247. return;
  248. err = vkEndCommandBuffer(demo->setup_cmd);
  249. assert(!err);
  250. const VkCommandBuffer cmd_bufs[] = {demo->setup_cmd};
  251. VkFence nullFence = {VK_NULL_HANDLE};
  252. VkSubmitInfo submit_info = {.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
  253. .pNext = NULL,
  254. .waitSemaphoreCount = 0,
  255. .pWaitSemaphores = NULL,
  256. .pWaitDstStageMask = NULL,
  257. .commandBufferCount = 1,
  258. .pCommandBuffers = cmd_bufs,
  259. .signalSemaphoreCount = 0,
  260. .pSignalSemaphores = NULL};
  261. err = vkQueueSubmit(demo->queue, 1, &submit_info, nullFence);
  262. assert(!err);
  263. err = vkQueueWaitIdle(demo->queue);
  264. assert(!err);
  265. vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, cmd_bufs);
  266. demo->setup_cmd = VK_NULL_HANDLE;
  267. }
  268. static void demo_set_image_layout(struct demo *demo, VkImage image,
  269. VkImageAspectFlags aspectMask,
  270. VkImageLayout old_image_layout,
  271. VkImageLayout new_image_layout,
  272. VkAccessFlagBits srcAccessMask) {
  273. VkResult U_ASSERT_ONLY err;
  274. if (demo->setup_cmd == VK_NULL_HANDLE) {
  275. const VkCommandBufferAllocateInfo cmd = {
  276. .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
  277. .pNext = NULL,
  278. .commandPool = demo->cmd_pool,
  279. .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
  280. .commandBufferCount = 1,
  281. };
  282. err = vkAllocateCommandBuffers(demo->device, &cmd, &demo->setup_cmd);
  283. assert(!err);
  284. VkCommandBufferBeginInfo cmd_buf_info = {
  285. .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
  286. .pNext = NULL,
  287. .flags = 0,
  288. .pInheritanceInfo = NULL,
  289. };
  290. err = vkBeginCommandBuffer(demo->setup_cmd, &cmd_buf_info);
  291. assert(!err);
  292. }
  293. VkImageMemoryBarrier image_memory_barrier = {
  294. .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
  295. .pNext = NULL,
  296. .srcAccessMask = srcAccessMask,
  297. .dstAccessMask = 0,
  298. .oldLayout = old_image_layout,
  299. .newLayout = new_image_layout,
  300. .image = image,
  301. .subresourceRange = {aspectMask, 0, 1, 0, 1}};
  302. if (new_image_layout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
  303. /* Make sure anything that was copying from this image has completed */
  304. image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  305. }
  306. if (new_image_layout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) {
  307. image_memory_barrier.dstAccessMask =
  308. VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
  309. }
  310. if (new_image_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL) {
  311. image_memory_barrier.dstAccessMask =
  312. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
  313. }
  314. if (new_image_layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) {
  315. /* Make sure any Copy or CPU writes to image are flushed */
  316. image_memory_barrier.dstAccessMask =
  317. VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
  318. }
  319. VkImageMemoryBarrier *pmemory_barrier = &image_memory_barrier;
  320. VkPipelineStageFlags src_stages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
  321. VkPipelineStageFlags dest_stages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
  322. vkCmdPipelineBarrier(demo->setup_cmd, src_stages, dest_stages, 0, 0, NULL,
  323. 0, NULL, 1, pmemory_barrier);
  324. }
  325. static void demo_draw_build_cmd(struct demo *demo) {
  326. const VkCommandBufferBeginInfo cmd_buf_info = {
  327. .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
  328. .pNext = NULL,
  329. .flags = 0,
  330. .pInheritanceInfo = NULL,
  331. };
  332. const VkClearValue clear_values[2] = {
  333. [0] = {.color.float32 = {0.2f, 0.2f, 0.2f, 0.2f}},
  334. [1] = {.depthStencil = {demo->depthStencil, 0}},
  335. };
  336. const VkRenderPassBeginInfo rp_begin = {
  337. .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
  338. .pNext = NULL,
  339. .renderPass = demo->render_pass,
  340. .framebuffer = demo->framebuffers[demo->current_buffer],
  341. .renderArea.offset.x = 0,
  342. .renderArea.offset.y = 0,
  343. .renderArea.extent.width = demo->width,
  344. .renderArea.extent.height = demo->height,
  345. .clearValueCount = 2,
  346. .pClearValues = clear_values,
  347. };
  348. VkResult U_ASSERT_ONLY err;
  349. err = vkBeginCommandBuffer(demo->draw_cmd, &cmd_buf_info);
  350. assert(!err);
  351. // We can use LAYOUT_UNDEFINED as a wildcard here because we don't care what
  352. // happens to the previous contents of the image
  353. VkImageMemoryBarrier image_memory_barrier = {
  354. .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
  355. .pNext = NULL,
  356. .srcAccessMask = 0,
  357. .dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
  358. .oldLayout = VK_IMAGE_LAYOUT_UNDEFINED,
  359. .newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  360. .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
  361. .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
  362. .image = demo->buffers[demo->current_buffer].image,
  363. .subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1}};
  364. vkCmdPipelineBarrier(demo->draw_cmd, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
  365. VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, NULL, 0,
  366. NULL, 1, &image_memory_barrier);
  367. vkCmdBeginRenderPass(demo->draw_cmd, &rp_begin, VK_SUBPASS_CONTENTS_INLINE);
  368. vkCmdBindPipeline(demo->draw_cmd, VK_PIPELINE_BIND_POINT_GRAPHICS,
  369. demo->pipeline);
  370. vkCmdBindDescriptorSets(demo->draw_cmd, VK_PIPELINE_BIND_POINT_GRAPHICS,
  371. demo->pipeline_layout, 0, 1, &demo->desc_set, 0,
  372. NULL);
  373. VkViewport viewport;
  374. memset(&viewport, 0, sizeof(viewport));
  375. viewport.height = (float)demo->height;
  376. viewport.width = (float)demo->width;
  377. viewport.minDepth = (float)0.0f;
  378. viewport.maxDepth = (float)1.0f;
  379. vkCmdSetViewport(demo->draw_cmd, 0, 1, &viewport);
  380. VkRect2D scissor;
  381. memset(&scissor, 0, sizeof(scissor));
  382. scissor.extent.width = demo->width;
  383. scissor.extent.height = demo->height;
  384. scissor.offset.x = 0;
  385. scissor.offset.y = 0;
  386. vkCmdSetScissor(demo->draw_cmd, 0, 1, &scissor);
  387. VkDeviceSize offsets[1] = {0};
  388. vkCmdBindVertexBuffers(demo->draw_cmd, VERTEX_BUFFER_BIND_ID, 1,
  389. &demo->vertices.buf, offsets);
  390. vkCmdDraw(demo->draw_cmd, 3, 1, 0, 0);
  391. vkCmdEndRenderPass(demo->draw_cmd);
  392. VkImageMemoryBarrier prePresentBarrier = {
  393. .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
  394. .pNext = NULL,
  395. .srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
  396. .dstAccessMask = VK_ACCESS_MEMORY_READ_BIT,
  397. .oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  398. .newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
  399. .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
  400. .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
  401. .subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1}};
  402. prePresentBarrier.image = demo->buffers[demo->current_buffer].image;
  403. VkImageMemoryBarrier *pmemory_barrier = &prePresentBarrier;
  404. vkCmdPipelineBarrier(demo->draw_cmd, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
  405. VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, NULL, 0,
  406. NULL, 1, pmemory_barrier);
  407. err = vkEndCommandBuffer(demo->draw_cmd);
  408. assert(!err);
  409. }
  410. static void demo_draw(struct demo *demo) {
  411. VkResult U_ASSERT_ONLY err;
  412. VkSemaphore imageAcquiredSemaphore, drawCompleteSemaphore;
  413. VkSemaphoreCreateInfo semaphoreCreateInfo = {
  414. .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
  415. .pNext = NULL,
  416. .flags = 0,
  417. };
  418. err = vkCreateSemaphore(demo->device, &semaphoreCreateInfo,
  419. NULL, &imageAcquiredSemaphore);
  420. assert(!err);
  421. err = vkCreateSemaphore(demo->device, &semaphoreCreateInfo,
  422. NULL, &drawCompleteSemaphore);
  423. assert(!err);
  424. // Get the index of the next available swapchain image:
  425. err = vkAcquireNextImageKHR(demo->device, demo->swapchain, UINT64_MAX,
  426. imageAcquiredSemaphore,
  427. (VkFence)0, // TODO: Show use of fence
  428. &demo->current_buffer);
  429. if (err == VK_ERROR_OUT_OF_DATE_KHR) {
  430. // demo->swapchain is out of date (e.g. the window was resized) and
  431. // must be recreated:
  432. demo_resize(demo);
  433. demo_draw(demo);
  434. vkDestroySemaphore(demo->device, imageAcquiredSemaphore, NULL);
  435. vkDestroySemaphore(demo->device, drawCompleteSemaphore, NULL);
  436. return;
  437. } else if (err == VK_SUBOPTIMAL_KHR) {
  438. // demo->swapchain is not as optimal as it could be, but the platform's
  439. // presentation engine will still present the image correctly.
  440. } else {
  441. assert(!err);
  442. }
  443. demo_flush_init_cmd(demo);
  444. // Wait for the present complete semaphore to be signaled to ensure
  445. // that the image won't be rendered to until the presentation
  446. // engine has fully released ownership to the application, and it is
  447. // okay to render to the image.
  448. demo_draw_build_cmd(demo);
  449. VkFence nullFence = VK_NULL_HANDLE;
  450. VkPipelineStageFlags pipe_stage_flags =
  451. VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
  452. VkSubmitInfo submit_info = {.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
  453. .pNext = NULL,
  454. .waitSemaphoreCount = 1,
  455. .pWaitSemaphores = &imageAcquiredSemaphore,
  456. .pWaitDstStageMask = &pipe_stage_flags,
  457. .commandBufferCount = 1,
  458. .pCommandBuffers = &demo->draw_cmd,
  459. .signalSemaphoreCount = 1,
  460. .pSignalSemaphores = &drawCompleteSemaphore};
  461. err = vkQueueSubmit(demo->queue, 1, &submit_info, nullFence);
  462. assert(!err);
  463. VkPresentInfoKHR present = {
  464. .sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
  465. .pNext = NULL,
  466. .waitSemaphoreCount = 1,
  467. .pWaitSemaphores = &drawCompleteSemaphore,
  468. .swapchainCount = 1,
  469. .pSwapchains = &demo->swapchain,
  470. .pImageIndices = &demo->current_buffer,
  471. };
  472. err = vkQueuePresentKHR(demo->queue, &present);
  473. if (err == VK_ERROR_OUT_OF_DATE_KHR) {
  474. // demo->swapchain is out of date (e.g. the window was resized) and
  475. // must be recreated:
  476. demo_resize(demo);
  477. } else if (err == VK_SUBOPTIMAL_KHR) {
  478. // demo->swapchain is not as optimal as it could be, but the platform's
  479. // presentation engine will still present the image correctly.
  480. } else {
  481. assert(!err);
  482. }
  483. err = vkQueueWaitIdle(demo->queue);
  484. assert(err == VK_SUCCESS);
  485. vkDestroySemaphore(demo->device, imageAcquiredSemaphore, NULL);
  486. vkDestroySemaphore(demo->device, drawCompleteSemaphore, NULL);
  487. }
  488. static void demo_prepare_buffers(struct demo *demo) {
  489. VkResult U_ASSERT_ONLY err;
  490. VkSwapchainKHR oldSwapchain = demo->swapchain;
  491. // Check the surface capabilities and formats
  492. VkSurfaceCapabilitiesKHR surfCapabilities;
  493. err = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
  494. demo->gpu, demo->surface, &surfCapabilities);
  495. assert(!err);
  496. uint32_t presentModeCount;
  497. err = vkGetPhysicalDeviceSurfacePresentModesKHR(
  498. demo->gpu, demo->surface, &presentModeCount, NULL);
  499. assert(!err);
  500. VkPresentModeKHR *presentModes =
  501. (VkPresentModeKHR *)malloc(presentModeCount * sizeof(VkPresentModeKHR));
  502. assert(presentModes);
  503. err = vkGetPhysicalDeviceSurfacePresentModesKHR(
  504. demo->gpu, demo->surface, &presentModeCount, presentModes);
  505. assert(!err);
  506. VkExtent2D swapchainExtent;
  507. // width and height are either both 0xFFFFFFFF, or both not 0xFFFFFFFF.
  508. if (surfCapabilities.currentExtent.width == 0xFFFFFFFF) {
  509. // If the surface size is undefined, the size is set to the size
  510. // of the images requested, which must fit within the minimum and
  511. // maximum values.
  512. swapchainExtent.width = demo->width;
  513. swapchainExtent.height = demo->height;
  514. if (swapchainExtent.width < surfCapabilities.minImageExtent.width) {
  515. swapchainExtent.width = surfCapabilities.minImageExtent.width;
  516. } else if (swapchainExtent.width > surfCapabilities.maxImageExtent.width) {
  517. swapchainExtent.width = surfCapabilities.maxImageExtent.width;
  518. }
  519. if (swapchainExtent.height < surfCapabilities.minImageExtent.height) {
  520. swapchainExtent.height = surfCapabilities.minImageExtent.height;
  521. } else if (swapchainExtent.height > surfCapabilities.maxImageExtent.height) {
  522. swapchainExtent.height = surfCapabilities.maxImageExtent.height;
  523. }
  524. } else {
  525. // If the surface size is defined, the swap chain size must match
  526. swapchainExtent = surfCapabilities.currentExtent;
  527. demo->width = surfCapabilities.currentExtent.width;
  528. demo->height = surfCapabilities.currentExtent.height;
  529. }
  530. VkPresentModeKHR swapchainPresentMode = VK_PRESENT_MODE_FIFO_KHR;
  531. // Determine the number of VkImage's to use in the swap chain.
  532. // Application desires to only acquire 1 image at a time (which is
  533. // "surfCapabilities.minImageCount").
  534. uint32_t desiredNumOfSwapchainImages = surfCapabilities.minImageCount;
  535. // If maxImageCount is 0, we can ask for as many images as we want;
  536. // otherwise we're limited to maxImageCount
  537. if ((surfCapabilities.maxImageCount > 0) &&
  538. (desiredNumOfSwapchainImages > surfCapabilities.maxImageCount)) {
  539. // Application must settle for fewer images than desired:
  540. desiredNumOfSwapchainImages = surfCapabilities.maxImageCount;
  541. }
  542. VkSurfaceTransformFlagsKHR preTransform;
  543. if (surfCapabilities.supportedTransforms &
  544. VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) {
  545. preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
  546. } else {
  547. preTransform = surfCapabilities.currentTransform;
  548. }
  549. const VkSwapchainCreateInfoKHR swapchain = {
  550. .sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
  551. .pNext = NULL,
  552. .surface = demo->surface,
  553. .minImageCount = desiredNumOfSwapchainImages,
  554. .imageFormat = demo->format,
  555. .imageColorSpace = demo->color_space,
  556. .imageExtent =
  557. {
  558. .width = swapchainExtent.width, .height = swapchainExtent.height,
  559. },
  560. .imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
  561. .preTransform = preTransform,
  562. .compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
  563. .imageArrayLayers = 1,
  564. .imageSharingMode = VK_SHARING_MODE_EXCLUSIVE,
  565. .queueFamilyIndexCount = 0,
  566. .pQueueFamilyIndices = NULL,
  567. .presentMode = swapchainPresentMode,
  568. .oldSwapchain = oldSwapchain,
  569. .clipped = true,
  570. };
  571. uint32_t i;
  572. err = vkCreateSwapchainKHR(demo->device, &swapchain, NULL, &demo->swapchain);
  573. assert(!err);
  574. // If we just re-created an existing swapchain, we should destroy the old
  575. // swapchain at this point.
  576. // Note: destroying the swapchain also cleans up all its associated
  577. // presentable images once the platform is done with them.
  578. if (oldSwapchain != VK_NULL_HANDLE) {
  579. vkDestroySwapchainKHR(demo->device, oldSwapchain, NULL);
  580. }
  581. err = vkGetSwapchainImagesKHR(demo->device, demo->swapchain,
  582. &demo->swapchainImageCount, NULL);
  583. assert(!err);
  584. VkImage *swapchainImages =
  585. (VkImage *)malloc(demo->swapchainImageCount * sizeof(VkImage));
  586. assert(swapchainImages);
  587. err = vkGetSwapchainImagesKHR(demo->device, demo->swapchain,
  588. &demo->swapchainImageCount,
  589. swapchainImages);
  590. assert(!err);
  591. demo->buffers = (SwapchainBuffers *)malloc(sizeof(SwapchainBuffers) *
  592. demo->swapchainImageCount);
  593. assert(demo->buffers);
  594. for (i = 0; i < demo->swapchainImageCount; i++) {
  595. VkImageViewCreateInfo color_attachment_view = {
  596. .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
  597. .pNext = NULL,
  598. .format = demo->format,
  599. .components =
  600. {
  601. .r = VK_COMPONENT_SWIZZLE_R,
  602. .g = VK_COMPONENT_SWIZZLE_G,
  603. .b = VK_COMPONENT_SWIZZLE_B,
  604. .a = VK_COMPONENT_SWIZZLE_A,
  605. },
  606. .subresourceRange = {.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
  607. .baseMipLevel = 0,
  608. .levelCount = 1,
  609. .baseArrayLayer = 0,
  610. .layerCount = 1},
  611. .viewType = VK_IMAGE_VIEW_TYPE_2D,
  612. .flags = 0,
  613. };
  614. demo->buffers[i].image = swapchainImages[i];
  615. color_attachment_view.image = demo->buffers[i].image;
  616. err = vkCreateImageView(demo->device, &color_attachment_view, NULL,
  617. &demo->buffers[i].view);
  618. assert(!err);
  619. }
  620. demo->current_buffer = 0;
  621. if (NULL != presentModes) {
  622. free(presentModes);
  623. }
  624. }
  625. static void demo_prepare_depth(struct demo *demo) {
  626. const VkFormat depth_format = VK_FORMAT_D16_UNORM;
  627. const VkImageCreateInfo image = {
  628. .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
  629. .pNext = NULL,
  630. .imageType = VK_IMAGE_TYPE_2D,
  631. .format = depth_format,
  632. .extent = {demo->width, demo->height, 1},
  633. .mipLevels = 1,
  634. .arrayLayers = 1,
  635. .samples = VK_SAMPLE_COUNT_1_BIT,
  636. .tiling = VK_IMAGE_TILING_OPTIMAL,
  637. .usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
  638. .flags = 0,
  639. };
  640. VkMemoryAllocateInfo mem_alloc = {
  641. .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
  642. .pNext = NULL,
  643. .allocationSize = 0,
  644. .memoryTypeIndex = 0,
  645. };
  646. VkImageViewCreateInfo view = {
  647. .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
  648. .pNext = NULL,
  649. .image = VK_NULL_HANDLE,
  650. .format = depth_format,
  651. .subresourceRange = {.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT,
  652. .baseMipLevel = 0,
  653. .levelCount = 1,
  654. .baseArrayLayer = 0,
  655. .layerCount = 1},
  656. .flags = 0,
  657. .viewType = VK_IMAGE_VIEW_TYPE_2D,
  658. };
  659. VkMemoryRequirements mem_reqs;
  660. VkResult U_ASSERT_ONLY err;
  661. bool U_ASSERT_ONLY pass;
  662. demo->depth.format = depth_format;
  663. /* create image */
  664. err = vkCreateImage(demo->device, &image, NULL, &demo->depth.image);
  665. assert(!err);
  666. /* get memory requirements for this object */
  667. vkGetImageMemoryRequirements(demo->device, demo->depth.image, &mem_reqs);
  668. /* select memory size and type */
  669. mem_alloc.allocationSize = mem_reqs.size;
  670. pass = memory_type_from_properties(demo, mem_reqs.memoryTypeBits,
  671. 0, /* No requirements */
  672. &mem_alloc.memoryTypeIndex);
  673. assert(pass);
  674. /* allocate memory */
  675. err = vkAllocateMemory(demo->device, &mem_alloc, NULL, &demo->depth.mem);
  676. assert(!err);
  677. /* bind memory */
  678. err =
  679. vkBindImageMemory(demo->device, demo->depth.image, demo->depth.mem, 0);
  680. assert(!err);
  681. demo_set_image_layout(demo, demo->depth.image, VK_IMAGE_ASPECT_DEPTH_BIT,
  682. VK_IMAGE_LAYOUT_UNDEFINED,
  683. VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
  684. 0);
  685. /* create image view */
  686. view.image = demo->depth.image;
  687. err = vkCreateImageView(demo->device, &view, NULL, &demo->depth.view);
  688. assert(!err);
  689. }
  690. static void
  691. demo_prepare_texture_image(struct demo *demo, const uint32_t *tex_colors,
  692. struct texture_object *tex_obj, VkImageTiling tiling,
  693. VkImageUsageFlags usage, VkFlags required_props) {
  694. const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
  695. const int32_t tex_width = 2;
  696. const int32_t tex_height = 2;
  697. VkResult U_ASSERT_ONLY err;
  698. bool U_ASSERT_ONLY pass;
  699. tex_obj->tex_width = tex_width;
  700. tex_obj->tex_height = tex_height;
  701. const VkImageCreateInfo image_create_info = {
  702. .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
  703. .pNext = NULL,
  704. .imageType = VK_IMAGE_TYPE_2D,
  705. .format = tex_format,
  706. .extent = {tex_width, tex_height, 1},
  707. .mipLevels = 1,
  708. .arrayLayers = 1,
  709. .samples = VK_SAMPLE_COUNT_1_BIT,
  710. .tiling = tiling,
  711. .usage = usage,
  712. .flags = 0,
  713. .initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED
  714. };
  715. VkMemoryAllocateInfo mem_alloc = {
  716. .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
  717. .pNext = NULL,
  718. .allocationSize = 0,
  719. .memoryTypeIndex = 0,
  720. };
  721. VkMemoryRequirements mem_reqs;
  722. err =
  723. vkCreateImage(demo->device, &image_create_info, NULL, &tex_obj->image);
  724. assert(!err);
  725. vkGetImageMemoryRequirements(demo->device, tex_obj->image, &mem_reqs);
  726. mem_alloc.allocationSize = mem_reqs.size;
  727. pass =
  728. memory_type_from_properties(demo, mem_reqs.memoryTypeBits,
  729. required_props, &mem_alloc.memoryTypeIndex);
  730. assert(pass);
  731. /* allocate memory */
  732. err = vkAllocateMemory(demo->device, &mem_alloc, NULL, &tex_obj->mem);
  733. assert(!err);
  734. /* bind memory */
  735. err = vkBindImageMemory(demo->device, tex_obj->image, tex_obj->mem, 0);
  736. assert(!err);
  737. if (required_props & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
  738. const VkImageSubresource subres = {
  739. .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
  740. .mipLevel = 0,
  741. .arrayLayer = 0,
  742. };
  743. VkSubresourceLayout layout;
  744. void *data;
  745. int32_t x, y;
  746. vkGetImageSubresourceLayout(demo->device, tex_obj->image, &subres,
  747. &layout);
  748. err = vkMapMemory(demo->device, tex_obj->mem, 0,
  749. mem_alloc.allocationSize, 0, &data);
  750. assert(!err);
  751. for (y = 0; y < tex_height; y++) {
  752. uint32_t *row = (uint32_t *)((char *)data + layout.rowPitch * y);
  753. for (x = 0; x < tex_width; x++)
  754. row[x] = tex_colors[(x & 1) ^ (y & 1)];
  755. }
  756. vkUnmapMemory(demo->device, tex_obj->mem);
  757. }
  758. tex_obj->imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  759. demo_set_image_layout(demo, tex_obj->image, VK_IMAGE_ASPECT_COLOR_BIT,
  760. VK_IMAGE_LAYOUT_PREINITIALIZED, tex_obj->imageLayout,
  761. VK_ACCESS_HOST_WRITE_BIT);
  762. /* setting the image layout does not reference the actual memory so no need
  763. * to add a mem ref */
  764. }
  765. static void demo_destroy_texture_image(struct demo *demo,
  766. struct texture_object *tex_obj) {
  767. /* clean up staging resources */
  768. vkDestroyImage(demo->device, tex_obj->image, NULL);
  769. vkFreeMemory(demo->device, tex_obj->mem, NULL);
  770. }
  771. static void demo_prepare_textures(struct demo *demo) {
  772. const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
  773. VkFormatProperties props;
  774. const uint32_t tex_colors[DEMO_TEXTURE_COUNT][2] = {
  775. {0xffff0000, 0xff00ff00},
  776. };
  777. uint32_t i;
  778. VkResult U_ASSERT_ONLY err;
  779. vkGetPhysicalDeviceFormatProperties(demo->gpu, tex_format, &props);
  780. for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
  781. if ((props.linearTilingFeatures &
  782. VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) &&
  783. !demo->use_staging_buffer) {
  784. /* Device can texture using linear textures */
  785. demo_prepare_texture_image(
  786. demo, tex_colors[i], &demo->textures[i], VK_IMAGE_TILING_LINEAR,
  787. VK_IMAGE_USAGE_SAMPLED_BIT,
  788. VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
  789. VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
  790. } else if (props.optimalTilingFeatures &
  791. VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) {
  792. /* Must use staging buffer to copy linear texture to optimized */
  793. struct texture_object staging_texture;
  794. memset(&staging_texture, 0, sizeof(staging_texture));
  795. demo_prepare_texture_image(
  796. demo, tex_colors[i], &staging_texture, VK_IMAGE_TILING_LINEAR,
  797. VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
  798. VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
  799. VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
  800. demo_prepare_texture_image(
  801. demo, tex_colors[i], &demo->textures[i],
  802. VK_IMAGE_TILING_OPTIMAL,
  803. (VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT),
  804. VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
  805. demo_set_image_layout(demo, staging_texture.image,
  806. VK_IMAGE_ASPECT_COLOR_BIT,
  807. staging_texture.imageLayout,
  808. VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
  809. 0);
  810. demo_set_image_layout(demo, demo->textures[i].image,
  811. VK_IMAGE_ASPECT_COLOR_BIT,
  812. demo->textures[i].imageLayout,
  813. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
  814. 0);
  815. VkImageCopy copy_region = {
  816. .srcSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1},
  817. .srcOffset = {0, 0, 0},
  818. .dstSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1},
  819. .dstOffset = {0, 0, 0},
  820. .extent = {staging_texture.tex_width,
  821. staging_texture.tex_height, 1},
  822. };
  823. vkCmdCopyImage(
  824. demo->setup_cmd, staging_texture.image,
  825. VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, demo->textures[i].image,
  826. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &copy_region);
  827. demo_set_image_layout(demo, demo->textures[i].image,
  828. VK_IMAGE_ASPECT_COLOR_BIT,
  829. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
  830. demo->textures[i].imageLayout,
  831. 0);
  832. demo_flush_init_cmd(demo);
  833. demo_destroy_texture_image(demo, &staging_texture);
  834. } else {
  835. /* Can't support VK_FORMAT_B8G8R8A8_UNORM !? */
  836. assert(!"No support for B8G8R8A8_UNORM as texture image format");
  837. }
  838. const VkSamplerCreateInfo sampler = {
  839. .sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
  840. .pNext = NULL,
  841. .magFilter = VK_FILTER_NEAREST,
  842. .minFilter = VK_FILTER_NEAREST,
  843. .mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST,
  844. .addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT,
  845. .addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT,
  846. .addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT,
  847. .mipLodBias = 0.0f,
  848. .anisotropyEnable = VK_FALSE,
  849. .maxAnisotropy = 1,
  850. .compareOp = VK_COMPARE_OP_NEVER,
  851. .minLod = 0.0f,
  852. .maxLod = 0.0f,
  853. .borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
  854. .unnormalizedCoordinates = VK_FALSE,
  855. };
  856. VkImageViewCreateInfo view = {
  857. .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
  858. .pNext = NULL,
  859. .image = VK_NULL_HANDLE,
  860. .viewType = VK_IMAGE_VIEW_TYPE_2D,
  861. .format = tex_format,
  862. .components =
  863. {
  864. VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G,
  865. VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A,
  866. },
  867. .subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
  868. .flags = 0,
  869. };
  870. /* create sampler */
  871. err = vkCreateSampler(demo->device, &sampler, NULL,
  872. &demo->textures[i].sampler);
  873. assert(!err);
  874. /* create image view */
  875. view.image = demo->textures[i].image;
  876. err = vkCreateImageView(demo->device, &view, NULL,
  877. &demo->textures[i].view);
  878. assert(!err);
  879. }
  880. }
  881. static void demo_prepare_vertices(struct demo *demo) {
  882. // clang-format off
  883. const float vb[3][5] = {
  884. /* position texcoord */
  885. { -1.0f, -1.0f, 0.25f, 0.0f, 0.0f },
  886. { 1.0f, -1.0f, 0.25f, 1.0f, 0.0f },
  887. { 0.0f, 1.0f, 1.0f, 0.5f, 1.0f },
  888. };
  889. // clang-format on
  890. const VkBufferCreateInfo buf_info = {
  891. .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
  892. .pNext = NULL,
  893. .size = sizeof(vb),
  894. .usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
  895. .flags = 0,
  896. };
  897. VkMemoryAllocateInfo mem_alloc = {
  898. .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
  899. .pNext = NULL,
  900. .allocationSize = 0,
  901. .memoryTypeIndex = 0,
  902. };
  903. VkMemoryRequirements mem_reqs;
  904. VkResult U_ASSERT_ONLY err;
  905. bool U_ASSERT_ONLY pass;
  906. void *data;
  907. memset(&demo->vertices, 0, sizeof(demo->vertices));
  908. err = vkCreateBuffer(demo->device, &buf_info, NULL, &demo->vertices.buf);
  909. assert(!err);
  910. vkGetBufferMemoryRequirements(demo->device, demo->vertices.buf, &mem_reqs);
  911. assert(!err);
  912. mem_alloc.allocationSize = mem_reqs.size;
  913. pass = memory_type_from_properties(demo, mem_reqs.memoryTypeBits,
  914. VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
  915. VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
  916. &mem_alloc.memoryTypeIndex);
  917. assert(pass);
  918. err = vkAllocateMemory(demo->device, &mem_alloc, NULL, &demo->vertices.mem);
  919. assert(!err);
  920. err = vkMapMemory(demo->device, demo->vertices.mem, 0,
  921. mem_alloc.allocationSize, 0, &data);
  922. assert(!err);
  923. memcpy(data, vb, sizeof(vb));
  924. vkUnmapMemory(demo->device, demo->vertices.mem);
  925. err = vkBindBufferMemory(demo->device, demo->vertices.buf,
  926. demo->vertices.mem, 0);
  927. assert(!err);
  928. demo->vertices.vi.sType =
  929. VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
  930. demo->vertices.vi.pNext = NULL;
  931. demo->vertices.vi.vertexBindingDescriptionCount = 1;
  932. demo->vertices.vi.pVertexBindingDescriptions = demo->vertices.vi_bindings;
  933. demo->vertices.vi.vertexAttributeDescriptionCount = 2;
  934. demo->vertices.vi.pVertexAttributeDescriptions = demo->vertices.vi_attrs;
  935. demo->vertices.vi_bindings[0].binding = VERTEX_BUFFER_BIND_ID;
  936. demo->vertices.vi_bindings[0].stride = sizeof(vb[0]);
  937. demo->vertices.vi_bindings[0].inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
  938. demo->vertices.vi_attrs[0].binding = VERTEX_BUFFER_BIND_ID;
  939. demo->vertices.vi_attrs[0].location = 0;
  940. demo->vertices.vi_attrs[0].format = VK_FORMAT_R32G32B32_SFLOAT;
  941. demo->vertices.vi_attrs[0].offset = 0;
  942. demo->vertices.vi_attrs[1].binding = VERTEX_BUFFER_BIND_ID;
  943. demo->vertices.vi_attrs[1].location = 1;
  944. demo->vertices.vi_attrs[1].format = VK_FORMAT_R32G32_SFLOAT;
  945. demo->vertices.vi_attrs[1].offset = sizeof(float) * 3;
  946. }
  947. static void demo_prepare_descriptor_layout(struct demo *demo) {
  948. const VkDescriptorSetLayoutBinding layout_binding = {
  949. .binding = 0,
  950. .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
  951. .descriptorCount = DEMO_TEXTURE_COUNT,
  952. .stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT,
  953. .pImmutableSamplers = NULL,
  954. };
  955. const VkDescriptorSetLayoutCreateInfo descriptor_layout = {
  956. .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
  957. .pNext = NULL,
  958. .bindingCount = 1,
  959. .pBindings = &layout_binding,
  960. };
  961. VkResult U_ASSERT_ONLY err;
  962. err = vkCreateDescriptorSetLayout(demo->device, &descriptor_layout, NULL,
  963. &demo->desc_layout);
  964. assert(!err);
  965. const VkPipelineLayoutCreateInfo pPipelineLayoutCreateInfo = {
  966. .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
  967. .pNext = NULL,
  968. .setLayoutCount = 1,
  969. .pSetLayouts = &demo->desc_layout,
  970. };
  971. err = vkCreatePipelineLayout(demo->device, &pPipelineLayoutCreateInfo, NULL,
  972. &demo->pipeline_layout);
  973. assert(!err);
  974. }
  975. static void demo_prepare_render_pass(struct demo *demo) {
  976. const VkAttachmentDescription attachments[2] = {
  977. [0] =
  978. {
  979. .format = demo->format,
  980. .samples = VK_SAMPLE_COUNT_1_BIT,
  981. .loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
  982. .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
  983. .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
  984. .stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
  985. .initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  986. .finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  987. },
  988. [1] =
  989. {
  990. .format = demo->depth.format,
  991. .samples = VK_SAMPLE_COUNT_1_BIT,
  992. .loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
  993. .storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
  994. .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
  995. .stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
  996. .initialLayout =
  997. VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
  998. .finalLayout =
  999. VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
  1000. },
  1001. };
  1002. const VkAttachmentReference color_reference = {
  1003. .attachment = 0, .layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  1004. };
  1005. const VkAttachmentReference depth_reference = {
  1006. .attachment = 1,
  1007. .layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
  1008. };
  1009. const VkSubpassDescription subpass = {
  1010. .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
  1011. .flags = 0,
  1012. .inputAttachmentCount = 0,
  1013. .pInputAttachments = NULL,
  1014. .colorAttachmentCount = 1,
  1015. .pColorAttachments = &color_reference,
  1016. .pResolveAttachments = NULL,
  1017. .pDepthStencilAttachment = &depth_reference,
  1018. .preserveAttachmentCount = 0,
  1019. .pPreserveAttachments = NULL,
  1020. };
  1021. const VkRenderPassCreateInfo rp_info = {
  1022. .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
  1023. .pNext = NULL,
  1024. .attachmentCount = 2,
  1025. .pAttachments = attachments,
  1026. .subpassCount = 1,
  1027. .pSubpasses = &subpass,
  1028. .dependencyCount = 0,
  1029. .pDependencies = NULL,
  1030. };
  1031. VkResult U_ASSERT_ONLY err;
  1032. err = vkCreateRenderPass(demo->device, &rp_info, NULL, &demo->render_pass);
  1033. assert(!err);
  1034. }
  1035. static VkShaderModule
  1036. demo_prepare_shader_module(struct demo *demo, const void *code, size_t size) {
  1037. VkShaderModuleCreateInfo moduleCreateInfo;
  1038. VkShaderModule module;
  1039. VkResult U_ASSERT_ONLY err;
  1040. moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
  1041. moduleCreateInfo.pNext = NULL;
  1042. moduleCreateInfo.codeSize = size;
  1043. moduleCreateInfo.pCode = code;
  1044. moduleCreateInfo.flags = 0;
  1045. err = vkCreateShaderModule(demo->device, &moduleCreateInfo, NULL, &module);
  1046. assert(!err);
  1047. return module;
  1048. }
  1049. static VkShaderModule demo_prepare_vs(struct demo *demo) {
  1050. size_t size = sizeof(vertShaderCode);
  1051. demo->vert_shader_module =
  1052. demo_prepare_shader_module(demo, vertShaderCode, size);
  1053. return demo->vert_shader_module;
  1054. }
  1055. static VkShaderModule demo_prepare_fs(struct demo *demo) {
  1056. size_t size = sizeof(fragShaderCode);
  1057. demo->frag_shader_module =
  1058. demo_prepare_shader_module(demo, fragShaderCode, size);
  1059. return demo->frag_shader_module;
  1060. }
  1061. static void demo_prepare_pipeline(struct demo *demo) {
  1062. VkGraphicsPipelineCreateInfo pipeline;
  1063. VkPipelineCacheCreateInfo pipelineCache;
  1064. VkPipelineVertexInputStateCreateInfo vi;
  1065. VkPipelineInputAssemblyStateCreateInfo ia;
  1066. VkPipelineRasterizationStateCreateInfo rs;
  1067. VkPipelineColorBlendStateCreateInfo cb;
  1068. VkPipelineDepthStencilStateCreateInfo ds;
  1069. VkPipelineViewportStateCreateInfo vp;
  1070. VkPipelineMultisampleStateCreateInfo ms;
  1071. VkDynamicState dynamicStateEnables[(VK_DYNAMIC_STATE_STENCIL_REFERENCE - VK_DYNAMIC_STATE_VIEWPORT + 1)];
  1072. VkPipelineDynamicStateCreateInfo dynamicState;
  1073. VkResult U_ASSERT_ONLY err;
  1074. memset(dynamicStateEnables, 0, sizeof dynamicStateEnables);
  1075. memset(&dynamicState, 0, sizeof dynamicState);
  1076. dynamicState.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
  1077. dynamicState.pDynamicStates = dynamicStateEnables;
  1078. memset(&pipeline, 0, sizeof(pipeline));
  1079. pipeline.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
  1080. pipeline.layout = demo->pipeline_layout;
  1081. vi = demo->vertices.vi;
  1082. memset(&ia, 0, sizeof(ia));
  1083. ia.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
  1084. ia.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
  1085. memset(&rs, 0, sizeof(rs));
  1086. rs.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
  1087. rs.polygonMode = VK_POLYGON_MODE_FILL;
  1088. rs.cullMode = VK_CULL_MODE_BACK_BIT;
  1089. rs.frontFace = VK_FRONT_FACE_CLOCKWISE;
  1090. rs.depthClampEnable = VK_FALSE;
  1091. rs.rasterizerDiscardEnable = VK_FALSE;
  1092. rs.depthBiasEnable = VK_FALSE;
  1093. rs.lineWidth = 1.0f;
  1094. memset(&cb, 0, sizeof(cb));
  1095. cb.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
  1096. VkPipelineColorBlendAttachmentState att_state[1];
  1097. memset(att_state, 0, sizeof(att_state));
  1098. att_state[0].colorWriteMask = 0xf;
  1099. att_state[0].blendEnable = VK_FALSE;
  1100. cb.attachmentCount = 1;
  1101. cb.pAttachments = att_state;
  1102. memset(&vp, 0, sizeof(vp));
  1103. vp.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
  1104. vp.viewportCount = 1;
  1105. dynamicStateEnables[dynamicState.dynamicStateCount++] =
  1106. VK_DYNAMIC_STATE_VIEWPORT;
  1107. vp.scissorCount = 1;
  1108. dynamicStateEnables[dynamicState.dynamicStateCount++] =
  1109. VK_DYNAMIC_STATE_SCISSOR;
  1110. memset(&ds, 0, sizeof(ds));
  1111. ds.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
  1112. ds.depthTestEnable = VK_TRUE;
  1113. ds.depthWriteEnable = VK_TRUE;
  1114. ds.depthCompareOp = VK_COMPARE_OP_LESS_OR_EQUAL;
  1115. ds.depthBoundsTestEnable = VK_FALSE;
  1116. ds.back.failOp = VK_STENCIL_OP_KEEP;
  1117. ds.back.passOp = VK_STENCIL_OP_KEEP;
  1118. ds.back.compareOp = VK_COMPARE_OP_ALWAYS;
  1119. ds.stencilTestEnable = VK_FALSE;
  1120. ds.front = ds.back;
  1121. memset(&ms, 0, sizeof(ms));
  1122. ms.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
  1123. ms.pSampleMask = NULL;
  1124. ms.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
  1125. // Two stages: vs and fs
  1126. pipeline.stageCount = 2;
  1127. VkPipelineShaderStageCreateInfo shaderStages[2];
  1128. memset(&shaderStages, 0, 2 * sizeof(VkPipelineShaderStageCreateInfo));
  1129. shaderStages[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
  1130. shaderStages[0].stage = VK_SHADER_STAGE_VERTEX_BIT;
  1131. shaderStages[0].module = demo_prepare_vs(demo);
  1132. shaderStages[0].pName = "main";
  1133. shaderStages[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
  1134. shaderStages[1].stage = VK_SHADER_STAGE_FRAGMENT_BIT;
  1135. shaderStages[1].module = demo_prepare_fs(demo);
  1136. shaderStages[1].pName = "main";
  1137. pipeline.pVertexInputState = &vi;
  1138. pipeline.pInputAssemblyState = &ia;
  1139. pipeline.pRasterizationState = &rs;
  1140. pipeline.pColorBlendState = &cb;
  1141. pipeline.pMultisampleState = &ms;
  1142. pipeline.pViewportState = &vp;
  1143. pipeline.pDepthStencilState = &ds;
  1144. pipeline.pStages = shaderStages;
  1145. pipeline.renderPass = demo->render_pass;
  1146. pipeline.pDynamicState = &dynamicState;
  1147. memset(&pipelineCache, 0, sizeof(pipelineCache));
  1148. pipelineCache.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
  1149. err = vkCreatePipelineCache(demo->device, &pipelineCache, NULL,
  1150. &demo->pipelineCache);
  1151. assert(!err);
  1152. err = vkCreateGraphicsPipelines(demo->device, demo->pipelineCache, 1,
  1153. &pipeline, NULL, &demo->pipeline);
  1154. assert(!err);
  1155. vkDestroyPipelineCache(demo->device, demo->pipelineCache, NULL);
  1156. vkDestroyShaderModule(demo->device, demo->frag_shader_module, NULL);
  1157. vkDestroyShaderModule(demo->device, demo->vert_shader_module, NULL);
  1158. }
  1159. static void demo_prepare_descriptor_pool(struct demo *demo) {
  1160. const VkDescriptorPoolSize type_count = {
  1161. .type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
  1162. .descriptorCount = DEMO_TEXTURE_COUNT,
  1163. };
  1164. const VkDescriptorPoolCreateInfo descriptor_pool = {
  1165. .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
  1166. .pNext = NULL,
  1167. .maxSets = 1,
  1168. .poolSizeCount = 1,
  1169. .pPoolSizes = &type_count,
  1170. };
  1171. VkResult U_ASSERT_ONLY err;
  1172. err = vkCreateDescriptorPool(demo->device, &descriptor_pool, NULL,
  1173. &demo->desc_pool);
  1174. assert(!err);
  1175. }
  1176. static void demo_prepare_descriptor_set(struct demo *demo) {
  1177. VkDescriptorImageInfo tex_descs[DEMO_TEXTURE_COUNT];
  1178. VkWriteDescriptorSet write;
  1179. VkResult U_ASSERT_ONLY err;
  1180. uint32_t i;
  1181. VkDescriptorSetAllocateInfo alloc_info = {
  1182. .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
  1183. .pNext = NULL,
  1184. .descriptorPool = demo->desc_pool,
  1185. .descriptorSetCount = 1,
  1186. .pSetLayouts = &demo->desc_layout};
  1187. err = vkAllocateDescriptorSets(demo->device, &alloc_info, &demo->desc_set);
  1188. assert(!err);
  1189. memset(&tex_descs, 0, sizeof(tex_descs));
  1190. for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
  1191. tex_descs[i].sampler = demo->textures[i].sampler;
  1192. tex_descs[i].imageView = demo->textures[i].view;
  1193. tex_descs[i].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  1194. }
  1195. memset(&write, 0, sizeof(write));
  1196. write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  1197. write.dstSet = demo->desc_set;
  1198. write.descriptorCount = DEMO_TEXTURE_COUNT;
  1199. write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  1200. write.pImageInfo = tex_descs;
  1201. vkUpdateDescriptorSets(demo->device, 1, &write, 0, NULL);
  1202. }
  1203. static void demo_prepare_framebuffers(struct demo *demo) {
  1204. VkImageView attachments[2];
  1205. attachments[1] = demo->depth.view;
  1206. const VkFramebufferCreateInfo fb_info = {
  1207. .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
  1208. .pNext = NULL,
  1209. .renderPass = demo->render_pass,
  1210. .attachmentCount = 2,
  1211. .pAttachments = attachments,
  1212. .width = demo->width,
  1213. .height = demo->height,
  1214. .layers = 1,
  1215. };
  1216. VkResult U_ASSERT_ONLY err;
  1217. uint32_t i;
  1218. demo->framebuffers = (VkFramebuffer *)malloc(demo->swapchainImageCount *
  1219. sizeof(VkFramebuffer));
  1220. assert(demo->framebuffers);
  1221. for (i = 0; i < demo->swapchainImageCount; i++) {
  1222. attachments[0] = demo->buffers[i].view;
  1223. err = vkCreateFramebuffer(demo->device, &fb_info, NULL,
  1224. &demo->framebuffers[i]);
  1225. assert(!err);
  1226. }
  1227. }
  1228. static void demo_prepare(struct demo *demo) {
  1229. VkResult U_ASSERT_ONLY err;
  1230. const VkCommandPoolCreateInfo cmd_pool_info = {
  1231. .sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
  1232. .pNext = NULL,
  1233. .queueFamilyIndex = demo->graphics_queue_node_index,
  1234. .flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
  1235. };
  1236. err = vkCreateCommandPool(demo->device, &cmd_pool_info, NULL,
  1237. &demo->cmd_pool);
  1238. assert(!err);
  1239. const VkCommandBufferAllocateInfo cmd = {
  1240. .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
  1241. .pNext = NULL,
  1242. .commandPool = demo->cmd_pool,
  1243. .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
  1244. .commandBufferCount = 1,
  1245. };
  1246. err = vkAllocateCommandBuffers(demo->device, &cmd, &demo->draw_cmd);
  1247. assert(!err);
  1248. demo_prepare_buffers(demo);
  1249. demo_prepare_depth(demo);
  1250. demo_prepare_textures(demo);
  1251. demo_prepare_vertices(demo);
  1252. demo_prepare_descriptor_layout(demo);
  1253. demo_prepare_render_pass(demo);
  1254. demo_prepare_pipeline(demo);
  1255. demo_prepare_descriptor_pool(demo);
  1256. demo_prepare_descriptor_set(demo);
  1257. demo_prepare_framebuffers(demo);
  1258. }
  1259. static void demo_error_callback(int error, const char* description) {
  1260. printf("GLFW error: %s\n", description);
  1261. fflush(stdout);
  1262. }
  1263. static void demo_key_callback(GLFWwindow* window, int key, int scancode, int action, int mods) {
  1264. if (key == GLFW_KEY_ESCAPE && action == GLFW_RELEASE)
  1265. glfwSetWindowShouldClose(window, GLFW_TRUE);
  1266. }
  1267. static void demo_refresh_callback(GLFWwindow* window) {
  1268. struct demo* demo = glfwGetWindowUserPointer(window);
  1269. demo_draw(demo);
  1270. }
  1271. static void demo_resize_callback(GLFWwindow* window, int width, int height) {
  1272. struct demo* demo = glfwGetWindowUserPointer(window);
  1273. demo->width = width;
  1274. demo->height = height;
  1275. demo_resize(demo);
  1276. }
  1277. static void demo_run(struct demo *demo) {
  1278. while (!glfwWindowShouldClose(demo->window)) {
  1279. glfwPollEvents();
  1280. demo_draw(demo);
  1281. if (demo->depthStencil > 0.99f)
  1282. demo->depthIncrement = -0.001f;
  1283. if (demo->depthStencil < 0.8f)
  1284. demo->depthIncrement = 0.001f;
  1285. demo->depthStencil += demo->depthIncrement;
  1286. // Wait for work to finish before updating MVP.
  1287. vkDeviceWaitIdle(demo->device);
  1288. demo->curFrame++;
  1289. if (demo->frameCount != INT32_MAX && demo->curFrame == demo->frameCount)
  1290. glfwSetWindowShouldClose(demo->window, GLFW_TRUE);
  1291. }
  1292. }
  1293. static void demo_create_window(struct demo *demo) {
  1294. glfwWindowHint(GLFW_CLIENT_API, GLFW_NO_API);
  1295. demo->window = glfwCreateWindow(demo->width,
  1296. demo->height,
  1297. APP_LONG_NAME,
  1298. NULL,
  1299. NULL);
  1300. if (!demo->window) {
  1301. // It didn't work, so try to give a useful error:
  1302. printf("Cannot create a window in which to draw!\n");
  1303. fflush(stdout);
  1304. exit(1);
  1305. }
  1306. glfwSetWindowUserPointer(demo->window, demo);
  1307. glfwSetWindowRefreshCallback(demo->window, demo_refresh_callback);
  1308. glfwSetFramebufferSizeCallback(demo->window, demo_resize_callback);
  1309. glfwSetKeyCallback(demo->window, demo_key_callback);
  1310. }
  1311. /*
  1312. * Return 1 (true) if all layer names specified in check_names
  1313. * can be found in given layer properties.
  1314. */
  1315. static VkBool32 demo_check_layers(uint32_t check_count, const char **check_names,
  1316. uint32_t layer_count,
  1317. VkLayerProperties *layers) {
  1318. uint32_t i, j;
  1319. for (i = 0; i < check_count; i++) {
  1320. VkBool32 found = 0;
  1321. for (j = 0; j < layer_count; j++) {
  1322. if (!strcmp(check_names[i], layers[j].layerName)) {
  1323. found = 1;
  1324. break;
  1325. }
  1326. }
  1327. if (!found) {
  1328. fprintf(stderr, "Cannot find layer: %s\n", check_names[i]);
  1329. return 0;
  1330. }
  1331. }
  1332. return 1;
  1333. }
  1334. static void demo_init_vk(struct demo *demo) {
  1335. VkResult err;
  1336. uint32_t i = 0;
  1337. uint32_t required_extension_count = 0;
  1338. uint32_t instance_extension_count = 0;
  1339. uint32_t instance_layer_count = 0;
  1340. uint32_t validation_layer_count = 0;
  1341. const char **required_extensions = NULL;
  1342. const char **instance_validation_layers = NULL;
  1343. demo->enabled_extension_count = 0;
  1344. demo->enabled_layer_count = 0;
  1345. char *instance_validation_layers_alt1[] = {
  1346. "VK_LAYER_LUNARG_standard_validation"
  1347. };
  1348. char *instance_validation_layers_alt2[] = {
  1349. "VK_LAYER_GOOGLE_threading", "VK_LAYER_LUNARG_parameter_validation",
  1350. "VK_LAYER_LUNARG_object_tracker", "VK_LAYER_LUNARG_image",
  1351. "VK_LAYER_LUNARG_core_validation", "VK_LAYER_LUNARG_swapchain",
  1352. "VK_LAYER_GOOGLE_unique_objects"
  1353. };
  1354. /* Look for validation layers */
  1355. VkBool32 validation_found = 0;
  1356. if (demo->validate) {
  1357. err = vkEnumerateInstanceLayerProperties(&instance_layer_count, NULL);
  1358. assert(!err);
  1359. instance_validation_layers = (const char**) instance_validation_layers_alt1;
  1360. if (instance_layer_count > 0) {
  1361. VkLayerProperties *instance_layers =
  1362. malloc(sizeof (VkLayerProperties) * instance_layer_count);
  1363. err = vkEnumerateInstanceLayerProperties(&instance_layer_count,
  1364. instance_layers);
  1365. assert(!err);
  1366. validation_found = demo_check_layers(
  1367. ARRAY_SIZE(instance_validation_layers_alt1),
  1368. instance_validation_layers, instance_layer_count,
  1369. instance_layers);
  1370. if (validation_found) {
  1371. demo->enabled_layer_count = ARRAY_SIZE(instance_validation_layers_alt1);
  1372. demo->enabled_layers[0] = "VK_LAYER_LUNARG_standard_validation";
  1373. validation_layer_count = 1;
  1374. } else {
  1375. // use alternative set of validation layers
  1376. instance_validation_layers =
  1377. (const char**) instance_validation_layers_alt2;
  1378. demo->enabled_layer_count = ARRAY_SIZE(instance_validation_layers_alt2);
  1379. validation_found = demo_check_layers(
  1380. ARRAY_SIZE(instance_validation_layers_alt2),
  1381. instance_validation_layers, instance_layer_count,
  1382. instance_layers);
  1383. validation_layer_count =
  1384. ARRAY_SIZE(instance_validation_layers_alt2);
  1385. for (i = 0; i < validation_layer_count; i++) {
  1386. demo->enabled_layers[i] = instance_validation_layers[i];
  1387. }
  1388. }
  1389. free(instance_layers);
  1390. }
  1391. if (!validation_found) {
  1392. ERR_EXIT("vkEnumerateInstanceLayerProperties failed to find "
  1393. "required validation layer.\n\n"
  1394. "Please look at the Getting Started guide for additional "
  1395. "information.\n",
  1396. "vkCreateInstance Failure");
  1397. }
  1398. }
  1399. /* Look for instance extensions */
  1400. required_extensions = glfwGetRequiredInstanceExtensions(&required_extension_count);
  1401. if (!required_extensions) {
  1402. ERR_EXIT("glfwGetRequiredInstanceExtensions failed to find the "
  1403. "platform surface extensions.\n\nDo you have a compatible "
  1404. "Vulkan installable client driver (ICD) installed?\nPlease "
  1405. "look at the Getting Started guide for additional "
  1406. "information.\n",
  1407. "vkCreateInstance Failure");
  1408. }
  1409. for (i = 0; i < required_extension_count; i++) {
  1410. demo->extension_names[demo->enabled_extension_count++] = required_extensions[i];
  1411. assert(demo->enabled_extension_count < 64);
  1412. }
  1413. err = vkEnumerateInstanceExtensionProperties(
  1414. NULL, &instance_extension_count, NULL);
  1415. assert(!err);
  1416. if (instance_extension_count > 0) {
  1417. VkExtensionProperties *instance_extensions =
  1418. malloc(sizeof(VkExtensionProperties) * instance_extension_count);
  1419. err = vkEnumerateInstanceExtensionProperties(
  1420. NULL, &instance_extension_count, instance_extensions);
  1421. assert(!err);
  1422. for (i = 0; i < instance_extension_count; i++) {
  1423. if (!strcmp(VK_EXT_DEBUG_REPORT_EXTENSION_NAME,
  1424. instance_extensions[i].extensionName)) {
  1425. if (demo->validate) {
  1426. demo->extension_names[demo->enabled_extension_count++] =
  1427. VK_EXT_DEBUG_REPORT_EXTENSION_NAME;
  1428. }
  1429. }
  1430. assert(demo->enabled_extension_count < 64);
  1431. }
  1432. free(instance_extensions);
  1433. }
  1434. const VkApplicationInfo app = {
  1435. .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO,
  1436. .pNext = NULL,
  1437. .pApplicationName = APP_SHORT_NAME,
  1438. .applicationVersion = 0,
  1439. .pEngineName = APP_SHORT_NAME,
  1440. .engineVersion = 0,
  1441. .apiVersion = VK_API_VERSION_1_0,
  1442. };
  1443. VkInstanceCreateInfo inst_info = {
  1444. .sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
  1445. .pNext = NULL,
  1446. .pApplicationInfo = &app,
  1447. .enabledLayerCount = demo->enabled_layer_count,
  1448. .ppEnabledLayerNames = (const char *const *)instance_validation_layers,
  1449. .enabledExtensionCount = demo->enabled_extension_count,
  1450. .ppEnabledExtensionNames = (const char *const *)demo->extension_names,
  1451. };
  1452. uint32_t gpu_count;
  1453. err = vkCreateInstance(&inst_info, NULL, &demo->inst);
  1454. if (err == VK_ERROR_INCOMPATIBLE_DRIVER) {
  1455. ERR_EXIT("Cannot find a compatible Vulkan installable client driver "
  1456. "(ICD).\n\nPlease look at the Getting Started guide for "
  1457. "additional information.\n",
  1458. "vkCreateInstance Failure");
  1459. } else if (err == VK_ERROR_EXTENSION_NOT_PRESENT) {
  1460. ERR_EXIT("Cannot find a specified extension library"
  1461. ".\nMake sure your layers path is set appropriately\n",
  1462. "vkCreateInstance Failure");
  1463. } else if (err) {
  1464. ERR_EXIT("vkCreateInstance failed.\n\nDo you have a compatible Vulkan "
  1465. "installable client driver (ICD) installed?\nPlease look at "
  1466. "the Getting Started guide for additional information.\n",
  1467. "vkCreateInstance Failure");
  1468. }
  1469. gladLoadVulkanUserPtr(NULL, (GLADuserptrloadfunc) glfwGetInstanceProcAddress, demo->inst);
  1470. /* Make initial call to query gpu_count, then second call for gpu info*/
  1471. err = vkEnumeratePhysicalDevices(demo->inst, &gpu_count, NULL);
  1472. assert(!err && gpu_count > 0);
  1473. if (gpu_count > 0) {
  1474. VkPhysicalDevice *physical_devices =
  1475. malloc(sizeof(VkPhysicalDevice) * gpu_count);
  1476. err = vkEnumeratePhysicalDevices(demo->inst, &gpu_count,
  1477. physical_devices);
  1478. assert(!err);
  1479. /* For tri demo we just grab the first physical device */
  1480. demo->gpu = physical_devices[0];
  1481. free(physical_devices);
  1482. } else {
  1483. ERR_EXIT("vkEnumeratePhysicalDevices reported zero accessible devices."
  1484. "\n\nDo you have a compatible Vulkan installable client"
  1485. " driver (ICD) installed?\nPlease look at the Getting Started"
  1486. " guide for additional information.\n",
  1487. "vkEnumeratePhysicalDevices Failure");
  1488. }
  1489. gladLoadVulkanUserPtr(demo->gpu, (GLADuserptrloadfunc) glfwGetInstanceProcAddress, demo->inst);
  1490. /* Look for device extensions */
  1491. uint32_t device_extension_count = 0;
  1492. VkBool32 swapchainExtFound = 0;
  1493. demo->enabled_extension_count = 0;
  1494. err = vkEnumerateDeviceExtensionProperties(demo->gpu, NULL,
  1495. &device_extension_count, NULL);
  1496. assert(!err);
  1497. if (device_extension_count > 0) {
  1498. VkExtensionProperties *device_extensions =
  1499. malloc(sizeof(VkExtensionProperties) * device_extension_count);
  1500. err = vkEnumerateDeviceExtensionProperties(
  1501. demo->gpu, NULL, &device_extension_count, device_extensions);
  1502. assert(!err);
  1503. for (i = 0; i < device_extension_count; i++) {
  1504. if (!strcmp(VK_KHR_SWAPCHAIN_EXTENSION_NAME,
  1505. device_extensions[i].extensionName)) {
  1506. swapchainExtFound = 1;
  1507. demo->extension_names[demo->enabled_extension_count++] =
  1508. VK_KHR_SWAPCHAIN_EXTENSION_NAME;
  1509. }
  1510. assert(demo->enabled_extension_count < 64);
  1511. }
  1512. free(device_extensions);
  1513. }
  1514. if (!swapchainExtFound) {
  1515. ERR_EXIT("vkEnumerateDeviceExtensionProperties failed to find "
  1516. "the " VK_KHR_SWAPCHAIN_EXTENSION_NAME
  1517. " extension.\n\nDo you have a compatible "
  1518. "Vulkan installable client driver (ICD) installed?\nPlease "
  1519. "look at the Getting Started guide for additional "
  1520. "information.\n",
  1521. "vkCreateInstance Failure");
  1522. }
  1523. if (demo->validate) {
  1524. VkDebugReportCallbackCreateInfoEXT dbgCreateInfo;
  1525. dbgCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
  1526. dbgCreateInfo.flags =
  1527. VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT;
  1528. dbgCreateInfo.pfnCallback = demo->use_break ? BreakCallback : dbgFunc;
  1529. dbgCreateInfo.pUserData = demo;
  1530. dbgCreateInfo.pNext = NULL;
  1531. err = vkCreateDebugReportCallbackEXT(demo->inst, &dbgCreateInfo, NULL,
  1532. &demo->msg_callback);
  1533. switch (err) {
  1534. case VK_SUCCESS:
  1535. break;
  1536. case VK_ERROR_OUT_OF_HOST_MEMORY:
  1537. ERR_EXIT("CreateDebugReportCallback: out of host memory\n",
  1538. "CreateDebugReportCallback Failure");
  1539. break;
  1540. default:
  1541. ERR_EXIT("CreateDebugReportCallback: unknown failure\n",
  1542. "CreateDebugReportCallback Failure");
  1543. break;
  1544. }
  1545. }
  1546. vkGetPhysicalDeviceProperties(demo->gpu, &demo->gpu_props);
  1547. // Query with NULL data to get count
  1548. vkGetPhysicalDeviceQueueFamilyProperties(demo->gpu, &demo->queue_count,
  1549. NULL);
  1550. demo->queue_props = (VkQueueFamilyProperties *)malloc(
  1551. demo->queue_count * sizeof(VkQueueFamilyProperties));
  1552. vkGetPhysicalDeviceQueueFamilyProperties(demo->gpu, &demo->queue_count,
  1553. demo->queue_props);
  1554. assert(demo->queue_count >= 1);
  1555. vkGetPhysicalDeviceFeatures(demo->gpu, &demo->gpu_features);
  1556. // Graphics queue and MemMgr queue can be separate.
  1557. // TODO: Add support for separate queues, including synchronization,
  1558. // and appropriate tracking for QueueSubmit
  1559. }
  1560. static void demo_init_device(struct demo *demo) {
  1561. VkResult U_ASSERT_ONLY err;
  1562. float queue_priorities[1] = {0.0};
  1563. const VkDeviceQueueCreateInfo queue = {
  1564. .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
  1565. .pNext = NULL,
  1566. .queueFamilyIndex = demo->graphics_queue_node_index,
  1567. .queueCount = 1,
  1568. .pQueuePriorities = queue_priorities};
  1569. VkPhysicalDeviceFeatures features;
  1570. memset(&features, 0, sizeof(features));
  1571. if (demo->gpu_features.shaderClipDistance) {
  1572. features.shaderClipDistance = VK_TRUE;
  1573. }
  1574. VkDeviceCreateInfo device = {
  1575. .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
  1576. .pNext = NULL,
  1577. .queueCreateInfoCount = 1,
  1578. .pQueueCreateInfos = &queue,
  1579. .enabledLayerCount = 0,
  1580. .ppEnabledLayerNames = NULL,
  1581. .enabledExtensionCount = demo->enabled_extension_count,
  1582. .ppEnabledExtensionNames = (const char *const *)demo->extension_names,
  1583. .pEnabledFeatures = &features,
  1584. };
  1585. err = vkCreateDevice(demo->gpu, &device, NULL, &demo->device);
  1586. assert(!err);
  1587. }
  1588. static void demo_init_vk_swapchain(struct demo *demo) {
  1589. VkResult U_ASSERT_ONLY err;
  1590. uint32_t i;
  1591. // Create a WSI surface for the window:
  1592. glfwCreateWindowSurface(demo->inst, demo->window, NULL, &demo->surface);
  1593. // Iterate over each queue to learn whether it supports presenting:
  1594. VkBool32 *supportsPresent =
  1595. (VkBool32 *)malloc(demo->queue_count * sizeof(VkBool32));
  1596. for (i = 0; i < demo->queue_count; i++) {
  1597. vkGetPhysicalDeviceSurfaceSupportKHR(demo->gpu, i, demo->surface,
  1598. &supportsPresent[i]);
  1599. }
  1600. // Search for a graphics and a present queue in the array of queue
  1601. // families, try to find one that supports both
  1602. uint32_t graphicsQueueNodeIndex = UINT32_MAX;
  1603. uint32_t presentQueueNodeIndex = UINT32_MAX;
  1604. for (i = 0; i < demo->queue_count; i++) {
  1605. if ((demo->queue_props[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0) {
  1606. if (graphicsQueueNodeIndex == UINT32_MAX) {
  1607. graphicsQueueNodeIndex = i;
  1608. }
  1609. if (supportsPresent[i] == VK_TRUE) {
  1610. graphicsQueueNodeIndex = i;
  1611. presentQueueNodeIndex = i;
  1612. break;
  1613. }
  1614. }
  1615. }
  1616. if (presentQueueNodeIndex == UINT32_MAX) {
  1617. // If didn't find a queue that supports both graphics and present, then
  1618. // find a separate present queue.
  1619. for (i = 0; i < demo->queue_count; ++i) {
  1620. if (supportsPresent[i] == VK_TRUE) {
  1621. presentQueueNodeIndex = i;
  1622. break;
  1623. }
  1624. }
  1625. }
  1626. free(supportsPresent);
  1627. // Generate error if could not find both a graphics and a present queue
  1628. if (graphicsQueueNodeIndex == UINT32_MAX ||
  1629. presentQueueNodeIndex == UINT32_MAX) {
  1630. ERR_EXIT("Could not find a graphics and a present queue\n",
  1631. "Swapchain Initialization Failure");
  1632. }
  1633. // TODO: Add support for separate queues, including presentation,
  1634. // synchronization, and appropriate tracking for QueueSubmit.
  1635. // NOTE: While it is possible for an application to use a separate graphics
  1636. // and a present queues, this demo program assumes it is only using
  1637. // one:
  1638. if (graphicsQueueNodeIndex != presentQueueNodeIndex) {
  1639. ERR_EXIT("Could not find a common graphics and a present queue\n",
  1640. "Swapchain Initialization Failure");
  1641. }
  1642. demo->graphics_queue_node_index = graphicsQueueNodeIndex;
  1643. demo_init_device(demo);
  1644. vkGetDeviceQueue(demo->device, demo->graphics_queue_node_index, 0,
  1645. &demo->queue);
  1646. // Get the list of VkFormat's that are supported:
  1647. uint32_t formatCount;
  1648. err = vkGetPhysicalDeviceSurfaceFormatsKHR(demo->gpu, demo->surface,
  1649. &formatCount, NULL);
  1650. assert(!err);
  1651. VkSurfaceFormatKHR *surfFormats =
  1652. (VkSurfaceFormatKHR *)malloc(formatCount * sizeof(VkSurfaceFormatKHR));
  1653. err = vkGetPhysicalDeviceSurfaceFormatsKHR(demo->gpu, demo->surface,
  1654. &formatCount, surfFormats);
  1655. assert(!err);
  1656. // If the format list includes just one entry of VK_FORMAT_UNDEFINED,
  1657. // the surface has no preferred format. Otherwise, at least one
  1658. // supported format will be returned.
  1659. if (formatCount == 1 && surfFormats[0].format == VK_FORMAT_UNDEFINED) {
  1660. demo->format = VK_FORMAT_B8G8R8A8_UNORM;
  1661. } else {
  1662. assert(formatCount >= 1);
  1663. demo->format = surfFormats[0].format;
  1664. }
  1665. demo->color_space = surfFormats[0].colorSpace;
  1666. demo->curFrame = 0;
  1667. // Get Memory information and properties
  1668. vkGetPhysicalDeviceMemoryProperties(demo->gpu, &demo->memory_properties);
  1669. }
  1670. static void demo_init_connection(struct demo *demo) {
  1671. glfwSetErrorCallback(demo_error_callback);
  1672. if (!glfwInit()) {
  1673. printf("Cannot initialize GLFW.\nExiting ...\n");
  1674. fflush(stdout);
  1675. exit(1);
  1676. }
  1677. if (!glfwVulkanSupported()) {
  1678. printf("GLFW failed to find the Vulkan loader.\nExiting ...\n");
  1679. fflush(stdout);
  1680. exit(1);
  1681. }
  1682. gladLoadVulkanUserPtr(NULL, (GLADuserptrloadfunc) glfwGetInstanceProcAddress, NULL);
  1683. }
  1684. static void demo_init(struct demo *demo, const int argc, const char *argv[])
  1685. {
  1686. int i;
  1687. memset(demo, 0, sizeof(*demo));
  1688. demo->frameCount = INT32_MAX;
  1689. for (i = 1; i < argc; i++) {
  1690. if (strcmp(argv[i], "--use_staging") == 0) {
  1691. demo->use_staging_buffer = true;
  1692. continue;
  1693. }
  1694. if (strcmp(argv[i], "--break") == 0) {
  1695. demo->use_break = true;
  1696. continue;
  1697. }
  1698. if (strcmp(argv[i], "--validate") == 0) {
  1699. demo->validate = true;
  1700. continue;
  1701. }
  1702. if (strcmp(argv[i], "--c") == 0 && demo->frameCount == INT32_MAX &&
  1703. i < argc - 1 && sscanf(argv[i + 1], "%d", &demo->frameCount) == 1 &&
  1704. demo->frameCount >= 0) {
  1705. i++;
  1706. continue;
  1707. }
  1708. fprintf(stderr, "Usage:\n %s [--use_staging] [--validate] [--break] "
  1709. "[--c <framecount>]\n",
  1710. APP_SHORT_NAME);
  1711. fflush(stderr);
  1712. exit(1);
  1713. }
  1714. demo_init_connection(demo);
  1715. demo_init_vk(demo);
  1716. demo->width = 300;
  1717. demo->height = 300;
  1718. demo->depthStencil = 1.0;
  1719. demo->depthIncrement = -0.01f;
  1720. }
  1721. static void demo_cleanup(struct demo *demo) {
  1722. uint32_t i;
  1723. for (i = 0; i < demo->swapchainImageCount; i++) {
  1724. vkDestroyFramebuffer(demo->device, demo->framebuffers[i], NULL);
  1725. }
  1726. free(demo->framebuffers);
  1727. vkDestroyDescriptorPool(demo->device, demo->desc_pool, NULL);
  1728. if (demo->setup_cmd) {
  1729. vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->setup_cmd);
  1730. }
  1731. vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->draw_cmd);
  1732. vkDestroyCommandPool(demo->device, demo->cmd_pool, NULL);
  1733. vkDestroyPipeline(demo->device, demo->pipeline, NULL);
  1734. vkDestroyRenderPass(demo->device, demo->render_pass, NULL);
  1735. vkDestroyPipelineLayout(demo->device, demo->pipeline_layout, NULL);
  1736. vkDestroyDescriptorSetLayout(demo->device, demo->desc_layout, NULL);
  1737. vkDestroyBuffer(demo->device, demo->vertices.buf, NULL);
  1738. vkFreeMemory(demo->device, demo->vertices.mem, NULL);
  1739. for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
  1740. vkDestroyImageView(demo->device, demo->textures[i].view, NULL);
  1741. vkDestroyImage(demo->device, demo->textures[i].image, NULL);
  1742. vkFreeMemory(demo->device, demo->textures[i].mem, NULL);
  1743. vkDestroySampler(demo->device, demo->textures[i].sampler, NULL);
  1744. }
  1745. for (i = 0; i < demo->swapchainImageCount; i++) {
  1746. vkDestroyImageView(demo->device, demo->buffers[i].view, NULL);
  1747. }
  1748. vkDestroyImageView(demo->device, demo->depth.view, NULL);
  1749. vkDestroyImage(demo->device, demo->depth.image, NULL);
  1750. vkFreeMemory(demo->device, demo->depth.mem, NULL);
  1751. vkDestroySwapchainKHR(demo->device, demo->swapchain, NULL);
  1752. free(demo->buffers);
  1753. vkDestroyDevice(demo->device, NULL);
  1754. if (demo->validate) {
  1755. vkDestroyDebugReportCallbackEXT(demo->inst, demo->msg_callback, NULL);
  1756. }
  1757. vkDestroySurfaceKHR(demo->inst, demo->surface, NULL);
  1758. vkDestroyInstance(demo->inst, NULL);
  1759. free(demo->queue_props);
  1760. glfwDestroyWindow(demo->window);
  1761. glfwTerminate();
  1762. }
  1763. static void demo_resize(struct demo *demo) {
  1764. uint32_t i;
  1765. // In order to properly resize the window, we must re-create the swapchain
  1766. // AND redo the command buffers, etc.
  1767. //
  1768. // First, perform part of the demo_cleanup() function:
  1769. for (i = 0; i < demo->swapchainImageCount; i++) {
  1770. vkDestroyFramebuffer(demo->device, demo->framebuffers[i], NULL);
  1771. }
  1772. free(demo->framebuffers);
  1773. vkDestroyDescriptorPool(demo->device, demo->desc_pool, NULL);
  1774. if (demo->setup_cmd) {
  1775. vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->setup_cmd);
  1776. demo->setup_cmd = VK_NULL_HANDLE;
  1777. }
  1778. vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->draw_cmd);
  1779. vkDestroyCommandPool(demo->device, demo->cmd_pool, NULL);
  1780. vkDestroyPipeline(demo->device, demo->pipeline, NULL);
  1781. vkDestroyRenderPass(demo->device, demo->render_pass, NULL);
  1782. vkDestroyPipelineLayout(demo->device, demo->pipeline_layout, NULL);
  1783. vkDestroyDescriptorSetLayout(demo->device, demo->desc_layout, NULL);
  1784. vkDestroyBuffer(demo->device, demo->vertices.buf, NULL);
  1785. vkFreeMemory(demo->device, demo->vertices.mem, NULL);
  1786. for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
  1787. vkDestroyImageView(demo->device, demo->textures[i].view, NULL);
  1788. vkDestroyImage(demo->device, demo->textures[i].image, NULL);
  1789. vkFreeMemory(demo->device, demo->textures[i].mem, NULL);
  1790. vkDestroySampler(demo->device, demo->textures[i].sampler, NULL);
  1791. }
  1792. for (i = 0; i < demo->swapchainImageCount; i++) {
  1793. vkDestroyImageView(demo->device, demo->buffers[i].view, NULL);
  1794. }
  1795. vkDestroyImageView(demo->device, demo->depth.view, NULL);
  1796. vkDestroyImage(demo->device, demo->depth.image, NULL);
  1797. vkFreeMemory(demo->device, demo->depth.mem, NULL);
  1798. free(demo->buffers);
  1799. // Second, re-perform the demo_prepare() function, which will re-create the
  1800. // swapchain:
  1801. demo_prepare(demo);
  1802. }
  1803. int main(const int argc, const char *argv[]) {
  1804. struct demo demo;
  1805. demo_init(&demo, argc, argv);
  1806. demo_create_window(&demo);
  1807. demo_init_vk_swapchain(&demo);
  1808. demo_prepare(&demo);
  1809. demo_run(&demo);
  1810. demo_cleanup(&demo);
  1811. return validation_error;
  1812. }