triangle-vulkan.c 79 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139
  1. /*
  2. * Copyright (c) 2015-2016 The Khronos Group Inc.
  3. * Copyright (c) 2015-2016 Valve Corporation
  4. * Copyright (c) 2015-2016 LunarG, Inc.
  5. *
  6. * Licensed under the Apache License, Version 2.0 (the "License");
  7. * you may not use this file except in compliance with the License.
  8. * You may obtain a copy of the License at
  9. *
  10. * http://www.apache.org/licenses/LICENSE-2.0
  11. *
  12. * Unless required by applicable law or agreed to in writing, software
  13. * distributed under the License is distributed on an "AS IS" BASIS,
  14. * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
  15. * See the License for the specific language governing permissions and
  16. * limitations under the License.
  17. *
  18. * Author: Chia-I Wu <[email protected]>
  19. * Author: Cody Northrop <[email protected]>
  20. * Author: Courtney Goeltzenleuchter <[email protected]>
  21. * Author: Ian Elliott <[email protected]>
  22. * Author: Jon Ashburn <[email protected]>
  23. * Author: Piers Daniell <[email protected]>
  24. * Author: Gwan-gyeong Mun <[email protected]>
  25. * Porter: Camilla Löwy <[email protected]>
  26. */
  27. /*
  28. * Draw a textured triangle with depth testing. This is written against Intel
  29. * ICD. It does not do state transition nor object memory binding like it
  30. * should. It also does no error checking.
  31. */
  32. #include <stdio.h>
  33. #include <stdlib.h>
  34. #include <string.h>
  35. #include <stdbool.h>
  36. #include <assert.h>
  37. #include <signal.h>
  38. #ifdef _WIN32
  39. #include <windows.h>
  40. #endif
  41. #include <glad/vulkan.h>
  42. #define GLFW_INCLUDE_NONE
  43. #include <GLFW/glfw3.h>
  44. #define DEMO_TEXTURE_COUNT 1
  45. #define VERTEX_BUFFER_BIND_ID 0
  46. #define APP_SHORT_NAME "tri"
  47. #define APP_LONG_NAME "The Vulkan Triangle Demo Program"
  48. #define ARRAY_SIZE(a) (sizeof(a) / sizeof(a[0]))
  49. #if defined(NDEBUG) && defined(__GNUC__)
  50. #define U_ASSERT_ONLY __attribute__((unused))
  51. #else
  52. #define U_ASSERT_ONLY
  53. #endif
  54. #define ERR_EXIT(err_msg, err_class) \
  55. do { \
  56. printf(err_msg); \
  57. fflush(stdout); \
  58. exit(1); \
  59. } while (0)
  60. static const uint32_t fragShaderCode[] = {
  61. 0x07230203,0x00010000,0x00080007,0x00000014,0x00000000,0x00020011,0x00000001,0x0006000b,
  62. 0x00000001,0x4c534c47,0x6474732e,0x3035342e,0x00000000,0x0003000e,0x00000000,0x00000001,
  63. 0x0007000f,0x00000004,0x00000004,0x6e69616d,0x00000000,0x00000009,0x00000011,0x00030010,
  64. 0x00000004,0x00000007,0x00030003,0x00000002,0x00000190,0x00090004,0x415f4c47,0x735f4252,
  65. 0x72617065,0x5f657461,0x64616873,0x6f5f7265,0x63656a62,0x00007374,0x00090004,0x415f4c47,
  66. 0x735f4252,0x69646168,0x6c5f676e,0x75676e61,0x5f656761,0x70303234,0x006b6361,0x00040005,
  67. 0x00000004,0x6e69616d,0x00000000,0x00050005,0x00000009,0x61724675,0x6c6f4367,0x0000726f,
  68. 0x00030005,0x0000000d,0x00786574,0x00050005,0x00000011,0x63786574,0x64726f6f,0x00000000,
  69. 0x00040047,0x00000009,0x0000001e,0x00000000,0x00040047,0x0000000d,0x00000022,0x00000000,
  70. 0x00040047,0x0000000d,0x00000021,0x00000000,0x00040047,0x00000011,0x0000001e,0x00000000,
  71. 0x00020013,0x00000002,0x00030021,0x00000003,0x00000002,0x00030016,0x00000006,0x00000020,
  72. 0x00040017,0x00000007,0x00000006,0x00000004,0x00040020,0x00000008,0x00000003,0x00000007,
  73. 0x0004003b,0x00000008,0x00000009,0x00000003,0x00090019,0x0000000a,0x00000006,0x00000001,
  74. 0x00000000,0x00000000,0x00000000,0x00000001,0x00000000,0x0003001b,0x0000000b,0x0000000a,
  75. 0x00040020,0x0000000c,0x00000000,0x0000000b,0x0004003b,0x0000000c,0x0000000d,0x00000000,
  76. 0x00040017,0x0000000f,0x00000006,0x00000002,0x00040020,0x00000010,0x00000001,0x0000000f,
  77. 0x0004003b,0x00000010,0x00000011,0x00000001,0x00050036,0x00000002,0x00000004,0x00000000,
  78. 0x00000003,0x000200f8,0x00000005,0x0004003d,0x0000000b,0x0000000e,0x0000000d,0x0004003d,
  79. 0x0000000f,0x00000012,0x00000011,0x00050057,0x00000007,0x00000013,0x0000000e,0x00000012,
  80. 0x0003003e,0x00000009,0x00000013,0x000100fd,0x00010038
  81. };
  82. static const uint32_t vertShaderCode[] = {
  83. 0x07230203,0x00010000,0x00080007,0x00000018,0x00000000,0x00020011,0x00000001,0x0006000b,
  84. 0x00000001,0x4c534c47,0x6474732e,0x3035342e,0x00000000,0x0003000e,0x00000000,0x00000001,
  85. 0x0009000f,0x00000000,0x00000004,0x6e69616d,0x00000000,0x00000009,0x0000000b,0x00000010,
  86. 0x00000014,0x00030003,0x00000002,0x00000190,0x00090004,0x415f4c47,0x735f4252,0x72617065,
  87. 0x5f657461,0x64616873,0x6f5f7265,0x63656a62,0x00007374,0x00090004,0x415f4c47,0x735f4252,
  88. 0x69646168,0x6c5f676e,0x75676e61,0x5f656761,0x70303234,0x006b6361,0x00040005,0x00000004,
  89. 0x6e69616d,0x00000000,0x00050005,0x00000009,0x63786574,0x64726f6f,0x00000000,0x00040005,
  90. 0x0000000b,0x72747461,0x00000000,0x00060005,0x0000000e,0x505f6c67,0x65567265,0x78657472,
  91. 0x00000000,0x00060006,0x0000000e,0x00000000,0x505f6c67,0x7469736f,0x006e6f69,0x00030005,
  92. 0x00000010,0x00000000,0x00030005,0x00000014,0x00736f70,0x00040047,0x00000009,0x0000001e,
  93. 0x00000000,0x00040047,0x0000000b,0x0000001e,0x00000001,0x00050048,0x0000000e,0x00000000,
  94. 0x0000000b,0x00000000,0x00030047,0x0000000e,0x00000002,0x00040047,0x00000014,0x0000001e,
  95. 0x00000000,0x00020013,0x00000002,0x00030021,0x00000003,0x00000002,0x00030016,0x00000006,
  96. 0x00000020,0x00040017,0x00000007,0x00000006,0x00000002,0x00040020,0x00000008,0x00000003,
  97. 0x00000007,0x0004003b,0x00000008,0x00000009,0x00000003,0x00040020,0x0000000a,0x00000001,
  98. 0x00000007,0x0004003b,0x0000000a,0x0000000b,0x00000001,0x00040017,0x0000000d,0x00000006,
  99. 0x00000004,0x0003001e,0x0000000e,0x0000000d,0x00040020,0x0000000f,0x00000003,0x0000000e,
  100. 0x0004003b,0x0000000f,0x00000010,0x00000003,0x00040015,0x00000011,0x00000020,0x00000001,
  101. 0x0004002b,0x00000011,0x00000012,0x00000000,0x00040020,0x00000013,0x00000001,0x0000000d,
  102. 0x0004003b,0x00000013,0x00000014,0x00000001,0x00040020,0x00000016,0x00000003,0x0000000d,
  103. 0x00050036,0x00000002,0x00000004,0x00000000,0x00000003,0x000200f8,0x00000005,0x0004003d,
  104. 0x00000007,0x0000000c,0x0000000b,0x0003003e,0x00000009,0x0000000c,0x0004003d,0x0000000d,
  105. 0x00000015,0x00000014,0x00050041,0x00000016,0x00000017,0x00000010,0x00000012,0x0003003e,
  106. 0x00000017,0x00000015,0x000100fd,0x00010038
  107. };
  108. struct texture_object {
  109. VkSampler sampler;
  110. VkImage image;
  111. VkImageLayout imageLayout;
  112. VkDeviceMemory mem;
  113. VkImageView view;
  114. int32_t tex_width, tex_height;
  115. };
  116. static int validation_error = 0;
  117. VKAPI_ATTR VkBool32 VKAPI_CALL
  118. BreakCallback(VkFlags msgFlags, VkDebugReportObjectTypeEXT objType,
  119. uint64_t srcObject, size_t location, int32_t msgCode,
  120. const char *pLayerPrefix, const char *pMsg,
  121. void *pUserData) {
  122. #ifdef _WIN32
  123. DebugBreak();
  124. #else
  125. raise(SIGTRAP);
  126. #endif
  127. return false;
  128. }
  129. typedef struct {
  130. VkImage image;
  131. VkCommandBuffer cmd;
  132. VkImageView view;
  133. } SwapchainBuffers;
  134. struct demo {
  135. GLFWwindow* window;
  136. VkSurfaceKHR surface;
  137. bool use_staging_buffer;
  138. VkInstance inst;
  139. VkPhysicalDevice gpu;
  140. VkDevice device;
  141. VkQueue queue;
  142. VkPhysicalDeviceProperties gpu_props;
  143. VkPhysicalDeviceFeatures gpu_features;
  144. VkQueueFamilyProperties *queue_props;
  145. uint32_t graphics_queue_node_index;
  146. uint32_t enabled_extension_count;
  147. uint32_t enabled_layer_count;
  148. const char *extension_names[64];
  149. const char *enabled_layers[64];
  150. int width, height;
  151. VkFormat format;
  152. VkColorSpaceKHR color_space;
  153. uint32_t swapchainImageCount;
  154. VkSwapchainKHR swapchain;
  155. SwapchainBuffers *buffers;
  156. VkCommandPool cmd_pool;
  157. struct {
  158. VkFormat format;
  159. VkImage image;
  160. VkDeviceMemory mem;
  161. VkImageView view;
  162. } depth;
  163. struct texture_object textures[DEMO_TEXTURE_COUNT];
  164. struct {
  165. VkBuffer buf;
  166. VkDeviceMemory mem;
  167. VkPipelineVertexInputStateCreateInfo vi;
  168. VkVertexInputBindingDescription vi_bindings[1];
  169. VkVertexInputAttributeDescription vi_attrs[2];
  170. } vertices;
  171. VkCommandBuffer setup_cmd; // Command Buffer for initialization commands
  172. VkCommandBuffer draw_cmd; // Command Buffer for drawing commands
  173. VkPipelineLayout pipeline_layout;
  174. VkDescriptorSetLayout desc_layout;
  175. VkPipelineCache pipelineCache;
  176. VkRenderPass render_pass;
  177. VkPipeline pipeline;
  178. VkShaderModule vert_shader_module;
  179. VkShaderModule frag_shader_module;
  180. VkDescriptorPool desc_pool;
  181. VkDescriptorSet desc_set;
  182. VkFramebuffer *framebuffers;
  183. VkPhysicalDeviceMemoryProperties memory_properties;
  184. int32_t curFrame;
  185. int32_t frameCount;
  186. bool validate;
  187. bool use_break;
  188. VkDebugReportCallbackEXT msg_callback;
  189. float depthStencil;
  190. float depthIncrement;
  191. uint32_t current_buffer;
  192. uint32_t queue_count;
  193. };
  194. VKAPI_ATTR VkBool32 VKAPI_CALL
  195. dbgFunc(VkFlags msgFlags, VkDebugReportObjectTypeEXT objType,
  196. uint64_t srcObject, size_t location, int32_t msgCode,
  197. const char *pLayerPrefix, const char *pMsg, void *pUserData) {
  198. char *message = (char *)malloc(strlen(pMsg) + 100);
  199. assert(message);
  200. validation_error = 1;
  201. if (msgFlags & VK_DEBUG_REPORT_ERROR_BIT_EXT) {
  202. sprintf(message, "ERROR: [%s] Code %d : %s", pLayerPrefix, msgCode,
  203. pMsg);
  204. } else if (msgFlags & VK_DEBUG_REPORT_WARNING_BIT_EXT) {
  205. sprintf(message, "WARNING: [%s] Code %d : %s", pLayerPrefix, msgCode,
  206. pMsg);
  207. } else {
  208. return false;
  209. }
  210. printf("%s\n", message);
  211. fflush(stdout);
  212. free(message);
  213. /*
  214. * false indicates that layer should not bail-out of an
  215. * API call that had validation failures. This may mean that the
  216. * app dies inside the driver due to invalid parameter(s).
  217. * That's what would happen without validation layers, so we'll
  218. * keep that behavior here.
  219. */
  220. return false;
  221. }
  222. // Forward declaration:
  223. static void demo_resize(struct demo *demo);
  224. static bool memory_type_from_properties(struct demo *demo, uint32_t typeBits,
  225. VkFlags requirements_mask,
  226. uint32_t *typeIndex) {
  227. uint32_t i;
  228. // Search memtypes to find first index with those properties
  229. for (i = 0; i < VK_MAX_MEMORY_TYPES; i++) {
  230. if ((typeBits & 1) == 1) {
  231. // Type is available, does it match user properties?
  232. if ((demo->memory_properties.memoryTypes[i].propertyFlags &
  233. requirements_mask) == requirements_mask) {
  234. *typeIndex = i;
  235. return true;
  236. }
  237. }
  238. typeBits >>= 1;
  239. }
  240. // No memory types matched, return failure
  241. return false;
  242. }
  243. static void demo_flush_init_cmd(struct demo *demo) {
  244. VkResult U_ASSERT_ONLY err;
  245. if (demo->setup_cmd == VK_NULL_HANDLE)
  246. return;
  247. err = vkEndCommandBuffer(demo->setup_cmd);
  248. assert(!err);
  249. const VkCommandBuffer cmd_bufs[] = {demo->setup_cmd};
  250. VkFence nullFence = {VK_NULL_HANDLE};
  251. VkSubmitInfo submit_info = {.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
  252. .pNext = NULL,
  253. .waitSemaphoreCount = 0,
  254. .pWaitSemaphores = NULL,
  255. .pWaitDstStageMask = NULL,
  256. .commandBufferCount = 1,
  257. .pCommandBuffers = cmd_bufs,
  258. .signalSemaphoreCount = 0,
  259. .pSignalSemaphores = NULL};
  260. err = vkQueueSubmit(demo->queue, 1, &submit_info, nullFence);
  261. assert(!err);
  262. err = vkQueueWaitIdle(demo->queue);
  263. assert(!err);
  264. vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, cmd_bufs);
  265. demo->setup_cmd = VK_NULL_HANDLE;
  266. }
  267. static void demo_set_image_layout(struct demo *demo, VkImage image,
  268. VkImageAspectFlags aspectMask,
  269. VkImageLayout old_image_layout,
  270. VkImageLayout new_image_layout,
  271. VkAccessFlagBits srcAccessMask) {
  272. VkResult U_ASSERT_ONLY err;
  273. if (demo->setup_cmd == VK_NULL_HANDLE) {
  274. const VkCommandBufferAllocateInfo cmd = {
  275. .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
  276. .pNext = NULL,
  277. .commandPool = demo->cmd_pool,
  278. .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
  279. .commandBufferCount = 1,
  280. };
  281. err = vkAllocateCommandBuffers(demo->device, &cmd, &demo->setup_cmd);
  282. assert(!err);
  283. VkCommandBufferBeginInfo cmd_buf_info = {
  284. .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
  285. .pNext = NULL,
  286. .flags = 0,
  287. .pInheritanceInfo = NULL,
  288. };
  289. err = vkBeginCommandBuffer(demo->setup_cmd, &cmd_buf_info);
  290. assert(!err);
  291. }
  292. VkImageMemoryBarrier image_memory_barrier = {
  293. .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
  294. .pNext = NULL,
  295. .srcAccessMask = srcAccessMask,
  296. .dstAccessMask = 0,
  297. .oldLayout = old_image_layout,
  298. .newLayout = new_image_layout,
  299. .image = image,
  300. .subresourceRange = {aspectMask, 0, 1, 0, 1}};
  301. if (new_image_layout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL) {
  302. /* Make sure anything that was copying from this image has completed */
  303. image_memory_barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
  304. }
  305. if (new_image_layout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL) {
  306. image_memory_barrier.dstAccessMask =
  307. VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
  308. }
  309. if (new_image_layout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL) {
  310. image_memory_barrier.dstAccessMask =
  311. VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
  312. }
  313. if (new_image_layout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL) {
  314. /* Make sure any Copy or CPU writes to image are flushed */
  315. image_memory_barrier.dstAccessMask =
  316. VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_INPUT_ATTACHMENT_READ_BIT;
  317. }
  318. VkImageMemoryBarrier *pmemory_barrier = &image_memory_barrier;
  319. VkPipelineStageFlags src_stages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
  320. VkPipelineStageFlags dest_stages = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
  321. vkCmdPipelineBarrier(demo->setup_cmd, src_stages, dest_stages, 0, 0, NULL,
  322. 0, NULL, 1, pmemory_barrier);
  323. }
  324. static void demo_draw_build_cmd(struct demo *demo) {
  325. const VkCommandBufferBeginInfo cmd_buf_info = {
  326. .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO,
  327. .pNext = NULL,
  328. .flags = 0,
  329. .pInheritanceInfo = NULL,
  330. };
  331. const VkClearValue clear_values[2] = {
  332. [0] = {.color.float32 = {0.2f, 0.2f, 0.2f, 0.2f}},
  333. [1] = {.depthStencil = {demo->depthStencil, 0}},
  334. };
  335. const VkRenderPassBeginInfo rp_begin = {
  336. .sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO,
  337. .pNext = NULL,
  338. .renderPass = demo->render_pass,
  339. .framebuffer = demo->framebuffers[demo->current_buffer],
  340. .renderArea.offset.x = 0,
  341. .renderArea.offset.y = 0,
  342. .renderArea.extent.width = demo->width,
  343. .renderArea.extent.height = demo->height,
  344. .clearValueCount = 2,
  345. .pClearValues = clear_values,
  346. };
  347. VkResult U_ASSERT_ONLY err;
  348. err = vkBeginCommandBuffer(demo->draw_cmd, &cmd_buf_info);
  349. assert(!err);
  350. // We can use LAYOUT_UNDEFINED as a wildcard here because we don't care what
  351. // happens to the previous contents of the image
  352. VkImageMemoryBarrier image_memory_barrier = {
  353. .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
  354. .pNext = NULL,
  355. .srcAccessMask = 0,
  356. .dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
  357. .oldLayout = VK_IMAGE_LAYOUT_UNDEFINED,
  358. .newLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  359. .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
  360. .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
  361. .image = demo->buffers[demo->current_buffer].image,
  362. .subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1}};
  363. vkCmdPipelineBarrier(demo->draw_cmd, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
  364. VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, NULL, 0,
  365. NULL, 1, &image_memory_barrier);
  366. vkCmdBeginRenderPass(demo->draw_cmd, &rp_begin, VK_SUBPASS_CONTENTS_INLINE);
  367. vkCmdBindPipeline(demo->draw_cmd, VK_PIPELINE_BIND_POINT_GRAPHICS,
  368. demo->pipeline);
  369. vkCmdBindDescriptorSets(demo->draw_cmd, VK_PIPELINE_BIND_POINT_GRAPHICS,
  370. demo->pipeline_layout, 0, 1, &demo->desc_set, 0,
  371. NULL);
  372. VkViewport viewport;
  373. memset(&viewport, 0, sizeof(viewport));
  374. viewport.height = (float)demo->height;
  375. viewport.width = (float)demo->width;
  376. viewport.minDepth = (float)0.0f;
  377. viewport.maxDepth = (float)1.0f;
  378. vkCmdSetViewport(demo->draw_cmd, 0, 1, &viewport);
  379. VkRect2D scissor;
  380. memset(&scissor, 0, sizeof(scissor));
  381. scissor.extent.width = demo->width;
  382. scissor.extent.height = demo->height;
  383. scissor.offset.x = 0;
  384. scissor.offset.y = 0;
  385. vkCmdSetScissor(demo->draw_cmd, 0, 1, &scissor);
  386. VkDeviceSize offsets[1] = {0};
  387. vkCmdBindVertexBuffers(demo->draw_cmd, VERTEX_BUFFER_BIND_ID, 1,
  388. &demo->vertices.buf, offsets);
  389. vkCmdDraw(demo->draw_cmd, 3, 1, 0, 0);
  390. vkCmdEndRenderPass(demo->draw_cmd);
  391. VkImageMemoryBarrier prePresentBarrier = {
  392. .sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER,
  393. .pNext = NULL,
  394. .srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT,
  395. .dstAccessMask = VK_ACCESS_MEMORY_READ_BIT,
  396. .oldLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  397. .newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR,
  398. .srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
  399. .dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED,
  400. .subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1}};
  401. prePresentBarrier.image = demo->buffers[demo->current_buffer].image;
  402. VkImageMemoryBarrier *pmemory_barrier = &prePresentBarrier;
  403. vkCmdPipelineBarrier(demo->draw_cmd, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT,
  404. VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, NULL, 0,
  405. NULL, 1, pmemory_barrier);
  406. err = vkEndCommandBuffer(demo->draw_cmd);
  407. assert(!err);
  408. }
  409. static void demo_draw(struct demo *demo) {
  410. VkResult U_ASSERT_ONLY err;
  411. VkSemaphore imageAcquiredSemaphore, drawCompleteSemaphore;
  412. VkSemaphoreCreateInfo semaphoreCreateInfo = {
  413. .sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO,
  414. .pNext = NULL,
  415. .flags = 0,
  416. };
  417. err = vkCreateSemaphore(demo->device, &semaphoreCreateInfo,
  418. NULL, &imageAcquiredSemaphore);
  419. assert(!err);
  420. err = vkCreateSemaphore(demo->device, &semaphoreCreateInfo,
  421. NULL, &drawCompleteSemaphore);
  422. assert(!err);
  423. // Get the index of the next available swapchain image:
  424. err = vkAcquireNextImageKHR(demo->device, demo->swapchain, UINT64_MAX,
  425. imageAcquiredSemaphore,
  426. (VkFence)0, // TODO: Show use of fence
  427. &demo->current_buffer);
  428. if (err == VK_ERROR_OUT_OF_DATE_KHR) {
  429. // demo->swapchain is out of date (e.g. the window was resized) and
  430. // must be recreated:
  431. demo_resize(demo);
  432. demo_draw(demo);
  433. vkDestroySemaphore(demo->device, imageAcquiredSemaphore, NULL);
  434. vkDestroySemaphore(demo->device, drawCompleteSemaphore, NULL);
  435. return;
  436. } else if (err == VK_SUBOPTIMAL_KHR) {
  437. // demo->swapchain is not as optimal as it could be, but the platform's
  438. // presentation engine will still present the image correctly.
  439. } else {
  440. assert(!err);
  441. }
  442. demo_flush_init_cmd(demo);
  443. // Wait for the present complete semaphore to be signaled to ensure
  444. // that the image won't be rendered to until the presentation
  445. // engine has fully released ownership to the application, and it is
  446. // okay to render to the image.
  447. demo_draw_build_cmd(demo);
  448. VkFence nullFence = VK_NULL_HANDLE;
  449. VkPipelineStageFlags pipe_stage_flags =
  450. VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
  451. VkSubmitInfo submit_info = {.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO,
  452. .pNext = NULL,
  453. .waitSemaphoreCount = 1,
  454. .pWaitSemaphores = &imageAcquiredSemaphore,
  455. .pWaitDstStageMask = &pipe_stage_flags,
  456. .commandBufferCount = 1,
  457. .pCommandBuffers = &demo->draw_cmd,
  458. .signalSemaphoreCount = 1,
  459. .pSignalSemaphores = &drawCompleteSemaphore};
  460. err = vkQueueSubmit(demo->queue, 1, &submit_info, nullFence);
  461. assert(!err);
  462. VkPresentInfoKHR present = {
  463. .sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR,
  464. .pNext = NULL,
  465. .waitSemaphoreCount = 1,
  466. .pWaitSemaphores = &drawCompleteSemaphore,
  467. .swapchainCount = 1,
  468. .pSwapchains = &demo->swapchain,
  469. .pImageIndices = &demo->current_buffer,
  470. };
  471. err = vkQueuePresentKHR(demo->queue, &present);
  472. if (err == VK_ERROR_OUT_OF_DATE_KHR) {
  473. // demo->swapchain is out of date (e.g. the window was resized) and
  474. // must be recreated:
  475. demo_resize(demo);
  476. } else if (err == VK_SUBOPTIMAL_KHR) {
  477. // demo->swapchain is not as optimal as it could be, but the platform's
  478. // presentation engine will still present the image correctly.
  479. } else {
  480. assert(!err);
  481. }
  482. err = vkQueueWaitIdle(demo->queue);
  483. assert(err == VK_SUCCESS);
  484. vkDestroySemaphore(demo->device, imageAcquiredSemaphore, NULL);
  485. vkDestroySemaphore(demo->device, drawCompleteSemaphore, NULL);
  486. }
  487. static void demo_prepare_buffers(struct demo *demo) {
  488. VkResult U_ASSERT_ONLY err;
  489. VkSwapchainKHR oldSwapchain = demo->swapchain;
  490. // Check the surface capabilities and formats
  491. VkSurfaceCapabilitiesKHR surfCapabilities;
  492. err = vkGetPhysicalDeviceSurfaceCapabilitiesKHR(
  493. demo->gpu, demo->surface, &surfCapabilities);
  494. assert(!err);
  495. uint32_t presentModeCount;
  496. err = vkGetPhysicalDeviceSurfacePresentModesKHR(
  497. demo->gpu, demo->surface, &presentModeCount, NULL);
  498. assert(!err);
  499. VkPresentModeKHR *presentModes =
  500. (VkPresentModeKHR *)malloc(presentModeCount * sizeof(VkPresentModeKHR));
  501. assert(presentModes);
  502. err = vkGetPhysicalDeviceSurfacePresentModesKHR(
  503. demo->gpu, demo->surface, &presentModeCount, presentModes);
  504. assert(!err);
  505. VkExtent2D swapchainExtent;
  506. // width and height are either both 0xFFFFFFFF, or both not 0xFFFFFFFF.
  507. if (surfCapabilities.currentExtent.width == 0xFFFFFFFF) {
  508. // If the surface size is undefined, the size is set to the size
  509. // of the images requested, which must fit within the minimum and
  510. // maximum values.
  511. swapchainExtent.width = demo->width;
  512. swapchainExtent.height = demo->height;
  513. if (swapchainExtent.width < surfCapabilities.minImageExtent.width) {
  514. swapchainExtent.width = surfCapabilities.minImageExtent.width;
  515. } else if (swapchainExtent.width > surfCapabilities.maxImageExtent.width) {
  516. swapchainExtent.width = surfCapabilities.maxImageExtent.width;
  517. }
  518. if (swapchainExtent.height < surfCapabilities.minImageExtent.height) {
  519. swapchainExtent.height = surfCapabilities.minImageExtent.height;
  520. } else if (swapchainExtent.height > surfCapabilities.maxImageExtent.height) {
  521. swapchainExtent.height = surfCapabilities.maxImageExtent.height;
  522. }
  523. } else {
  524. // If the surface size is defined, the swap chain size must match
  525. swapchainExtent = surfCapabilities.currentExtent;
  526. demo->width = surfCapabilities.currentExtent.width;
  527. demo->height = surfCapabilities.currentExtent.height;
  528. }
  529. VkPresentModeKHR swapchainPresentMode = VK_PRESENT_MODE_FIFO_KHR;
  530. // Determine the number of VkImage's to use in the swap chain.
  531. // Application desires to only acquire 1 image at a time (which is
  532. // "surfCapabilities.minImageCount").
  533. uint32_t desiredNumOfSwapchainImages = surfCapabilities.minImageCount;
  534. // If maxImageCount is 0, we can ask for as many images as we want;
  535. // otherwise we're limited to maxImageCount
  536. if ((surfCapabilities.maxImageCount > 0) &&
  537. (desiredNumOfSwapchainImages > surfCapabilities.maxImageCount)) {
  538. // Application must settle for fewer images than desired:
  539. desiredNumOfSwapchainImages = surfCapabilities.maxImageCount;
  540. }
  541. VkSurfaceTransformFlagsKHR preTransform;
  542. if (surfCapabilities.supportedTransforms &
  543. VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR) {
  544. preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
  545. } else {
  546. preTransform = surfCapabilities.currentTransform;
  547. }
  548. const VkSwapchainCreateInfoKHR swapchain = {
  549. .sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR,
  550. .pNext = NULL,
  551. .surface = demo->surface,
  552. .minImageCount = desiredNumOfSwapchainImages,
  553. .imageFormat = demo->format,
  554. .imageColorSpace = demo->color_space,
  555. .imageExtent =
  556. {
  557. .width = swapchainExtent.width, .height = swapchainExtent.height,
  558. },
  559. .imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT,
  560. .preTransform = preTransform,
  561. .compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR,
  562. .imageArrayLayers = 1,
  563. .imageSharingMode = VK_SHARING_MODE_EXCLUSIVE,
  564. .queueFamilyIndexCount = 0,
  565. .pQueueFamilyIndices = NULL,
  566. .presentMode = swapchainPresentMode,
  567. .oldSwapchain = oldSwapchain,
  568. .clipped = true,
  569. };
  570. uint32_t i;
  571. err = vkCreateSwapchainKHR(demo->device, &swapchain, NULL, &demo->swapchain);
  572. assert(!err);
  573. // If we just re-created an existing swapchain, we should destroy the old
  574. // swapchain at this point.
  575. // Note: destroying the swapchain also cleans up all its associated
  576. // presentable images once the platform is done with them.
  577. if (oldSwapchain != VK_NULL_HANDLE) {
  578. vkDestroySwapchainKHR(demo->device, oldSwapchain, NULL);
  579. }
  580. err = vkGetSwapchainImagesKHR(demo->device, demo->swapchain,
  581. &demo->swapchainImageCount, NULL);
  582. assert(!err);
  583. VkImage *swapchainImages =
  584. (VkImage *)malloc(demo->swapchainImageCount * sizeof(VkImage));
  585. assert(swapchainImages);
  586. err = vkGetSwapchainImagesKHR(demo->device, demo->swapchain,
  587. &demo->swapchainImageCount,
  588. swapchainImages);
  589. assert(!err);
  590. demo->buffers = (SwapchainBuffers *)malloc(sizeof(SwapchainBuffers) *
  591. demo->swapchainImageCount);
  592. assert(demo->buffers);
  593. for (i = 0; i < demo->swapchainImageCount; i++) {
  594. VkImageViewCreateInfo color_attachment_view = {
  595. .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
  596. .pNext = NULL,
  597. .format = demo->format,
  598. .components =
  599. {
  600. .r = VK_COMPONENT_SWIZZLE_R,
  601. .g = VK_COMPONENT_SWIZZLE_G,
  602. .b = VK_COMPONENT_SWIZZLE_B,
  603. .a = VK_COMPONENT_SWIZZLE_A,
  604. },
  605. .subresourceRange = {.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
  606. .baseMipLevel = 0,
  607. .levelCount = 1,
  608. .baseArrayLayer = 0,
  609. .layerCount = 1},
  610. .viewType = VK_IMAGE_VIEW_TYPE_2D,
  611. .flags = 0,
  612. };
  613. demo->buffers[i].image = swapchainImages[i];
  614. color_attachment_view.image = demo->buffers[i].image;
  615. err = vkCreateImageView(demo->device, &color_attachment_view, NULL,
  616. &demo->buffers[i].view);
  617. assert(!err);
  618. }
  619. demo->current_buffer = 0;
  620. if (NULL != presentModes) {
  621. free(presentModes);
  622. }
  623. }
  624. static void demo_prepare_depth(struct demo *demo) {
  625. const VkFormat depth_format = VK_FORMAT_D16_UNORM;
  626. const VkImageCreateInfo image = {
  627. .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
  628. .pNext = NULL,
  629. .imageType = VK_IMAGE_TYPE_2D,
  630. .format = depth_format,
  631. .extent = {demo->width, demo->height, 1},
  632. .mipLevels = 1,
  633. .arrayLayers = 1,
  634. .samples = VK_SAMPLE_COUNT_1_BIT,
  635. .tiling = VK_IMAGE_TILING_OPTIMAL,
  636. .usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
  637. .flags = 0,
  638. };
  639. VkMemoryAllocateInfo mem_alloc = {
  640. .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
  641. .pNext = NULL,
  642. .allocationSize = 0,
  643. .memoryTypeIndex = 0,
  644. };
  645. VkImageViewCreateInfo view = {
  646. .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
  647. .pNext = NULL,
  648. .image = VK_NULL_HANDLE,
  649. .format = depth_format,
  650. .subresourceRange = {.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT,
  651. .baseMipLevel = 0,
  652. .levelCount = 1,
  653. .baseArrayLayer = 0,
  654. .layerCount = 1},
  655. .flags = 0,
  656. .viewType = VK_IMAGE_VIEW_TYPE_2D,
  657. };
  658. VkMemoryRequirements mem_reqs;
  659. VkResult U_ASSERT_ONLY err;
  660. bool U_ASSERT_ONLY pass;
  661. demo->depth.format = depth_format;
  662. /* create image */
  663. err = vkCreateImage(demo->device, &image, NULL, &demo->depth.image);
  664. assert(!err);
  665. /* get memory requirements for this object */
  666. vkGetImageMemoryRequirements(demo->device, demo->depth.image, &mem_reqs);
  667. /* select memory size and type */
  668. mem_alloc.allocationSize = mem_reqs.size;
  669. pass = memory_type_from_properties(demo, mem_reqs.memoryTypeBits,
  670. 0, /* No requirements */
  671. &mem_alloc.memoryTypeIndex);
  672. assert(pass);
  673. /* allocate memory */
  674. err = vkAllocateMemory(demo->device, &mem_alloc, NULL, &demo->depth.mem);
  675. assert(!err);
  676. /* bind memory */
  677. err =
  678. vkBindImageMemory(demo->device, demo->depth.image, demo->depth.mem, 0);
  679. assert(!err);
  680. demo_set_image_layout(demo, demo->depth.image, VK_IMAGE_ASPECT_DEPTH_BIT,
  681. VK_IMAGE_LAYOUT_UNDEFINED,
  682. VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
  683. 0);
  684. /* create image view */
  685. view.image = demo->depth.image;
  686. err = vkCreateImageView(demo->device, &view, NULL, &demo->depth.view);
  687. assert(!err);
  688. }
  689. static void
  690. demo_prepare_texture_image(struct demo *demo, const uint32_t *tex_colors,
  691. struct texture_object *tex_obj, VkImageTiling tiling,
  692. VkImageUsageFlags usage, VkFlags required_props) {
  693. const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
  694. const int32_t tex_width = 2;
  695. const int32_t tex_height = 2;
  696. VkResult U_ASSERT_ONLY err;
  697. bool U_ASSERT_ONLY pass;
  698. tex_obj->tex_width = tex_width;
  699. tex_obj->tex_height = tex_height;
  700. const VkImageCreateInfo image_create_info = {
  701. .sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO,
  702. .pNext = NULL,
  703. .imageType = VK_IMAGE_TYPE_2D,
  704. .format = tex_format,
  705. .extent = {tex_width, tex_height, 1},
  706. .mipLevels = 1,
  707. .arrayLayers = 1,
  708. .samples = VK_SAMPLE_COUNT_1_BIT,
  709. .tiling = tiling,
  710. .usage = usage,
  711. .flags = 0,
  712. .initialLayout = VK_IMAGE_LAYOUT_PREINITIALIZED
  713. };
  714. VkMemoryAllocateInfo mem_alloc = {
  715. .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
  716. .pNext = NULL,
  717. .allocationSize = 0,
  718. .memoryTypeIndex = 0,
  719. };
  720. VkMemoryRequirements mem_reqs;
  721. err =
  722. vkCreateImage(demo->device, &image_create_info, NULL, &tex_obj->image);
  723. assert(!err);
  724. vkGetImageMemoryRequirements(demo->device, tex_obj->image, &mem_reqs);
  725. mem_alloc.allocationSize = mem_reqs.size;
  726. pass =
  727. memory_type_from_properties(demo, mem_reqs.memoryTypeBits,
  728. required_props, &mem_alloc.memoryTypeIndex);
  729. assert(pass);
  730. /* allocate memory */
  731. err = vkAllocateMemory(demo->device, &mem_alloc, NULL, &tex_obj->mem);
  732. assert(!err);
  733. /* bind memory */
  734. err = vkBindImageMemory(demo->device, tex_obj->image, tex_obj->mem, 0);
  735. assert(!err);
  736. if (required_props & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) {
  737. const VkImageSubresource subres = {
  738. .aspectMask = VK_IMAGE_ASPECT_COLOR_BIT,
  739. .mipLevel = 0,
  740. .arrayLayer = 0,
  741. };
  742. VkSubresourceLayout layout;
  743. void *data;
  744. int32_t x, y;
  745. vkGetImageSubresourceLayout(demo->device, tex_obj->image, &subres,
  746. &layout);
  747. err = vkMapMemory(demo->device, tex_obj->mem, 0,
  748. mem_alloc.allocationSize, 0, &data);
  749. assert(!err);
  750. for (y = 0; y < tex_height; y++) {
  751. uint32_t *row = (uint32_t *)((char *)data + layout.rowPitch * y);
  752. for (x = 0; x < tex_width; x++)
  753. row[x] = tex_colors[(x & 1) ^ (y & 1)];
  754. }
  755. vkUnmapMemory(demo->device, tex_obj->mem);
  756. }
  757. tex_obj->imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  758. demo_set_image_layout(demo, tex_obj->image, VK_IMAGE_ASPECT_COLOR_BIT,
  759. VK_IMAGE_LAYOUT_PREINITIALIZED, tex_obj->imageLayout,
  760. VK_ACCESS_HOST_WRITE_BIT);
  761. /* setting the image layout does not reference the actual memory so no need
  762. * to add a mem ref */
  763. }
  764. static void demo_destroy_texture_image(struct demo *demo,
  765. struct texture_object *tex_obj) {
  766. /* clean up staging resources */
  767. vkDestroyImage(demo->device, tex_obj->image, NULL);
  768. vkFreeMemory(demo->device, tex_obj->mem, NULL);
  769. }
  770. static void demo_prepare_textures(struct demo *demo) {
  771. const VkFormat tex_format = VK_FORMAT_B8G8R8A8_UNORM;
  772. VkFormatProperties props;
  773. const uint32_t tex_colors[DEMO_TEXTURE_COUNT][2] = {
  774. {0xffff0000, 0xff00ff00},
  775. };
  776. uint32_t i;
  777. VkResult U_ASSERT_ONLY err;
  778. vkGetPhysicalDeviceFormatProperties(demo->gpu, tex_format, &props);
  779. for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
  780. if ((props.linearTilingFeatures &
  781. VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) &&
  782. !demo->use_staging_buffer) {
  783. /* Device can texture using linear textures */
  784. demo_prepare_texture_image(
  785. demo, tex_colors[i], &demo->textures[i], VK_IMAGE_TILING_LINEAR,
  786. VK_IMAGE_USAGE_SAMPLED_BIT,
  787. VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
  788. VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
  789. } else if (props.optimalTilingFeatures &
  790. VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT) {
  791. /* Must use staging buffer to copy linear texture to optimized */
  792. struct texture_object staging_texture;
  793. memset(&staging_texture, 0, sizeof(staging_texture));
  794. demo_prepare_texture_image(
  795. demo, tex_colors[i], &staging_texture, VK_IMAGE_TILING_LINEAR,
  796. VK_IMAGE_USAGE_TRANSFER_SRC_BIT,
  797. VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
  798. VK_MEMORY_PROPERTY_HOST_COHERENT_BIT);
  799. demo_prepare_texture_image(
  800. demo, tex_colors[i], &demo->textures[i],
  801. VK_IMAGE_TILING_OPTIMAL,
  802. (VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT),
  803. VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT);
  804. demo_set_image_layout(demo, staging_texture.image,
  805. VK_IMAGE_ASPECT_COLOR_BIT,
  806. staging_texture.imageLayout,
  807. VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
  808. 0);
  809. demo_set_image_layout(demo, demo->textures[i].image,
  810. VK_IMAGE_ASPECT_COLOR_BIT,
  811. demo->textures[i].imageLayout,
  812. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
  813. 0);
  814. VkImageCopy copy_region = {
  815. .srcSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1},
  816. .srcOffset = {0, 0, 0},
  817. .dstSubresource = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 0, 1},
  818. .dstOffset = {0, 0, 0},
  819. .extent = {staging_texture.tex_width,
  820. staging_texture.tex_height, 1},
  821. };
  822. vkCmdCopyImage(
  823. demo->setup_cmd, staging_texture.image,
  824. VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, demo->textures[i].image,
  825. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, &copy_region);
  826. demo_set_image_layout(demo, demo->textures[i].image,
  827. VK_IMAGE_ASPECT_COLOR_BIT,
  828. VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
  829. demo->textures[i].imageLayout,
  830. 0);
  831. demo_flush_init_cmd(demo);
  832. demo_destroy_texture_image(demo, &staging_texture);
  833. } else {
  834. /* Can't support VK_FORMAT_B8G8R8A8_UNORM !? */
  835. assert(!"No support for B8G8R8A8_UNORM as texture image format");
  836. }
  837. const VkSamplerCreateInfo sampler = {
  838. .sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO,
  839. .pNext = NULL,
  840. .magFilter = VK_FILTER_NEAREST,
  841. .minFilter = VK_FILTER_NEAREST,
  842. .mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST,
  843. .addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT,
  844. .addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT,
  845. .addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT,
  846. .mipLodBias = 0.0f,
  847. .anisotropyEnable = VK_FALSE,
  848. .maxAnisotropy = 1,
  849. .compareOp = VK_COMPARE_OP_NEVER,
  850. .minLod = 0.0f,
  851. .maxLod = 0.0f,
  852. .borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE,
  853. .unnormalizedCoordinates = VK_FALSE,
  854. };
  855. VkImageViewCreateInfo view = {
  856. .sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO,
  857. .pNext = NULL,
  858. .image = VK_NULL_HANDLE,
  859. .viewType = VK_IMAGE_VIEW_TYPE_2D,
  860. .format = tex_format,
  861. .components =
  862. {
  863. VK_COMPONENT_SWIZZLE_R, VK_COMPONENT_SWIZZLE_G,
  864. VK_COMPONENT_SWIZZLE_B, VK_COMPONENT_SWIZZLE_A,
  865. },
  866. .subresourceRange = {VK_IMAGE_ASPECT_COLOR_BIT, 0, 1, 0, 1},
  867. .flags = 0,
  868. };
  869. /* create sampler */
  870. err = vkCreateSampler(demo->device, &sampler, NULL,
  871. &demo->textures[i].sampler);
  872. assert(!err);
  873. /* create image view */
  874. view.image = demo->textures[i].image;
  875. err = vkCreateImageView(demo->device, &view, NULL,
  876. &demo->textures[i].view);
  877. assert(!err);
  878. }
  879. }
  880. static void demo_prepare_vertices(struct demo *demo) {
  881. // clang-format off
  882. const float vb[3][5] = {
  883. /* position texcoord */
  884. { -1.0f, -1.0f, 0.25f, 0.0f, 0.0f },
  885. { 1.0f, -1.0f, 0.25f, 1.0f, 0.0f },
  886. { 0.0f, 1.0f, 1.0f, 0.5f, 1.0f },
  887. };
  888. // clang-format on
  889. const VkBufferCreateInfo buf_info = {
  890. .sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO,
  891. .pNext = NULL,
  892. .size = sizeof(vb),
  893. .usage = VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
  894. .flags = 0,
  895. };
  896. VkMemoryAllocateInfo mem_alloc = {
  897. .sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO,
  898. .pNext = NULL,
  899. .allocationSize = 0,
  900. .memoryTypeIndex = 0,
  901. };
  902. VkMemoryRequirements mem_reqs;
  903. VkResult U_ASSERT_ONLY err;
  904. bool U_ASSERT_ONLY pass;
  905. void *data;
  906. memset(&demo->vertices, 0, sizeof(demo->vertices));
  907. err = vkCreateBuffer(demo->device, &buf_info, NULL, &demo->vertices.buf);
  908. assert(!err);
  909. vkGetBufferMemoryRequirements(demo->device, demo->vertices.buf, &mem_reqs);
  910. assert(!err);
  911. mem_alloc.allocationSize = mem_reqs.size;
  912. pass = memory_type_from_properties(demo, mem_reqs.memoryTypeBits,
  913. VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT |
  914. VK_MEMORY_PROPERTY_HOST_COHERENT_BIT,
  915. &mem_alloc.memoryTypeIndex);
  916. assert(pass);
  917. err = vkAllocateMemory(demo->device, &mem_alloc, NULL, &demo->vertices.mem);
  918. assert(!err);
  919. err = vkMapMemory(demo->device, demo->vertices.mem, 0,
  920. mem_alloc.allocationSize, 0, &data);
  921. assert(!err);
  922. memcpy(data, vb, sizeof(vb));
  923. vkUnmapMemory(demo->device, demo->vertices.mem);
  924. err = vkBindBufferMemory(demo->device, demo->vertices.buf,
  925. demo->vertices.mem, 0);
  926. assert(!err);
  927. demo->vertices.vi.sType =
  928. VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
  929. demo->vertices.vi.pNext = NULL;
  930. demo->vertices.vi.vertexBindingDescriptionCount = 1;
  931. demo->vertices.vi.pVertexBindingDescriptions = demo->vertices.vi_bindings;
  932. demo->vertices.vi.vertexAttributeDescriptionCount = 2;
  933. demo->vertices.vi.pVertexAttributeDescriptions = demo->vertices.vi_attrs;
  934. demo->vertices.vi_bindings[0].binding = VERTEX_BUFFER_BIND_ID;
  935. demo->vertices.vi_bindings[0].stride = sizeof(vb[0]);
  936. demo->vertices.vi_bindings[0].inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
  937. demo->vertices.vi_attrs[0].binding = VERTEX_BUFFER_BIND_ID;
  938. demo->vertices.vi_attrs[0].location = 0;
  939. demo->vertices.vi_attrs[0].format = VK_FORMAT_R32G32B32_SFLOAT;
  940. demo->vertices.vi_attrs[0].offset = 0;
  941. demo->vertices.vi_attrs[1].binding = VERTEX_BUFFER_BIND_ID;
  942. demo->vertices.vi_attrs[1].location = 1;
  943. demo->vertices.vi_attrs[1].format = VK_FORMAT_R32G32_SFLOAT;
  944. demo->vertices.vi_attrs[1].offset = sizeof(float) * 3;
  945. }
  946. static void demo_prepare_descriptor_layout(struct demo *demo) {
  947. const VkDescriptorSetLayoutBinding layout_binding = {
  948. .binding = 0,
  949. .descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
  950. .descriptorCount = DEMO_TEXTURE_COUNT,
  951. .stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT,
  952. .pImmutableSamplers = NULL,
  953. };
  954. const VkDescriptorSetLayoutCreateInfo descriptor_layout = {
  955. .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO,
  956. .pNext = NULL,
  957. .bindingCount = 1,
  958. .pBindings = &layout_binding,
  959. };
  960. VkResult U_ASSERT_ONLY err;
  961. err = vkCreateDescriptorSetLayout(demo->device, &descriptor_layout, NULL,
  962. &demo->desc_layout);
  963. assert(!err);
  964. const VkPipelineLayoutCreateInfo pPipelineLayoutCreateInfo = {
  965. .sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO,
  966. .pNext = NULL,
  967. .setLayoutCount = 1,
  968. .pSetLayouts = &demo->desc_layout,
  969. };
  970. err = vkCreatePipelineLayout(demo->device, &pPipelineLayoutCreateInfo, NULL,
  971. &demo->pipeline_layout);
  972. assert(!err);
  973. }
  974. static void demo_prepare_render_pass(struct demo *demo) {
  975. const VkAttachmentDescription attachments[2] = {
  976. [0] =
  977. {
  978. .format = demo->format,
  979. .samples = VK_SAMPLE_COUNT_1_BIT,
  980. .loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
  981. .storeOp = VK_ATTACHMENT_STORE_OP_STORE,
  982. .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
  983. .stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
  984. .initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  985. .finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  986. },
  987. [1] =
  988. {
  989. .format = demo->depth.format,
  990. .samples = VK_SAMPLE_COUNT_1_BIT,
  991. .loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR,
  992. .storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
  993. .stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE,
  994. .stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE,
  995. .initialLayout =
  996. VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
  997. .finalLayout =
  998. VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
  999. },
  1000. };
  1001. const VkAttachmentReference color_reference = {
  1002. .attachment = 0, .layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL,
  1003. };
  1004. const VkAttachmentReference depth_reference = {
  1005. .attachment = 1,
  1006. .layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL,
  1007. };
  1008. const VkSubpassDescription subpass = {
  1009. .pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS,
  1010. .flags = 0,
  1011. .inputAttachmentCount = 0,
  1012. .pInputAttachments = NULL,
  1013. .colorAttachmentCount = 1,
  1014. .pColorAttachments = &color_reference,
  1015. .pResolveAttachments = NULL,
  1016. .pDepthStencilAttachment = &depth_reference,
  1017. .preserveAttachmentCount = 0,
  1018. .pPreserveAttachments = NULL,
  1019. };
  1020. const VkRenderPassCreateInfo rp_info = {
  1021. .sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO,
  1022. .pNext = NULL,
  1023. .attachmentCount = 2,
  1024. .pAttachments = attachments,
  1025. .subpassCount = 1,
  1026. .pSubpasses = &subpass,
  1027. .dependencyCount = 0,
  1028. .pDependencies = NULL,
  1029. };
  1030. VkResult U_ASSERT_ONLY err;
  1031. err = vkCreateRenderPass(demo->device, &rp_info, NULL, &demo->render_pass);
  1032. assert(!err);
  1033. }
  1034. static VkShaderModule
  1035. demo_prepare_shader_module(struct demo *demo, const void *code, size_t size) {
  1036. VkShaderModuleCreateInfo moduleCreateInfo;
  1037. VkShaderModule module;
  1038. VkResult U_ASSERT_ONLY err;
  1039. moduleCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
  1040. moduleCreateInfo.pNext = NULL;
  1041. moduleCreateInfo.codeSize = size;
  1042. moduleCreateInfo.pCode = code;
  1043. moduleCreateInfo.flags = 0;
  1044. err = vkCreateShaderModule(demo->device, &moduleCreateInfo, NULL, &module);
  1045. assert(!err);
  1046. return module;
  1047. }
  1048. static VkShaderModule demo_prepare_vs(struct demo *demo) {
  1049. size_t size = sizeof(vertShaderCode);
  1050. demo->vert_shader_module =
  1051. demo_prepare_shader_module(demo, vertShaderCode, size);
  1052. return demo->vert_shader_module;
  1053. }
  1054. static VkShaderModule demo_prepare_fs(struct demo *demo) {
  1055. size_t size = sizeof(fragShaderCode);
  1056. demo->frag_shader_module =
  1057. demo_prepare_shader_module(demo, fragShaderCode, size);
  1058. return demo->frag_shader_module;
  1059. }
  1060. static void demo_prepare_pipeline(struct demo *demo) {
  1061. VkGraphicsPipelineCreateInfo pipeline;
  1062. VkPipelineCacheCreateInfo pipelineCache;
  1063. VkPipelineVertexInputStateCreateInfo vi;
  1064. VkPipelineInputAssemblyStateCreateInfo ia;
  1065. VkPipelineRasterizationStateCreateInfo rs;
  1066. VkPipelineColorBlendStateCreateInfo cb;
  1067. VkPipelineDepthStencilStateCreateInfo ds;
  1068. VkPipelineViewportStateCreateInfo vp;
  1069. VkPipelineMultisampleStateCreateInfo ms;
  1070. VkDynamicState dynamicStateEnables[2];
  1071. VkPipelineDynamicStateCreateInfo dynamicState;
  1072. VkResult U_ASSERT_ONLY err;
  1073. memset(dynamicStateEnables, 0, sizeof dynamicStateEnables);
  1074. memset(&dynamicState, 0, sizeof dynamicState);
  1075. dynamicState.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
  1076. dynamicState.pDynamicStates = dynamicStateEnables;
  1077. memset(&pipeline, 0, sizeof(pipeline));
  1078. pipeline.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
  1079. pipeline.layout = demo->pipeline_layout;
  1080. vi = demo->vertices.vi;
  1081. memset(&ia, 0, sizeof(ia));
  1082. ia.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
  1083. ia.topology = VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
  1084. memset(&rs, 0, sizeof(rs));
  1085. rs.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
  1086. rs.polygonMode = VK_POLYGON_MODE_FILL;
  1087. rs.cullMode = VK_CULL_MODE_BACK_BIT;
  1088. rs.frontFace = VK_FRONT_FACE_CLOCKWISE;
  1089. rs.depthClampEnable = VK_FALSE;
  1090. rs.rasterizerDiscardEnable = VK_FALSE;
  1091. rs.depthBiasEnable = VK_FALSE;
  1092. rs.lineWidth = 1.0f;
  1093. memset(&cb, 0, sizeof(cb));
  1094. cb.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
  1095. VkPipelineColorBlendAttachmentState att_state[1];
  1096. memset(att_state, 0, sizeof(att_state));
  1097. att_state[0].colorWriteMask = 0xf;
  1098. att_state[0].blendEnable = VK_FALSE;
  1099. cb.attachmentCount = 1;
  1100. cb.pAttachments = att_state;
  1101. memset(&vp, 0, sizeof(vp));
  1102. vp.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
  1103. vp.viewportCount = 1;
  1104. dynamicStateEnables[dynamicState.dynamicStateCount++] =
  1105. VK_DYNAMIC_STATE_VIEWPORT;
  1106. vp.scissorCount = 1;
  1107. dynamicStateEnables[dynamicState.dynamicStateCount++] =
  1108. VK_DYNAMIC_STATE_SCISSOR;
  1109. memset(&ds, 0, sizeof(ds));
  1110. ds.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
  1111. ds.depthTestEnable = VK_TRUE;
  1112. ds.depthWriteEnable = VK_TRUE;
  1113. ds.depthCompareOp = VK_COMPARE_OP_LESS_OR_EQUAL;
  1114. ds.depthBoundsTestEnable = VK_FALSE;
  1115. ds.back.failOp = VK_STENCIL_OP_KEEP;
  1116. ds.back.passOp = VK_STENCIL_OP_KEEP;
  1117. ds.back.compareOp = VK_COMPARE_OP_ALWAYS;
  1118. ds.stencilTestEnable = VK_FALSE;
  1119. ds.front = ds.back;
  1120. memset(&ms, 0, sizeof(ms));
  1121. ms.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
  1122. ms.pSampleMask = NULL;
  1123. ms.rasterizationSamples = VK_SAMPLE_COUNT_1_BIT;
  1124. // Two stages: vs and fs
  1125. pipeline.stageCount = 2;
  1126. VkPipelineShaderStageCreateInfo shaderStages[2];
  1127. memset(&shaderStages, 0, 2 * sizeof(VkPipelineShaderStageCreateInfo));
  1128. shaderStages[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
  1129. shaderStages[0].stage = VK_SHADER_STAGE_VERTEX_BIT;
  1130. shaderStages[0].module = demo_prepare_vs(demo);
  1131. shaderStages[0].pName = "main";
  1132. shaderStages[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
  1133. shaderStages[1].stage = VK_SHADER_STAGE_FRAGMENT_BIT;
  1134. shaderStages[1].module = demo_prepare_fs(demo);
  1135. shaderStages[1].pName = "main";
  1136. pipeline.pVertexInputState = &vi;
  1137. pipeline.pInputAssemblyState = &ia;
  1138. pipeline.pRasterizationState = &rs;
  1139. pipeline.pColorBlendState = &cb;
  1140. pipeline.pMultisampleState = &ms;
  1141. pipeline.pViewportState = &vp;
  1142. pipeline.pDepthStencilState = &ds;
  1143. pipeline.pStages = shaderStages;
  1144. pipeline.renderPass = demo->render_pass;
  1145. pipeline.pDynamicState = &dynamicState;
  1146. memset(&pipelineCache, 0, sizeof(pipelineCache));
  1147. pipelineCache.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
  1148. err = vkCreatePipelineCache(demo->device, &pipelineCache, NULL,
  1149. &demo->pipelineCache);
  1150. assert(!err);
  1151. err = vkCreateGraphicsPipelines(demo->device, demo->pipelineCache, 1,
  1152. &pipeline, NULL, &demo->pipeline);
  1153. assert(!err);
  1154. vkDestroyPipelineCache(demo->device, demo->pipelineCache, NULL);
  1155. vkDestroyShaderModule(demo->device, demo->frag_shader_module, NULL);
  1156. vkDestroyShaderModule(demo->device, demo->vert_shader_module, NULL);
  1157. }
  1158. static void demo_prepare_descriptor_pool(struct demo *demo) {
  1159. const VkDescriptorPoolSize type_count = {
  1160. .type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER,
  1161. .descriptorCount = DEMO_TEXTURE_COUNT,
  1162. };
  1163. const VkDescriptorPoolCreateInfo descriptor_pool = {
  1164. .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO,
  1165. .pNext = NULL,
  1166. .maxSets = 1,
  1167. .poolSizeCount = 1,
  1168. .pPoolSizes = &type_count,
  1169. };
  1170. VkResult U_ASSERT_ONLY err;
  1171. err = vkCreateDescriptorPool(demo->device, &descriptor_pool, NULL,
  1172. &demo->desc_pool);
  1173. assert(!err);
  1174. }
  1175. static void demo_prepare_descriptor_set(struct demo *demo) {
  1176. VkDescriptorImageInfo tex_descs[DEMO_TEXTURE_COUNT];
  1177. VkWriteDescriptorSet write;
  1178. VkResult U_ASSERT_ONLY err;
  1179. uint32_t i;
  1180. VkDescriptorSetAllocateInfo alloc_info = {
  1181. .sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO,
  1182. .pNext = NULL,
  1183. .descriptorPool = demo->desc_pool,
  1184. .descriptorSetCount = 1,
  1185. .pSetLayouts = &demo->desc_layout};
  1186. err = vkAllocateDescriptorSets(demo->device, &alloc_info, &demo->desc_set);
  1187. assert(!err);
  1188. memset(&tex_descs, 0, sizeof(tex_descs));
  1189. for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
  1190. tex_descs[i].sampler = demo->textures[i].sampler;
  1191. tex_descs[i].imageView = demo->textures[i].view;
  1192. tex_descs[i].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
  1193. }
  1194. memset(&write, 0, sizeof(write));
  1195. write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  1196. write.dstSet = demo->desc_set;
  1197. write.descriptorCount = DEMO_TEXTURE_COUNT;
  1198. write.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  1199. write.pImageInfo = tex_descs;
  1200. vkUpdateDescriptorSets(demo->device, 1, &write, 0, NULL);
  1201. }
  1202. static void demo_prepare_framebuffers(struct demo *demo) {
  1203. VkImageView attachments[2];
  1204. attachments[1] = demo->depth.view;
  1205. const VkFramebufferCreateInfo fb_info = {
  1206. .sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO,
  1207. .pNext = NULL,
  1208. .renderPass = demo->render_pass,
  1209. .attachmentCount = 2,
  1210. .pAttachments = attachments,
  1211. .width = demo->width,
  1212. .height = demo->height,
  1213. .layers = 1,
  1214. };
  1215. VkResult U_ASSERT_ONLY err;
  1216. uint32_t i;
  1217. demo->framebuffers = (VkFramebuffer *)malloc(demo->swapchainImageCount *
  1218. sizeof(VkFramebuffer));
  1219. assert(demo->framebuffers);
  1220. for (i = 0; i < demo->swapchainImageCount; i++) {
  1221. attachments[0] = demo->buffers[i].view;
  1222. err = vkCreateFramebuffer(demo->device, &fb_info, NULL,
  1223. &demo->framebuffers[i]);
  1224. assert(!err);
  1225. }
  1226. }
  1227. static void demo_prepare(struct demo *demo) {
  1228. VkResult U_ASSERT_ONLY err;
  1229. const VkCommandPoolCreateInfo cmd_pool_info = {
  1230. .sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO,
  1231. .pNext = NULL,
  1232. .queueFamilyIndex = demo->graphics_queue_node_index,
  1233. .flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT,
  1234. };
  1235. err = vkCreateCommandPool(demo->device, &cmd_pool_info, NULL,
  1236. &demo->cmd_pool);
  1237. assert(!err);
  1238. const VkCommandBufferAllocateInfo cmd = {
  1239. .sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO,
  1240. .pNext = NULL,
  1241. .commandPool = demo->cmd_pool,
  1242. .level = VK_COMMAND_BUFFER_LEVEL_PRIMARY,
  1243. .commandBufferCount = 1,
  1244. };
  1245. err = vkAllocateCommandBuffers(demo->device, &cmd, &demo->draw_cmd);
  1246. assert(!err);
  1247. demo_prepare_buffers(demo);
  1248. demo_prepare_depth(demo);
  1249. demo_prepare_textures(demo);
  1250. demo_prepare_vertices(demo);
  1251. demo_prepare_descriptor_layout(demo);
  1252. demo_prepare_render_pass(demo);
  1253. demo_prepare_pipeline(demo);
  1254. demo_prepare_descriptor_pool(demo);
  1255. demo_prepare_descriptor_set(demo);
  1256. demo_prepare_framebuffers(demo);
  1257. }
  1258. static void demo_error_callback(int error, const char* description) {
  1259. printf("GLFW error: %s\n", description);
  1260. fflush(stdout);
  1261. }
  1262. static void demo_key_callback(GLFWwindow* window, int key, int scancode, int action, int mods) {
  1263. if (key == GLFW_KEY_ESCAPE && action == GLFW_RELEASE)
  1264. glfwSetWindowShouldClose(window, GLFW_TRUE);
  1265. }
  1266. static void demo_refresh_callback(GLFWwindow* window) {
  1267. struct demo* demo = glfwGetWindowUserPointer(window);
  1268. demo_draw(demo);
  1269. }
  1270. static void demo_resize_callback(GLFWwindow* window, int width, int height) {
  1271. struct demo* demo = glfwGetWindowUserPointer(window);
  1272. demo->width = width;
  1273. demo->height = height;
  1274. demo_resize(demo);
  1275. }
  1276. static void demo_run(struct demo *demo) {
  1277. while (!glfwWindowShouldClose(demo->window)) {
  1278. glfwPollEvents();
  1279. demo_draw(demo);
  1280. if (demo->depthStencil > 0.99f)
  1281. demo->depthIncrement = -0.001f;
  1282. if (demo->depthStencil < 0.8f)
  1283. demo->depthIncrement = 0.001f;
  1284. demo->depthStencil += demo->depthIncrement;
  1285. // Wait for work to finish before updating MVP.
  1286. vkDeviceWaitIdle(demo->device);
  1287. demo->curFrame++;
  1288. if (demo->frameCount != INT32_MAX && demo->curFrame == demo->frameCount)
  1289. glfwSetWindowShouldClose(demo->window, GLFW_TRUE);
  1290. }
  1291. }
  1292. static void demo_create_window(struct demo *demo) {
  1293. glfwWindowHint(GLFW_CLIENT_API, GLFW_NO_API);
  1294. demo->window = glfwCreateWindow(demo->width,
  1295. demo->height,
  1296. APP_LONG_NAME,
  1297. NULL,
  1298. NULL);
  1299. if (!demo->window) {
  1300. // It didn't work, so try to give a useful error:
  1301. printf("Cannot create a window in which to draw!\n");
  1302. fflush(stdout);
  1303. exit(1);
  1304. }
  1305. glfwSetWindowUserPointer(demo->window, demo);
  1306. glfwSetWindowRefreshCallback(demo->window, demo_refresh_callback);
  1307. glfwSetFramebufferSizeCallback(demo->window, demo_resize_callback);
  1308. glfwSetKeyCallback(demo->window, demo_key_callback);
  1309. }
  1310. /*
  1311. * Return 1 (true) if all layer names specified in check_names
  1312. * can be found in given layer properties.
  1313. */
  1314. static VkBool32 demo_check_layers(uint32_t check_count, const char **check_names,
  1315. uint32_t layer_count,
  1316. VkLayerProperties *layers) {
  1317. uint32_t i, j;
  1318. for (i = 0; i < check_count; i++) {
  1319. VkBool32 found = 0;
  1320. for (j = 0; j < layer_count; j++) {
  1321. if (!strcmp(check_names[i], layers[j].layerName)) {
  1322. found = 1;
  1323. break;
  1324. }
  1325. }
  1326. if (!found) {
  1327. fprintf(stderr, "Cannot find layer: %s\n", check_names[i]);
  1328. return 0;
  1329. }
  1330. }
  1331. return 1;
  1332. }
  1333. static void demo_init_vk(struct demo *demo) {
  1334. VkResult err;
  1335. VkBool32 portability_enumeration = VK_FALSE;
  1336. uint32_t i = 0;
  1337. uint32_t required_extension_count = 0;
  1338. uint32_t instance_extension_count = 0;
  1339. uint32_t instance_layer_count = 0;
  1340. uint32_t validation_layer_count = 0;
  1341. const char **required_extensions = NULL;
  1342. const char **instance_validation_layers = NULL;
  1343. demo->enabled_extension_count = 0;
  1344. demo->enabled_layer_count = 0;
  1345. char *instance_validation_layers_alt1[] = {
  1346. "VK_LAYER_LUNARG_standard_validation"
  1347. };
  1348. char *instance_validation_layers_alt2[] = {
  1349. "VK_LAYER_GOOGLE_threading", "VK_LAYER_LUNARG_parameter_validation",
  1350. "VK_LAYER_LUNARG_object_tracker", "VK_LAYER_LUNARG_image",
  1351. "VK_LAYER_LUNARG_core_validation", "VK_LAYER_LUNARG_swapchain",
  1352. "VK_LAYER_GOOGLE_unique_objects"
  1353. };
  1354. /* Look for validation layers */
  1355. VkBool32 validation_found = 0;
  1356. if (demo->validate) {
  1357. err = vkEnumerateInstanceLayerProperties(&instance_layer_count, NULL);
  1358. assert(!err);
  1359. instance_validation_layers = (const char**) instance_validation_layers_alt1;
  1360. if (instance_layer_count > 0) {
  1361. VkLayerProperties *instance_layers =
  1362. malloc(sizeof (VkLayerProperties) * instance_layer_count);
  1363. err = vkEnumerateInstanceLayerProperties(&instance_layer_count,
  1364. instance_layers);
  1365. assert(!err);
  1366. validation_found = demo_check_layers(
  1367. ARRAY_SIZE(instance_validation_layers_alt1),
  1368. instance_validation_layers, instance_layer_count,
  1369. instance_layers);
  1370. if (validation_found) {
  1371. demo->enabled_layer_count = ARRAY_SIZE(instance_validation_layers_alt1);
  1372. demo->enabled_layers[0] = "VK_LAYER_LUNARG_standard_validation";
  1373. validation_layer_count = 1;
  1374. } else {
  1375. // use alternative set of validation layers
  1376. instance_validation_layers =
  1377. (const char**) instance_validation_layers_alt2;
  1378. demo->enabled_layer_count = ARRAY_SIZE(instance_validation_layers_alt2);
  1379. validation_found = demo_check_layers(
  1380. ARRAY_SIZE(instance_validation_layers_alt2),
  1381. instance_validation_layers, instance_layer_count,
  1382. instance_layers);
  1383. validation_layer_count =
  1384. ARRAY_SIZE(instance_validation_layers_alt2);
  1385. for (i = 0; i < validation_layer_count; i++) {
  1386. demo->enabled_layers[i] = instance_validation_layers[i];
  1387. }
  1388. }
  1389. free(instance_layers);
  1390. }
  1391. if (!validation_found) {
  1392. ERR_EXIT("vkEnumerateInstanceLayerProperties failed to find "
  1393. "required validation layer.\n\n"
  1394. "Please look at the Getting Started guide for additional "
  1395. "information.\n",
  1396. "vkCreateInstance Failure");
  1397. }
  1398. }
  1399. /* Look for instance extensions */
  1400. required_extensions = glfwGetRequiredInstanceExtensions(&required_extension_count);
  1401. if (!required_extensions) {
  1402. ERR_EXIT("glfwGetRequiredInstanceExtensions failed to find the "
  1403. "platform surface extensions.\n\nDo you have a compatible "
  1404. "Vulkan installable client driver (ICD) installed?\nPlease "
  1405. "look at the Getting Started guide for additional "
  1406. "information.\n",
  1407. "vkCreateInstance Failure");
  1408. }
  1409. for (i = 0; i < required_extension_count; i++) {
  1410. demo->extension_names[demo->enabled_extension_count++] = required_extensions[i];
  1411. assert(demo->enabled_extension_count < 64);
  1412. }
  1413. err = vkEnumerateInstanceExtensionProperties(
  1414. NULL, &instance_extension_count, NULL);
  1415. assert(!err);
  1416. if (instance_extension_count > 0) {
  1417. VkExtensionProperties *instance_extensions =
  1418. malloc(sizeof(VkExtensionProperties) * instance_extension_count);
  1419. err = vkEnumerateInstanceExtensionProperties(
  1420. NULL, &instance_extension_count, instance_extensions);
  1421. assert(!err);
  1422. for (i = 0; i < instance_extension_count; i++) {
  1423. if (!strcmp(VK_EXT_DEBUG_REPORT_EXTENSION_NAME,
  1424. instance_extensions[i].extensionName)) {
  1425. if (demo->validate) {
  1426. demo->extension_names[demo->enabled_extension_count++] =
  1427. VK_EXT_DEBUG_REPORT_EXTENSION_NAME;
  1428. }
  1429. }
  1430. assert(demo->enabled_extension_count < 64);
  1431. if (!strcmp(VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME,
  1432. instance_extensions[i].extensionName)) {
  1433. demo->extension_names[demo->enabled_extension_count++] =
  1434. VK_KHR_PORTABILITY_ENUMERATION_EXTENSION_NAME;
  1435. portability_enumeration = VK_TRUE;
  1436. }
  1437. assert(demo->enabled_extension_count < 64);
  1438. }
  1439. free(instance_extensions);
  1440. }
  1441. const VkApplicationInfo app = {
  1442. .sType = VK_STRUCTURE_TYPE_APPLICATION_INFO,
  1443. .pNext = NULL,
  1444. .pApplicationName = APP_SHORT_NAME,
  1445. .applicationVersion = 0,
  1446. .pEngineName = APP_SHORT_NAME,
  1447. .engineVersion = 0,
  1448. .apiVersion = VK_API_VERSION_1_0,
  1449. };
  1450. VkInstanceCreateInfo inst_info = {
  1451. .sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO,
  1452. .pNext = NULL,
  1453. .pApplicationInfo = &app,
  1454. .enabledLayerCount = demo->enabled_layer_count,
  1455. .ppEnabledLayerNames = (const char *const *)instance_validation_layers,
  1456. .enabledExtensionCount = demo->enabled_extension_count,
  1457. .ppEnabledExtensionNames = (const char *const *)demo->extension_names,
  1458. };
  1459. if (portability_enumeration)
  1460. inst_info.flags |= VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR;
  1461. uint32_t gpu_count;
  1462. err = vkCreateInstance(&inst_info, NULL, &demo->inst);
  1463. if (err == VK_ERROR_INCOMPATIBLE_DRIVER) {
  1464. ERR_EXIT("Cannot find a compatible Vulkan installable client driver "
  1465. "(ICD).\n\nPlease look at the Getting Started guide for "
  1466. "additional information.\n",
  1467. "vkCreateInstance Failure");
  1468. } else if (err == VK_ERROR_EXTENSION_NOT_PRESENT) {
  1469. ERR_EXIT("Cannot find a specified extension library"
  1470. ".\nMake sure your layers path is set appropriately\n",
  1471. "vkCreateInstance Failure");
  1472. } else if (err) {
  1473. ERR_EXIT("vkCreateInstance failed.\n\nDo you have a compatible Vulkan "
  1474. "installable client driver (ICD) installed?\nPlease look at "
  1475. "the Getting Started guide for additional information.\n",
  1476. "vkCreateInstance Failure");
  1477. }
  1478. gladLoadVulkanUserPtr(NULL, (GLADuserptrloadfunc) glfwGetInstanceProcAddress, demo->inst);
  1479. /* Make initial call to query gpu_count, then second call for gpu info*/
  1480. err = vkEnumeratePhysicalDevices(demo->inst, &gpu_count, NULL);
  1481. assert(!err && gpu_count > 0);
  1482. if (gpu_count > 0) {
  1483. VkPhysicalDevice *physical_devices =
  1484. malloc(sizeof(VkPhysicalDevice) * gpu_count);
  1485. err = vkEnumeratePhysicalDevices(demo->inst, &gpu_count,
  1486. physical_devices);
  1487. assert(!err);
  1488. /* For tri demo we just grab the first physical device */
  1489. demo->gpu = physical_devices[0];
  1490. free(physical_devices);
  1491. } else {
  1492. ERR_EXIT("vkEnumeratePhysicalDevices reported zero accessible devices."
  1493. "\n\nDo you have a compatible Vulkan installable client"
  1494. " driver (ICD) installed?\nPlease look at the Getting Started"
  1495. " guide for additional information.\n",
  1496. "vkEnumeratePhysicalDevices Failure");
  1497. }
  1498. gladLoadVulkanUserPtr(demo->gpu, (GLADuserptrloadfunc) glfwGetInstanceProcAddress, demo->inst);
  1499. /* Look for device extensions */
  1500. uint32_t device_extension_count = 0;
  1501. VkBool32 swapchainExtFound = 0;
  1502. demo->enabled_extension_count = 0;
  1503. err = vkEnumerateDeviceExtensionProperties(demo->gpu, NULL,
  1504. &device_extension_count, NULL);
  1505. assert(!err);
  1506. if (device_extension_count > 0) {
  1507. VkExtensionProperties *device_extensions =
  1508. malloc(sizeof(VkExtensionProperties) * device_extension_count);
  1509. err = vkEnumerateDeviceExtensionProperties(
  1510. demo->gpu, NULL, &device_extension_count, device_extensions);
  1511. assert(!err);
  1512. for (i = 0; i < device_extension_count; i++) {
  1513. if (!strcmp(VK_KHR_SWAPCHAIN_EXTENSION_NAME,
  1514. device_extensions[i].extensionName)) {
  1515. swapchainExtFound = 1;
  1516. demo->extension_names[demo->enabled_extension_count++] =
  1517. VK_KHR_SWAPCHAIN_EXTENSION_NAME;
  1518. }
  1519. assert(demo->enabled_extension_count < 64);
  1520. }
  1521. free(device_extensions);
  1522. }
  1523. if (!swapchainExtFound) {
  1524. ERR_EXIT("vkEnumerateDeviceExtensionProperties failed to find "
  1525. "the " VK_KHR_SWAPCHAIN_EXTENSION_NAME
  1526. " extension.\n\nDo you have a compatible "
  1527. "Vulkan installable client driver (ICD) installed?\nPlease "
  1528. "look at the Getting Started guide for additional "
  1529. "information.\n",
  1530. "vkCreateInstance Failure");
  1531. }
  1532. if (demo->validate) {
  1533. VkDebugReportCallbackCreateInfoEXT dbgCreateInfo;
  1534. dbgCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
  1535. dbgCreateInfo.flags =
  1536. VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT;
  1537. dbgCreateInfo.pfnCallback = demo->use_break ? BreakCallback : dbgFunc;
  1538. dbgCreateInfo.pUserData = demo;
  1539. dbgCreateInfo.pNext = NULL;
  1540. err = vkCreateDebugReportCallbackEXT(demo->inst, &dbgCreateInfo, NULL,
  1541. &demo->msg_callback);
  1542. switch (err) {
  1543. case VK_SUCCESS:
  1544. break;
  1545. case VK_ERROR_OUT_OF_HOST_MEMORY:
  1546. ERR_EXIT("CreateDebugReportCallback: out of host memory\n",
  1547. "CreateDebugReportCallback Failure");
  1548. break;
  1549. default:
  1550. ERR_EXIT("CreateDebugReportCallback: unknown failure\n",
  1551. "CreateDebugReportCallback Failure");
  1552. break;
  1553. }
  1554. }
  1555. vkGetPhysicalDeviceProperties(demo->gpu, &demo->gpu_props);
  1556. // Query with NULL data to get count
  1557. vkGetPhysicalDeviceQueueFamilyProperties(demo->gpu, &demo->queue_count,
  1558. NULL);
  1559. demo->queue_props = (VkQueueFamilyProperties *)malloc(
  1560. demo->queue_count * sizeof(VkQueueFamilyProperties));
  1561. vkGetPhysicalDeviceQueueFamilyProperties(demo->gpu, &demo->queue_count,
  1562. demo->queue_props);
  1563. assert(demo->queue_count >= 1);
  1564. vkGetPhysicalDeviceFeatures(demo->gpu, &demo->gpu_features);
  1565. // Graphics queue and MemMgr queue can be separate.
  1566. // TODO: Add support for separate queues, including synchronization,
  1567. // and appropriate tracking for QueueSubmit
  1568. }
  1569. static void demo_init_device(struct demo *demo) {
  1570. VkResult U_ASSERT_ONLY err;
  1571. float queue_priorities[1] = {0.0};
  1572. const VkDeviceQueueCreateInfo queue = {
  1573. .sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO,
  1574. .pNext = NULL,
  1575. .queueFamilyIndex = demo->graphics_queue_node_index,
  1576. .queueCount = 1,
  1577. .pQueuePriorities = queue_priorities};
  1578. VkPhysicalDeviceFeatures features;
  1579. memset(&features, 0, sizeof(features));
  1580. if (demo->gpu_features.shaderClipDistance) {
  1581. features.shaderClipDistance = VK_TRUE;
  1582. }
  1583. VkDeviceCreateInfo device = {
  1584. .sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO,
  1585. .pNext = NULL,
  1586. .queueCreateInfoCount = 1,
  1587. .pQueueCreateInfos = &queue,
  1588. .enabledLayerCount = 0,
  1589. .ppEnabledLayerNames = NULL,
  1590. .enabledExtensionCount = demo->enabled_extension_count,
  1591. .ppEnabledExtensionNames = (const char *const *)demo->extension_names,
  1592. .pEnabledFeatures = &features,
  1593. };
  1594. err = vkCreateDevice(demo->gpu, &device, NULL, &demo->device);
  1595. assert(!err);
  1596. }
  1597. static void demo_init_vk_swapchain(struct demo *demo) {
  1598. VkResult U_ASSERT_ONLY err;
  1599. uint32_t i;
  1600. // Create a WSI surface for the window:
  1601. glfwCreateWindowSurface(demo->inst, demo->window, NULL, &demo->surface);
  1602. // Iterate over each queue to learn whether it supports presenting:
  1603. VkBool32 *supportsPresent =
  1604. (VkBool32 *)malloc(demo->queue_count * sizeof(VkBool32));
  1605. for (i = 0; i < demo->queue_count; i++) {
  1606. vkGetPhysicalDeviceSurfaceSupportKHR(demo->gpu, i, demo->surface,
  1607. &supportsPresent[i]);
  1608. }
  1609. // Search for a graphics and a present queue in the array of queue
  1610. // families, try to find one that supports both
  1611. uint32_t graphicsQueueNodeIndex = UINT32_MAX;
  1612. uint32_t presentQueueNodeIndex = UINT32_MAX;
  1613. for (i = 0; i < demo->queue_count; i++) {
  1614. if ((demo->queue_props[i].queueFlags & VK_QUEUE_GRAPHICS_BIT) != 0) {
  1615. if (graphicsQueueNodeIndex == UINT32_MAX) {
  1616. graphicsQueueNodeIndex = i;
  1617. }
  1618. if (supportsPresent[i] == VK_TRUE) {
  1619. graphicsQueueNodeIndex = i;
  1620. presentQueueNodeIndex = i;
  1621. break;
  1622. }
  1623. }
  1624. }
  1625. if (presentQueueNodeIndex == UINT32_MAX) {
  1626. // If didn't find a queue that supports both graphics and present, then
  1627. // find a separate present queue.
  1628. for (i = 0; i < demo->queue_count; ++i) {
  1629. if (supportsPresent[i] == VK_TRUE) {
  1630. presentQueueNodeIndex = i;
  1631. break;
  1632. }
  1633. }
  1634. }
  1635. free(supportsPresent);
  1636. // Generate error if could not find both a graphics and a present queue
  1637. if (graphicsQueueNodeIndex == UINT32_MAX ||
  1638. presentQueueNodeIndex == UINT32_MAX) {
  1639. ERR_EXIT("Could not find a graphics and a present queue\n",
  1640. "Swapchain Initialization Failure");
  1641. }
  1642. // TODO: Add support for separate queues, including presentation,
  1643. // synchronization, and appropriate tracking for QueueSubmit.
  1644. // NOTE: While it is possible for an application to use a separate graphics
  1645. // and a present queues, this demo program assumes it is only using
  1646. // one:
  1647. if (graphicsQueueNodeIndex != presentQueueNodeIndex) {
  1648. ERR_EXIT("Could not find a common graphics and a present queue\n",
  1649. "Swapchain Initialization Failure");
  1650. }
  1651. demo->graphics_queue_node_index = graphicsQueueNodeIndex;
  1652. demo_init_device(demo);
  1653. vkGetDeviceQueue(demo->device, demo->graphics_queue_node_index, 0,
  1654. &demo->queue);
  1655. // Get the list of VkFormat's that are supported:
  1656. uint32_t formatCount;
  1657. err = vkGetPhysicalDeviceSurfaceFormatsKHR(demo->gpu, demo->surface,
  1658. &formatCount, NULL);
  1659. assert(!err);
  1660. VkSurfaceFormatKHR *surfFormats =
  1661. (VkSurfaceFormatKHR *)malloc(formatCount * sizeof(VkSurfaceFormatKHR));
  1662. err = vkGetPhysicalDeviceSurfaceFormatsKHR(demo->gpu, demo->surface,
  1663. &formatCount, surfFormats);
  1664. assert(!err);
  1665. // If the format list includes just one entry of VK_FORMAT_UNDEFINED,
  1666. // the surface has no preferred format. Otherwise, at least one
  1667. // supported format will be returned.
  1668. if (formatCount == 1 && surfFormats[0].format == VK_FORMAT_UNDEFINED) {
  1669. demo->format = VK_FORMAT_B8G8R8A8_UNORM;
  1670. } else {
  1671. assert(formatCount >= 1);
  1672. demo->format = surfFormats[0].format;
  1673. }
  1674. demo->color_space = surfFormats[0].colorSpace;
  1675. demo->curFrame = 0;
  1676. // Get Memory information and properties
  1677. vkGetPhysicalDeviceMemoryProperties(demo->gpu, &demo->memory_properties);
  1678. }
  1679. static void demo_init_connection(struct demo *demo) {
  1680. glfwSetErrorCallback(demo_error_callback);
  1681. if (!glfwInit()) {
  1682. printf("Cannot initialize GLFW.\nExiting ...\n");
  1683. fflush(stdout);
  1684. exit(1);
  1685. }
  1686. if (!glfwVulkanSupported()) {
  1687. printf("GLFW failed to find the Vulkan loader.\nExiting ...\n");
  1688. fflush(stdout);
  1689. exit(1);
  1690. }
  1691. gladLoadVulkanUserPtr(NULL, (GLADuserptrloadfunc) glfwGetInstanceProcAddress, NULL);
  1692. }
  1693. static void demo_init(struct demo *demo, const int argc, const char *argv[])
  1694. {
  1695. int i;
  1696. memset(demo, 0, sizeof(*demo));
  1697. demo->frameCount = INT32_MAX;
  1698. for (i = 1; i < argc; i++) {
  1699. if (strcmp(argv[i], "--use_staging") == 0) {
  1700. demo->use_staging_buffer = true;
  1701. continue;
  1702. }
  1703. if (strcmp(argv[i], "--break") == 0) {
  1704. demo->use_break = true;
  1705. continue;
  1706. }
  1707. if (strcmp(argv[i], "--validate") == 0) {
  1708. demo->validate = true;
  1709. continue;
  1710. }
  1711. if (strcmp(argv[i], "--c") == 0 && demo->frameCount == INT32_MAX &&
  1712. i < argc - 1 && sscanf(argv[i + 1], "%d", &demo->frameCount) == 1 &&
  1713. demo->frameCount >= 0) {
  1714. i++;
  1715. continue;
  1716. }
  1717. fprintf(stderr, "Usage:\n %s [--use_staging] [--validate] [--break] "
  1718. "[--c <framecount>]\n",
  1719. APP_SHORT_NAME);
  1720. fflush(stderr);
  1721. exit(1);
  1722. }
  1723. demo_init_connection(demo);
  1724. demo_init_vk(demo);
  1725. demo->width = 300;
  1726. demo->height = 300;
  1727. demo->depthStencil = 1.0;
  1728. demo->depthIncrement = -0.01f;
  1729. }
  1730. static void demo_cleanup(struct demo *demo) {
  1731. uint32_t i;
  1732. for (i = 0; i < demo->swapchainImageCount; i++) {
  1733. vkDestroyFramebuffer(demo->device, demo->framebuffers[i], NULL);
  1734. }
  1735. free(demo->framebuffers);
  1736. vkDestroyDescriptorPool(demo->device, demo->desc_pool, NULL);
  1737. if (demo->setup_cmd) {
  1738. vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->setup_cmd);
  1739. }
  1740. vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->draw_cmd);
  1741. vkDestroyCommandPool(demo->device, demo->cmd_pool, NULL);
  1742. vkDestroyPipeline(demo->device, demo->pipeline, NULL);
  1743. vkDestroyRenderPass(demo->device, demo->render_pass, NULL);
  1744. vkDestroyPipelineLayout(demo->device, demo->pipeline_layout, NULL);
  1745. vkDestroyDescriptorSetLayout(demo->device, demo->desc_layout, NULL);
  1746. vkDestroyBuffer(demo->device, demo->vertices.buf, NULL);
  1747. vkFreeMemory(demo->device, demo->vertices.mem, NULL);
  1748. for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
  1749. vkDestroyImageView(demo->device, demo->textures[i].view, NULL);
  1750. vkDestroyImage(demo->device, demo->textures[i].image, NULL);
  1751. vkFreeMemory(demo->device, demo->textures[i].mem, NULL);
  1752. vkDestroySampler(demo->device, demo->textures[i].sampler, NULL);
  1753. }
  1754. for (i = 0; i < demo->swapchainImageCount; i++) {
  1755. vkDestroyImageView(demo->device, demo->buffers[i].view, NULL);
  1756. }
  1757. vkDestroyImageView(demo->device, demo->depth.view, NULL);
  1758. vkDestroyImage(demo->device, demo->depth.image, NULL);
  1759. vkFreeMemory(demo->device, demo->depth.mem, NULL);
  1760. vkDestroySwapchainKHR(demo->device, demo->swapchain, NULL);
  1761. free(demo->buffers);
  1762. vkDestroyDevice(demo->device, NULL);
  1763. if (demo->validate) {
  1764. vkDestroyDebugReportCallbackEXT(demo->inst, demo->msg_callback, NULL);
  1765. }
  1766. vkDestroySurfaceKHR(demo->inst, demo->surface, NULL);
  1767. vkDestroyInstance(demo->inst, NULL);
  1768. free(demo->queue_props);
  1769. glfwDestroyWindow(demo->window);
  1770. glfwTerminate();
  1771. }
  1772. static void demo_resize(struct demo *demo) {
  1773. uint32_t i;
  1774. // In order to properly resize the window, we must re-create the swapchain
  1775. // AND redo the command buffers, etc.
  1776. //
  1777. // First, perform part of the demo_cleanup() function:
  1778. for (i = 0; i < demo->swapchainImageCount; i++) {
  1779. vkDestroyFramebuffer(demo->device, demo->framebuffers[i], NULL);
  1780. }
  1781. free(demo->framebuffers);
  1782. vkDestroyDescriptorPool(demo->device, demo->desc_pool, NULL);
  1783. if (demo->setup_cmd) {
  1784. vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->setup_cmd);
  1785. demo->setup_cmd = VK_NULL_HANDLE;
  1786. }
  1787. vkFreeCommandBuffers(demo->device, demo->cmd_pool, 1, &demo->draw_cmd);
  1788. vkDestroyCommandPool(demo->device, demo->cmd_pool, NULL);
  1789. vkDestroyPipeline(demo->device, demo->pipeline, NULL);
  1790. vkDestroyRenderPass(demo->device, demo->render_pass, NULL);
  1791. vkDestroyPipelineLayout(demo->device, demo->pipeline_layout, NULL);
  1792. vkDestroyDescriptorSetLayout(demo->device, demo->desc_layout, NULL);
  1793. vkDestroyBuffer(demo->device, demo->vertices.buf, NULL);
  1794. vkFreeMemory(demo->device, demo->vertices.mem, NULL);
  1795. for (i = 0; i < DEMO_TEXTURE_COUNT; i++) {
  1796. vkDestroyImageView(demo->device, demo->textures[i].view, NULL);
  1797. vkDestroyImage(demo->device, demo->textures[i].image, NULL);
  1798. vkFreeMemory(demo->device, demo->textures[i].mem, NULL);
  1799. vkDestroySampler(demo->device, demo->textures[i].sampler, NULL);
  1800. }
  1801. for (i = 0; i < demo->swapchainImageCount; i++) {
  1802. vkDestroyImageView(demo->device, demo->buffers[i].view, NULL);
  1803. }
  1804. vkDestroyImageView(demo->device, demo->depth.view, NULL);
  1805. vkDestroyImage(demo->device, demo->depth.image, NULL);
  1806. vkFreeMemory(demo->device, demo->depth.mem, NULL);
  1807. free(demo->buffers);
  1808. // Second, re-perform the demo_prepare() function, which will re-create the
  1809. // swapchain:
  1810. demo_prepare(demo);
  1811. }
  1812. int main(const int argc, const char *argv[]) {
  1813. struct demo demo;
  1814. demo_init(&demo, argc, argv);
  1815. demo_create_window(&demo);
  1816. demo_init_vk_swapchain(&demo);
  1817. demo_prepare(&demo);
  1818. demo_run(&demo);
  1819. demo_cleanup(&demo);
  1820. return validation_error;
  1821. }