RendererVK.cpp 52 KB

12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262
  1. // Jolt Physics Library (https://github.com/jrouwe/JoltPhysics)
  2. // SPDX-FileCopyrightText: 2024 Jorrit Rouwe
  3. // SPDX-License-Identifier: MIT
  4. #include <TestFramework.h>
  5. #include <Renderer/VK/RendererVK.h>
  6. #include <Renderer/VK/RenderPrimitiveVK.h>
  7. #include <Renderer/VK/RenderInstancesVK.h>
  8. #include <Renderer/VK/PipelineStateVK.h>
  9. #include <Renderer/VK/VertexShaderVK.h>
  10. #include <Renderer/VK/PixelShaderVK.h>
  11. #include <Renderer/VK/TextureVK.h>
  12. #include <Renderer/VK/FatalErrorIfFailedVK.h>
  13. #include <Utils/Log.h>
  14. #include <Utils/ReadData.h>
  15. #include <Jolt/Core/Profiler.h>
  16. #include <Jolt/Core/QuickSort.h>
  17. #include <Jolt/Core/RTTI.h>
  18. #ifdef JPH_PLATFORM_WINDOWS
  19. #include <vulkan/vulkan_win32.h>
  20. #include <Window/ApplicationWindowWin.h>
  21. #elif defined(JPH_PLATFORM_LINUX)
  22. #include <vulkan/vulkan_xlib.h>
  23. #include <Window/ApplicationWindowLinux.h>
  24. #elif defined(JPH_PLATFORM_MACOS)
  25. #include <vulkan/vulkan_metal.h>
  26. #include <Window/ApplicationWindowMacOS.h>
  27. #endif
  28. #ifdef JPH_DEBUG
  29. static VKAPI_ATTR VkBool32 VKAPI_CALL sVulkanDebugCallback(VkDebugUtilsMessageSeverityFlagBitsEXT inSeverity, [[maybe_unused]] VkDebugUtilsMessageTypeFlagsEXT inType, const VkDebugUtilsMessengerCallbackDataEXT *inCallbackData, [[maybe_unused]] void *inUserData)
  30. {
  31. Trace("VK: %s", inCallbackData->pMessage);
  32. JPH_ASSERT((inSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) == 0);
  33. return VK_FALSE;
  34. }
  35. #endif // JPH_DEBUG
  36. RendererVK::~RendererVK()
  37. {
  38. vkDeviceWaitIdle(mDevice);
  39. // Trace allocation stats
  40. Trace("VK: Max allocations: %u, max size: %u MB", mMaxNumAllocations, uint32(mMaxTotalAllocated >> 20));
  41. // Destroy the shadow map
  42. mShadowMap = nullptr;
  43. vkDestroyFramebuffer(mDevice, mShadowFrameBuffer, nullptr);
  44. // Release constant buffers
  45. for (unique_ptr<ConstantBufferVK> &cb : mVertexShaderConstantBufferProjection)
  46. cb = nullptr;
  47. for (unique_ptr<ConstantBufferVK> &cb : mVertexShaderConstantBufferOrtho)
  48. cb = nullptr;
  49. for (unique_ptr<ConstantBufferVK> &cb : mPixelShaderConstantBuffer)
  50. cb = nullptr;
  51. // Free all buffers
  52. for (BufferCache &bc : mFreedBuffers)
  53. for (BufferCache::value_type &vt : bc)
  54. for (BufferVK &bvk : vt.second)
  55. FreeBufferInternal(bvk);
  56. for (BufferCache::value_type &vt : mBufferCache)
  57. for (BufferVK &bvk : vt.second)
  58. FreeBufferInternal(bvk);
  59. // Free all blocks in the memory cache
  60. for (MemoryCache::value_type &mc : mMemoryCache)
  61. for (Memory &m : mc.second)
  62. if (m.mOffset == 0)
  63. vkFreeMemory(mDevice, m.mMemory, nullptr); // Don't care about memory tracking anymore
  64. for (VkFence fence : mInFlightFences)
  65. vkDestroyFence(mDevice, fence, nullptr);
  66. for (VkSemaphore semaphore : mRenderFinishedSemaphores)
  67. vkDestroySemaphore(mDevice, semaphore, nullptr);
  68. for (VkSemaphore semaphore : mImageAvailableSemaphores)
  69. vkDestroySemaphore(mDevice, semaphore, nullptr);
  70. vkDestroyCommandPool(mDevice, mCommandPool, nullptr);
  71. vkDestroyPipelineLayout(mDevice, mPipelineLayout, nullptr);
  72. vkDestroyRenderPass(mDevice, mRenderPassShadow, nullptr);
  73. vkDestroyRenderPass(mDevice, mRenderPass, nullptr);
  74. vkDestroyDescriptorPool(mDevice, mDescriptorPool, nullptr);
  75. vkDestroySampler(mDevice, mTextureSamplerShadow, nullptr);
  76. vkDestroySampler(mDevice, mTextureSamplerRepeat, nullptr);
  77. vkDestroyDescriptorSetLayout(mDevice, mDescriptorSetLayoutUBO, nullptr);
  78. vkDestroyDescriptorSetLayout(mDevice, mDescriptorSetLayoutTexture, nullptr);
  79. DestroySwapChain();
  80. vkDestroySurfaceKHR(mInstance, mSurface, nullptr);
  81. vkDestroyDevice(mDevice, nullptr);
  82. #ifdef JPH_DEBUG
  83. PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = (PFN_vkDestroyDebugUtilsMessengerEXT)(void *)vkGetInstanceProcAddr(mInstance, "vkDestroyDebugUtilsMessengerEXT");
  84. if (vkDestroyDebugUtilsMessengerEXT != nullptr)
  85. vkDestroyDebugUtilsMessengerEXT(mInstance, mDebugMessenger, nullptr);
  86. #endif
  87. vkDestroyInstance(mInstance, nullptr);
  88. }
  89. void RendererVK::Initialize(ApplicationWindow *inWindow)
  90. {
  91. Renderer::Initialize(inWindow);
  92. // Flip the sign of the projection matrix
  93. mPerspectiveYSign = -1.0f;
  94. // Required instance extensions
  95. Array<const char *> required_instance_extensions;
  96. required_instance_extensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
  97. #ifdef JPH_PLATFORM_WINDOWS
  98. required_instance_extensions.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
  99. #elif defined(JPH_PLATFORM_LINUX)
  100. required_instance_extensions.push_back(VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
  101. #elif defined(JPH_PLATFORM_MACOS)
  102. required_instance_extensions.push_back(VK_EXT_METAL_SURFACE_EXTENSION_NAME);
  103. required_instance_extensions.push_back("VK_KHR_portability_enumeration");
  104. required_instance_extensions.push_back("VK_KHR_get_physical_device_properties2");
  105. #endif
  106. // Required device extensions
  107. Array<const char *> required_device_extensions;
  108. required_device_extensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
  109. #ifdef JPH_PLATFORM_MACOS
  110. required_device_extensions.push_back("VK_KHR_portability_subset"); // VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME
  111. #endif
  112. // Query supported instance extensions
  113. uint32 instance_extension_count = 0;
  114. FatalErrorIfFailed(vkEnumerateInstanceExtensionProperties(nullptr, &instance_extension_count, nullptr));
  115. Array<VkExtensionProperties> instance_extensions;
  116. instance_extensions.resize(instance_extension_count);
  117. FatalErrorIfFailed(vkEnumerateInstanceExtensionProperties(nullptr, &instance_extension_count, instance_extensions.data()));
  118. // Query supported validation layers
  119. uint32 validation_layer_count;
  120. vkEnumerateInstanceLayerProperties(&validation_layer_count, nullptr);
  121. Array<VkLayerProperties> validation_layers(validation_layer_count);
  122. vkEnumerateInstanceLayerProperties(&validation_layer_count, validation_layers.data());
  123. // Create Vulkan instance
  124. VkInstanceCreateInfo instance_create_info = {};
  125. instance_create_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
  126. #ifdef JPH_PLATFORM_MACOS
  127. instance_create_info.flags = VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR;
  128. #endif
  129. #ifdef JPH_DEBUG
  130. // Enable validation layer if supported
  131. const char *desired_validation_layers[] = { "VK_LAYER_KHRONOS_validation" };
  132. for (const VkLayerProperties &p : validation_layers)
  133. if (strcmp(desired_validation_layers[0], p.layerName) == 0)
  134. {
  135. instance_create_info.enabledLayerCount = 1;
  136. instance_create_info.ppEnabledLayerNames = desired_validation_layers;
  137. break;
  138. }
  139. // Setup debug messenger callback if the extension is supported
  140. VkDebugUtilsMessengerCreateInfoEXT messenger_create_info = {};
  141. for (const VkExtensionProperties &ext : instance_extensions)
  142. if (strcmp(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, ext.extensionName) == 0)
  143. {
  144. messenger_create_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
  145. messenger_create_info.messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
  146. messenger_create_info.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_DEVICE_ADDRESS_BINDING_BIT_EXT;
  147. messenger_create_info.pfnUserCallback = sVulkanDebugCallback;
  148. instance_create_info.pNext = &messenger_create_info;
  149. required_instance_extensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
  150. break;
  151. }
  152. #endif
  153. instance_create_info.enabledExtensionCount = (uint32)required_instance_extensions.size();
  154. instance_create_info.ppEnabledExtensionNames = required_instance_extensions.data();
  155. FatalErrorIfFailed(vkCreateInstance(&instance_create_info, nullptr, &mInstance));
  156. #ifdef JPH_DEBUG
  157. // Finalize debug messenger callback
  158. PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = (PFN_vkCreateDebugUtilsMessengerEXT)(std::uintptr_t)vkGetInstanceProcAddr(mInstance, "vkCreateDebugUtilsMessengerEXT");
  159. if (vkCreateDebugUtilsMessengerEXT != nullptr)
  160. FatalErrorIfFailed(vkCreateDebugUtilsMessengerEXT(mInstance, &messenger_create_info, nullptr, &mDebugMessenger));
  161. #endif
  162. // Create surface
  163. #ifdef JPH_PLATFORM_WINDOWS
  164. VkWin32SurfaceCreateInfoKHR surface_create_info = {};
  165. surface_create_info.sType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR;
  166. surface_create_info.hwnd = static_cast<ApplicationWindowWin *>(mWindow)->GetWindowHandle();
  167. surface_create_info.hinstance = GetModuleHandle(nullptr);
  168. FatalErrorIfFailed(vkCreateWin32SurfaceKHR(mInstance, &surface_create_info, nullptr, &mSurface));
  169. #elif defined(JPH_PLATFORM_LINUX)
  170. VkXlibSurfaceCreateInfoKHR surface_create_info = {};
  171. surface_create_info.sType = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR;
  172. surface_create_info.dpy = static_cast<ApplicationWindowLinux *>(mWindow)->GetDisplay();
  173. surface_create_info.window = static_cast<ApplicationWindowLinux *>(mWindow)->GetWindow();
  174. FatalErrorIfFailed(vkCreateXlibSurfaceKHR(mInstance, &surface_create_info, nullptr, &mSurface));
  175. #elif defined(JPH_PLATFORM_MACOS)
  176. VkMetalSurfaceCreateInfoEXT surface_create_info = {};
  177. surface_create_info.sType = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT;
  178. surface_create_info.pNext = nullptr;
  179. surface_create_info.pLayer = static_cast<ApplicationWindowMacOS *>(mWindow)->GetMetalLayer();
  180. FatalErrorIfFailed(vkCreateMetalSurfaceEXT(mInstance, &surface_create_info, nullptr, &mSurface));
  181. #endif
  182. // Select device
  183. uint32 device_count = 0;
  184. FatalErrorIfFailed(vkEnumeratePhysicalDevices(mInstance, &device_count, nullptr));
  185. Array<VkPhysicalDevice> devices;
  186. devices.resize(device_count);
  187. FatalErrorIfFailed(vkEnumeratePhysicalDevices(mInstance, &device_count, devices.data()));
  188. struct Device
  189. {
  190. VkPhysicalDevice mPhysicalDevice;
  191. String mName;
  192. VkSurfaceFormatKHR mFormat;
  193. uint32 mGraphicsQueueIndex;
  194. uint32 mPresentQueueIndex;
  195. int mScore;
  196. };
  197. Array<Device> available_devices;
  198. for (VkPhysicalDevice device : devices)
  199. {
  200. // Get device properties
  201. VkPhysicalDeviceProperties properties;
  202. vkGetPhysicalDeviceProperties(device, &properties);
  203. // Test if it is an appropriate type
  204. int score = 0;
  205. switch (properties.deviceType)
  206. {
  207. case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU:
  208. score = 30;
  209. break;
  210. case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU:
  211. score = 20;
  212. break;
  213. case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU:
  214. score = 10;
  215. break;
  216. case VK_PHYSICAL_DEVICE_TYPE_CPU:
  217. score = 5;
  218. break;
  219. case VK_PHYSICAL_DEVICE_TYPE_OTHER:
  220. case VK_PHYSICAL_DEVICE_TYPE_MAX_ENUM:
  221. continue;
  222. }
  223. // Check if the device supports all our required extensions
  224. uint32 device_extension_count;
  225. vkEnumerateDeviceExtensionProperties(device, nullptr, &device_extension_count, nullptr);
  226. Array<VkExtensionProperties> available_extensions;
  227. available_extensions.resize(device_extension_count);
  228. vkEnumerateDeviceExtensionProperties(device, nullptr, &device_extension_count, available_extensions.data());
  229. int found_extensions = 0;
  230. for (const char *required_device_extension : required_device_extensions)
  231. for (const VkExtensionProperties &ext : available_extensions)
  232. if (strcmp(required_device_extension, ext.extensionName) == 0)
  233. {
  234. found_extensions++;
  235. break;
  236. }
  237. if (found_extensions != int(required_device_extensions.size()))
  238. continue;
  239. // Find the right queues
  240. uint32 queue_family_count = 0;
  241. vkGetPhysicalDeviceQueueFamilyProperties(device, &queue_family_count, nullptr);
  242. Array<VkQueueFamilyProperties> queue_families;
  243. queue_families.resize(queue_family_count);
  244. vkGetPhysicalDeviceQueueFamilyProperties(device, &queue_family_count, queue_families.data());
  245. uint32 graphics_queue = ~uint32(0);
  246. uint32 present_queue = ~uint32(0);
  247. for (uint32 i = 0; i < uint32(queue_families.size()); ++i)
  248. {
  249. if (queue_families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT)
  250. graphics_queue = i;
  251. VkBool32 present_support = false;
  252. vkGetPhysicalDeviceSurfaceSupportKHR(device, i, mSurface, &present_support);
  253. if (present_support)
  254. present_queue = i;
  255. if (graphics_queue != ~uint32(0) && present_queue != ~uint32(0))
  256. break;
  257. }
  258. if (graphics_queue == ~uint32(0) || present_queue == ~uint32(0))
  259. continue;
  260. // Select surface format
  261. VkSurfaceFormatKHR selected_format = SelectFormat(device);
  262. if (selected_format.format == VK_FORMAT_UNDEFINED)
  263. continue;
  264. // Add the device
  265. available_devices.push_back({ device, properties.deviceName, selected_format, graphics_queue, present_queue, score });
  266. }
  267. if (available_devices.empty())
  268. FatalError("No Vulkan device found!");
  269. QuickSort(available_devices.begin(), available_devices.end(), [](const Device &inLHS, const Device &inRHS) {
  270. return inLHS.mScore > inRHS.mScore;
  271. });
  272. const Device &selected_device = available_devices[0];
  273. Trace("Selected device: %s", selected_device.mName.c_str());
  274. mPhysicalDevice = selected_device.mPhysicalDevice;
  275. // Get memory properties
  276. vkGetPhysicalDeviceMemoryProperties(mPhysicalDevice, &mMemoryProperties);
  277. // Get features
  278. VkPhysicalDeviceFeatures physical_device_features = {};
  279. vkGetPhysicalDeviceFeatures(mPhysicalDevice, &physical_device_features);
  280. // Create device
  281. float queue_priority = 1.0f;
  282. VkDeviceQueueCreateInfo queue_create_info[2] = {};
  283. for (size_t i = 0; i < std::size(queue_create_info); ++i)
  284. {
  285. queue_create_info[i].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
  286. queue_create_info[i].queueCount = 1;
  287. queue_create_info[i].pQueuePriorities = &queue_priority;
  288. }
  289. queue_create_info[0].queueFamilyIndex = selected_device.mGraphicsQueueIndex;
  290. queue_create_info[1].queueFamilyIndex = selected_device.mPresentQueueIndex;
  291. VkPhysicalDeviceFeatures device_features = {};
  292. if (!physical_device_features.fillModeNonSolid)
  293. FatalError("fillModeNonSolid not supported!");
  294. device_features.fillModeNonSolid = VK_TRUE;
  295. VkDeviceCreateInfo device_create_info = {};
  296. device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
  297. device_create_info.queueCreateInfoCount = selected_device.mGraphicsQueueIndex != selected_device.mPresentQueueIndex? 2 : 1;
  298. device_create_info.pQueueCreateInfos = queue_create_info;
  299. device_create_info.enabledLayerCount = instance_create_info.enabledLayerCount;
  300. device_create_info.ppEnabledLayerNames = instance_create_info.ppEnabledLayerNames;
  301. device_create_info.enabledExtensionCount = uint32(required_device_extensions.size());
  302. device_create_info.ppEnabledExtensionNames = required_device_extensions.data();
  303. device_create_info.pEnabledFeatures = &device_features;
  304. FatalErrorIfFailed(vkCreateDevice(selected_device.mPhysicalDevice, &device_create_info, nullptr, &mDevice));
  305. // Get the queues
  306. mGraphicsQueueIndex = selected_device.mGraphicsQueueIndex;
  307. mPresentQueueIndex = selected_device.mPresentQueueIndex;
  308. vkGetDeviceQueue(mDevice, mGraphicsQueueIndex, 0, &mGraphicsQueue);
  309. vkGetDeviceQueue(mDevice, mPresentQueueIndex, 0, &mPresentQueue);
  310. VkCommandPoolCreateInfo pool_info = {};
  311. pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
  312. pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
  313. pool_info.queueFamilyIndex = selected_device.mGraphicsQueueIndex;
  314. FatalErrorIfFailed(vkCreateCommandPool(mDevice, &pool_info, nullptr, &mCommandPool));
  315. VkCommandBufferAllocateInfo command_buffer_info = {};
  316. command_buffer_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
  317. command_buffer_info.commandPool = mCommandPool;
  318. command_buffer_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
  319. command_buffer_info.commandBufferCount = 1;
  320. for (uint32 i = 0; i < cFrameCount; ++i)
  321. FatalErrorIfFailed(vkAllocateCommandBuffers(mDevice, &command_buffer_info, &mCommandBuffers[i]));
  322. VkSemaphoreCreateInfo semaphore_info = {};
  323. semaphore_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
  324. for (uint32 i = 0; i < cFrameCount; ++i)
  325. {
  326. FatalErrorIfFailed(vkCreateSemaphore(mDevice, &semaphore_info, nullptr, &mImageAvailableSemaphores[i]));
  327. FatalErrorIfFailed(vkCreateSemaphore(mDevice, &semaphore_info, nullptr, &mRenderFinishedSemaphores[i]));
  328. }
  329. VkFenceCreateInfo fence_info = {};
  330. fence_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
  331. fence_info.flags = VK_FENCE_CREATE_SIGNALED_BIT;
  332. for (uint32 i = 0; i < cFrameCount; ++i)
  333. FatalErrorIfFailed(vkCreateFence(mDevice, &fence_info, nullptr, &mInFlightFences[i]));
  334. // Create constant buffer. One per frame to avoid overwriting the constant buffer while the GPU is still using it.
  335. for (uint n = 0; n < cFrameCount; ++n)
  336. {
  337. mVertexShaderConstantBufferProjection[n] = CreateConstantBuffer(sizeof(VertexShaderConstantBuffer));
  338. mVertexShaderConstantBufferOrtho[n] = CreateConstantBuffer(sizeof(VertexShaderConstantBuffer));
  339. mPixelShaderConstantBuffer[n] = CreateConstantBuffer(sizeof(PixelShaderConstantBuffer));
  340. }
  341. // Create descriptor set layout for the uniform buffers
  342. VkDescriptorSetLayoutBinding ubo_layout_binding[2] = {};
  343. ubo_layout_binding[0].binding = 0;
  344. ubo_layout_binding[0].descriptorCount = 1;
  345. ubo_layout_binding[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
  346. ubo_layout_binding[0].stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
  347. ubo_layout_binding[1].binding = 1;
  348. ubo_layout_binding[1].descriptorCount = 1;
  349. ubo_layout_binding[1].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
  350. ubo_layout_binding[1].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
  351. VkDescriptorSetLayoutCreateInfo ubo_dsl = {};
  352. ubo_dsl.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  353. ubo_dsl.bindingCount = std::size(ubo_layout_binding);
  354. ubo_dsl.pBindings = ubo_layout_binding;
  355. FatalErrorIfFailed(vkCreateDescriptorSetLayout(mDevice, &ubo_dsl, nullptr, &mDescriptorSetLayoutUBO));
  356. // Create descriptor set layout for the texture binding
  357. VkDescriptorSetLayoutBinding texture_layout_binding = {};
  358. texture_layout_binding.binding = 0;
  359. texture_layout_binding.descriptorCount = 1;
  360. texture_layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  361. texture_layout_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
  362. VkDescriptorSetLayoutCreateInfo texture_dsl = {};
  363. texture_dsl.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  364. texture_dsl.bindingCount = 1;
  365. texture_dsl.pBindings = &texture_layout_binding;
  366. FatalErrorIfFailed(vkCreateDescriptorSetLayout(mDevice, &texture_dsl, nullptr, &mDescriptorSetLayoutTexture));
  367. // Create pipeline layout
  368. VkPipelineLayoutCreateInfo pipeline_layout = {};
  369. VkDescriptorSetLayout layout_handles[] = { mDescriptorSetLayoutUBO, mDescriptorSetLayoutTexture };
  370. pipeline_layout.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
  371. pipeline_layout.setLayoutCount = std::size(layout_handles);
  372. pipeline_layout.pSetLayouts = layout_handles;
  373. pipeline_layout.pushConstantRangeCount = 0;
  374. FatalErrorIfFailed(vkCreatePipelineLayout(mDevice, &pipeline_layout, nullptr, &mPipelineLayout));
  375. // Create descriptor pool
  376. VkDescriptorPoolSize descriptor_pool_sizes[] = {
  377. { VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 128 },
  378. { VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 128 },
  379. };
  380. VkDescriptorPoolCreateInfo descriptor_info = {};
  381. descriptor_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
  382. descriptor_info.poolSizeCount = std::size(descriptor_pool_sizes);
  383. descriptor_info.pPoolSizes = descriptor_pool_sizes;
  384. descriptor_info.maxSets = 256;
  385. FatalErrorIfFailed(vkCreateDescriptorPool(mDevice, &descriptor_info, nullptr, &mDescriptorPool));
  386. // Allocate descriptor sets for 3d rendering
  387. Array<VkDescriptorSetLayout> layouts(cFrameCount, mDescriptorSetLayoutUBO);
  388. VkDescriptorSetAllocateInfo descriptor_set_alloc_info = {};
  389. descriptor_set_alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
  390. descriptor_set_alloc_info.descriptorPool = mDescriptorPool;
  391. descriptor_set_alloc_info.descriptorSetCount = cFrameCount;
  392. descriptor_set_alloc_info.pSetLayouts = layouts.data();
  393. FatalErrorIfFailed(vkAllocateDescriptorSets(mDevice, &descriptor_set_alloc_info, mDescriptorSets));
  394. for (uint i = 0; i < cFrameCount; i++)
  395. {
  396. VkDescriptorBufferInfo vs_buffer_info = {};
  397. vs_buffer_info.buffer = mVertexShaderConstantBufferProjection[i]->GetBuffer();
  398. vs_buffer_info.range = sizeof(VertexShaderConstantBuffer);
  399. VkDescriptorBufferInfo ps_buffer_info = {};
  400. ps_buffer_info.buffer = mPixelShaderConstantBuffer[i]->GetBuffer();
  401. ps_buffer_info.range = sizeof(PixelShaderConstantBuffer);
  402. VkWriteDescriptorSet descriptor_write[2] = {};
  403. descriptor_write[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  404. descriptor_write[0].dstSet = mDescriptorSets[i];
  405. descriptor_write[0].dstBinding = 0;
  406. descriptor_write[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
  407. descriptor_write[0].descriptorCount = 1;
  408. descriptor_write[0].pBufferInfo = &vs_buffer_info;
  409. descriptor_write[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  410. descriptor_write[1].dstSet = mDescriptorSets[i];
  411. descriptor_write[1].dstBinding = 1;
  412. descriptor_write[1].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
  413. descriptor_write[1].descriptorCount = 1;
  414. descriptor_write[1].pBufferInfo = &ps_buffer_info;
  415. vkUpdateDescriptorSets(mDevice, 2, descriptor_write, 0, nullptr);
  416. }
  417. // Allocate descriptor sets for 2d rendering
  418. FatalErrorIfFailed(vkAllocateDescriptorSets(mDevice, &descriptor_set_alloc_info, mDescriptorSetsOrtho));
  419. for (uint i = 0; i < cFrameCount; i++)
  420. {
  421. VkDescriptorBufferInfo vs_buffer_info = {};
  422. vs_buffer_info.buffer = mVertexShaderConstantBufferOrtho[i]->GetBuffer();
  423. vs_buffer_info.range = sizeof(VertexShaderConstantBuffer);
  424. VkWriteDescriptorSet descriptor_write = {};
  425. descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  426. descriptor_write.dstSet = mDescriptorSetsOrtho[i];
  427. descriptor_write.dstBinding = 0;
  428. descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
  429. descriptor_write.descriptorCount = 1;
  430. descriptor_write.pBufferInfo = &vs_buffer_info;
  431. vkUpdateDescriptorSets(mDevice, 1, &descriptor_write, 0, nullptr);
  432. }
  433. // Create regular texture sampler
  434. VkSamplerCreateInfo sampler_info = {};
  435. sampler_info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
  436. sampler_info.magFilter = VK_FILTER_LINEAR;
  437. sampler_info.minFilter = VK_FILTER_LINEAR;
  438. sampler_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT;
  439. sampler_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT;
  440. sampler_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT;
  441. sampler_info.borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK;
  442. sampler_info.unnormalizedCoordinates = VK_FALSE;
  443. sampler_info.minLod = 0.0f;
  444. sampler_info.maxLod = VK_LOD_CLAMP_NONE;
  445. sampler_info.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
  446. FatalErrorIfFailed(vkCreateSampler(mDevice, &sampler_info, nullptr, &mTextureSamplerRepeat));
  447. // Create sampler for shadow maps
  448. sampler_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
  449. sampler_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
  450. sampler_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
  451. FatalErrorIfFailed(vkCreateSampler(mDevice, &sampler_info, nullptr, &mTextureSamplerShadow));
  452. {
  453. // Create shadow render pass
  454. VkAttachmentDescription shadowmap_attachment = {};
  455. shadowmap_attachment.format = FindDepthFormat();
  456. shadowmap_attachment.samples = VK_SAMPLE_COUNT_1_BIT;
  457. shadowmap_attachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
  458. shadowmap_attachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
  459. shadowmap_attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  460. shadowmap_attachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  461. shadowmap_attachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  462. shadowmap_attachment.finalLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  463. VkAttachmentReference shadowmap_attachment_ref = {};
  464. shadowmap_attachment_ref.attachment = 0;
  465. shadowmap_attachment_ref.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  466. VkSubpassDescription subpass_shadow = {};
  467. subpass_shadow.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
  468. subpass_shadow.pDepthStencilAttachment = &shadowmap_attachment_ref;
  469. VkSubpassDependency dependencies_shadow = {};
  470. dependencies_shadow.srcSubpass = VK_SUBPASS_EXTERNAL;
  471. dependencies_shadow.dstSubpass = 0;
  472. dependencies_shadow.srcStageMask = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
  473. dependencies_shadow.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
  474. dependencies_shadow.dstStageMask = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT;
  475. dependencies_shadow.dstAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
  476. VkRenderPassCreateInfo render_pass_shadow = {};
  477. render_pass_shadow.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
  478. render_pass_shadow.attachmentCount = 1;
  479. render_pass_shadow.pAttachments = &shadowmap_attachment;
  480. render_pass_shadow.subpassCount = 1;
  481. render_pass_shadow.pSubpasses = &subpass_shadow;
  482. render_pass_shadow.dependencyCount = 1;
  483. render_pass_shadow.pDependencies = &dependencies_shadow;
  484. FatalErrorIfFailed(vkCreateRenderPass(mDevice, &render_pass_shadow, nullptr, &mRenderPassShadow));
  485. }
  486. // Create depth only texture (no color buffer, as seen from light)
  487. mShadowMap = new TextureVK(this, cShadowMapSize, cShadowMapSize);
  488. // Create frame buffer for the shadow pass
  489. VkImageView attachments[] = { mShadowMap->GetImageView() };
  490. VkFramebufferCreateInfo frame_buffer_info = {};
  491. frame_buffer_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
  492. frame_buffer_info.renderPass = mRenderPassShadow;
  493. frame_buffer_info.attachmentCount = std::size(attachments);
  494. frame_buffer_info.pAttachments = attachments;
  495. frame_buffer_info.width = cShadowMapSize;
  496. frame_buffer_info.height = cShadowMapSize;
  497. frame_buffer_info.layers = 1;
  498. FatalErrorIfFailed(vkCreateFramebuffer(mDevice, &frame_buffer_info, nullptr, &mShadowFrameBuffer));
  499. {
  500. // Create normal render pass
  501. VkAttachmentDescription attachments_normal[2] = {};
  502. VkAttachmentDescription &color_attachment = attachments_normal[0];
  503. color_attachment.format = selected_device.mFormat.format;
  504. color_attachment.samples = VK_SAMPLE_COUNT_1_BIT;
  505. color_attachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
  506. color_attachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
  507. color_attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  508. color_attachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  509. color_attachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  510. color_attachment.finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
  511. VkAttachmentReference color_attachment_ref = {};
  512. color_attachment_ref.attachment = 0;
  513. color_attachment_ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  514. VkAttachmentDescription &depth_attachment = attachments_normal[1];
  515. depth_attachment.format = FindDepthFormat();
  516. depth_attachment.samples = VK_SAMPLE_COUNT_1_BIT;
  517. depth_attachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
  518. depth_attachment.storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  519. depth_attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  520. depth_attachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  521. depth_attachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  522. depth_attachment.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  523. VkAttachmentReference depth_attachment_ref = {};
  524. depth_attachment_ref.attachment = 1;
  525. depth_attachment_ref.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  526. VkSubpassDescription subpass_normal = {};
  527. subpass_normal.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
  528. subpass_normal.colorAttachmentCount = 1;
  529. subpass_normal.pColorAttachments = &color_attachment_ref;
  530. subpass_normal.pDepthStencilAttachment = &depth_attachment_ref;
  531. VkSubpassDependency dependencies_normal = {};
  532. dependencies_normal.srcSubpass = VK_SUBPASS_EXTERNAL;
  533. dependencies_normal.dstSubpass = 0;
  534. dependencies_normal.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
  535. dependencies_normal.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
  536. dependencies_normal.dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  537. dependencies_normal.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_SHADER_READ_BIT;
  538. VkRenderPassCreateInfo render_pass_normal = {};
  539. render_pass_normal.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
  540. render_pass_normal.attachmentCount = std::size(attachments_normal);
  541. render_pass_normal.pAttachments = attachments_normal;
  542. render_pass_normal.subpassCount = 1;
  543. render_pass_normal.pSubpasses = &subpass_normal;
  544. render_pass_normal.dependencyCount = 1;
  545. render_pass_normal.pDependencies = &dependencies_normal;
  546. FatalErrorIfFailed(vkCreateRenderPass(mDevice, &render_pass_normal, nullptr, &mRenderPass));
  547. }
  548. // Create the swap chain
  549. CreateSwapChain(mPhysicalDevice);
  550. }
  551. VkSurfaceFormatKHR RendererVK::SelectFormat(VkPhysicalDevice inDevice)
  552. {
  553. uint32 format_count;
  554. vkGetPhysicalDeviceSurfaceFormatsKHR(inDevice, mSurface, &format_count, nullptr);
  555. if (format_count == 0)
  556. return { VK_FORMAT_UNDEFINED, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR };
  557. Array<VkSurfaceFormatKHR> formats;
  558. formats.resize(format_count);
  559. vkGetPhysicalDeviceSurfaceFormatsKHR(inDevice, mSurface, &format_count, formats.data());
  560. // Select BGRA8 UNORM format if available, otherwise the 1st format
  561. for (const VkSurfaceFormatKHR &format : formats)
  562. if (format.format == VK_FORMAT_B8G8R8A8_UNORM && format.colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR)
  563. return format;
  564. return formats[0];
  565. }
  566. VkFormat RendererVK::FindDepthFormat()
  567. {
  568. VkFormat candidates[] = { VK_FORMAT_D32_SFLOAT, VK_FORMAT_D32_SFLOAT_S8_UINT, VK_FORMAT_D24_UNORM_S8_UINT };
  569. for (VkFormat format : candidates)
  570. {
  571. VkFormatProperties props;
  572. vkGetPhysicalDeviceFormatProperties(mPhysicalDevice, format, &props);
  573. if ((props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) == VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)
  574. return format;
  575. }
  576. FatalError("Failed to find format!");
  577. }
  578. void RendererVK::CreateSwapChain(VkPhysicalDevice inDevice)
  579. {
  580. // Select the format
  581. VkSurfaceFormatKHR format = SelectFormat(inDevice);
  582. mSwapChainImageFormat = format.format;
  583. // Determine swap chain extent
  584. VkSurfaceCapabilitiesKHR capabilities;
  585. vkGetPhysicalDeviceSurfaceCapabilitiesKHR(inDevice, mSurface, &capabilities);
  586. mSwapChainExtent = capabilities.currentExtent;
  587. if (mSwapChainExtent.width == UINT32_MAX || mSwapChainExtent.height == UINT32_MAX)
  588. mSwapChainExtent = { uint32(mWindow->GetWindowWidth()), uint32(mWindow->GetWindowHeight()) };
  589. mSwapChainExtent.width = Clamp(mSwapChainExtent.width, capabilities.minImageExtent.width, capabilities.maxImageExtent.width);
  590. mSwapChainExtent.height = Clamp(mSwapChainExtent.height, capabilities.minImageExtent.height, capabilities.maxImageExtent.height);
  591. // Early out if our window has been minimized
  592. if (mSwapChainExtent.width == 0 || mSwapChainExtent.height == 0)
  593. return;
  594. // Create the swap chain
  595. uint32 desired_image_count = max(min(capabilities.minImageCount + 1, capabilities.maxImageCount), capabilities.minImageCount);
  596. VkSwapchainCreateInfoKHR swapchain_create_info = {};
  597. swapchain_create_info.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
  598. swapchain_create_info.surface = mSurface;
  599. swapchain_create_info.minImageCount = desired_image_count;
  600. swapchain_create_info.imageFormat = format.format;
  601. swapchain_create_info.imageColorSpace = format.colorSpace;
  602. swapchain_create_info.imageExtent = mSwapChainExtent;
  603. swapchain_create_info.imageArrayLayers = 1;
  604. swapchain_create_info.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
  605. uint32 queue_family_indices[] = { mGraphicsQueueIndex, mPresentQueueIndex };
  606. if (mGraphicsQueueIndex != mPresentQueueIndex)
  607. {
  608. swapchain_create_info.imageSharingMode = VK_SHARING_MODE_CONCURRENT;
  609. swapchain_create_info.queueFamilyIndexCount = 2;
  610. swapchain_create_info.pQueueFamilyIndices = queue_family_indices;
  611. }
  612. else
  613. {
  614. swapchain_create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
  615. }
  616. swapchain_create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
  617. swapchain_create_info.preTransform = capabilities.currentTransform;
  618. swapchain_create_info.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
  619. swapchain_create_info.presentMode = VK_PRESENT_MODE_FIFO_KHR;
  620. swapchain_create_info.clipped = VK_TRUE;
  621. FatalErrorIfFailed(vkCreateSwapchainKHR(mDevice, &swapchain_create_info, nullptr, &mSwapChain));
  622. // Get the actual swap chain image count
  623. uint32 image_count;
  624. FatalErrorIfFailed(vkGetSwapchainImagesKHR(mDevice, mSwapChain, &image_count, nullptr));
  625. // Get the swap chain images
  626. mSwapChainImages.resize(image_count);
  627. FatalErrorIfFailed(vkGetSwapchainImagesKHR(mDevice, mSwapChain, &image_count, mSwapChainImages.data()));
  628. // Create image views
  629. mSwapChainImageViews.resize(image_count);
  630. for (uint32 i = 0; i < image_count; ++i)
  631. mSwapChainImageViews[i] = CreateImageView(mSwapChainImages[i], mSwapChainImageFormat, VK_IMAGE_ASPECT_COLOR_BIT);
  632. // Create depth buffer
  633. VkFormat depth_format = FindDepthFormat();
  634. VkImageUsageFlags depth_usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
  635. VkMemoryPropertyFlags depth_memory_properties = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
  636. // Test and utilize support for transient memory for the depth buffer
  637. VkImageFormatProperties depth_transient_properties = {};
  638. VkResult depth_transient_support = vkGetPhysicalDeviceImageFormatProperties(mPhysicalDevice, depth_format, VK_IMAGE_TYPE_2D, VK_IMAGE_TILING_OPTIMAL, depth_usage | VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, 0, &depth_transient_properties);
  639. if (depth_transient_support == VK_SUCCESS)
  640. {
  641. depth_usage |= VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT;
  642. // Test and utilize lazily allocated memory for the depth buffer
  643. for (size_t i = 0; i < mMemoryProperties.memoryTypeCount; i++)
  644. if (mMemoryProperties.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT)
  645. {
  646. depth_memory_properties = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
  647. break;
  648. }
  649. }
  650. CreateImage(mSwapChainExtent.width, mSwapChainExtent.height, depth_format, VK_IMAGE_TILING_OPTIMAL, depth_usage, depth_memory_properties, mDepthImage, mDepthImageMemory);
  651. mDepthImageView = CreateImageView(mDepthImage, depth_format, VK_IMAGE_ASPECT_DEPTH_BIT);
  652. // Create frame buffers for the normal pass
  653. mSwapChainFramebuffers.resize(image_count);
  654. for (size_t i = 0; i < mSwapChainFramebuffers.size(); i++)
  655. {
  656. VkImageView attachments[] = { mSwapChainImageViews[i], mDepthImageView };
  657. VkFramebufferCreateInfo frame_buffer_info = {};
  658. frame_buffer_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
  659. frame_buffer_info.renderPass = mRenderPass;
  660. frame_buffer_info.attachmentCount = std::size(attachments);
  661. frame_buffer_info.pAttachments = attachments;
  662. frame_buffer_info.width = mSwapChainExtent.width;
  663. frame_buffer_info.height = mSwapChainExtent.height;
  664. frame_buffer_info.layers = 1;
  665. FatalErrorIfFailed(vkCreateFramebuffer(mDevice, &frame_buffer_info, nullptr, &mSwapChainFramebuffers[i]));
  666. }
  667. }
  668. void RendererVK::DestroySwapChain()
  669. {
  670. // Destroy depth buffer
  671. if (mDepthImageView != VK_NULL_HANDLE)
  672. {
  673. vkDestroyImageView(mDevice, mDepthImageView, nullptr);
  674. DestroyImage(mDepthImage, mDepthImageMemory);
  675. }
  676. for (VkFramebuffer frame_buffer : mSwapChainFramebuffers)
  677. vkDestroyFramebuffer(mDevice, frame_buffer, nullptr);
  678. mSwapChainFramebuffers.clear();
  679. for (VkImageView view : mSwapChainImageViews)
  680. vkDestroyImageView(mDevice, view, nullptr);
  681. mSwapChainImageViews.clear();
  682. if (mSwapChain != nullptr)
  683. {
  684. vkDestroySwapchainKHR(mDevice, mSwapChain, nullptr);
  685. mSwapChain = nullptr;
  686. }
  687. }
  688. void RendererVK::OnWindowResize()
  689. {
  690. vkDeviceWaitIdle(mDevice);
  691. DestroySwapChain();
  692. CreateSwapChain(mPhysicalDevice);
  693. }
  694. void RendererVK::BeginFrame(const CameraState &inCamera, float inWorldScale)
  695. {
  696. JPH_PROFILE_FUNCTION();
  697. Renderer::BeginFrame(inCamera, inWorldScale);
  698. // If we have no swap chain, bail out
  699. if (mSwapChain == nullptr)
  700. return;
  701. // Update frame index
  702. mFrameIndex = (mFrameIndex + 1) % cFrameCount;
  703. // Wait for this frame to complete
  704. vkWaitForFences(mDevice, 1, &mInFlightFences[mFrameIndex], VK_TRUE, UINT64_MAX);
  705. VkResult result = vkAcquireNextImageKHR(mDevice, mSwapChain, UINT64_MAX, mImageAvailableSemaphores[mFrameIndex], VK_NULL_HANDLE, &mImageIndex);
  706. if (result == VK_ERROR_OUT_OF_DATE_KHR || result == VK_SUBOPTIMAL_KHR)
  707. {
  708. vkDeviceWaitIdle(mDevice);
  709. DestroySwapChain();
  710. CreateSwapChain(mPhysicalDevice);
  711. if (mSwapChain == nullptr)
  712. return;
  713. result = vkAcquireNextImageKHR(mDevice, mSwapChain, UINT64_MAX, mImageAvailableSemaphores[mFrameIndex], VK_NULL_HANDLE, &mImageIndex);
  714. }
  715. FatalErrorIfFailed(result);
  716. // Free buffers that weren't used this frame
  717. for (BufferCache::value_type &vt : mBufferCache)
  718. for (BufferVK &bvk : vt.second)
  719. FreeBufferInternal(bvk);
  720. mBufferCache.clear();
  721. // Recycle the buffers that were freed
  722. mBufferCache.swap(mFreedBuffers[mFrameIndex]);
  723. vkResetFences(mDevice, 1, &mInFlightFences[mFrameIndex]);
  724. VkCommandBuffer command_buffer = GetCommandBuffer();
  725. FatalErrorIfFailed(vkResetCommandBuffer(command_buffer, 0));
  726. VkCommandBufferBeginInfo command_buffer_begin_info = {};
  727. command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  728. command_buffer_begin_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
  729. FatalErrorIfFailed(vkBeginCommandBuffer(command_buffer, &command_buffer_begin_info));
  730. // Begin the shadow pass
  731. VkClearValue clear_value;
  732. clear_value.depthStencil = { 0.0f, 0 };
  733. VkRenderPassBeginInfo render_pass_begin_info = {};
  734. render_pass_begin_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
  735. render_pass_begin_info.renderPass = mRenderPassShadow;
  736. render_pass_begin_info.framebuffer = mShadowFrameBuffer;
  737. render_pass_begin_info.renderArea.extent = { cShadowMapSize, cShadowMapSize };
  738. render_pass_begin_info.clearValueCount = 1;
  739. render_pass_begin_info.pClearValues = &clear_value;
  740. vkCmdBeginRenderPass(command_buffer, &render_pass_begin_info, VK_SUBPASS_CONTENTS_INLINE);
  741. // Set constants for vertex shader in projection mode
  742. VertexShaderConstantBuffer *vs = mVertexShaderConstantBufferProjection[mFrameIndex]->Map<VertexShaderConstantBuffer>();
  743. *vs = mVSBuffer;
  744. mVertexShaderConstantBufferProjection[mFrameIndex]->Unmap();
  745. // Set constants for vertex shader in ortho mode
  746. vs = mVertexShaderConstantBufferOrtho[mFrameIndex]->Map<VertexShaderConstantBuffer>();
  747. *vs = mVSBufferOrtho;
  748. mVertexShaderConstantBufferOrtho[mFrameIndex]->Unmap();
  749. // Set constants for pixel shader
  750. PixelShaderConstantBuffer *ps = mPixelShaderConstantBuffer[mFrameIndex]->Map<PixelShaderConstantBuffer>();
  751. *ps = mPSBuffer;
  752. mPixelShaderConstantBuffer[mFrameIndex]->Unmap();
  753. // Set the view port and scissor rect to the shadow map size
  754. UpdateViewPortAndScissorRect(cShadowMapSize, cShadowMapSize);
  755. // Switch to 3d projection mode
  756. SetProjectionMode();
  757. }
  758. void RendererVK::EndShadowPass()
  759. {
  760. VkCommandBuffer command_buffer = GetCommandBuffer();
  761. // End the shadow pass
  762. vkCmdEndRenderPass(command_buffer);
  763. // Begin the normal render pass
  764. VkClearValue clear_values[2];
  765. clear_values[0].color = {{ 0.098f, 0.098f, 0.439f, 1.000f }};
  766. clear_values[1].depthStencil = { 0.0f, 0 }; // Reverse-Z clears to 0
  767. VkRenderPassBeginInfo render_pass_begin_info = {};
  768. render_pass_begin_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
  769. render_pass_begin_info.renderPass = mRenderPass;
  770. JPH_ASSERT(mImageIndex < mSwapChainFramebuffers.size());
  771. render_pass_begin_info.framebuffer = mSwapChainFramebuffers[mImageIndex];
  772. render_pass_begin_info.renderArea.extent = mSwapChainExtent;
  773. render_pass_begin_info.clearValueCount = std::size(clear_values);
  774. render_pass_begin_info.pClearValues = clear_values;
  775. vkCmdBeginRenderPass(command_buffer, &render_pass_begin_info, VK_SUBPASS_CONTENTS_INLINE);
  776. // Set the view port and scissor rect to the screen size
  777. UpdateViewPortAndScissorRect(mSwapChainExtent.width, mSwapChainExtent.height);
  778. }
  779. void RendererVK::EndFrame()
  780. {
  781. JPH_PROFILE_FUNCTION();
  782. // If we have no swap chain, bail out
  783. if (mSwapChain == nullptr)
  784. {
  785. Renderer::EndFrame();
  786. return;
  787. }
  788. VkCommandBuffer command_buffer = GetCommandBuffer();
  789. vkCmdEndRenderPass(command_buffer);
  790. FatalErrorIfFailed(vkEndCommandBuffer(command_buffer));
  791. VkSemaphore wait_semaphores[] = { mImageAvailableSemaphores[mFrameIndex] };
  792. VkSemaphore signal_semaphores[] = { mRenderFinishedSemaphores[mFrameIndex] };
  793. VkPipelineStageFlags wait_stages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT };
  794. VkSubmitInfo submit_info = {};
  795. submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  796. submit_info.waitSemaphoreCount = 1;
  797. submit_info.pWaitSemaphores = wait_semaphores;
  798. submit_info.pWaitDstStageMask = wait_stages;
  799. submit_info.commandBufferCount = 1;
  800. submit_info.pCommandBuffers = &command_buffer;
  801. submit_info.signalSemaphoreCount = 1;
  802. submit_info.pSignalSemaphores = signal_semaphores;
  803. FatalErrorIfFailed(vkQueueSubmit(mGraphicsQueue, 1, &submit_info, mInFlightFences[mFrameIndex]));
  804. VkSwapchainKHR swap_chains[] = { mSwapChain };
  805. VkPresentInfoKHR present_info = {};
  806. present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
  807. present_info.waitSemaphoreCount = 1;
  808. present_info.pWaitSemaphores = signal_semaphores;
  809. present_info.swapchainCount = 1;
  810. present_info.pSwapchains = swap_chains;
  811. present_info.pImageIndices = &mImageIndex;
  812. vkQueuePresentKHR(mPresentQueue, &present_info);
  813. Renderer::EndFrame();
  814. }
  815. void RendererVK::SetProjectionMode()
  816. {
  817. JPH_ASSERT(mInFrame);
  818. // Bind descriptor set for 3d rendering
  819. vkCmdBindDescriptorSets(GetCommandBuffer(), VK_PIPELINE_BIND_POINT_GRAPHICS, mPipelineLayout, 0, 1, &mDescriptorSets[mFrameIndex], 0, nullptr);
  820. }
  821. void RendererVK::SetOrthoMode()
  822. {
  823. JPH_ASSERT(mInFrame);
  824. // Bind descriptor set for 2d rendering
  825. vkCmdBindDescriptorSets(GetCommandBuffer(), VK_PIPELINE_BIND_POINT_GRAPHICS, mPipelineLayout, 0, 1, &mDescriptorSetsOrtho[mFrameIndex], 0, nullptr);
  826. }
  827. Ref<Texture> RendererVK::CreateTexture(const Surface *inSurface)
  828. {
  829. return new TextureVK(this, inSurface);
  830. }
  831. Ref<VertexShader> RendererVK::CreateVertexShader(const char *inFileName)
  832. {
  833. Array<uint8> data = ReadData((String(inFileName) + ".vert.spv").c_str());
  834. VkShaderModuleCreateInfo create_info = {};
  835. create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
  836. create_info.codeSize = data.size();
  837. create_info.pCode = reinterpret_cast<const uint32 *>(data.data());
  838. VkShaderModule shader_module;
  839. FatalErrorIfFailed(vkCreateShaderModule(mDevice, &create_info, nullptr, &shader_module));
  840. return new VertexShaderVK(mDevice, shader_module);
  841. }
  842. Ref<PixelShader> RendererVK::CreatePixelShader(const char *inFileName)
  843. {
  844. Array<uint8> data = ReadData((String(inFileName) + ".frag.spv").c_str());
  845. VkShaderModuleCreateInfo create_info = {};
  846. create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
  847. create_info.codeSize = data.size();
  848. create_info.pCode = reinterpret_cast<const uint32 *>(data.data());
  849. VkShaderModule shader_module;
  850. FatalErrorIfFailed(vkCreateShaderModule(mDevice, &create_info, nullptr, &shader_module));
  851. return new PixelShaderVK(mDevice, shader_module);
  852. }
  853. unique_ptr<PipelineState> RendererVK::CreatePipelineState(const VertexShader *inVertexShader, const PipelineState::EInputDescription *inInputDescription, uint inInputDescriptionCount, const PixelShader *inPixelShader, PipelineState::EDrawPass inDrawPass, PipelineState::EFillMode inFillMode, PipelineState::ETopology inTopology, PipelineState::EDepthTest inDepthTest, PipelineState::EBlendMode inBlendMode, PipelineState::ECullMode inCullMode)
  854. {
  855. return make_unique<PipelineStateVK>(this, static_cast<const VertexShaderVK *>(inVertexShader), inInputDescription, inInputDescriptionCount, static_cast<const PixelShaderVK *>(inPixelShader), inDrawPass, inFillMode, inTopology, inDepthTest, inBlendMode, inCullMode);
  856. }
  857. RenderPrimitive *RendererVK::CreateRenderPrimitive(PipelineState::ETopology inType)
  858. {
  859. return new RenderPrimitiveVK(this);
  860. }
  861. RenderInstances *RendererVK::CreateRenderInstances()
  862. {
  863. return new RenderInstancesVK(this);
  864. }
  865. uint32 RendererVK::FindMemoryType(uint32 inTypeFilter, VkMemoryPropertyFlags inProperties)
  866. {
  867. for (uint32 i = 0; i < mMemoryProperties.memoryTypeCount; i++)
  868. if ((inTypeFilter & (1 << i))
  869. && (mMemoryProperties.memoryTypes[i].propertyFlags & inProperties) == inProperties)
  870. return i;
  871. FatalError("Failed to find memory type!");
  872. }
  873. void RendererVK::AllocateMemory(VkDeviceSize inSize, uint32 inMemoryTypeBits, VkMemoryPropertyFlags inProperties, VkDeviceMemory &outMemory)
  874. {
  875. VkMemoryAllocateInfo alloc_info = {};
  876. alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
  877. alloc_info.allocationSize = inSize;
  878. alloc_info.memoryTypeIndex = FindMemoryType(inMemoryTypeBits, inProperties);
  879. FatalErrorIfFailed(vkAllocateMemory(mDevice, &alloc_info, nullptr, &outMemory));
  880. // Track allocation
  881. ++mNumAllocations;
  882. mTotalAllocated += inSize;
  883. // Track max usage
  884. mMaxTotalAllocated = max(mMaxTotalAllocated, mTotalAllocated);
  885. mMaxNumAllocations = max(mMaxNumAllocations, mNumAllocations);
  886. }
  887. void RendererVK::FreeMemory(VkDeviceMemory inMemory, VkDeviceSize inSize)
  888. {
  889. vkFreeMemory(mDevice, inMemory, nullptr);
  890. // Track free
  891. --mNumAllocations;
  892. mTotalAllocated -= inSize;
  893. }
  894. void RendererVK::CreateBuffer(VkDeviceSize inSize, VkBufferUsageFlags inUsage, VkMemoryPropertyFlags inProperties, BufferVK &outBuffer)
  895. {
  896. // Check the cache
  897. BufferCache::iterator i = mBufferCache.find({ inSize, inUsage, inProperties });
  898. if (i != mBufferCache.end() && !i->second.empty())
  899. {
  900. outBuffer = i->second.back();
  901. i->second.pop_back();
  902. return;
  903. }
  904. // Create a new buffer
  905. outBuffer.mSize = inSize;
  906. outBuffer.mUsage = inUsage;
  907. outBuffer.mProperties = inProperties;
  908. VkBufferCreateInfo create_info = {};
  909. create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
  910. create_info.size = inSize;
  911. create_info.usage = inUsage;
  912. create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
  913. FatalErrorIfFailed(vkCreateBuffer(mDevice, &create_info, nullptr, &outBuffer.mBuffer));
  914. VkMemoryRequirements mem_requirements;
  915. vkGetBufferMemoryRequirements(mDevice, outBuffer.mBuffer, &mem_requirements);
  916. if (mem_requirements.size > cMaxAllocSize)
  917. {
  918. // Allocate block directly
  919. AllocateMemory(mem_requirements.size, mem_requirements.memoryTypeBits, inProperties, outBuffer.mMemory);
  920. outBuffer.mAllocatedSize = mem_requirements.size;
  921. outBuffer.mOffset = 0;
  922. }
  923. else
  924. {
  925. // Round allocation to the next power of 2 so that we can use a simple block based allocator
  926. outBuffer.mAllocatedSize = max(VkDeviceSize(GetNextPowerOf2(uint32(mem_requirements.size))), cMinAllocSize);
  927. // Ensure that we have memory available from the right pool
  928. Array<Memory> &mem_array = mMemoryCache[{ outBuffer.mAllocatedSize, outBuffer.mUsage, outBuffer.mProperties }];
  929. if (mem_array.empty())
  930. {
  931. // Allocate a bigger block
  932. VkDeviceMemory device_memory;
  933. AllocateMemory(cBlockSize, mem_requirements.memoryTypeBits, inProperties, device_memory);
  934. // Divide into sub blocks
  935. for (VkDeviceSize offset = 0; offset < cBlockSize; offset += outBuffer.mAllocatedSize)
  936. mem_array.push_back({ device_memory, offset });
  937. }
  938. // Claim memory from the pool
  939. Memory &memory = mem_array.back();
  940. outBuffer.mMemory = memory.mMemory;
  941. outBuffer.mOffset = memory.mOffset;
  942. mem_array.pop_back();
  943. }
  944. // Bind the memory to the buffer
  945. vkBindBufferMemory(mDevice, outBuffer.mBuffer, outBuffer.mMemory, outBuffer.mOffset);
  946. }
  947. VkCommandBuffer RendererVK::StartTempCommandBuffer()
  948. {
  949. VkCommandBufferAllocateInfo alloc_info = {};
  950. alloc_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
  951. alloc_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
  952. alloc_info.commandPool = mCommandPool;
  953. alloc_info.commandBufferCount = 1;
  954. VkCommandBuffer command_buffer;
  955. vkAllocateCommandBuffers(mDevice, &alloc_info, &command_buffer);
  956. VkCommandBufferBeginInfo begin_info = {};
  957. begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  958. begin_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
  959. vkBeginCommandBuffer(command_buffer, &begin_info);
  960. return command_buffer;
  961. }
  962. void RendererVK::EndTempCommandBuffer(VkCommandBuffer inCommandBuffer)
  963. {
  964. vkEndCommandBuffer(inCommandBuffer);
  965. VkSubmitInfo submit_info = {};
  966. submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  967. submit_info.commandBufferCount = 1;
  968. submit_info.pCommandBuffers = &inCommandBuffer;
  969. vkQueueSubmit(mGraphicsQueue, 1, &submit_info, VK_NULL_HANDLE);
  970. vkQueueWaitIdle(mGraphicsQueue); // Inefficient, but we only use this during initialization
  971. vkFreeCommandBuffers(mDevice, mCommandPool, 1, &inCommandBuffer);
  972. }
  973. void RendererVK::CopyBuffer(VkBuffer inSrc, VkBuffer inDst, VkDeviceSize inSize)
  974. {
  975. VkCommandBuffer command_buffer = StartTempCommandBuffer();
  976. VkBufferCopy region = {};
  977. region.size = inSize;
  978. vkCmdCopyBuffer(command_buffer, inSrc, inDst, 1, &region);
  979. EndTempCommandBuffer(command_buffer);
  980. }
  981. void RendererVK::CreateDeviceLocalBuffer(const void *inData, VkDeviceSize inSize, VkBufferUsageFlags inUsage, BufferVK &outBuffer)
  982. {
  983. BufferVK staging_buffer;
  984. CreateBuffer(inSize, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, staging_buffer);
  985. void *data;
  986. vkMapMemory(mDevice, staging_buffer.mMemory, staging_buffer.mOffset, inSize, 0, &data);
  987. memcpy(data, inData, (size_t)inSize);
  988. vkUnmapMemory(mDevice, staging_buffer.mMemory);
  989. CreateBuffer(inSize, inUsage | VK_BUFFER_USAGE_TRANSFER_DST_BIT, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, outBuffer);
  990. CopyBuffer(staging_buffer.mBuffer, outBuffer.mBuffer, inSize);
  991. FreeBuffer(staging_buffer);
  992. }
  993. void RendererVK::FreeBuffer(BufferVK &ioBuffer)
  994. {
  995. if (ioBuffer.mBuffer != VK_NULL_HANDLE)
  996. {
  997. JPH_ASSERT(mFrameIndex < cFrameCount);
  998. mFreedBuffers[mFrameIndex][{ ioBuffer.mSize, ioBuffer.mUsage, ioBuffer.mProperties }].push_back(ioBuffer);
  999. }
  1000. }
  1001. void RendererVK::FreeBufferInternal(BufferVK &ioBuffer)
  1002. {
  1003. // Destroy the buffer
  1004. vkDestroyBuffer(mDevice, ioBuffer.mBuffer, nullptr);
  1005. ioBuffer.mBuffer = VK_NULL_HANDLE;
  1006. if (ioBuffer.mAllocatedSize > cMaxAllocSize)
  1007. FreeMemory(ioBuffer.mMemory, ioBuffer.mAllocatedSize);
  1008. else
  1009. mMemoryCache[{ ioBuffer.mAllocatedSize, ioBuffer.mUsage, ioBuffer.mProperties }].push_back({ ioBuffer.mMemory, ioBuffer.mOffset });
  1010. ioBuffer.mMemory = VK_NULL_HANDLE;
  1011. }
  1012. unique_ptr<ConstantBufferVK> RendererVK::CreateConstantBuffer(VkDeviceSize inBufferSize)
  1013. {
  1014. return make_unique<ConstantBufferVK>(this, inBufferSize);
  1015. }
  1016. VkImageView RendererVK::CreateImageView(VkImage inImage, VkFormat inFormat, VkImageAspectFlags inAspectFlags)
  1017. {
  1018. VkImageViewCreateInfo view_info = {};
  1019. view_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
  1020. view_info.image = inImage;
  1021. view_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
  1022. view_info.format = inFormat;
  1023. view_info.subresourceRange.aspectMask = inAspectFlags;
  1024. view_info.subresourceRange.levelCount = 1;
  1025. view_info.subresourceRange.layerCount = 1;
  1026. VkImageView image_view;
  1027. FatalErrorIfFailed(vkCreateImageView(mDevice, &view_info, nullptr, &image_view));
  1028. return image_view;
  1029. }
  1030. void RendererVK::CreateImage(uint32 inWidth, uint32 inHeight, VkFormat inFormat, VkImageTiling inTiling, VkImageUsageFlags inUsage, VkMemoryPropertyFlags inProperties, VkImage &outImage, VkDeviceMemory &outMemory)
  1031. {
  1032. VkImageCreateInfo image_info = {};
  1033. image_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
  1034. image_info.imageType = VK_IMAGE_TYPE_2D;
  1035. image_info.extent.width = inWidth;
  1036. image_info.extent.height = inHeight;
  1037. image_info.extent.depth = 1;
  1038. image_info.mipLevels = 1;
  1039. image_info.arrayLayers = 1;
  1040. image_info.format = inFormat;
  1041. image_info.tiling = inTiling;
  1042. image_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  1043. image_info.usage = inUsage;
  1044. image_info.samples = VK_SAMPLE_COUNT_1_BIT;
  1045. image_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
  1046. FatalErrorIfFailed(vkCreateImage(mDevice, &image_info, nullptr, &outImage));
  1047. VkMemoryRequirements mem_requirements;
  1048. vkGetImageMemoryRequirements(mDevice, outImage, &mem_requirements);
  1049. AllocateMemory(mem_requirements.size, mem_requirements.memoryTypeBits, inProperties, outMemory);
  1050. vkBindImageMemory(mDevice, outImage, outMemory, 0);
  1051. }
  1052. void RendererVK::DestroyImage(VkImage inImage, VkDeviceMemory inMemory)
  1053. {
  1054. VkMemoryRequirements mem_requirements;
  1055. vkGetImageMemoryRequirements(mDevice, inImage, &mem_requirements);
  1056. vkDestroyImage(mDevice, inImage, nullptr);
  1057. FreeMemory(inMemory, mem_requirements.size);
  1058. }
  1059. void RendererVK::UpdateViewPortAndScissorRect(uint32 inWidth, uint32 inHeight)
  1060. {
  1061. VkCommandBuffer command_buffer = GetCommandBuffer();
  1062. // Update the view port rect
  1063. VkViewport viewport = {};
  1064. viewport.x = 0.0f;
  1065. viewport.y = 0.0f;
  1066. viewport.width = (float)inWidth;
  1067. viewport.height = (float)inHeight;
  1068. viewport.minDepth = 0.0f;
  1069. viewport.maxDepth = 1.0f;
  1070. vkCmdSetViewport(command_buffer, 0, 1, &viewport);
  1071. // Update the scissor rect
  1072. VkRect2D scissor = {};
  1073. scissor.extent = { inWidth, inHeight };
  1074. vkCmdSetScissor(command_buffer, 0, 1, &scissor);
  1075. }