RendererVK.cpp 53 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324
  1. // Jolt Physics Library (https://github.com/jrouwe/JoltPhysics)
  2. // SPDX-FileCopyrightText: 2024 Jorrit Rouwe
  3. // SPDX-License-Identifier: MIT
  4. #include <TestFramework.h>
  5. #include <Renderer/VK/RendererVK.h>
  6. #include <Renderer/VK/RenderPrimitiveVK.h>
  7. #include <Renderer/VK/RenderInstancesVK.h>
  8. #include <Renderer/VK/PipelineStateVK.h>
  9. #include <Renderer/VK/VertexShaderVK.h>
  10. #include <Renderer/VK/PixelShaderVK.h>
  11. #include <Renderer/VK/TextureVK.h>
  12. #include <Renderer/VK/FatalErrorIfFailedVK.h>
  13. #include <Utils/Log.h>
  14. #include <Utils/ReadData.h>
  15. #include <Jolt/Core/Profiler.h>
  16. #include <Jolt/Core/QuickSort.h>
  17. #include <Jolt/Core/RTTI.h>
  18. JPH_SUPPRESS_WARNINGS_STD_BEGIN
  19. #ifdef JPH_PLATFORM_WINDOWS
  20. #include <vulkan/vulkan_win32.h>
  21. #include <Window/ApplicationWindowWin.h>
  22. #elif defined(JPH_PLATFORM_LINUX)
  23. #include <vulkan/vulkan_xlib.h>
  24. #include <Window/ApplicationWindowLinux.h>
  25. #elif defined(JPH_PLATFORM_MACOS)
  26. #include <vulkan/vulkan_metal.h>
  27. #include <Window/ApplicationWindowMacOS.h>
  28. #endif
  29. JPH_SUPPRESS_WARNINGS_STD_END
  30. #ifdef JPH_DEBUG
  31. static VKAPI_ATTR VkBool32 VKAPI_CALL sVulkanDebugCallback(VkDebugUtilsMessageSeverityFlagBitsEXT inSeverity, [[maybe_unused]] VkDebugUtilsMessageTypeFlagsEXT inType, const VkDebugUtilsMessengerCallbackDataEXT *inCallbackData, [[maybe_unused]] void *inUserData)
  32. {
  33. Trace("VK: %s", inCallbackData->pMessage);
  34. JPH_ASSERT((inSeverity & VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT) == 0);
  35. return VK_FALSE;
  36. }
  37. #endif // JPH_DEBUG
  38. RendererVK::~RendererVK()
  39. {
  40. vkDeviceWaitIdle(mDevice);
  41. // Trace allocation stats
  42. Trace("VK: Max allocations: %u, max size: %u MB", mMaxNumAllocations, uint32(mMaxTotalAllocated >> 20));
  43. // Destroy the shadow map
  44. mShadowMap = nullptr;
  45. vkDestroyFramebuffer(mDevice, mShadowFrameBuffer, nullptr);
  46. // Release constant buffers
  47. for (unique_ptr<ConstantBufferVK> &cb : mVertexShaderConstantBufferProjection)
  48. cb = nullptr;
  49. for (unique_ptr<ConstantBufferVK> &cb : mVertexShaderConstantBufferOrtho)
  50. cb = nullptr;
  51. for (unique_ptr<ConstantBufferVK> &cb : mPixelShaderConstantBuffer)
  52. cb = nullptr;
  53. // Free all buffers
  54. for (BufferCache &bc : mFreedBuffers)
  55. for (BufferCache::value_type &vt : bc)
  56. for (BufferVK &bvk : vt.second)
  57. FreeBufferInternal(bvk);
  58. for (BufferCache::value_type &vt : mBufferCache)
  59. for (BufferVK &bvk : vt.second)
  60. FreeBufferInternal(bvk);
  61. // Free all blocks in the memory cache
  62. for (MemoryCache::value_type &mc : mMemoryCache)
  63. for (Memory &m : mc.second)
  64. if (m.mOffset == 0)
  65. vkFreeMemory(mDevice, m.mMemory, nullptr); // Don't care about memory tracking anymore
  66. for (VkFence fence : mInFlightFences)
  67. vkDestroyFence(mDevice, fence, nullptr);
  68. vkDestroyCommandPool(mDevice, mCommandPool, nullptr);
  69. vkDestroyPipelineLayout(mDevice, mPipelineLayout, nullptr);
  70. vkDestroyRenderPass(mDevice, mRenderPassShadow, nullptr);
  71. vkDestroyRenderPass(mDevice, mRenderPass, nullptr);
  72. vkDestroyDescriptorPool(mDevice, mDescriptorPool, nullptr);
  73. vkDestroySampler(mDevice, mTextureSamplerShadow, nullptr);
  74. vkDestroySampler(mDevice, mTextureSamplerRepeat, nullptr);
  75. vkDestroyDescriptorSetLayout(mDevice, mDescriptorSetLayoutUBO, nullptr);
  76. vkDestroyDescriptorSetLayout(mDevice, mDescriptorSetLayoutTexture, nullptr);
  77. DestroySwapChain();
  78. vkDestroySurfaceKHR(mInstance, mSurface, nullptr);
  79. vkDestroyDevice(mDevice, nullptr);
  80. #ifdef JPH_DEBUG
  81. PFN_vkDestroyDebugUtilsMessengerEXT vkDestroyDebugUtilsMessengerEXT = (PFN_vkDestroyDebugUtilsMessengerEXT)(void *)vkGetInstanceProcAddr(mInstance, "vkDestroyDebugUtilsMessengerEXT");
  82. if (vkDestroyDebugUtilsMessengerEXT != nullptr)
  83. vkDestroyDebugUtilsMessengerEXT(mInstance, mDebugMessenger, nullptr);
  84. #endif
  85. vkDestroyInstance(mInstance, nullptr);
  86. }
  87. void RendererVK::Initialize(ApplicationWindow *inWindow)
  88. {
  89. Renderer::Initialize(inWindow);
  90. // Flip the sign of the projection matrix
  91. mPerspectiveYSign = -1.0f;
  92. // Required instance extensions
  93. Array<const char *> required_instance_extensions;
  94. required_instance_extensions.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
  95. #ifdef JPH_PLATFORM_WINDOWS
  96. required_instance_extensions.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
  97. #elif defined(JPH_PLATFORM_LINUX)
  98. required_instance_extensions.push_back(VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
  99. #elif defined(JPH_PLATFORM_MACOS)
  100. required_instance_extensions.push_back(VK_EXT_METAL_SURFACE_EXTENSION_NAME);
  101. required_instance_extensions.push_back("VK_KHR_portability_enumeration");
  102. required_instance_extensions.push_back("VK_KHR_get_physical_device_properties2");
  103. #endif
  104. // Required device extensions
  105. Array<const char *> required_device_extensions;
  106. required_device_extensions.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
  107. #ifdef JPH_PLATFORM_MACOS
  108. required_device_extensions.push_back("VK_KHR_portability_subset"); // VK_KHR_PORTABILITY_SUBSET_EXTENSION_NAME
  109. #endif
  110. // Query supported instance extensions
  111. uint32 instance_extension_count = 0;
  112. FatalErrorIfFailed(vkEnumerateInstanceExtensionProperties(nullptr, &instance_extension_count, nullptr));
  113. Array<VkExtensionProperties> instance_extensions;
  114. instance_extensions.resize(instance_extension_count);
  115. FatalErrorIfFailed(vkEnumerateInstanceExtensionProperties(nullptr, &instance_extension_count, instance_extensions.data()));
  116. // Query supported validation layers
  117. uint32 validation_layer_count;
  118. vkEnumerateInstanceLayerProperties(&validation_layer_count, nullptr);
  119. Array<VkLayerProperties> validation_layers(validation_layer_count);
  120. vkEnumerateInstanceLayerProperties(&validation_layer_count, validation_layers.data());
  121. // Create Vulkan instance
  122. VkInstanceCreateInfo instance_create_info = {};
  123. instance_create_info.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
  124. #ifdef JPH_PLATFORM_MACOS
  125. instance_create_info.flags = VK_INSTANCE_CREATE_ENUMERATE_PORTABILITY_BIT_KHR;
  126. #endif
  127. #ifdef JPH_DEBUG
  128. // Enable validation layer if supported
  129. const char *desired_validation_layers[] = { "VK_LAYER_KHRONOS_validation" };
  130. for (const VkLayerProperties &p : validation_layers)
  131. if (strcmp(desired_validation_layers[0], p.layerName) == 0)
  132. {
  133. instance_create_info.enabledLayerCount = 1;
  134. instance_create_info.ppEnabledLayerNames = desired_validation_layers;
  135. break;
  136. }
  137. // Setup debug messenger callback if the extension is supported
  138. VkDebugUtilsMessengerCreateInfoEXT messenger_create_info = {};
  139. for (const VkExtensionProperties &ext : instance_extensions)
  140. if (strcmp(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, ext.extensionName) == 0)
  141. {
  142. messenger_create_info.sType = VK_STRUCTURE_TYPE_DEBUG_UTILS_MESSENGER_CREATE_INFO_EXT;
  143. messenger_create_info.messageSeverity = VK_DEBUG_UTILS_MESSAGE_SEVERITY_VERBOSE_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_INFO_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_WARNING_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_SEVERITY_ERROR_BIT_EXT;
  144. messenger_create_info.messageType = VK_DEBUG_UTILS_MESSAGE_TYPE_GENERAL_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_VALIDATION_BIT_EXT | VK_DEBUG_UTILS_MESSAGE_TYPE_PERFORMANCE_BIT_EXT;
  145. messenger_create_info.pfnUserCallback = sVulkanDebugCallback;
  146. instance_create_info.pNext = &messenger_create_info;
  147. required_instance_extensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
  148. break;
  149. }
  150. #endif
  151. instance_create_info.enabledExtensionCount = (uint32)required_instance_extensions.size();
  152. instance_create_info.ppEnabledExtensionNames = required_instance_extensions.data();
  153. FatalErrorIfFailed(vkCreateInstance(&instance_create_info, nullptr, &mInstance));
  154. #ifdef JPH_DEBUG
  155. // Finalize debug messenger callback
  156. PFN_vkCreateDebugUtilsMessengerEXT vkCreateDebugUtilsMessengerEXT = (PFN_vkCreateDebugUtilsMessengerEXT)(std::uintptr_t)vkGetInstanceProcAddr(mInstance, "vkCreateDebugUtilsMessengerEXT");
  157. if (vkCreateDebugUtilsMessengerEXT != nullptr)
  158. FatalErrorIfFailed(vkCreateDebugUtilsMessengerEXT(mInstance, &messenger_create_info, nullptr, &mDebugMessenger));
  159. #endif
  160. // Create surface
  161. #ifdef JPH_PLATFORM_WINDOWS
  162. VkWin32SurfaceCreateInfoKHR surface_create_info = {};
  163. surface_create_info.sType = VK_STRUCTURE_TYPE_WIN32_SURFACE_CREATE_INFO_KHR;
  164. surface_create_info.hwnd = static_cast<ApplicationWindowWin *>(mWindow)->GetWindowHandle();
  165. surface_create_info.hinstance = GetModuleHandle(nullptr);
  166. FatalErrorIfFailed(vkCreateWin32SurfaceKHR(mInstance, &surface_create_info, nullptr, &mSurface));
  167. #elif defined(JPH_PLATFORM_LINUX)
  168. VkXlibSurfaceCreateInfoKHR surface_create_info = {};
  169. surface_create_info.sType = VK_STRUCTURE_TYPE_XLIB_SURFACE_CREATE_INFO_KHR;
  170. surface_create_info.dpy = static_cast<ApplicationWindowLinux *>(mWindow)->GetDisplay();
  171. surface_create_info.window = static_cast<ApplicationWindowLinux *>(mWindow)->GetWindow();
  172. FatalErrorIfFailed(vkCreateXlibSurfaceKHR(mInstance, &surface_create_info, nullptr, &mSurface));
  173. #elif defined(JPH_PLATFORM_MACOS)
  174. VkMetalSurfaceCreateInfoEXT surface_create_info = {};
  175. surface_create_info.sType = VK_STRUCTURE_TYPE_METAL_SURFACE_CREATE_INFO_EXT;
  176. surface_create_info.pNext = nullptr;
  177. surface_create_info.pLayer = static_cast<ApplicationWindowMacOS *>(mWindow)->GetMetalLayer();
  178. FatalErrorIfFailed(vkCreateMetalSurfaceEXT(mInstance, &surface_create_info, nullptr, &mSurface));
  179. #endif
  180. // Select device
  181. uint32 device_count = 0;
  182. FatalErrorIfFailed(vkEnumeratePhysicalDevices(mInstance, &device_count, nullptr));
  183. Array<VkPhysicalDevice> devices;
  184. devices.resize(device_count);
  185. FatalErrorIfFailed(vkEnumeratePhysicalDevices(mInstance, &device_count, devices.data()));
  186. struct Device
  187. {
  188. VkPhysicalDevice mPhysicalDevice;
  189. String mName;
  190. VkSurfaceFormatKHR mFormat;
  191. uint32 mGraphicsQueueIndex;
  192. uint32 mPresentQueueIndex;
  193. int mScore;
  194. };
  195. Array<Device> available_devices;
  196. for (VkPhysicalDevice device : devices)
  197. {
  198. // Get device properties
  199. VkPhysicalDeviceProperties properties;
  200. vkGetPhysicalDeviceProperties(device, &properties);
  201. // Test if it is an appropriate type
  202. int score = 0;
  203. switch (properties.deviceType)
  204. {
  205. case VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU:
  206. score = 30;
  207. break;
  208. case VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU:
  209. score = 20;
  210. break;
  211. case VK_PHYSICAL_DEVICE_TYPE_VIRTUAL_GPU:
  212. score = 10;
  213. break;
  214. case VK_PHYSICAL_DEVICE_TYPE_CPU:
  215. score = 5;
  216. break;
  217. case VK_PHYSICAL_DEVICE_TYPE_OTHER:
  218. case VK_PHYSICAL_DEVICE_TYPE_MAX_ENUM:
  219. continue;
  220. }
  221. // Check if the device supports all our required extensions
  222. uint32 device_extension_count;
  223. vkEnumerateDeviceExtensionProperties(device, nullptr, &device_extension_count, nullptr);
  224. Array<VkExtensionProperties> available_extensions;
  225. available_extensions.resize(device_extension_count);
  226. vkEnumerateDeviceExtensionProperties(device, nullptr, &device_extension_count, available_extensions.data());
  227. int found_extensions = 0;
  228. for (const char *required_device_extension : required_device_extensions)
  229. for (const VkExtensionProperties &ext : available_extensions)
  230. if (strcmp(required_device_extension, ext.extensionName) == 0)
  231. {
  232. found_extensions++;
  233. break;
  234. }
  235. if (found_extensions != int(required_device_extensions.size()))
  236. continue;
  237. // Find the right queues
  238. uint32 queue_family_count = 0;
  239. vkGetPhysicalDeviceQueueFamilyProperties(device, &queue_family_count, nullptr);
  240. Array<VkQueueFamilyProperties> queue_families;
  241. queue_families.resize(queue_family_count);
  242. vkGetPhysicalDeviceQueueFamilyProperties(device, &queue_family_count, queue_families.data());
  243. uint32 graphics_queue = ~uint32(0);
  244. uint32 present_queue = ~uint32(0);
  245. for (uint32 i = 0; i < uint32(queue_families.size()); ++i)
  246. {
  247. if (queue_families[i].queueFlags & VK_QUEUE_GRAPHICS_BIT)
  248. graphics_queue = i;
  249. VkBool32 present_support = false;
  250. vkGetPhysicalDeviceSurfaceSupportKHR(device, i, mSurface, &present_support);
  251. if (present_support)
  252. present_queue = i;
  253. if (graphics_queue != ~uint32(0) && present_queue != ~uint32(0))
  254. break;
  255. }
  256. if (graphics_queue == ~uint32(0) || present_queue == ~uint32(0))
  257. continue;
  258. // Select surface format
  259. VkSurfaceFormatKHR selected_format = SelectFormat(device);
  260. if (selected_format.format == VK_FORMAT_UNDEFINED)
  261. continue;
  262. // Add the device
  263. available_devices.push_back({ device, properties.deviceName, selected_format, graphics_queue, present_queue, score });
  264. }
  265. if (available_devices.empty())
  266. FatalError("No Vulkan device found!");
  267. QuickSort(available_devices.begin(), available_devices.end(), [](const Device &inLHS, const Device &inRHS) {
  268. return inLHS.mScore > inRHS.mScore;
  269. });
  270. const Device &selected_device = available_devices[0];
  271. Trace("Selected device: %s", selected_device.mName.c_str());
  272. mPhysicalDevice = selected_device.mPhysicalDevice;
  273. // Get memory properties
  274. vkGetPhysicalDeviceMemoryProperties(mPhysicalDevice, &mMemoryProperties);
  275. // Get features
  276. VkPhysicalDeviceFeatures physical_device_features = {};
  277. vkGetPhysicalDeviceFeatures(mPhysicalDevice, &physical_device_features);
  278. // Create device
  279. float queue_priority = 1.0f;
  280. VkDeviceQueueCreateInfo queue_create_info[2] = {};
  281. for (size_t i = 0; i < std::size(queue_create_info); ++i)
  282. {
  283. queue_create_info[i].sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
  284. queue_create_info[i].queueCount = 1;
  285. queue_create_info[i].pQueuePriorities = &queue_priority;
  286. }
  287. queue_create_info[0].queueFamilyIndex = selected_device.mGraphicsQueueIndex;
  288. queue_create_info[1].queueFamilyIndex = selected_device.mPresentQueueIndex;
  289. VkPhysicalDeviceFeatures device_features = {};
  290. if (!physical_device_features.fillModeNonSolid)
  291. FatalError("fillModeNonSolid not supported!");
  292. device_features.fillModeNonSolid = VK_TRUE;
  293. VkDeviceCreateInfo device_create_info = {};
  294. device_create_info.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
  295. device_create_info.queueCreateInfoCount = selected_device.mGraphicsQueueIndex != selected_device.mPresentQueueIndex? 2 : 1;
  296. device_create_info.pQueueCreateInfos = queue_create_info;
  297. device_create_info.enabledLayerCount = instance_create_info.enabledLayerCount;
  298. device_create_info.ppEnabledLayerNames = instance_create_info.ppEnabledLayerNames;
  299. device_create_info.enabledExtensionCount = uint32(required_device_extensions.size());
  300. device_create_info.ppEnabledExtensionNames = required_device_extensions.data();
  301. device_create_info.pEnabledFeatures = &device_features;
  302. FatalErrorIfFailed(vkCreateDevice(selected_device.mPhysicalDevice, &device_create_info, nullptr, &mDevice));
  303. // Get the queues
  304. mGraphicsQueueIndex = selected_device.mGraphicsQueueIndex;
  305. mPresentQueueIndex = selected_device.mPresentQueueIndex;
  306. vkGetDeviceQueue(mDevice, mGraphicsQueueIndex, 0, &mGraphicsQueue);
  307. vkGetDeviceQueue(mDevice, mPresentQueueIndex, 0, &mPresentQueue);
  308. VkCommandPoolCreateInfo pool_info = {};
  309. pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
  310. pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
  311. pool_info.queueFamilyIndex = selected_device.mGraphicsQueueIndex;
  312. FatalErrorIfFailed(vkCreateCommandPool(mDevice, &pool_info, nullptr, &mCommandPool));
  313. VkCommandBufferAllocateInfo command_buffer_info = {};
  314. command_buffer_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
  315. command_buffer_info.commandPool = mCommandPool;
  316. command_buffer_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
  317. command_buffer_info.commandBufferCount = 1;
  318. for (uint32 i = 0; i < cFrameCount; ++i)
  319. FatalErrorIfFailed(vkAllocateCommandBuffers(mDevice, &command_buffer_info, &mCommandBuffers[i]));
  320. VkFenceCreateInfo fence_info = {};
  321. fence_info.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
  322. fence_info.flags = VK_FENCE_CREATE_SIGNALED_BIT;
  323. for (uint32 i = 0; i < cFrameCount; ++i)
  324. FatalErrorIfFailed(vkCreateFence(mDevice, &fence_info, nullptr, &mInFlightFences[i]));
  325. // Create constant buffer. One per frame to avoid overwriting the constant buffer while the GPU is still using it.
  326. for (uint n = 0; n < cFrameCount; ++n)
  327. {
  328. mVertexShaderConstantBufferProjection[n] = CreateConstantBuffer(sizeof(VertexShaderConstantBuffer));
  329. mVertexShaderConstantBufferOrtho[n] = CreateConstantBuffer(sizeof(VertexShaderConstantBuffer));
  330. mPixelShaderConstantBuffer[n] = CreateConstantBuffer(sizeof(PixelShaderConstantBuffer));
  331. }
  332. // Create descriptor set layout for the uniform buffers
  333. VkDescriptorSetLayoutBinding ubo_layout_binding[2] = {};
  334. ubo_layout_binding[0].binding = 0;
  335. ubo_layout_binding[0].descriptorCount = 1;
  336. ubo_layout_binding[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
  337. ubo_layout_binding[0].stageFlags = VK_SHADER_STAGE_VERTEX_BIT;
  338. ubo_layout_binding[1].binding = 1;
  339. ubo_layout_binding[1].descriptorCount = 1;
  340. ubo_layout_binding[1].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
  341. ubo_layout_binding[1].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
  342. VkDescriptorSetLayoutCreateInfo ubo_dsl = {};
  343. ubo_dsl.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  344. ubo_dsl.bindingCount = std::size(ubo_layout_binding);
  345. ubo_dsl.pBindings = ubo_layout_binding;
  346. FatalErrorIfFailed(vkCreateDescriptorSetLayout(mDevice, &ubo_dsl, nullptr, &mDescriptorSetLayoutUBO));
  347. // Create descriptor set layout for the texture binding
  348. VkDescriptorSetLayoutBinding texture_layout_binding = {};
  349. texture_layout_binding.binding = 0;
  350. texture_layout_binding.descriptorCount = 1;
  351. texture_layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
  352. texture_layout_binding.stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT;
  353. VkDescriptorSetLayoutCreateInfo texture_dsl = {};
  354. texture_dsl.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
  355. texture_dsl.bindingCount = 1;
  356. texture_dsl.pBindings = &texture_layout_binding;
  357. FatalErrorIfFailed(vkCreateDescriptorSetLayout(mDevice, &texture_dsl, nullptr, &mDescriptorSetLayoutTexture));
  358. // Create pipeline layout
  359. VkPipelineLayoutCreateInfo pipeline_layout = {};
  360. VkDescriptorSetLayout layout_handles[] = { mDescriptorSetLayoutUBO, mDescriptorSetLayoutTexture };
  361. pipeline_layout.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
  362. pipeline_layout.setLayoutCount = std::size(layout_handles);
  363. pipeline_layout.pSetLayouts = layout_handles;
  364. pipeline_layout.pushConstantRangeCount = 0;
  365. FatalErrorIfFailed(vkCreatePipelineLayout(mDevice, &pipeline_layout, nullptr, &mPipelineLayout));
  366. // Create descriptor pool
  367. VkDescriptorPoolSize descriptor_pool_sizes[] = {
  368. { VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER, 128 },
  369. { VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER, 128 },
  370. };
  371. VkDescriptorPoolCreateInfo descriptor_info = {};
  372. descriptor_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
  373. descriptor_info.poolSizeCount = std::size(descriptor_pool_sizes);
  374. descriptor_info.pPoolSizes = descriptor_pool_sizes;
  375. descriptor_info.maxSets = 256;
  376. FatalErrorIfFailed(vkCreateDescriptorPool(mDevice, &descriptor_info, nullptr, &mDescriptorPool));
  377. // Allocate descriptor sets for 3d rendering
  378. Array<VkDescriptorSetLayout> layouts(cFrameCount, mDescriptorSetLayoutUBO);
  379. VkDescriptorSetAllocateInfo descriptor_set_alloc_info = {};
  380. descriptor_set_alloc_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
  381. descriptor_set_alloc_info.descriptorPool = mDescriptorPool;
  382. descriptor_set_alloc_info.descriptorSetCount = cFrameCount;
  383. descriptor_set_alloc_info.pSetLayouts = layouts.data();
  384. FatalErrorIfFailed(vkAllocateDescriptorSets(mDevice, &descriptor_set_alloc_info, mDescriptorSets));
  385. for (uint i = 0; i < cFrameCount; i++)
  386. {
  387. VkDescriptorBufferInfo vs_buffer_info = {};
  388. vs_buffer_info.buffer = mVertexShaderConstantBufferProjection[i]->GetBuffer();
  389. vs_buffer_info.range = sizeof(VertexShaderConstantBuffer);
  390. VkDescriptorBufferInfo ps_buffer_info = {};
  391. ps_buffer_info.buffer = mPixelShaderConstantBuffer[i]->GetBuffer();
  392. ps_buffer_info.range = sizeof(PixelShaderConstantBuffer);
  393. VkWriteDescriptorSet descriptor_write[2] = {};
  394. descriptor_write[0].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  395. descriptor_write[0].dstSet = mDescriptorSets[i];
  396. descriptor_write[0].dstBinding = 0;
  397. descriptor_write[0].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
  398. descriptor_write[0].descriptorCount = 1;
  399. descriptor_write[0].pBufferInfo = &vs_buffer_info;
  400. descriptor_write[1].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  401. descriptor_write[1].dstSet = mDescriptorSets[i];
  402. descriptor_write[1].dstBinding = 1;
  403. descriptor_write[1].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
  404. descriptor_write[1].descriptorCount = 1;
  405. descriptor_write[1].pBufferInfo = &ps_buffer_info;
  406. vkUpdateDescriptorSets(mDevice, 2, descriptor_write, 0, nullptr);
  407. }
  408. // Allocate descriptor sets for 2d rendering
  409. FatalErrorIfFailed(vkAllocateDescriptorSets(mDevice, &descriptor_set_alloc_info, mDescriptorSetsOrtho));
  410. for (uint i = 0; i < cFrameCount; i++)
  411. {
  412. VkDescriptorBufferInfo vs_buffer_info = {};
  413. vs_buffer_info.buffer = mVertexShaderConstantBufferOrtho[i]->GetBuffer();
  414. vs_buffer_info.range = sizeof(VertexShaderConstantBuffer);
  415. VkWriteDescriptorSet descriptor_write = {};
  416. descriptor_write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
  417. descriptor_write.dstSet = mDescriptorSetsOrtho[i];
  418. descriptor_write.dstBinding = 0;
  419. descriptor_write.descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
  420. descriptor_write.descriptorCount = 1;
  421. descriptor_write.pBufferInfo = &vs_buffer_info;
  422. vkUpdateDescriptorSets(mDevice, 1, &descriptor_write, 0, nullptr);
  423. }
  424. // Create regular texture sampler
  425. VkSamplerCreateInfo sampler_info = {};
  426. sampler_info.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
  427. sampler_info.magFilter = VK_FILTER_LINEAR;
  428. sampler_info.minFilter = VK_FILTER_LINEAR;
  429. sampler_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT;
  430. sampler_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT;
  431. sampler_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT;
  432. sampler_info.borderColor = VK_BORDER_COLOR_INT_OPAQUE_BLACK;
  433. sampler_info.unnormalizedCoordinates = VK_FALSE;
  434. sampler_info.minLod = 0.0f;
  435. sampler_info.maxLod = VK_LOD_CLAMP_NONE;
  436. sampler_info.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
  437. FatalErrorIfFailed(vkCreateSampler(mDevice, &sampler_info, nullptr, &mTextureSamplerRepeat));
  438. // Create sampler for shadow maps
  439. sampler_info.addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
  440. sampler_info.addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
  441. sampler_info.addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
  442. FatalErrorIfFailed(vkCreateSampler(mDevice, &sampler_info, nullptr, &mTextureSamplerShadow));
  443. {
  444. // Create shadow render pass
  445. VkAttachmentDescription shadowmap_attachment = {};
  446. shadowmap_attachment.format = FindDepthFormat();
  447. shadowmap_attachment.samples = VK_SAMPLE_COUNT_1_BIT;
  448. shadowmap_attachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
  449. shadowmap_attachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
  450. shadowmap_attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  451. shadowmap_attachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  452. shadowmap_attachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  453. shadowmap_attachment.finalLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
  454. VkAttachmentReference shadowmap_attachment_ref = {};
  455. shadowmap_attachment_ref.attachment = 0;
  456. shadowmap_attachment_ref.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  457. VkSubpassDescription subpass_shadow = {};
  458. subpass_shadow.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
  459. subpass_shadow.pDepthStencilAttachment = &shadowmap_attachment_ref;
  460. VkSubpassDependency dependencies_shadow = {};
  461. dependencies_shadow.srcSubpass = VK_SUBPASS_EXTERNAL;
  462. dependencies_shadow.dstSubpass = 0;
  463. dependencies_shadow.srcStageMask = VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
  464. dependencies_shadow.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
  465. dependencies_shadow.dstStageMask = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT;
  466. dependencies_shadow.dstAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
  467. VkRenderPassCreateInfo render_pass_shadow = {};
  468. render_pass_shadow.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
  469. render_pass_shadow.attachmentCount = 1;
  470. render_pass_shadow.pAttachments = &shadowmap_attachment;
  471. render_pass_shadow.subpassCount = 1;
  472. render_pass_shadow.pSubpasses = &subpass_shadow;
  473. render_pass_shadow.dependencyCount = 1;
  474. render_pass_shadow.pDependencies = &dependencies_shadow;
  475. FatalErrorIfFailed(vkCreateRenderPass(mDevice, &render_pass_shadow, nullptr, &mRenderPassShadow));
  476. }
  477. // Create depth only texture (no color buffer, as seen from light)
  478. mShadowMap = new TextureVK(this, cShadowMapSize, cShadowMapSize);
  479. // Create frame buffer for the shadow pass
  480. VkImageView attachments[] = { mShadowMap->GetImageView() };
  481. VkFramebufferCreateInfo frame_buffer_info = {};
  482. frame_buffer_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
  483. frame_buffer_info.renderPass = mRenderPassShadow;
  484. frame_buffer_info.attachmentCount = std::size(attachments);
  485. frame_buffer_info.pAttachments = attachments;
  486. frame_buffer_info.width = cShadowMapSize;
  487. frame_buffer_info.height = cShadowMapSize;
  488. frame_buffer_info.layers = 1;
  489. FatalErrorIfFailed(vkCreateFramebuffer(mDevice, &frame_buffer_info, nullptr, &mShadowFrameBuffer));
  490. {
  491. // Create normal render pass
  492. VkAttachmentDescription attachments_normal[2] = {};
  493. VkAttachmentDescription &color_attachment = attachments_normal[0];
  494. color_attachment.format = selected_device.mFormat.format;
  495. color_attachment.samples = VK_SAMPLE_COUNT_1_BIT;
  496. color_attachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
  497. color_attachment.storeOp = VK_ATTACHMENT_STORE_OP_STORE;
  498. color_attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  499. color_attachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  500. color_attachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  501. color_attachment.finalLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR;
  502. VkAttachmentReference color_attachment_ref = {};
  503. color_attachment_ref.attachment = 0;
  504. color_attachment_ref.layout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
  505. VkAttachmentDescription &depth_attachment = attachments_normal[1];
  506. depth_attachment.format = FindDepthFormat();
  507. depth_attachment.samples = VK_SAMPLE_COUNT_1_BIT;
  508. depth_attachment.loadOp = VK_ATTACHMENT_LOAD_OP_CLEAR;
  509. depth_attachment.storeOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  510. depth_attachment.stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
  511. depth_attachment.stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
  512. depth_attachment.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  513. depth_attachment.finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  514. VkAttachmentReference depth_attachment_ref = {};
  515. depth_attachment_ref.attachment = 1;
  516. depth_attachment_ref.layout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
  517. VkSubpassDescription subpass_normal = {};
  518. subpass_normal.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
  519. subpass_normal.colorAttachmentCount = 1;
  520. subpass_normal.pColorAttachments = &color_attachment_ref;
  521. subpass_normal.pDepthStencilAttachment = &depth_attachment_ref;
  522. VkSubpassDependency dependencies_normal = {};
  523. dependencies_normal.srcSubpass = VK_SUBPASS_EXTERNAL;
  524. dependencies_normal.dstSubpass = 0;
  525. dependencies_normal.srcStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
  526. dependencies_normal.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
  527. dependencies_normal.dstStageMask = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
  528. dependencies_normal.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_SHADER_READ_BIT;
  529. VkRenderPassCreateInfo render_pass_normal = {};
  530. render_pass_normal.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
  531. render_pass_normal.attachmentCount = std::size(attachments_normal);
  532. render_pass_normal.pAttachments = attachments_normal;
  533. render_pass_normal.subpassCount = 1;
  534. render_pass_normal.pSubpasses = &subpass_normal;
  535. render_pass_normal.dependencyCount = 1;
  536. render_pass_normal.pDependencies = &dependencies_normal;
  537. FatalErrorIfFailed(vkCreateRenderPass(mDevice, &render_pass_normal, nullptr, &mRenderPass));
  538. }
  539. // Create the swap chain
  540. CreateSwapChain(mPhysicalDevice);
  541. }
  542. VkSurfaceFormatKHR RendererVK::SelectFormat(VkPhysicalDevice inDevice)
  543. {
  544. uint32 format_count;
  545. vkGetPhysicalDeviceSurfaceFormatsKHR(inDevice, mSurface, &format_count, nullptr);
  546. if (format_count == 0)
  547. return { VK_FORMAT_UNDEFINED, VK_COLOR_SPACE_SRGB_NONLINEAR_KHR };
  548. Array<VkSurfaceFormatKHR> formats;
  549. formats.resize(format_count);
  550. vkGetPhysicalDeviceSurfaceFormatsKHR(inDevice, mSurface, &format_count, formats.data());
  551. // Select BGRA8 UNORM format if available, otherwise the 1st format
  552. for (const VkSurfaceFormatKHR &format : formats)
  553. if (format.format == VK_FORMAT_B8G8R8A8_UNORM && format.colorSpace == VK_COLOR_SPACE_SRGB_NONLINEAR_KHR)
  554. return format;
  555. return formats[0];
  556. }
  557. VkFormat RendererVK::FindDepthFormat()
  558. {
  559. VkFormat candidates[] = { VK_FORMAT_D32_SFLOAT, VK_FORMAT_D32_SFLOAT_S8_UINT, VK_FORMAT_D24_UNORM_S8_UINT };
  560. for (VkFormat format : candidates)
  561. {
  562. VkFormatProperties props;
  563. vkGetPhysicalDeviceFormatProperties(mPhysicalDevice, format, &props);
  564. if ((props.optimalTilingFeatures & VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT) == VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT)
  565. return format;
  566. }
  567. FatalError("Failed to find format!");
  568. }
  569. void RendererVK::CreateSwapChain(VkPhysicalDevice inDevice)
  570. {
  571. // Select the format
  572. VkSurfaceFormatKHR format = SelectFormat(inDevice);
  573. mSwapChainImageFormat = format.format;
  574. // Determine swap chain extent
  575. VkSurfaceCapabilitiesKHR capabilities;
  576. vkGetPhysicalDeviceSurfaceCapabilitiesKHR(inDevice, mSurface, &capabilities);
  577. mSwapChainExtent = capabilities.currentExtent;
  578. if (mSwapChainExtent.width == UINT32_MAX || mSwapChainExtent.height == UINT32_MAX)
  579. mSwapChainExtent = { uint32(mWindow->GetWindowWidth()), uint32(mWindow->GetWindowHeight()) };
  580. mSwapChainExtent.width = Clamp(mSwapChainExtent.width, capabilities.minImageExtent.width, capabilities.maxImageExtent.width);
  581. mSwapChainExtent.height = Clamp(mSwapChainExtent.height, capabilities.minImageExtent.height, capabilities.maxImageExtent.height);
  582. Trace("VK: Create swap chain %ux%u", mSwapChainExtent.width, mSwapChainExtent.height);
  583. // Early out if our window has been minimized
  584. if (mSwapChainExtent.width == 0 || mSwapChainExtent.height == 0)
  585. return;
  586. // Create the swap chain
  587. uint32 desired_image_count = max(min(cFrameCount, capabilities.maxImageCount), capabilities.minImageCount);
  588. VkSwapchainCreateInfoKHR swapchain_create_info = {};
  589. swapchain_create_info.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
  590. swapchain_create_info.surface = mSurface;
  591. swapchain_create_info.minImageCount = desired_image_count;
  592. swapchain_create_info.imageFormat = format.format;
  593. swapchain_create_info.imageColorSpace = format.colorSpace;
  594. swapchain_create_info.imageExtent = mSwapChainExtent;
  595. swapchain_create_info.imageArrayLayers = 1;
  596. swapchain_create_info.imageUsage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
  597. uint32 queue_family_indices[] = { mGraphicsQueueIndex, mPresentQueueIndex };
  598. if (mGraphicsQueueIndex != mPresentQueueIndex)
  599. {
  600. swapchain_create_info.imageSharingMode = VK_SHARING_MODE_CONCURRENT;
  601. swapchain_create_info.queueFamilyIndexCount = 2;
  602. swapchain_create_info.pQueueFamilyIndices = queue_family_indices;
  603. }
  604. else
  605. {
  606. swapchain_create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
  607. }
  608. swapchain_create_info.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
  609. swapchain_create_info.preTransform = capabilities.currentTransform;
  610. swapchain_create_info.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
  611. swapchain_create_info.presentMode = VK_PRESENT_MODE_FIFO_KHR;
  612. swapchain_create_info.clipped = VK_TRUE;
  613. FatalErrorIfFailed(vkCreateSwapchainKHR(mDevice, &swapchain_create_info, nullptr, &mSwapChain));
  614. // Get the actual swap chain image count
  615. uint32 image_count;
  616. FatalErrorIfFailed(vkGetSwapchainImagesKHR(mDevice, mSwapChain, &image_count, nullptr));
  617. // Get the swap chain images
  618. mSwapChainImages.resize(image_count);
  619. FatalErrorIfFailed(vkGetSwapchainImagesKHR(mDevice, mSwapChain, &image_count, mSwapChainImages.data()));
  620. // Create image views
  621. mSwapChainImageViews.resize(image_count);
  622. for (uint32 i = 0; i < image_count; ++i)
  623. mSwapChainImageViews[i] = CreateImageView(mSwapChainImages[i], mSwapChainImageFormat, VK_IMAGE_ASPECT_COLOR_BIT);
  624. // Create depth buffer
  625. VkFormat depth_format = FindDepthFormat();
  626. VkImageUsageFlags depth_usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT;
  627. VkMemoryPropertyFlags depth_memory_properties = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
  628. // Test and utilize support for transient memory for the depth buffer
  629. VkImageFormatProperties depth_transient_properties = {};
  630. VkResult depth_transient_support = vkGetPhysicalDeviceImageFormatProperties(mPhysicalDevice, depth_format, VK_IMAGE_TYPE_2D, VK_IMAGE_TILING_OPTIMAL, depth_usage | VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT, 0, &depth_transient_properties);
  631. if (depth_transient_support == VK_SUCCESS)
  632. {
  633. depth_usage |= VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT;
  634. // Test and utilize lazily allocated memory for the depth buffer
  635. for (size_t i = 0; i < mMemoryProperties.memoryTypeCount; i++)
  636. if (mMemoryProperties.memoryTypes[i].propertyFlags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT)
  637. {
  638. depth_memory_properties = VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT;
  639. break;
  640. }
  641. }
  642. CreateImage(mSwapChainExtent.width, mSwapChainExtent.height, depth_format, VK_IMAGE_TILING_OPTIMAL, depth_usage, depth_memory_properties, mDepthImage, mDepthImageMemory);
  643. mDepthImageView = CreateImageView(mDepthImage, depth_format, VK_IMAGE_ASPECT_DEPTH_BIT);
  644. // Create frame buffers for the normal pass
  645. mSwapChainFramebuffers.resize(image_count);
  646. for (size_t i = 0; i < mSwapChainFramebuffers.size(); i++)
  647. {
  648. VkImageView attachments[] = { mSwapChainImageViews[i], mDepthImageView };
  649. VkFramebufferCreateInfo frame_buffer_info = {};
  650. frame_buffer_info.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
  651. frame_buffer_info.renderPass = mRenderPass;
  652. frame_buffer_info.attachmentCount = std::size(attachments);
  653. frame_buffer_info.pAttachments = attachments;
  654. frame_buffer_info.width = mSwapChainExtent.width;
  655. frame_buffer_info.height = mSwapChainExtent.height;
  656. frame_buffer_info.layers = 1;
  657. FatalErrorIfFailed(vkCreateFramebuffer(mDevice, &frame_buffer_info, nullptr, &mSwapChainFramebuffers[i]));
  658. }
  659. // Allocate space to remember the image available semaphores
  660. mImageAvailableSemaphores.resize(image_count, VK_NULL_HANDLE);
  661. // Allocate the render finished semaphores
  662. mRenderFinishedSemaphores.resize(image_count, VK_NULL_HANDLE);
  663. for (uint32 i = 0; i < image_count; ++i)
  664. mRenderFinishedSemaphores[i] = AllocateSemaphore();
  665. }
  666. void RendererVK::DestroySwapChain()
  667. {
  668. // Destroy semaphores
  669. for (VkSemaphore semaphore : mImageAvailableSemaphores)
  670. vkDestroySemaphore(mDevice, semaphore, nullptr);
  671. mImageAvailableSemaphores.clear();
  672. for (VkSemaphore semaphore : mRenderFinishedSemaphores)
  673. vkDestroySemaphore(mDevice, semaphore, nullptr);
  674. mRenderFinishedSemaphores.clear();
  675. for (VkSemaphore semaphore : mAvailableSemaphores)
  676. vkDestroySemaphore(mDevice, semaphore, nullptr);
  677. mAvailableSemaphores.clear();
  678. // Destroy depth buffer
  679. if (mDepthImageView != VK_NULL_HANDLE)
  680. {
  681. vkDestroyImageView(mDevice, mDepthImageView, nullptr);
  682. mDepthImageView = VK_NULL_HANDLE;
  683. DestroyImage(mDepthImage, mDepthImageMemory);
  684. mDepthImage = VK_NULL_HANDLE;
  685. mDepthImageMemory = VK_NULL_HANDLE;
  686. }
  687. for (VkFramebuffer frame_buffer : mSwapChainFramebuffers)
  688. vkDestroyFramebuffer(mDevice, frame_buffer, nullptr);
  689. mSwapChainFramebuffers.clear();
  690. for (VkImageView view : mSwapChainImageViews)
  691. vkDestroyImageView(mDevice, view, nullptr);
  692. mSwapChainImageViews.clear();
  693. mSwapChainImages.clear();
  694. if (mSwapChain != VK_NULL_HANDLE)
  695. {
  696. vkDestroySwapchainKHR(mDevice, mSwapChain, nullptr);
  697. mSwapChain = VK_NULL_HANDLE;
  698. }
  699. }
  700. void RendererVK::OnWindowResize()
  701. {
  702. vkDeviceWaitIdle(mDevice);
  703. DestroySwapChain();
  704. CreateSwapChain(mPhysicalDevice);
  705. }
  706. VkSemaphore RendererVK::AllocateSemaphore()
  707. {
  708. VkSemaphore semaphore;
  709. if (mAvailableSemaphores.empty())
  710. {
  711. VkSemaphoreCreateInfo semaphore_info = {};
  712. semaphore_info.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
  713. FatalErrorIfFailed(vkCreateSemaphore(mDevice, &semaphore_info, nullptr, &semaphore));
  714. }
  715. else
  716. {
  717. semaphore = mAvailableSemaphores.back();
  718. mAvailableSemaphores.pop_back();
  719. }
  720. return semaphore;
  721. }
  722. void RendererVK::FreeSemaphore(VkSemaphore inSemaphore)
  723. {
  724. if (inSemaphore != VK_NULL_HANDLE)
  725. mAvailableSemaphores.push_back(inSemaphore);
  726. }
  727. bool RendererVK::BeginFrame(const CameraState &inCamera, float inWorldScale)
  728. {
  729. JPH_PROFILE_FUNCTION();
  730. Renderer::BeginFrame(inCamera, inWorldScale);
  731. // If we have no swap chain, bail out
  732. if (mSwapChain == VK_NULL_HANDLE)
  733. {
  734. Renderer::EndFrame();
  735. return false;
  736. }
  737. // Update frame index
  738. mFrameIndex = (mFrameIndex + 1) % cFrameCount;
  739. // Wait for this frame to complete
  740. vkWaitForFences(mDevice, 1, &mInFlightFences[mFrameIndex], VK_TRUE, UINT64_MAX);
  741. VkSemaphore semaphore = AllocateSemaphore();
  742. VkResult result = mSubOptimalSwapChain? VK_ERROR_OUT_OF_DATE_KHR : vkAcquireNextImageKHR(mDevice, mSwapChain, UINT64_MAX, semaphore, VK_NULL_HANDLE, &mImageIndex);
  743. if (result == VK_ERROR_OUT_OF_DATE_KHR)
  744. {
  745. vkDeviceWaitIdle(mDevice);
  746. DestroySwapChain();
  747. CreateSwapChain(mPhysicalDevice);
  748. if (mSwapChain == VK_NULL_HANDLE)
  749. {
  750. FreeSemaphore(semaphore);
  751. Renderer::EndFrame();
  752. return false;
  753. }
  754. result = vkAcquireNextImageKHR(mDevice, mSwapChain, UINT64_MAX, semaphore, VK_NULL_HANDLE, &mImageIndex);
  755. mSubOptimalSwapChain = false;
  756. }
  757. else if (result == VK_SUBOPTIMAL_KHR)
  758. {
  759. // Render this frame with the suboptimal swap chain as we've already acquired an image
  760. mSubOptimalSwapChain = true;
  761. result = VK_SUCCESS;
  762. }
  763. FatalErrorIfFailed(result);
  764. // The previous semaphore is now no longer in use, associate the new semaphore with the image
  765. FreeSemaphore(mImageAvailableSemaphores[mImageIndex]);
  766. mImageAvailableSemaphores[mImageIndex] = semaphore;
  767. // Free buffers that weren't used this frame
  768. for (BufferCache::value_type &vt : mBufferCache)
  769. for (BufferVK &bvk : vt.second)
  770. FreeBufferInternal(bvk);
  771. mBufferCache.clear();
  772. // Recycle the buffers that were freed
  773. mBufferCache.swap(mFreedBuffers[mFrameIndex]);
  774. vkResetFences(mDevice, 1, &mInFlightFences[mFrameIndex]);
  775. VkCommandBuffer command_buffer = GetCommandBuffer();
  776. FatalErrorIfFailed(vkResetCommandBuffer(command_buffer, 0));
  777. VkCommandBufferBeginInfo command_buffer_begin_info = {};
  778. command_buffer_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  779. command_buffer_begin_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
  780. FatalErrorIfFailed(vkBeginCommandBuffer(command_buffer, &command_buffer_begin_info));
  781. // Begin the shadow pass
  782. VkClearValue clear_value;
  783. clear_value.depthStencil = { 0.0f, 0 };
  784. VkRenderPassBeginInfo render_pass_begin_info = {};
  785. render_pass_begin_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
  786. render_pass_begin_info.renderPass = mRenderPassShadow;
  787. render_pass_begin_info.framebuffer = mShadowFrameBuffer;
  788. render_pass_begin_info.renderArea.extent = { cShadowMapSize, cShadowMapSize };
  789. render_pass_begin_info.clearValueCount = 1;
  790. render_pass_begin_info.pClearValues = &clear_value;
  791. vkCmdBeginRenderPass(command_buffer, &render_pass_begin_info, VK_SUBPASS_CONTENTS_INLINE);
  792. // Set constants for vertex shader in projection mode
  793. VertexShaderConstantBuffer *vs = mVertexShaderConstantBufferProjection[mFrameIndex]->Map<VertexShaderConstantBuffer>();
  794. *vs = mVSBuffer;
  795. mVertexShaderConstantBufferProjection[mFrameIndex]->Unmap();
  796. // Set constants for vertex shader in ortho mode
  797. vs = mVertexShaderConstantBufferOrtho[mFrameIndex]->Map<VertexShaderConstantBuffer>();
  798. *vs = mVSBufferOrtho;
  799. mVertexShaderConstantBufferOrtho[mFrameIndex]->Unmap();
  800. // Set constants for pixel shader
  801. PixelShaderConstantBuffer *ps = mPixelShaderConstantBuffer[mFrameIndex]->Map<PixelShaderConstantBuffer>();
  802. *ps = mPSBuffer;
  803. mPixelShaderConstantBuffer[mFrameIndex]->Unmap();
  804. // Set the view port and scissor rect to the shadow map size
  805. UpdateViewPortAndScissorRect(cShadowMapSize, cShadowMapSize);
  806. // Switch to 3d projection mode
  807. SetProjectionMode();
  808. return true;
  809. }
  810. void RendererVK::EndShadowPass()
  811. {
  812. VkCommandBuffer command_buffer = GetCommandBuffer();
  813. // End the shadow pass
  814. vkCmdEndRenderPass(command_buffer);
  815. // Begin the normal render pass
  816. VkClearValue clear_values[2];
  817. clear_values[0].color = {{ 0.098f, 0.098f, 0.439f, 1.000f }};
  818. clear_values[1].depthStencil = { 0.0f, 0 }; // Reverse-Z clears to 0
  819. VkRenderPassBeginInfo render_pass_begin_info = {};
  820. render_pass_begin_info.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
  821. render_pass_begin_info.renderPass = mRenderPass;
  822. JPH_ASSERT(mImageIndex < mSwapChainFramebuffers.size());
  823. render_pass_begin_info.framebuffer = mSwapChainFramebuffers[mImageIndex];
  824. render_pass_begin_info.renderArea.extent = mSwapChainExtent;
  825. render_pass_begin_info.clearValueCount = std::size(clear_values);
  826. render_pass_begin_info.pClearValues = clear_values;
  827. vkCmdBeginRenderPass(command_buffer, &render_pass_begin_info, VK_SUBPASS_CONTENTS_INLINE);
  828. // Set the view port and scissor rect to the screen size
  829. UpdateViewPortAndScissorRect(mSwapChainExtent.width, mSwapChainExtent.height);
  830. }
  831. void RendererVK::EndFrame()
  832. {
  833. JPH_PROFILE_FUNCTION();
  834. VkCommandBuffer command_buffer = GetCommandBuffer();
  835. vkCmdEndRenderPass(command_buffer);
  836. FatalErrorIfFailed(vkEndCommandBuffer(command_buffer));
  837. VkSemaphore wait_semaphores[] = { mImageAvailableSemaphores[mImageIndex] };
  838. VkSemaphore signal_semaphores[] = { mRenderFinishedSemaphores[mImageIndex] };
  839. VkPipelineStageFlags wait_stages[] = { VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT };
  840. VkSubmitInfo submit_info = {};
  841. submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  842. submit_info.waitSemaphoreCount = 1;
  843. submit_info.pWaitSemaphores = wait_semaphores;
  844. submit_info.pWaitDstStageMask = wait_stages;
  845. submit_info.commandBufferCount = 1;
  846. submit_info.pCommandBuffers = &command_buffer;
  847. submit_info.signalSemaphoreCount = 1;
  848. submit_info.pSignalSemaphores = signal_semaphores;
  849. FatalErrorIfFailed(vkQueueSubmit(mGraphicsQueue, 1, &submit_info, mInFlightFences[mFrameIndex]));
  850. VkSwapchainKHR swap_chains[] = { mSwapChain };
  851. VkPresentInfoKHR present_info = {};
  852. present_info.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
  853. present_info.waitSemaphoreCount = 1;
  854. present_info.pWaitSemaphores = signal_semaphores;
  855. present_info.swapchainCount = 1;
  856. present_info.pSwapchains = swap_chains;
  857. present_info.pImageIndices = &mImageIndex;
  858. vkQueuePresentKHR(mPresentQueue, &present_info);
  859. Renderer::EndFrame();
  860. }
  861. void RendererVK::SetProjectionMode()
  862. {
  863. JPH_ASSERT(mInFrame);
  864. // Bind descriptor set for 3d rendering
  865. vkCmdBindDescriptorSets(GetCommandBuffer(), VK_PIPELINE_BIND_POINT_GRAPHICS, mPipelineLayout, 0, 1, &mDescriptorSets[mFrameIndex], 0, nullptr);
  866. }
  867. void RendererVK::SetOrthoMode()
  868. {
  869. JPH_ASSERT(mInFrame);
  870. // Bind descriptor set for 2d rendering
  871. vkCmdBindDescriptorSets(GetCommandBuffer(), VK_PIPELINE_BIND_POINT_GRAPHICS, mPipelineLayout, 0, 1, &mDescriptorSetsOrtho[mFrameIndex], 0, nullptr);
  872. }
  873. Ref<Texture> RendererVK::CreateTexture(const Surface *inSurface)
  874. {
  875. return new TextureVK(this, inSurface);
  876. }
  877. Ref<VertexShader> RendererVK::CreateVertexShader(const char *inName)
  878. {
  879. Array<uint8> data = ReadData((String("Shaders/VK/") + inName + ".vert.spv").c_str());
  880. VkShaderModuleCreateInfo create_info = {};
  881. create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
  882. create_info.codeSize = data.size();
  883. create_info.pCode = reinterpret_cast<const uint32 *>(data.data());
  884. VkShaderModule shader_module;
  885. FatalErrorIfFailed(vkCreateShaderModule(mDevice, &create_info, nullptr, &shader_module));
  886. return new VertexShaderVK(mDevice, shader_module);
  887. }
  888. Ref<PixelShader> RendererVK::CreatePixelShader(const char *inName)
  889. {
  890. Array<uint8> data = ReadData((String("Shaders/VK/") + inName + ".frag.spv").c_str());
  891. VkShaderModuleCreateInfo create_info = {};
  892. create_info.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
  893. create_info.codeSize = data.size();
  894. create_info.pCode = reinterpret_cast<const uint32 *>(data.data());
  895. VkShaderModule shader_module;
  896. FatalErrorIfFailed(vkCreateShaderModule(mDevice, &create_info, nullptr, &shader_module));
  897. return new PixelShaderVK(mDevice, shader_module);
  898. }
  899. unique_ptr<PipelineState> RendererVK::CreatePipelineState(const VertexShader *inVertexShader, const PipelineState::EInputDescription *inInputDescription, uint inInputDescriptionCount, const PixelShader *inPixelShader, PipelineState::EDrawPass inDrawPass, PipelineState::EFillMode inFillMode, PipelineState::ETopology inTopology, PipelineState::EDepthTest inDepthTest, PipelineState::EBlendMode inBlendMode, PipelineState::ECullMode inCullMode)
  900. {
  901. return make_unique<PipelineStateVK>(this, static_cast<const VertexShaderVK *>(inVertexShader), inInputDescription, inInputDescriptionCount, static_cast<const PixelShaderVK *>(inPixelShader), inDrawPass, inFillMode, inTopology, inDepthTest, inBlendMode, inCullMode);
  902. }
  903. RenderPrimitive *RendererVK::CreateRenderPrimitive(PipelineState::ETopology inType)
  904. {
  905. return new RenderPrimitiveVK(this);
  906. }
  907. RenderInstances *RendererVK::CreateRenderInstances()
  908. {
  909. return new RenderInstancesVK(this);
  910. }
  911. uint32 RendererVK::FindMemoryType(uint32 inTypeFilter, VkMemoryPropertyFlags inProperties)
  912. {
  913. for (uint32 i = 0; i < mMemoryProperties.memoryTypeCount; i++)
  914. if ((inTypeFilter & (1 << i))
  915. && (mMemoryProperties.memoryTypes[i].propertyFlags & inProperties) == inProperties)
  916. return i;
  917. FatalError("Failed to find memory type!");
  918. }
  919. void RendererVK::AllocateMemory(VkDeviceSize inSize, uint32 inMemoryTypeBits, VkMemoryPropertyFlags inProperties, VkDeviceMemory &outMemory)
  920. {
  921. VkMemoryAllocateInfo alloc_info = {};
  922. alloc_info.sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO;
  923. alloc_info.allocationSize = inSize;
  924. alloc_info.memoryTypeIndex = FindMemoryType(inMemoryTypeBits, inProperties);
  925. FatalErrorIfFailed(vkAllocateMemory(mDevice, &alloc_info, nullptr, &outMemory));
  926. // Track allocation
  927. ++mNumAllocations;
  928. mTotalAllocated += inSize;
  929. // Track max usage
  930. mMaxTotalAllocated = max(mMaxTotalAllocated, mTotalAllocated);
  931. mMaxNumAllocations = max(mMaxNumAllocations, mNumAllocations);
  932. }
  933. void RendererVK::FreeMemory(VkDeviceMemory inMemory, VkDeviceSize inSize)
  934. {
  935. vkFreeMemory(mDevice, inMemory, nullptr);
  936. // Track free
  937. --mNumAllocations;
  938. mTotalAllocated -= inSize;
  939. }
  940. void RendererVK::CreateBuffer(VkDeviceSize inSize, VkBufferUsageFlags inUsage, VkMemoryPropertyFlags inProperties, BufferVK &outBuffer)
  941. {
  942. // Check the cache
  943. BufferCache::iterator i = mBufferCache.find({ inSize, inUsage, inProperties });
  944. if (i != mBufferCache.end() && !i->second.empty())
  945. {
  946. outBuffer = i->second.back();
  947. i->second.pop_back();
  948. return;
  949. }
  950. // Create a new buffer
  951. outBuffer.mSize = inSize;
  952. outBuffer.mUsage = inUsage;
  953. outBuffer.mProperties = inProperties;
  954. VkBufferCreateInfo create_info = {};
  955. create_info.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
  956. create_info.size = inSize;
  957. create_info.usage = inUsage;
  958. create_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
  959. FatalErrorIfFailed(vkCreateBuffer(mDevice, &create_info, nullptr, &outBuffer.mBuffer));
  960. VkMemoryRequirements mem_requirements;
  961. vkGetBufferMemoryRequirements(mDevice, outBuffer.mBuffer, &mem_requirements);
  962. if (mem_requirements.size > cMaxAllocSize)
  963. {
  964. // Allocate block directly
  965. AllocateMemory(mem_requirements.size, mem_requirements.memoryTypeBits, inProperties, outBuffer.mMemory);
  966. outBuffer.mAllocatedSize = mem_requirements.size;
  967. outBuffer.mOffset = 0;
  968. }
  969. else
  970. {
  971. // Round allocation to the next power of 2 so that we can use a simple block based allocator
  972. outBuffer.mAllocatedSize = max(VkDeviceSize(GetNextPowerOf2(uint32(mem_requirements.size))), cMinAllocSize);
  973. // Ensure that we have memory available from the right pool
  974. Array<Memory> &mem_array = mMemoryCache[{ outBuffer.mAllocatedSize, outBuffer.mUsage, outBuffer.mProperties }];
  975. if (mem_array.empty())
  976. {
  977. // Allocate a bigger block
  978. VkDeviceMemory device_memory;
  979. AllocateMemory(cBlockSize, mem_requirements.memoryTypeBits, inProperties, device_memory);
  980. // Divide into sub blocks
  981. for (VkDeviceSize offset = 0; offset < cBlockSize; offset += outBuffer.mAllocatedSize)
  982. mem_array.push_back({ device_memory, offset });
  983. }
  984. // Claim memory from the pool
  985. Memory &memory = mem_array.back();
  986. outBuffer.mMemory = memory.mMemory;
  987. outBuffer.mOffset = memory.mOffset;
  988. mem_array.pop_back();
  989. }
  990. // Bind the memory to the buffer
  991. vkBindBufferMemory(mDevice, outBuffer.mBuffer, outBuffer.mMemory, outBuffer.mOffset);
  992. }
  993. VkCommandBuffer RendererVK::StartTempCommandBuffer()
  994. {
  995. VkCommandBufferAllocateInfo alloc_info = {};
  996. alloc_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
  997. alloc_info.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
  998. alloc_info.commandPool = mCommandPool;
  999. alloc_info.commandBufferCount = 1;
  1000. VkCommandBuffer command_buffer;
  1001. vkAllocateCommandBuffers(mDevice, &alloc_info, &command_buffer);
  1002. VkCommandBufferBeginInfo begin_info = {};
  1003. begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
  1004. begin_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
  1005. vkBeginCommandBuffer(command_buffer, &begin_info);
  1006. return command_buffer;
  1007. }
  1008. void RendererVK::EndTempCommandBuffer(VkCommandBuffer inCommandBuffer)
  1009. {
  1010. vkEndCommandBuffer(inCommandBuffer);
  1011. VkSubmitInfo submit_info = {};
  1012. submit_info.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
  1013. submit_info.commandBufferCount = 1;
  1014. submit_info.pCommandBuffers = &inCommandBuffer;
  1015. vkQueueSubmit(mGraphicsQueue, 1, &submit_info, VK_NULL_HANDLE);
  1016. vkQueueWaitIdle(mGraphicsQueue); // Inefficient, but we only use this during initialization
  1017. vkFreeCommandBuffers(mDevice, mCommandPool, 1, &inCommandBuffer);
  1018. }
  1019. void RendererVK::CopyBuffer(VkBuffer inSrc, VkBuffer inDst, VkDeviceSize inSize)
  1020. {
  1021. VkCommandBuffer command_buffer = StartTempCommandBuffer();
  1022. VkBufferCopy region = {};
  1023. region.size = inSize;
  1024. vkCmdCopyBuffer(command_buffer, inSrc, inDst, 1, &region);
  1025. EndTempCommandBuffer(command_buffer);
  1026. }
  1027. void RendererVK::CreateDeviceLocalBuffer(const void *inData, VkDeviceSize inSize, VkBufferUsageFlags inUsage, BufferVK &outBuffer)
  1028. {
  1029. BufferVK staging_buffer;
  1030. CreateBuffer(inSize, VK_BUFFER_USAGE_TRANSFER_SRC_BIT, VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT, staging_buffer);
  1031. void *data;
  1032. vkMapMemory(mDevice, staging_buffer.mMemory, staging_buffer.mOffset, inSize, 0, &data);
  1033. memcpy(data, inData, (size_t)inSize);
  1034. vkUnmapMemory(mDevice, staging_buffer.mMemory);
  1035. CreateBuffer(inSize, inUsage | VK_BUFFER_USAGE_TRANSFER_DST_BIT, VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT, outBuffer);
  1036. CopyBuffer(staging_buffer.mBuffer, outBuffer.mBuffer, inSize);
  1037. FreeBuffer(staging_buffer);
  1038. }
  1039. void RendererVK::FreeBuffer(BufferVK &ioBuffer)
  1040. {
  1041. if (ioBuffer.mBuffer != VK_NULL_HANDLE)
  1042. {
  1043. JPH_ASSERT(mFrameIndex < cFrameCount);
  1044. mFreedBuffers[mFrameIndex][{ ioBuffer.mSize, ioBuffer.mUsage, ioBuffer.mProperties }].push_back(ioBuffer);
  1045. }
  1046. }
  1047. void RendererVK::FreeBufferInternal(BufferVK &ioBuffer)
  1048. {
  1049. // Destroy the buffer
  1050. vkDestroyBuffer(mDevice, ioBuffer.mBuffer, nullptr);
  1051. ioBuffer.mBuffer = VK_NULL_HANDLE;
  1052. if (ioBuffer.mAllocatedSize > cMaxAllocSize)
  1053. FreeMemory(ioBuffer.mMemory, ioBuffer.mAllocatedSize);
  1054. else
  1055. mMemoryCache[{ ioBuffer.mAllocatedSize, ioBuffer.mUsage, ioBuffer.mProperties }].push_back({ ioBuffer.mMemory, ioBuffer.mOffset });
  1056. ioBuffer.mMemory = VK_NULL_HANDLE;
  1057. }
  1058. unique_ptr<ConstantBufferVK> RendererVK::CreateConstantBuffer(VkDeviceSize inBufferSize)
  1059. {
  1060. return make_unique<ConstantBufferVK>(this, inBufferSize);
  1061. }
  1062. VkImageView RendererVK::CreateImageView(VkImage inImage, VkFormat inFormat, VkImageAspectFlags inAspectFlags)
  1063. {
  1064. VkImageViewCreateInfo view_info = {};
  1065. view_info.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
  1066. view_info.image = inImage;
  1067. view_info.viewType = VK_IMAGE_VIEW_TYPE_2D;
  1068. view_info.format = inFormat;
  1069. view_info.subresourceRange.aspectMask = inAspectFlags;
  1070. view_info.subresourceRange.levelCount = 1;
  1071. view_info.subresourceRange.layerCount = 1;
  1072. VkImageView image_view;
  1073. FatalErrorIfFailed(vkCreateImageView(mDevice, &view_info, nullptr, &image_view));
  1074. return image_view;
  1075. }
  1076. void RendererVK::CreateImage(uint32 inWidth, uint32 inHeight, VkFormat inFormat, VkImageTiling inTiling, VkImageUsageFlags inUsage, VkMemoryPropertyFlags inProperties, VkImage &outImage, VkDeviceMemory &outMemory)
  1077. {
  1078. VkImageCreateInfo image_info = {};
  1079. image_info.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
  1080. image_info.imageType = VK_IMAGE_TYPE_2D;
  1081. image_info.extent.width = inWidth;
  1082. image_info.extent.height = inHeight;
  1083. image_info.extent.depth = 1;
  1084. image_info.mipLevels = 1;
  1085. image_info.arrayLayers = 1;
  1086. image_info.format = inFormat;
  1087. image_info.tiling = inTiling;
  1088. image_info.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
  1089. image_info.usage = inUsage;
  1090. image_info.samples = VK_SAMPLE_COUNT_1_BIT;
  1091. image_info.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
  1092. FatalErrorIfFailed(vkCreateImage(mDevice, &image_info, nullptr, &outImage));
  1093. VkMemoryRequirements mem_requirements;
  1094. vkGetImageMemoryRequirements(mDevice, outImage, &mem_requirements);
  1095. AllocateMemory(mem_requirements.size, mem_requirements.memoryTypeBits, inProperties, outMemory);
  1096. vkBindImageMemory(mDevice, outImage, outMemory, 0);
  1097. }
  1098. void RendererVK::DestroyImage(VkImage inImage, VkDeviceMemory inMemory)
  1099. {
  1100. VkMemoryRequirements mem_requirements;
  1101. vkGetImageMemoryRequirements(mDevice, inImage, &mem_requirements);
  1102. vkDestroyImage(mDevice, inImage, nullptr);
  1103. FreeMemory(inMemory, mem_requirements.size);
  1104. }
  1105. void RendererVK::UpdateViewPortAndScissorRect(uint32 inWidth, uint32 inHeight)
  1106. {
  1107. VkCommandBuffer command_buffer = GetCommandBuffer();
  1108. // Update the view port rect
  1109. VkViewport viewport = {};
  1110. viewport.x = 0.0f;
  1111. viewport.y = 0.0f;
  1112. viewport.width = (float)inWidth;
  1113. viewport.height = (float)inHeight;
  1114. viewport.minDepth = 0.0f;
  1115. viewport.maxDepth = 1.0f;
  1116. vkCmdSetViewport(command_buffer, 0, 1, &viewport);
  1117. // Update the scissor rect
  1118. VkRect2D scissor = {};
  1119. scissor.extent = { inWidth, inHeight };
  1120. vkCmdSetScissor(command_buffer, 0, 1, &scissor);
  1121. }
  1122. #ifdef JPH_ENABLE_VULKAN
  1123. Renderer *Renderer::sCreate()
  1124. {
  1125. return new RendererVK;
  1126. }
  1127. #endif