BsVulkanRenderAPI.cpp 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsVulkanRenderAPI.h"
  4. #include "CoreThread/BsCoreThread.h"
  5. #include "Profiling/BsRenderStats.h"
  6. #include "RenderAPI/BsGpuParamDesc.h"
  7. #include "BsVulkanDevice.h"
  8. #include "Managers/BsVulkanTextureManager.h"
  9. #include "Managers/BsVulkanRenderWindowManager.h"
  10. #include "Managers/BsVulkanHardwareBufferManager.h"
  11. #include "Managers/BsVulkanRenderStateManager.h"
  12. #include "Managers/BsGpuProgramManager.h"
  13. #include "Managers/BsVulkanQueryManager.h"
  14. #include "Managers/BsVulkanGLSLProgramFactory.h"
  15. #include "Managers/BsVulkanCommandBufferManager.h"
  16. #include "BsVulkanCommandBuffer.h"
  17. #include "BsVulkanGpuParams.h"
  18. #include "Managers/BsVulkanVertexInputManager.h"
  19. #include "BsVulkanGpuParamBlockBuffer.h"
  20. #include <vulkan/vulkan.h>
  21. #include "BsVulkanUtility.h"
  22. #if BS_PLATFORM == BS_PLATFORM_WIN32
  23. #include "Win32/BsWin32VideoModeInfo.h"
  24. #elif BS_PLATFORM == BS_PLATFORM_LINUX
  25. #include "Linux/BsLinuxVideoModeInfo.h"
  26. #else
  27. static_assert(false, "Other platform includes go here.");
  28. #endif
  29. #define USE_VALIDATION_LAYERS 1
  30. namespace bs { namespace ct
  31. {
  32. VkAllocationCallbacks* gVulkanAllocator = nullptr;
  33. PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = nullptr;
  34. PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = nullptr;
  35. PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = nullptr;
  36. PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = nullptr;
  37. PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = nullptr;
  38. PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = nullptr;
  39. PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = nullptr;
  40. PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = nullptr;
  41. PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = nullptr;
  42. PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = nullptr;
  43. PFN_vkQueuePresentKHR vkQueuePresentKHR = nullptr;
  44. VkBool32 debugMsgCallback(VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objType, uint64_t srcObject,
  45. size_t location, int32_t msgCode, const char* pLayerPrefix, const char* pMsg, void* pUserData)
  46. {
  47. StringStream message;
  48. // Determine prefix
  49. if (flags & VK_DEBUG_REPORT_ERROR_BIT_EXT)
  50. message << "ERROR";
  51. if (flags & VK_DEBUG_REPORT_WARNING_BIT_EXT)
  52. message << "WARNING";
  53. if (flags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT)
  54. message << "PERFORMANCE";
  55. if (flags & VK_DEBUG_REPORT_INFORMATION_BIT_EXT)
  56. message << "INFO";
  57. if (flags & VK_DEBUG_REPORT_DEBUG_BIT_EXT)
  58. message << "DEBUG";
  59. message << ": [" << pLayerPrefix << "] Code " << msgCode << ": " << pMsg << std::endl;
  60. if (flags & VK_DEBUG_REPORT_ERROR_BIT_EXT)
  61. BS_EXCEPT(RenderingAPIException, message.str())
  62. else if (flags & VK_DEBUG_REPORT_WARNING_BIT_EXT || flags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT)
  63. LOGWRN(message.str())
  64. else
  65. LOGDBG(message.str())
  66. // Don't abort calls that caused a validation message
  67. return VK_FALSE;
  68. }
  69. VulkanRenderAPI::VulkanRenderAPI()
  70. :mInstance(nullptr)
  71. {
  72. #if BS_DEBUG_MODE
  73. mDebugCallback = nullptr;
  74. #endif
  75. }
  76. VulkanRenderAPI::~VulkanRenderAPI()
  77. {
  78. }
  79. const StringID& VulkanRenderAPI::getName() const
  80. {
  81. static StringID strName("VulkanRenderAPI");
  82. return strName;
  83. }
  84. void VulkanRenderAPI::initialize()
  85. {
  86. THROW_IF_NOT_CORE_THREAD;
  87. // Create instance
  88. VkApplicationInfo appInfo;
  89. appInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
  90. appInfo.pNext = nullptr;
  91. appInfo.pApplicationName = "Banshee3D App";
  92. appInfo.applicationVersion = 1;
  93. appInfo.pEngineName = "Banshee3D";
  94. appInfo.engineVersion = (0 << 24) | (4 << 16) | 0;
  95. appInfo.apiVersion = VK_API_VERSION_1_0;
  96. #if BS_DEBUG_MODE && USE_VALIDATION_LAYERS
  97. const char* layers[] =
  98. {
  99. "VK_LAYER_LUNARG_standard_validation"
  100. };
  101. const char* extensions[] =
  102. {
  103. nullptr, /** Surface extension */
  104. nullptr, /** OS specific surface extension */
  105. VK_EXT_DEBUG_REPORT_EXTENSION_NAME
  106. };
  107. uint32_t numLayers = sizeof(layers) / sizeof(layers[0]);
  108. #else
  109. const char** layers = nullptr;
  110. const char* extensions[] =
  111. {
  112. nullptr, /** Surface extension */
  113. nullptr, /** OS specific surface extension */
  114. };
  115. uint32_t numLayers = 0;
  116. #endif
  117. extensions[0] = VK_KHR_SURFACE_EXTENSION_NAME;
  118. #if BS_PLATFORM == BS_PLATFORM_WIN32
  119. extensions[1] = VK_KHR_WIN32_SURFACE_EXTENSION_NAME;
  120. #elif BS_PLATFORM == BS_PLATFORM_ANDROID
  121. extensions[1] = VK_KHR_ANDROID_SURFACE_EXTENSION_NAME;
  122. #else
  123. extensions[1] = VK_KHR_XLIB_SURFACE_EXTENSION_NAME;
  124. #endif
  125. uint32_t numExtensions = sizeof(extensions) / sizeof(extensions[0]);
  126. VkInstanceCreateInfo instanceInfo;
  127. instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
  128. instanceInfo.pNext = nullptr;
  129. instanceInfo.flags = 0;
  130. instanceInfo.pApplicationInfo = &appInfo;
  131. instanceInfo.enabledLayerCount = numLayers;
  132. instanceInfo.ppEnabledLayerNames = layers;
  133. instanceInfo.enabledExtensionCount = numExtensions;
  134. instanceInfo.ppEnabledExtensionNames = extensions;
  135. VkResult result = vkCreateInstance(&instanceInfo, gVulkanAllocator, &mInstance);
  136. assert(result == VK_SUCCESS);
  137. // Set up debugging
  138. #if BS_DEBUG_MODE && USE_VALIDATION_LAYERS
  139. VkDebugReportFlagsEXT debugFlags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT |
  140. VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT;
  141. GET_INSTANCE_PROC_ADDR(mInstance, CreateDebugReportCallbackEXT);
  142. GET_INSTANCE_PROC_ADDR(mInstance, DestroyDebugReportCallbackEXT);
  143. VkDebugReportCallbackCreateInfoEXT debugInfo;
  144. debugInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
  145. debugInfo.pNext = nullptr;
  146. debugInfo.flags = 0;
  147. debugInfo.pfnCallback = (PFN_vkDebugReportCallbackEXT)debugMsgCallback;
  148. debugInfo.flags = debugFlags;
  149. result = vkCreateDebugReportCallbackEXT(mInstance, &debugInfo, nullptr, &mDebugCallback);
  150. assert(result == VK_SUCCESS);
  151. #endif
  152. // Enumerate all devices
  153. result = vkEnumeratePhysicalDevices(mInstance, &mNumDevices, nullptr);
  154. assert(result == VK_SUCCESS);
  155. Vector<VkPhysicalDevice> physicalDevices(mNumDevices);
  156. result = vkEnumeratePhysicalDevices(mInstance, &mNumDevices, physicalDevices.data());
  157. assert(result == VK_SUCCESS);
  158. mDevices.resize(mNumDevices);
  159. for(uint32_t i = 0; i < mNumDevices; i++)
  160. mDevices[i] = bs_shared_ptr_new<VulkanDevice>(physicalDevices[i], i);
  161. // Find primary device
  162. // Note: MULTIGPU - Detect multiple similar devices here if supporting multi-GPU
  163. for (uint32_t i = 0; i < mNumDevices; i++)
  164. {
  165. bool isPrimary = mDevices[i]->getDeviceProperties().deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU;
  166. if (isPrimary)
  167. {
  168. mDevices[i]->setIsPrimary();
  169. mPrimaryDevices.push_back(mDevices[i]);
  170. break;
  171. }
  172. }
  173. if (mPrimaryDevices.size() == 0)
  174. mPrimaryDevices.push_back(mDevices[0]);
  175. #if BS_PLATFORM == BS_PLATFORM_WIN32
  176. mVideoModeInfo = bs_shared_ptr_new<Win32VideoModeInfo>();
  177. #elif BS_PLATFORM == BS_PLATFORM_LINUX
  178. mVideoModeInfo = bs_shared_ptr_new<LinuxVideoModeInfo>();
  179. #else
  180. static_assert(false, "mVideoModeInfo needs to be created.");
  181. #endif
  182. GPUInfo gpuInfo;
  183. gpuInfo.numGPUs = std::min(5U, mNumDevices);
  184. for(UINT32 i = 0; i < gpuInfo.numGPUs; i++)
  185. gpuInfo.names[i] = mDevices[i]->getDeviceProperties().deviceName;
  186. PlatformUtility::_setGPUInfo(gpuInfo);
  187. // Get required extension functions
  188. GET_INSTANCE_PROC_ADDR(mInstance, GetPhysicalDeviceSurfaceSupportKHR);
  189. GET_INSTANCE_PROC_ADDR(mInstance, GetPhysicalDeviceSurfaceFormatsKHR);
  190. GET_INSTANCE_PROC_ADDR(mInstance, GetPhysicalDeviceSurfaceCapabilitiesKHR);
  191. GET_INSTANCE_PROC_ADDR(mInstance, GetPhysicalDeviceSurfacePresentModesKHR);
  192. VkDevice presentDevice = _getPresentDevice()->getLogical();
  193. GET_DEVICE_PROC_ADDR(presentDevice, CreateSwapchainKHR);
  194. GET_DEVICE_PROC_ADDR(presentDevice, DestroySwapchainKHR);
  195. GET_DEVICE_PROC_ADDR(presentDevice, GetSwapchainImagesKHR);
  196. GET_DEVICE_PROC_ADDR(presentDevice, AcquireNextImageKHR);
  197. GET_DEVICE_PROC_ADDR(presentDevice, QueuePresentKHR);
  198. // Create command buffer manager
  199. CommandBufferManager::startUp<VulkanCommandBufferManager>(*this);
  200. // Create main command buffer
  201. mMainCommandBuffer = std::static_pointer_cast<VulkanCommandBuffer>(CommandBuffer::create(GQT_GRAPHICS));
  202. // Create the texture manager for use by others
  203. bs::TextureManager::startUp<bs::VulkanTextureManager>();
  204. TextureManager::startUp<VulkanTextureManager>();
  205. // Create hardware buffer manager
  206. bs::HardwareBufferManager::startUp();
  207. HardwareBufferManager::startUp<VulkanHardwareBufferManager>();
  208. // Create render window manager
  209. bs::RenderWindowManager::startUp<bs::VulkanRenderWindowManager>();
  210. RenderWindowManager::startUp();
  211. // Create query manager
  212. QueryManager::startUp<VulkanQueryManager>(*this);
  213. // Create vertex input manager
  214. VulkanVertexInputManager::startUp();
  215. // Create & register HLSL factory
  216. mGLSLFactory = bs_new<VulkanGLSLProgramFactory>();
  217. // Create render state manager
  218. RenderStateManager::startUp<VulkanRenderStateManager>();
  219. GpuProgramManager::instance().addFactory("vksl", mGLSLFactory);
  220. initCapabilites();
  221. RenderAPI::initialize();
  222. }
  223. void VulkanRenderAPI::destroyCore()
  224. {
  225. THROW_IF_NOT_CORE_THREAD;
  226. if (mGLSLFactory != nullptr)
  227. {
  228. bs_delete(mGLSLFactory);
  229. mGLSLFactory = nullptr;
  230. }
  231. VulkanVertexInputManager::shutDown();
  232. QueryManager::shutDown();
  233. RenderStateManager::shutDown();
  234. RenderWindowManager::shutDown();
  235. bs::RenderWindowManager::shutDown();
  236. HardwareBufferManager::shutDown();
  237. bs::HardwareBufferManager::shutDown();
  238. TextureManager::shutDown();
  239. bs::TextureManager::shutDown();
  240. mMainCommandBuffer = nullptr;
  241. // Make sure everything finishes and all resources get freed
  242. VulkanCommandBufferManager& cmdBufManager = static_cast<VulkanCommandBufferManager&>(CommandBufferManager::instance());
  243. for (UINT32 i = 0; i < (UINT32)mDevices.size(); i++)
  244. {
  245. mDevices[i]->waitIdle();
  246. cmdBufManager.refreshStates(i);
  247. }
  248. CommandBufferManager::shutDown();
  249. mPrimaryDevices.clear();
  250. mDevices.clear();
  251. #if BS_DEBUG_MODE
  252. if (mDebugCallback != nullptr)
  253. vkDestroyDebugReportCallbackEXT(mInstance, mDebugCallback, gVulkanAllocator);
  254. #endif
  255. vkDestroyInstance(mInstance, gVulkanAllocator);
  256. RenderAPI::destroyCore();
  257. }
  258. void VulkanRenderAPI::setGraphicsPipeline(const SPtr<GraphicsPipelineState>& pipelineState,
  259. const SPtr<CommandBuffer>& commandBuffer)
  260. {
  261. VulkanCommandBuffer* cb = getCB(commandBuffer);
  262. VulkanCmdBuffer* vkCB = cb->getInternal();
  263. vkCB->setPipelineState(pipelineState);
  264. BS_INC_RENDER_STAT(NumPipelineStateChanges);
  265. }
  266. void VulkanRenderAPI::setComputePipeline(const SPtr<ComputePipelineState>& pipelineState,
  267. const SPtr<CommandBuffer>& commandBuffer)
  268. {
  269. VulkanCommandBuffer* cb = getCB(commandBuffer);
  270. VulkanCmdBuffer* vkCB = cb->getInternal();
  271. vkCB->setPipelineState(pipelineState);
  272. BS_INC_RENDER_STAT(NumPipelineStateChanges);
  273. }
  274. void VulkanRenderAPI::setGpuParams(const SPtr<GpuParams>& gpuParams, const SPtr<CommandBuffer>& commandBuffer)
  275. {
  276. VulkanCommandBuffer* cb = getCB(commandBuffer);
  277. VulkanCmdBuffer* vkCB = cb->getInternal();
  278. UINT32 globalQueueIdx = CommandSyncMask::getGlobalQueueIdx(cb->getType(), cb->getQueueIdx());
  279. for (UINT32 i = 0; i < GPT_COUNT; i++)
  280. {
  281. SPtr<GpuParamDesc> paramDesc = gpuParams->getParamDesc((GpuProgramType)i);
  282. if (paramDesc == nullptr)
  283. continue;
  284. // Flush all param block buffers
  285. for (auto iter = paramDesc->paramBlocks.begin(); iter != paramDesc->paramBlocks.end(); ++iter)
  286. {
  287. SPtr<GpuParamBlockBuffer> buffer = gpuParams->getParamBlockBuffer(iter->second.set, iter->second.slot);
  288. if (buffer != nullptr)
  289. buffer->flushToGPU(globalQueueIdx);
  290. }
  291. }
  292. vkCB->setGpuParams(gpuParams);
  293. BS_INC_RENDER_STAT(NumGpuParamBinds);
  294. }
  295. void VulkanRenderAPI::setViewport(const Rect2& vp, const SPtr<CommandBuffer>& commandBuffer)
  296. {
  297. VulkanCommandBuffer* cb = getCB(commandBuffer);
  298. VulkanCmdBuffer* vkCB = cb->getInternal();
  299. vkCB->setViewport(vp);
  300. }
  301. void VulkanRenderAPI::setVertexBuffers(UINT32 index, SPtr<VertexBuffer>* buffers, UINT32 numBuffers,
  302. const SPtr<CommandBuffer>& commandBuffer)
  303. {
  304. VulkanCommandBuffer* cb = getCB(commandBuffer);
  305. VulkanCmdBuffer* vkCB = cb->getInternal();
  306. vkCB->setVertexBuffers(index, buffers, numBuffers);
  307. BS_INC_RENDER_STAT(NumVertexBufferBinds);
  308. }
  309. void VulkanRenderAPI::setIndexBuffer(const SPtr<IndexBuffer>& buffer, const SPtr<CommandBuffer>& commandBuffer)
  310. {
  311. VulkanCommandBuffer* cb = getCB(commandBuffer);
  312. VulkanCmdBuffer* vkCB = cb->getInternal();
  313. vkCB->setIndexBuffer(buffer);
  314. BS_INC_RENDER_STAT(NumIndexBufferBinds);
  315. }
  316. void VulkanRenderAPI::setVertexDeclaration(const SPtr<VertexDeclaration>& vertexDeclaration,
  317. const SPtr<CommandBuffer>& commandBuffer)
  318. {
  319. VulkanCommandBuffer* cb = getCB(commandBuffer);
  320. VulkanCmdBuffer* vkCB = cb->getInternal();
  321. vkCB->setVertexDeclaration(vertexDeclaration);
  322. }
  323. void VulkanRenderAPI::setDrawOperation(DrawOperationType op, const SPtr<CommandBuffer>& commandBuffer)
  324. {
  325. VulkanCommandBuffer* cb = getCB(commandBuffer);
  326. VulkanCmdBuffer* vkCB = cb->getInternal();
  327. vkCB->setDrawOp(op);
  328. }
  329. void VulkanRenderAPI::draw(UINT32 vertexOffset, UINT32 vertexCount, UINT32 instanceCount,
  330. const SPtr<CommandBuffer>& commandBuffer)
  331. {
  332. UINT32 primCount = 0;
  333. VulkanCommandBuffer* cb = getCB(commandBuffer);
  334. VulkanCmdBuffer* vkCB = cb->getInternal();
  335. vkCB->draw(vertexOffset, vertexCount, instanceCount);
  336. BS_INC_RENDER_STAT(NumDrawCalls);
  337. BS_ADD_RENDER_STAT(NumVertices, vertexCount);
  338. BS_ADD_RENDER_STAT(NumPrimitives, primCount);
  339. }
  340. void VulkanRenderAPI::drawIndexed(UINT32 startIndex, UINT32 indexCount, UINT32 vertexOffset, UINT32 vertexCount,
  341. UINT32 instanceCount, const SPtr<CommandBuffer>& commandBuffer)
  342. {
  343. UINT32 primCount = 0;
  344. VulkanCommandBuffer* cb = getCB(commandBuffer);
  345. VulkanCmdBuffer* vkCB = cb->getInternal();
  346. vkCB->drawIndexed(startIndex, indexCount, vertexOffset, instanceCount);
  347. BS_INC_RENDER_STAT(NumDrawCalls);
  348. BS_ADD_RENDER_STAT(NumVertices, vertexCount);
  349. BS_ADD_RENDER_STAT(NumPrimitives, primCount);
  350. }
  351. void VulkanRenderAPI::dispatchCompute(UINT32 numGroupsX, UINT32 numGroupsY, UINT32 numGroupsZ,
  352. const SPtr<CommandBuffer>& commandBuffer)
  353. {
  354. VulkanCommandBuffer* cb = getCB(commandBuffer);
  355. VulkanCmdBuffer* vkCB = cb->getInternal();
  356. vkCB->dispatch(numGroupsX, numGroupsY, numGroupsZ);
  357. BS_INC_RENDER_STAT(NumComputeCalls);
  358. }
  359. void VulkanRenderAPI::setScissorRect(UINT32 left, UINT32 top, UINT32 right, UINT32 bottom,
  360. const SPtr<CommandBuffer>& commandBuffer)
  361. {
  362. VulkanCommandBuffer* cb = getCB(commandBuffer);
  363. VulkanCmdBuffer* vkCB = cb->getInternal();
  364. Rect2I area(left, top, right - left, bottom - top);
  365. vkCB->setScissorRect(area);
  366. }
  367. void VulkanRenderAPI::setStencilRef(UINT32 value, const SPtr<CommandBuffer>& commandBuffer)
  368. {
  369. VulkanCommandBuffer* cb = getCB(commandBuffer);
  370. VulkanCmdBuffer* vkCB = cb->getInternal();
  371. vkCB->setStencilRef(value);
  372. }
  373. void VulkanRenderAPI::clearViewport(UINT32 buffers, const Color& color, float depth, UINT16 stencil, UINT8 targetMask,
  374. const SPtr<CommandBuffer>& commandBuffer)
  375. {
  376. VulkanCommandBuffer* cb = getCB(commandBuffer);
  377. VulkanCmdBuffer* vkCB = cb->getInternal();
  378. vkCB->clearViewport(buffers, color, depth, stencil, targetMask);
  379. BS_INC_RENDER_STAT(NumClears);
  380. }
  381. void VulkanRenderAPI::clearRenderTarget(UINT32 buffers, const Color& color, float depth, UINT16 stencil,
  382. UINT8 targetMask, const SPtr<CommandBuffer>& commandBuffer)
  383. {
  384. VulkanCommandBuffer* cb = getCB(commandBuffer);
  385. VulkanCmdBuffer* vkCB = cb->getInternal();
  386. vkCB->clearRenderTarget(buffers, color, depth, stencil, targetMask);
  387. BS_INC_RENDER_STAT(NumClears);
  388. }
  389. void VulkanRenderAPI::setRenderTarget(const SPtr<RenderTarget>& target, UINT32 readOnlyFlags,
  390. RenderSurfaceMask loadMask, const SPtr<CommandBuffer>& commandBuffer)
  391. {
  392. VulkanCommandBuffer* cb = getCB(commandBuffer);
  393. VulkanCmdBuffer* vkCB = cb->getInternal();
  394. vkCB->setRenderTarget(target, readOnlyFlags, loadMask);
  395. BS_INC_RENDER_STAT(NumRenderTargetChanges);
  396. }
  397. void VulkanRenderAPI::swapBuffers(const SPtr<RenderTarget>& target, UINT32 syncMask)
  398. {
  399. THROW_IF_NOT_CORE_THREAD;
  400. submitCommandBuffer(mMainCommandBuffer, syncMask);
  401. target->swapBuffers(syncMask);
  402. // See if any command buffers finished executing
  403. VulkanCommandBufferManager& cbm = static_cast<VulkanCommandBufferManager&>(CommandBufferManager::instance());
  404. for (UINT32 i = 0; i < (UINT32)mDevices.size(); i++)
  405. cbm.refreshStates(i);
  406. BS_INC_RENDER_STAT(NumPresents);
  407. }
  408. void VulkanRenderAPI::addCommands(const SPtr<CommandBuffer>& commandBuffer, const SPtr<CommandBuffer>& secondary)
  409. {
  410. BS_EXCEPT(NotImplementedException, "Secondary command buffers not implemented");
  411. }
  412. void VulkanRenderAPI::submitCommandBuffer(const SPtr<CommandBuffer>& commandBuffer, UINT32 syncMask)
  413. {
  414. THROW_IF_NOT_CORE_THREAD;
  415. VulkanCommandBuffer* cmdBuffer = getCB(commandBuffer);
  416. // Submit all transfer buffers first
  417. VulkanCommandBufferManager& cbm = static_cast<VulkanCommandBufferManager&>(CommandBufferManager::instance());
  418. cbm.flushTransferBuffers(cmdBuffer->getDeviceIdx());
  419. cmdBuffer->submit(syncMask);
  420. }
  421. void VulkanRenderAPI::convertProjectionMatrix(const Matrix4& matrix, Matrix4& dest)
  422. {
  423. dest = matrix;
  424. // Flip Y axis
  425. dest[1][1] = -dest[1][1];
  426. // Convert depth range from [-1,1] to [0,1]
  427. dest[2][0] = (dest[2][0] + dest[3][0]) / 2;
  428. dest[2][1] = (dest[2][1] + dest[3][1]) / 2;
  429. dest[2][2] = (dest[2][2] + dest[3][2]) / 2;
  430. dest[2][3] = (dest[2][3] + dest[3][3]) / 2;
  431. }
  432. const RenderAPIInfo& VulkanRenderAPI::getAPIInfo() const
  433. {
  434. RenderAPIFeatures featureFlags =
  435. RenderAPIFeatureFlag::NDCYAxisDown |
  436. RenderAPIFeatureFlag::ColumnMajorMatrices |
  437. RenderAPIFeatureFlag::MultiThreadedCB |
  438. RenderAPIFeatureFlag::MSAAImageStores |
  439. RenderAPIFeatureFlag::TextureViews |
  440. RenderAPIFeatureFlag::Compute |
  441. RenderAPIFeatureFlag::LoadStore;
  442. static RenderAPIInfo info(0.0f, 0.0f, 0.0f, 1.0f, VET_COLOR_ABGR, featureFlags);
  443. return info;
  444. }
  445. GpuParamBlockDesc VulkanRenderAPI::generateParamBlockDesc(const String& name, Vector<GpuParamDataDesc>& params)
  446. {
  447. GpuParamBlockDesc block;
  448. block.blockSize = 0;
  449. block.isShareable = true;
  450. block.name = name;
  451. block.slot = 0;
  452. block.set = 0;
  453. for (auto& param : params)
  454. {
  455. UINT32 size;
  456. if(param.type == GPDT_STRUCT)
  457. {
  458. // Structs are always aligned and rounded up to vec4
  459. size = Math::divideAndRoundUp(param.elementSize, 16U) * 4;
  460. block.blockSize = Math::divideAndRoundUp(block.blockSize, 4U) * 4;
  461. }
  462. else
  463. size = VulkanUtility::calcInterfaceBlockElementSizeAndOffset(param.type, param.arraySize, block.blockSize);
  464. if (param.arraySize > 1)
  465. {
  466. param.elementSize = size;
  467. param.arrayElementStride = size;
  468. param.cpuMemOffset = block.blockSize;
  469. param.gpuMemOffset = 0;
  470. block.blockSize += size * param.arraySize;
  471. }
  472. else
  473. {
  474. param.elementSize = size;
  475. param.arrayElementStride = size;
  476. param.cpuMemOffset = block.blockSize;
  477. param.gpuMemOffset = 0;
  478. block.blockSize += size;
  479. }
  480. param.paramBlockSlot = 0;
  481. param.paramBlockSet = 0;
  482. }
  483. // Constant buffer size must always be a multiple of 16
  484. if (block.blockSize % 4 != 0)
  485. block.blockSize += (4 - (block.blockSize % 4));
  486. return block;
  487. }
  488. void VulkanRenderAPI::initCapabilites()
  489. {
  490. mNumDevices = (UINT32)mDevices.size();
  491. mCurrentCapabilities = bs_newN<RenderAPICapabilities>(mNumDevices);
  492. UINT32 deviceIdx = 0;
  493. for (auto& device : mDevices)
  494. {
  495. RenderAPICapabilities& caps = mCurrentCapabilities[deviceIdx];
  496. const VkPhysicalDeviceProperties& deviceProps = device->getDeviceProperties();
  497. const VkPhysicalDeviceFeatures& deviceFeatures = device->getDeviceFeatures();
  498. const VkPhysicalDeviceLimits& deviceLimits = deviceProps.limits;
  499. DriverVersion driverVersion;
  500. driverVersion.major = ((uint32_t)(deviceProps.apiVersion) >> 22);
  501. driverVersion.minor = ((uint32_t)(deviceProps.apiVersion) >> 12) & 0x3ff;
  502. driverVersion.release = (uint32_t)(deviceProps.apiVersion) & 0xfff;
  503. driverVersion.build = 0;
  504. caps.setDriverVersion(driverVersion);
  505. caps.setDeviceName(deviceProps.deviceName);
  506. // Determine vendor
  507. switch (deviceProps.vendorID)
  508. {
  509. case 0x10DE:
  510. caps.setVendor(GPU_NVIDIA);
  511. break;
  512. case 0x1002:
  513. caps.setVendor(GPU_AMD);
  514. break;
  515. case 0x163C:
  516. case 0x8086:
  517. caps.setVendor(GPU_INTEL);
  518. break;
  519. default:
  520. caps.setVendor(GPU_UNKNOWN);
  521. break;
  522. };
  523. caps.setRenderAPIName(getName());
  524. if(deviceFeatures.textureCompressionBC)
  525. caps.setCapability(RSC_TEXTURE_COMPRESSION_BC);
  526. if (deviceFeatures.textureCompressionETC2)
  527. caps.setCapability(RSC_TEXTURE_COMPRESSION_ETC2);
  528. if (deviceFeatures.textureCompressionASTC_LDR)
  529. caps.setCapability(RSC_TEXTURE_COMPRESSION_ASTC);
  530. caps.setMaxBoundVertexBuffers(deviceLimits.maxVertexInputBindings);
  531. caps.setNumMultiRenderTargets(deviceLimits.maxColorAttachments);
  532. caps.setCapability(RSC_COMPUTE_PROGRAM);
  533. caps.setNumTextureUnits(GPT_FRAGMENT_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  534. caps.setNumTextureUnits(GPT_VERTEX_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  535. caps.setNumTextureUnits(GPT_COMPUTE_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  536. caps.setNumGpuParamBlockBuffers(GPT_FRAGMENT_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  537. caps.setNumGpuParamBlockBuffers(GPT_VERTEX_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  538. caps.setNumGpuParamBlockBuffers(GPT_COMPUTE_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  539. caps.setNumLoadStoreTextureUnits(GPT_FRAGMENT_PROGRAM, deviceLimits.maxPerStageDescriptorStorageImages);
  540. caps.setNumLoadStoreTextureUnits(GPT_COMPUTE_PROGRAM, deviceLimits.maxPerStageDescriptorStorageImages);
  541. if(deviceFeatures.geometryShader)
  542. {
  543. caps.setCapability(RSC_GEOMETRY_PROGRAM);
  544. caps.addShaderProfile("gs_5_0");
  545. caps.setNumTextureUnits(GPT_GEOMETRY_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  546. caps.setNumGpuParamBlockBuffers(GPT_GEOMETRY_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  547. caps.setGeometryProgramNumOutputVertices(deviceLimits.maxGeometryOutputVertices);
  548. }
  549. if (deviceFeatures.tessellationShader)
  550. {
  551. caps.setCapability(RSC_TESSELLATION_PROGRAM);
  552. caps.setNumTextureUnits(GPT_HULL_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  553. caps.setNumTextureUnits(GPT_DOMAIN_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  554. caps.setNumGpuParamBlockBuffers(GPT_HULL_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  555. caps.setNumGpuParamBlockBuffers(GPT_DOMAIN_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  556. }
  557. caps.setNumCombinedTextureUnits(caps.getNumTextureUnits(GPT_FRAGMENT_PROGRAM)
  558. + caps.getNumTextureUnits(GPT_VERTEX_PROGRAM) + caps.getNumTextureUnits(GPT_GEOMETRY_PROGRAM)
  559. + caps.getNumTextureUnits(GPT_HULL_PROGRAM) + caps.getNumTextureUnits(GPT_DOMAIN_PROGRAM)
  560. + caps.getNumTextureUnits(GPT_COMPUTE_PROGRAM));
  561. caps.setNumCombinedGpuParamBlockBuffers(caps.getNumGpuParamBlockBuffers(GPT_FRAGMENT_PROGRAM)
  562. + caps.getNumGpuParamBlockBuffers(GPT_VERTEX_PROGRAM) + caps.getNumGpuParamBlockBuffers(GPT_GEOMETRY_PROGRAM)
  563. + caps.getNumGpuParamBlockBuffers(GPT_HULL_PROGRAM) + caps.getNumGpuParamBlockBuffers(GPT_DOMAIN_PROGRAM)
  564. + caps.getNumGpuParamBlockBuffers(GPT_COMPUTE_PROGRAM));
  565. caps.setNumCombinedLoadStoreTextureUnits(caps.getNumLoadStoreTextureUnits(GPT_FRAGMENT_PROGRAM)
  566. + caps.getNumLoadStoreTextureUnits(GPT_COMPUTE_PROGRAM));
  567. caps.addShaderProfile("glsl");
  568. deviceIdx++;
  569. }
  570. }
  571. VulkanCommandBuffer* VulkanRenderAPI::getCB(const SPtr<CommandBuffer>& buffer)
  572. {
  573. if (buffer != nullptr)
  574. return static_cast<VulkanCommandBuffer*>(buffer.get());
  575. return static_cast<VulkanCommandBuffer*>(mMainCommandBuffer.get());
  576. }
  577. VulkanRenderAPI& gVulkanRenderAPI()
  578. {
  579. return static_cast<VulkanRenderAPI&>(RenderAPI::instance());
  580. }
  581. }}