BsVulkanRenderAPI.cpp 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsVulkanRenderAPI.h"
  4. #include "BsCoreThread.h"
  5. #include "BsRenderStats.h"
  6. #include "BsGpuParamDesc.h"
  7. #include "BsVulkanDevice.h"
  8. #include "BsVulkanTextureManager.h"
  9. #include "BsVulkanRenderWindowManager.h"
  10. #include "BsVulkanHardwareBufferManager.h"
  11. #include "BsVulkanRenderStateManager.h"
  12. #include "BsGpuProgramManager.h"
  13. #include "BsVulkanQueryManager.h"
  14. #include "BsVulkanGLSLProgramFactory.h"
  15. #include "BsVulkanCommandBufferManager.h"
  16. #include "BsVulkanCommandBuffer.h"
  17. #include "BsVulkanGpuParams.h"
  18. #include "BsVulkanVertexInputManager.h"
  19. #include "BsVulkanGpuParamBlockBuffer.h"
  20. #if BS_PLATFORM == BS_PLATFORM_WIN32
  21. #include "Win32/BsWin32VideoModeInfo.h"
  22. #else
  23. static_assert(false, "Other platform includes go here.");
  24. #endif
  25. namespace bs
  26. {
  27. VkAllocationCallbacks* gVulkanAllocator = nullptr;
  28. PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = nullptr;
  29. PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = nullptr;
  30. PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = nullptr;
  31. PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = nullptr;
  32. PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = nullptr;
  33. PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = nullptr;
  34. PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = nullptr;
  35. PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = nullptr;
  36. PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = nullptr;
  37. PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = nullptr;
  38. PFN_vkQueuePresentKHR vkQueuePresentKHR = nullptr;
  39. VkBool32 debugMsgCallback(VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objType, uint64_t srcObject,
  40. size_t location, int32_t msgCode, const char* pLayerPrefix, const char* pMsg, void* pUserData)
  41. {
  42. StringStream message;
  43. // Determine prefix
  44. if (flags & VK_DEBUG_REPORT_ERROR_BIT_EXT)
  45. message << "ERROR";
  46. if (flags & VK_DEBUG_REPORT_WARNING_BIT_EXT)
  47. message << "WARNING";
  48. if (flags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT)
  49. message << "PERFORMANCE";
  50. if (flags & VK_DEBUG_REPORT_INFORMATION_BIT_EXT)
  51. message << "INFO";
  52. if (flags & VK_DEBUG_REPORT_DEBUG_BIT_EXT)
  53. message << "DEBUG";
  54. message << ": [" << pLayerPrefix << "] Code " << msgCode << ": " << pMsg << std::endl;
  55. if (flags & VK_DEBUG_REPORT_ERROR_BIT_EXT)
  56. BS_EXCEPT(RenderingAPIException, message.str())
  57. else if (flags & VK_DEBUG_REPORT_WARNING_BIT_EXT || flags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT)
  58. LOGWRN(message.str())
  59. else
  60. LOGDBG(message.str())
  61. // Don't abort calls that caused a validation message
  62. return VK_FALSE;
  63. }
  64. VulkanRenderAPI::VulkanRenderAPI()
  65. :mInstance(nullptr), mDebugCallback(nullptr)
  66. { }
  67. VulkanRenderAPI::~VulkanRenderAPI()
  68. {
  69. }
  70. const StringID& VulkanRenderAPI::getName() const
  71. {
  72. static StringID strName("VulkanRenderAPI");
  73. return strName;
  74. }
  75. const String& VulkanRenderAPI::getShadingLanguageName() const
  76. {
  77. static String strName("glsl");
  78. return strName;
  79. }
  80. void VulkanRenderAPI::initialize()
  81. {
  82. THROW_IF_NOT_CORE_THREAD;
  83. // Create instance
  84. VkApplicationInfo appInfo;
  85. appInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
  86. appInfo.pNext = nullptr;
  87. appInfo.pApplicationName = "Banshee3D App";
  88. appInfo.applicationVersion = 1;
  89. appInfo.pEngineName = "Banshee3D";
  90. appInfo.engineVersion = (0 << 24) | (4 << 16) | 0;
  91. appInfo.apiVersion = VK_API_VERSION_1_0;
  92. #if BS_DEBUG_MODE
  93. const char* layers[] =
  94. {
  95. "VK_LAYER_LUNARG_standard_validation"
  96. };
  97. const char* extensions[] =
  98. {
  99. nullptr, /** Surface extension */
  100. nullptr, /** OS specific surface extension */
  101. VK_EXT_DEBUG_REPORT_EXTENSION_NAME
  102. };
  103. #else
  104. const char** layers = nullptr;
  105. const char* extensions[] =
  106. {
  107. nullptr, /** Surface extension */
  108. nullptr, /** OS specific surface extension */
  109. };
  110. #endif
  111. extensions[0] = VK_KHR_SURFACE_EXTENSION_NAME;
  112. #if BS_PLATFORM == BS_PLATFORM_WIN32
  113. extensions[1] = VK_KHR_WIN32_SURFACE_EXTENSION_NAME;
  114. #elif BS_PLATFORM == BS_PLATFORM_ANDROID
  115. extensions[1] = VK_KHR_ANDROID_SURFACE_EXTENSION_NAME;
  116. #else
  117. extensions[1] = VK_KHR_XCB_SURFACE_EXTENSION_NAME;
  118. #endif
  119. uint32_t numLayers = sizeof(layers) / sizeof(layers[0]);
  120. uint32_t numExtensions = sizeof(extensions) / sizeof(extensions[0]);
  121. VkInstanceCreateInfo instanceInfo;
  122. instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
  123. instanceInfo.pNext = nullptr;
  124. instanceInfo.flags = 0;
  125. instanceInfo.pApplicationInfo = &appInfo;
  126. instanceInfo.enabledLayerCount = numLayers;
  127. instanceInfo.ppEnabledLayerNames = layers;
  128. instanceInfo.enabledExtensionCount = numExtensions;
  129. instanceInfo.ppEnabledExtensionNames = extensions;
  130. VkResult result = vkCreateInstance(&instanceInfo, gVulkanAllocator, &mInstance);
  131. assert(result == VK_SUCCESS);
  132. // Set up debugging
  133. #if BS_DEBUG_MODE
  134. VkDebugReportFlagsEXT debugFlags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT |
  135. VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT;
  136. GET_INSTANCE_PROC_ADDR(mInstance, CreateDebugReportCallbackEXT);
  137. GET_INSTANCE_PROC_ADDR(mInstance, DestroyDebugReportCallbackEXT);
  138. VkDebugReportCallbackCreateInfoEXT debugInfo;
  139. debugInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
  140. debugInfo.pNext = nullptr;
  141. debugInfo.flags = 0;
  142. debugInfo.pfnCallback = (PFN_vkDebugReportCallbackEXT)debugMsgCallback;
  143. debugInfo.flags = debugFlags;
  144. result = vkCreateDebugReportCallbackEXT(mInstance, &debugInfo, nullptr, &mDebugCallback);
  145. assert(result == VK_SUCCESS);
  146. #endif
  147. // Enumerate all devices
  148. result = vkEnumeratePhysicalDevices(mInstance, &mNumDevices, nullptr);
  149. assert(result == VK_SUCCESS);
  150. Vector<VkPhysicalDevice> physicalDevices(mNumDevices);
  151. result = vkEnumeratePhysicalDevices(mInstance, &mNumDevices, physicalDevices.data());
  152. assert(result == VK_SUCCESS);
  153. mDevices.resize(mNumDevices);
  154. for(uint32_t i = 0; i < mNumDevices; i++)
  155. mDevices[i] = bs_shared_ptr_new<VulkanDevice>(physicalDevices[i], i);
  156. // Find primary device
  157. // Note: MULTIGPU - Detect multiple similar devices here if supporting multi-GPU
  158. for (uint32_t i = 0; i < mNumDevices; i++)
  159. {
  160. bool isPrimary = mDevices[i]->getDeviceProperties().deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU;
  161. if (isPrimary)
  162. {
  163. mDevices[i]->setIsPrimary();
  164. mPrimaryDevices.push_back(mDevices[i]);
  165. break;
  166. }
  167. }
  168. if (mPrimaryDevices.size() == 0)
  169. mPrimaryDevices.push_back(mDevices[0]);
  170. #if BS_PLATFORM == BS_PLATFORM_WIN32
  171. mVideoModeInfo = bs_shared_ptr_new<Win32VideoModeInfo>();
  172. #else
  173. static_assert(false, "mVideoModeInfo needs to be created.")
  174. #endif
  175. // Get required extension functions
  176. GET_INSTANCE_PROC_ADDR(mInstance, GetPhysicalDeviceSurfaceSupportKHR);
  177. GET_INSTANCE_PROC_ADDR(mInstance, GetPhysicalDeviceSurfaceFormatsKHR);
  178. GET_INSTANCE_PROC_ADDR(mInstance, GetPhysicalDeviceSurfaceCapabilitiesKHR);
  179. GET_INSTANCE_PROC_ADDR(mInstance, GetPhysicalDeviceSurfacePresentModesKHR);
  180. VkDevice presentDevice = _getPresentDevice()->getLogical();
  181. GET_DEVICE_PROC_ADDR(presentDevice, CreateSwapchainKHR);
  182. GET_DEVICE_PROC_ADDR(presentDevice, DestroySwapchainKHR);
  183. GET_DEVICE_PROC_ADDR(presentDevice, GetSwapchainImagesKHR);
  184. GET_DEVICE_PROC_ADDR(presentDevice, AcquireNextImageKHR);
  185. GET_DEVICE_PROC_ADDR(presentDevice, QueuePresentKHR);
  186. // Create command buffer manager
  187. CommandBufferManager::startUp<VulkanCommandBufferManager>(*this);
  188. // Create main command buffer
  189. mMainCommandBuffer = std::static_pointer_cast<VulkanCommandBuffer>(CommandBuffer::create(GQT_GRAPHICS));
  190. // Create the texture manager for use by others
  191. TextureManager::startUp<VulkanTextureManager>();
  192. TextureCoreManager::startUp<VulkanTextureCoreManager>();
  193. // Create hardware buffer manager
  194. HardwareBufferManager::startUp();
  195. HardwareBufferCoreManager::startUp<VulkanHardwareBufferCoreManager>();
  196. // Create render window manager
  197. RenderWindowManager::startUp<VulkanRenderWindowManager>();
  198. RenderWindowCoreManager::startUp<VulkanRenderWindowCoreManager>(*this);
  199. // Create query manager
  200. QueryManager::startUp<VulkanQueryManager>(*this);
  201. // Create vertex input manager
  202. VulkanVertexInputManager::startUp();
  203. // Create & register HLSL factory
  204. mGLSLFactory = bs_new<VulkanGLSLProgramFactory>();
  205. // Create render state manager
  206. RenderStateCoreManager::startUp<VulkanRenderStateCoreManager>();
  207. GpuProgramCoreManager::instance().addFactory(mGLSLFactory);
  208. initCapabilites();
  209. RenderAPICore::initialize();
  210. }
  211. void VulkanRenderAPI::destroyCore()
  212. {
  213. THROW_IF_NOT_CORE_THREAD;
  214. if (mGLSLFactory != nullptr)
  215. {
  216. bs_delete(mGLSLFactory);
  217. mGLSLFactory = nullptr;
  218. }
  219. VulkanVertexInputManager::shutDown();
  220. QueryManager::shutDown();
  221. RenderStateCoreManager::shutDown();
  222. RenderWindowCoreManager::shutDown();
  223. RenderWindowManager::shutDown();
  224. HardwareBufferCoreManager::shutDown();
  225. HardwareBufferManager::shutDown();
  226. TextureCoreManager::shutDown();
  227. TextureManager::shutDown();
  228. mMainCommandBuffer = nullptr;
  229. // Make sure everything finishes and all resources get freed
  230. VulkanCommandBufferManager& cmdBufManager = static_cast<VulkanCommandBufferManager&>(CommandBufferManager::instance());
  231. for (UINT32 i = 0; i < (UINT32)mDevices.size(); i++)
  232. {
  233. mDevices[i]->waitIdle();
  234. cmdBufManager.refreshStates(i);
  235. }
  236. CommandBufferManager::shutDown();
  237. mPrimaryDevices.clear();
  238. mDevices.clear();
  239. #if BS_DEBUG_MODE
  240. if (mDebugCallback != nullptr)
  241. vkDestroyDebugReportCallbackEXT(mInstance, mDebugCallback, gVulkanAllocator);
  242. #endif
  243. vkDestroyInstance(mInstance, gVulkanAllocator);
  244. RenderAPICore::destroyCore();
  245. }
  246. void VulkanRenderAPI::setGraphicsPipeline(const SPtr<GraphicsPipelineStateCore>& pipelineState,
  247. const SPtr<CommandBuffer>& commandBuffer)
  248. {
  249. VulkanCommandBuffer* cb = getCB(commandBuffer);
  250. VulkanCmdBuffer* vkCB = cb->getInternal();
  251. vkCB->setPipelineState(pipelineState);
  252. BS_INC_RENDER_STAT(NumPipelineStateChanges);
  253. }
  254. void VulkanRenderAPI::setComputePipeline(const SPtr<ComputePipelineStateCore>& pipelineState,
  255. const SPtr<CommandBuffer>& commandBuffer)
  256. {
  257. VulkanCommandBuffer* cb = getCB(commandBuffer);
  258. VulkanCmdBuffer* vkCB = cb->getInternal();
  259. vkCB->setPipelineState(pipelineState);
  260. BS_INC_RENDER_STAT(NumPipelineStateChanges);
  261. }
  262. void VulkanRenderAPI::setGpuParams(const SPtr<GpuParamsCore>& gpuParams, const SPtr<CommandBuffer>& commandBuffer)
  263. {
  264. VulkanCommandBuffer* cb = getCB(commandBuffer);
  265. VulkanCmdBuffer* vkCB = cb->getInternal();
  266. UINT32 globalQueueIdx = CommandSyncMask::getGlobalQueueIdx(cb->getType(), cb->getQueueIdx());
  267. for (UINT32 i = 0; i < GPT_COUNT; i++)
  268. {
  269. SPtr<GpuParamDesc> paramDesc = gpuParams->getParamDesc((GpuProgramType)i);
  270. if (paramDesc == nullptr)
  271. return;
  272. // Flush all param block buffers
  273. for (auto iter = paramDesc->paramBlocks.begin(); iter != paramDesc->paramBlocks.end(); ++iter)
  274. {
  275. SPtr<GpuParamBlockBufferCore> buffer = gpuParams->getParamBlockBuffer(iter->second.set, iter->second.slot);
  276. if (buffer != nullptr)
  277. buffer->flushToGPU(globalQueueIdx);
  278. }
  279. }
  280. vkCB->setGpuParams(gpuParams);
  281. BS_INC_RENDER_STAT(NumGpuParamBinds);
  282. }
  283. void VulkanRenderAPI::beginFrame(const SPtr<CommandBuffer>& commandBuffer)
  284. {
  285. // Do nothing
  286. }
  287. void VulkanRenderAPI::endFrame(const SPtr<CommandBuffer>& commandBuffer)
  288. {
  289. // Do nothing
  290. }
  291. void VulkanRenderAPI::setViewport(const Rect2& vp, const SPtr<CommandBuffer>& commandBuffer)
  292. {
  293. VulkanCommandBuffer* cb = getCB(commandBuffer);
  294. VulkanCmdBuffer* vkCB = cb->getInternal();
  295. vkCB->setViewport(vp);
  296. }
  297. void VulkanRenderAPI::setVertexBuffers(UINT32 index, SPtr<VertexBufferCore>* buffers, UINT32 numBuffers,
  298. const SPtr<CommandBuffer>& commandBuffer)
  299. {
  300. VulkanCommandBuffer* cb = getCB(commandBuffer);
  301. VulkanCmdBuffer* vkCB = cb->getInternal();
  302. vkCB->setVertexBuffers(index, buffers, numBuffers);
  303. BS_INC_RENDER_STAT(NumVertexBufferBinds);
  304. }
  305. void VulkanRenderAPI::setIndexBuffer(const SPtr<IndexBufferCore>& buffer, const SPtr<CommandBuffer>& commandBuffer)
  306. {
  307. VulkanCommandBuffer* cb = getCB(commandBuffer);
  308. VulkanCmdBuffer* vkCB = cb->getInternal();
  309. vkCB->setIndexBuffer(buffer);
  310. BS_INC_RENDER_STAT(NumIndexBufferBinds);
  311. }
  312. void VulkanRenderAPI::setVertexDeclaration(const SPtr<VertexDeclarationCore>& vertexDeclaration,
  313. const SPtr<CommandBuffer>& commandBuffer)
  314. {
  315. VulkanCommandBuffer* cb = getCB(commandBuffer);
  316. VulkanCmdBuffer* vkCB = cb->getInternal();
  317. vkCB->setVertexDeclaration(vertexDeclaration);
  318. }
  319. void VulkanRenderAPI::setDrawOperation(DrawOperationType op, const SPtr<CommandBuffer>& commandBuffer)
  320. {
  321. VulkanCommandBuffer* cb = getCB(commandBuffer);
  322. VulkanCmdBuffer* vkCB = cb->getInternal();
  323. vkCB->setDrawOp(op);
  324. }
  325. void VulkanRenderAPI::draw(UINT32 vertexOffset, UINT32 vertexCount, UINT32 instanceCount,
  326. const SPtr<CommandBuffer>& commandBuffer)
  327. {
  328. UINT32 primCount = 0;
  329. VulkanCommandBuffer* cb = getCB(commandBuffer);
  330. VulkanCmdBuffer* vkCB = cb->getInternal();
  331. vkCB->draw(vertexOffset, vertexCount, instanceCount);
  332. BS_INC_RENDER_STAT(NumDrawCalls);
  333. BS_ADD_RENDER_STAT(NumVertices, vertexCount);
  334. BS_ADD_RENDER_STAT(NumPrimitives, primCount);
  335. }
  336. void VulkanRenderAPI::drawIndexed(UINT32 startIndex, UINT32 indexCount, UINT32 vertexOffset, UINT32 vertexCount,
  337. UINT32 instanceCount, const SPtr<CommandBuffer>& commandBuffer)
  338. {
  339. UINT32 primCount = 0;
  340. VulkanCommandBuffer* cb = getCB(commandBuffer);
  341. VulkanCmdBuffer* vkCB = cb->getInternal();
  342. vkCB->drawIndexed(startIndex, indexCount, vertexOffset, instanceCount);
  343. BS_INC_RENDER_STAT(NumDrawCalls);
  344. BS_ADD_RENDER_STAT(NumVertices, vertexCount);
  345. BS_ADD_RENDER_STAT(NumPrimitives, primCount);
  346. }
  347. void VulkanRenderAPI::dispatchCompute(UINT32 numGroupsX, UINT32 numGroupsY, UINT32 numGroupsZ,
  348. const SPtr<CommandBuffer>& commandBuffer)
  349. {
  350. VulkanCommandBuffer* cb = getCB(commandBuffer);
  351. VulkanCmdBuffer* vkCB = cb->getInternal();
  352. vkCB->dispatch(numGroupsX, numGroupsY, numGroupsZ);
  353. BS_INC_RENDER_STAT(NumComputeCalls);
  354. }
  355. void VulkanRenderAPI::setScissorRect(UINT32 left, UINT32 top, UINT32 right, UINT32 bottom,
  356. const SPtr<CommandBuffer>& commandBuffer)
  357. {
  358. VulkanCommandBuffer* cb = getCB(commandBuffer);
  359. VulkanCmdBuffer* vkCB = cb->getInternal();
  360. Rect2I area(left, top, right - left, bottom - top);
  361. vkCB->setScissorRect(area);
  362. }
  363. void VulkanRenderAPI::setStencilRef(UINT32 value, const SPtr<CommandBuffer>& commandBuffer)
  364. {
  365. VulkanCommandBuffer* cb = getCB(commandBuffer);
  366. VulkanCmdBuffer* vkCB = cb->getInternal();
  367. vkCB->setStencilRef(value);
  368. }
  369. void VulkanRenderAPI::clearViewport(UINT32 buffers, const Color& color, float depth, UINT16 stencil, UINT8 targetMask,
  370. const SPtr<CommandBuffer>& commandBuffer)
  371. {
  372. VulkanCommandBuffer* cb = getCB(commandBuffer);
  373. VulkanCmdBuffer* vkCB = cb->getInternal();
  374. vkCB->clearViewport(buffers, color, depth, stencil, targetMask);
  375. BS_INC_RENDER_STAT(NumClears);
  376. }
  377. void VulkanRenderAPI::clearRenderTarget(UINT32 buffers, const Color& color, float depth, UINT16 stencil,
  378. UINT8 targetMask, const SPtr<CommandBuffer>& commandBuffer)
  379. {
  380. VulkanCommandBuffer* cb = getCB(commandBuffer);
  381. VulkanCmdBuffer* vkCB = cb->getInternal();
  382. vkCB->clearRenderTarget(buffers, color, depth, stencil, targetMask);
  383. BS_INC_RENDER_STAT(NumClears);
  384. }
  385. void VulkanRenderAPI::setRenderTarget(const SPtr<RenderTargetCore>& target, bool readOnlyDepthStencil,
  386. RenderSurfaceMask loadMask, const SPtr<CommandBuffer>& commandBuffer)
  387. {
  388. VulkanCommandBuffer* cb = getCB(commandBuffer);
  389. VulkanCmdBuffer* vkCB = cb->getInternal();
  390. vkCB->setRenderTarget(target, readOnlyDepthStencil, loadMask);
  391. BS_INC_RENDER_STAT(NumRenderTargetChanges);
  392. }
  393. void VulkanRenderAPI::swapBuffers(const SPtr<RenderTargetCore>& target, UINT32 syncMask)
  394. {
  395. THROW_IF_NOT_CORE_THREAD;
  396. submitCommandBuffer(mMainCommandBuffer, syncMask);
  397. target->swapBuffers(syncMask);
  398. // See if any command buffers finished executing
  399. VulkanCommandBufferManager& cbm = static_cast<VulkanCommandBufferManager&>(CommandBufferManager::instance());
  400. for (UINT32 i = 0; i < (UINT32)mDevices.size(); i++)
  401. cbm.refreshStates(i);
  402. BS_INC_RENDER_STAT(NumPresents);
  403. }
  404. void VulkanRenderAPI::addCommands(const SPtr<CommandBuffer>& commandBuffer, const SPtr<CommandBuffer>& secondary)
  405. {
  406. BS_EXCEPT(NotImplementedException, "Secondary command buffers not implemented");
  407. }
  408. void VulkanRenderAPI::submitCommandBuffer(const SPtr<CommandBuffer>& commandBuffer, UINT32 syncMask)
  409. {
  410. THROW_IF_NOT_CORE_THREAD;
  411. if (commandBuffer == nullptr)
  412. return;
  413. // Submit all transfer buffers first
  414. VulkanCommandBuffer& cmdBuffer = static_cast<VulkanCommandBuffer&>(*commandBuffer);
  415. VulkanCommandBufferManager& cbm = static_cast<VulkanCommandBufferManager&>(CommandBufferManager::instance());
  416. cbm.flushTransferBuffers(cmdBuffer.getDeviceIdx());
  417. cmdBuffer.submit(syncMask);
  418. }
  419. void VulkanRenderAPI::convertProjectionMatrix(const Matrix4& matrix, Matrix4& dest)
  420. {
  421. dest = matrix;
  422. // Convert depth range from [-1,1] to [0,1]
  423. dest[2][0] = (dest[2][0] + dest[3][0]) / 2;
  424. dest[2][1] = (dest[2][1] + dest[3][1]) / 2;
  425. dest[2][2] = (dest[2][2] + dest[3][2]) / 2;
  426. dest[2][3] = (dest[2][3] + dest[3][3]) / 2;
  427. }
  428. const RenderAPIInfo& VulkanRenderAPI::getAPIInfo() const
  429. {
  430. static RenderAPIInfo info(0.0f, 0.0f, 0.0f, 1.0f, VET_COLOR_ABGR, false, true, true, true);
  431. return info;
  432. }
  433. GpuParamBlockDesc VulkanRenderAPI::generateParamBlockDesc(const String& name, Vector<GpuParamDataDesc>& params)
  434. {
  435. GpuParamBlockDesc block;
  436. block.blockSize = 0;
  437. block.isShareable = true;
  438. block.name = name;
  439. block.slot = 0;
  440. block.set = 0;
  441. for (auto& param : params)
  442. {
  443. const GpuParamDataTypeInfo& typeInfo = GpuParams::PARAM_SIZES.lookup[param.type];
  444. UINT32 size = typeInfo.size / 4;
  445. UINT32 alignment = typeInfo.alignment / 4;
  446. // Fix alignment if needed
  447. UINT32 alignOffset = block.blockSize % alignment;
  448. if (alignOffset != 0)
  449. {
  450. UINT32 padding = (alignment - alignOffset);
  451. block.blockSize += padding;
  452. }
  453. if (param.arraySize > 1)
  454. {
  455. // Array elements are always padded and aligned to vec4
  456. alignOffset = size % typeInfo.baseTypeSize;
  457. if (alignOffset != 0)
  458. {
  459. UINT32 padding = (typeInfo.baseTypeSize - alignOffset);
  460. size += padding;
  461. }
  462. alignOffset = block.blockSize % typeInfo.baseTypeSize;
  463. if (alignOffset != 0)
  464. {
  465. UINT32 padding = (typeInfo.baseTypeSize - alignOffset);
  466. block.blockSize += padding;
  467. }
  468. param.elementSize = size;
  469. param.arrayElementStride = size;
  470. param.cpuMemOffset = block.blockSize;
  471. param.gpuMemOffset = 0;
  472. block.blockSize += size * param.arraySize;
  473. }
  474. else
  475. {
  476. param.elementSize = size;
  477. param.arrayElementStride = size;
  478. param.cpuMemOffset = block.blockSize;
  479. param.gpuMemOffset = 0;
  480. block.blockSize += size;
  481. }
  482. param.paramBlockSlot = 0;
  483. param.paramBlockSet = 0;
  484. }
  485. // Constant buffer size must always be a multiple of 16
  486. if (block.blockSize % 4 != 0)
  487. block.blockSize += (4 - (block.blockSize % 4));
  488. return block;
  489. }
  490. void VulkanRenderAPI::initCapabilites()
  491. {
  492. mNumDevices = (UINT32)mDevices.size();
  493. mCurrentCapabilities = bs_newN<RenderAPICapabilities>(mNumDevices);
  494. UINT32 deviceIdx = 0;
  495. for (auto& device : mDevices)
  496. {
  497. RenderAPICapabilities& caps = mCurrentCapabilities[deviceIdx];
  498. const VkPhysicalDeviceProperties& deviceProps = device->getDeviceProperties();
  499. const VkPhysicalDeviceFeatures& deviceFeatures = device->getDeviceFeatures();
  500. const VkPhysicalDeviceLimits& deviceLimits = deviceProps.limits;
  501. DriverVersion driverVersion;
  502. driverVersion.major = ((uint32_t)(deviceProps.apiVersion) >> 22);
  503. driverVersion.minor = ((uint32_t)(deviceProps.apiVersion) >> 12) & 0x3ff;
  504. driverVersion.release = (uint32_t)(deviceProps.apiVersion) & 0xfff;
  505. driverVersion.build = 0;
  506. caps.setDriverVersion(driverVersion);
  507. caps.setDeviceName(deviceProps.deviceName);
  508. // Determine vendor
  509. switch (deviceProps.vendorID)
  510. {
  511. case 0x10DE:
  512. caps.setVendor(GPU_NVIDIA);
  513. break;
  514. case 0x1002:
  515. caps.setVendor(GPU_AMD);
  516. break;
  517. case 0x163C:
  518. case 0x8086:
  519. caps.setVendor(GPU_INTEL);
  520. break;
  521. default:
  522. caps.setVendor(GPU_UNKNOWN);
  523. break;
  524. };
  525. caps.setRenderAPIName(getName());
  526. if(deviceFeatures.textureCompressionBC)
  527. caps.setCapability(RSC_TEXTURE_COMPRESSION_BC);
  528. if (deviceFeatures.textureCompressionETC2)
  529. caps.setCapability(RSC_TEXTURE_COMPRESSION_ETC2);
  530. if (deviceFeatures.textureCompressionASTC_LDR)
  531. caps.setCapability(RSC_TEXTURE_COMPRESSION_ASTC);
  532. caps.setMaxBoundVertexBuffers(deviceLimits.maxVertexInputBindings);
  533. caps.setNumMultiRenderTargets(deviceLimits.maxColorAttachments);
  534. caps.setCapability(RSC_COMPUTE_PROGRAM);
  535. caps.setNumTextureUnits(GPT_FRAGMENT_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  536. caps.setNumTextureUnits(GPT_VERTEX_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  537. caps.setNumTextureUnits(GPT_COMPUTE_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  538. caps.setNumGpuParamBlockBuffers(GPT_FRAGMENT_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  539. caps.setNumGpuParamBlockBuffers(GPT_VERTEX_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  540. caps.setNumGpuParamBlockBuffers(GPT_COMPUTE_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  541. caps.setNumLoadStoreTextureUnits(GPT_FRAGMENT_PROGRAM, deviceLimits.maxPerStageDescriptorStorageImages);
  542. caps.setNumLoadStoreTextureUnits(GPT_COMPUTE_PROGRAM, deviceLimits.maxPerStageDescriptorStorageImages);
  543. if(deviceFeatures.geometryShader)
  544. {
  545. caps.setCapability(RSC_GEOMETRY_PROGRAM);
  546. caps.addShaderProfile("gs_5_0");
  547. caps.setNumTextureUnits(GPT_GEOMETRY_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  548. caps.setNumGpuParamBlockBuffers(GPT_GEOMETRY_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  549. caps.setGeometryProgramNumOutputVertices(deviceLimits.maxGeometryOutputVertices);
  550. }
  551. if (deviceFeatures.tessellationShader)
  552. {
  553. caps.setCapability(RSC_TESSELLATION_PROGRAM);
  554. caps.setNumTextureUnits(GPT_HULL_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  555. caps.setNumTextureUnits(GPT_DOMAIN_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  556. caps.setNumGpuParamBlockBuffers(GPT_HULL_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  557. caps.setNumGpuParamBlockBuffers(GPT_DOMAIN_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  558. }
  559. caps.setNumCombinedTextureUnits(caps.getNumTextureUnits(GPT_FRAGMENT_PROGRAM)
  560. + caps.getNumTextureUnits(GPT_VERTEX_PROGRAM) + caps.getNumTextureUnits(GPT_GEOMETRY_PROGRAM)
  561. + caps.getNumTextureUnits(GPT_HULL_PROGRAM) + caps.getNumTextureUnits(GPT_DOMAIN_PROGRAM)
  562. + caps.getNumTextureUnits(GPT_COMPUTE_PROGRAM));
  563. caps.setNumCombinedGpuParamBlockBuffers(caps.getNumGpuParamBlockBuffers(GPT_FRAGMENT_PROGRAM)
  564. + caps.getNumGpuParamBlockBuffers(GPT_VERTEX_PROGRAM) + caps.getNumGpuParamBlockBuffers(GPT_GEOMETRY_PROGRAM)
  565. + caps.getNumGpuParamBlockBuffers(GPT_HULL_PROGRAM) + caps.getNumGpuParamBlockBuffers(GPT_DOMAIN_PROGRAM)
  566. + caps.getNumGpuParamBlockBuffers(GPT_COMPUTE_PROGRAM));
  567. caps.setNumCombinedLoadStoreTextureUnits(caps.getNumLoadStoreTextureUnits(GPT_FRAGMENT_PROGRAM)
  568. + caps.getNumLoadStoreTextureUnits(GPT_COMPUTE_PROGRAM));
  569. caps.addShaderProfile("glsl");
  570. deviceIdx++;
  571. }
  572. }
  573. VulkanCommandBuffer* VulkanRenderAPI::getCB(const SPtr<CommandBuffer>& buffer)
  574. {
  575. if (buffer != nullptr)
  576. return static_cast<VulkanCommandBuffer*>(buffer.get());
  577. return static_cast<VulkanCommandBuffer*>(mMainCommandBuffer.get());
  578. }
  579. VulkanRenderAPI& gVulkanRenderAPI()
  580. {
  581. return static_cast<VulkanRenderAPI&>(RenderAPICore::instance());
  582. }
  583. }