BsVulkanRenderAPI.cpp 24 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsVulkanRenderAPI.h"
  4. #include "BsCoreThread.h"
  5. #include "BsRenderStats.h"
  6. #include "BsGpuParamDesc.h"
  7. #include "BsVulkanDevice.h"
  8. #include "BsVulkanTextureManager.h"
  9. #include "BsVulkanRenderWindowManager.h"
  10. #include "BsVulkanHardwareBufferManager.h"
  11. #include "BsVulkanRenderStateManager.h"
  12. #include "BsGpuProgramManager.h"
  13. #include "BsVulkanQueryManager.h"
  14. #include "BsVulkanGLSLProgramFactory.h"
  15. #include "BsVulkanCommandBufferManager.h"
  16. #include "BsVulkanCommandBuffer.h"
  17. #include "BsVulkanGpuParams.h"
  18. #include "BsVulkanVertexInputManager.h"
  19. #include "BsVulkanGpuParamBlockBuffer.h"
  20. #if BS_PLATFORM == BS_PLATFORM_WIN32
  21. #include "Win32/BsWin32VideoModeInfo.h"
  22. #else
  23. static_assert(false, "Other platform includes go here.");
  24. #endif
  25. #define USE_VALIDATION_LAYERS 1
  26. namespace bs { namespace ct
  27. {
  28. VkAllocationCallbacks* gVulkanAllocator = nullptr;
  29. PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = nullptr;
  30. PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = nullptr;
  31. PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = nullptr;
  32. PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = nullptr;
  33. PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = nullptr;
  34. PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = nullptr;
  35. PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = nullptr;
  36. PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = nullptr;
  37. PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = nullptr;
  38. PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = nullptr;
  39. PFN_vkQueuePresentKHR vkQueuePresentKHR = nullptr;
  40. VkBool32 debugMsgCallback(VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objType, uint64_t srcObject,
  41. size_t location, int32_t msgCode, const char* pLayerPrefix, const char* pMsg, void* pUserData)
  42. {
  43. StringStream message;
  44. // Determine prefix
  45. if (flags & VK_DEBUG_REPORT_ERROR_BIT_EXT)
  46. message << "ERROR";
  47. if (flags & VK_DEBUG_REPORT_WARNING_BIT_EXT)
  48. message << "WARNING";
  49. if (flags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT)
  50. message << "PERFORMANCE";
  51. if (flags & VK_DEBUG_REPORT_INFORMATION_BIT_EXT)
  52. message << "INFO";
  53. if (flags & VK_DEBUG_REPORT_DEBUG_BIT_EXT)
  54. message << "DEBUG";
  55. message << ": [" << pLayerPrefix << "] Code " << msgCode << ": " << pMsg << std::endl;
  56. if (flags & VK_DEBUG_REPORT_ERROR_BIT_EXT)
  57. BS_EXCEPT(RenderingAPIException, message.str())
  58. else if (flags & VK_DEBUG_REPORT_WARNING_BIT_EXT || flags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT)
  59. LOGWRN(message.str())
  60. else
  61. LOGDBG(message.str())
  62. // Don't abort calls that caused a validation message
  63. return VK_FALSE;
  64. }
  65. VulkanRenderAPI::VulkanRenderAPI()
  66. :mInstance(nullptr), mDebugCallback(nullptr)
  67. { }
  68. VulkanRenderAPI::~VulkanRenderAPI()
  69. {
  70. }
  71. const StringID& VulkanRenderAPI::getName() const
  72. {
  73. static StringID strName("VulkanRenderAPI");
  74. return strName;
  75. }
  76. const String& VulkanRenderAPI::getShadingLanguageName() const
  77. {
  78. static String strName("glsl");
  79. return strName;
  80. }
  81. void VulkanRenderAPI::initialize()
  82. {
  83. THROW_IF_NOT_CORE_THREAD;
  84. // Create instance
  85. VkApplicationInfo appInfo;
  86. appInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
  87. appInfo.pNext = nullptr;
  88. appInfo.pApplicationName = "Banshee3D App";
  89. appInfo.applicationVersion = 1;
  90. appInfo.pEngineName = "Banshee3D";
  91. appInfo.engineVersion = (0 << 24) | (4 << 16) | 0;
  92. appInfo.apiVersion = VK_API_VERSION_1_0;
  93. #if BS_DEBUG_MODE && USE_VALIDATION_LAYERS
  94. const char* layers[] =
  95. {
  96. "VK_LAYER_LUNARG_standard_validation"
  97. };
  98. const char* extensions[] =
  99. {
  100. nullptr, /** Surface extension */
  101. nullptr, /** OS specific surface extension */
  102. VK_EXT_DEBUG_REPORT_EXTENSION_NAME
  103. };
  104. uint32_t numLayers = sizeof(layers) / sizeof(layers[0]);
  105. #else
  106. const char** layers = nullptr;
  107. const char* extensions[] =
  108. {
  109. nullptr, /** Surface extension */
  110. nullptr, /** OS specific surface extension */
  111. };
  112. uint32_t numLayers = 0;
  113. #endif
  114. extensions[0] = VK_KHR_SURFACE_EXTENSION_NAME;
  115. #if BS_PLATFORM == BS_PLATFORM_WIN32
  116. extensions[1] = VK_KHR_WIN32_SURFACE_EXTENSION_NAME;
  117. #elif BS_PLATFORM == BS_PLATFORM_ANDROID
  118. extensions[1] = VK_KHR_ANDROID_SURFACE_EXTENSION_NAME;
  119. #else
  120. extensions[1] = VK_KHR_XCB_SURFACE_EXTENSION_NAME;
  121. #endif
  122. uint32_t numExtensions = sizeof(extensions) / sizeof(extensions[0]);
  123. VkInstanceCreateInfo instanceInfo;
  124. instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
  125. instanceInfo.pNext = nullptr;
  126. instanceInfo.flags = 0;
  127. instanceInfo.pApplicationInfo = &appInfo;
  128. instanceInfo.enabledLayerCount = numLayers;
  129. instanceInfo.ppEnabledLayerNames = layers;
  130. instanceInfo.enabledExtensionCount = numExtensions;
  131. instanceInfo.ppEnabledExtensionNames = extensions;
  132. VkResult result = vkCreateInstance(&instanceInfo, gVulkanAllocator, &mInstance);
  133. assert(result == VK_SUCCESS);
  134. // Set up debugging
  135. #if BS_DEBUG_MODE && USE_VALIDATION_LAYERS
  136. VkDebugReportFlagsEXT debugFlags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT |
  137. VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT;
  138. GET_INSTANCE_PROC_ADDR(mInstance, CreateDebugReportCallbackEXT);
  139. GET_INSTANCE_PROC_ADDR(mInstance, DestroyDebugReportCallbackEXT);
  140. VkDebugReportCallbackCreateInfoEXT debugInfo;
  141. debugInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
  142. debugInfo.pNext = nullptr;
  143. debugInfo.flags = 0;
  144. debugInfo.pfnCallback = (PFN_vkDebugReportCallbackEXT)debugMsgCallback;
  145. debugInfo.flags = debugFlags;
  146. result = vkCreateDebugReportCallbackEXT(mInstance, &debugInfo, nullptr, &mDebugCallback);
  147. assert(result == VK_SUCCESS);
  148. #endif
  149. // Enumerate all devices
  150. result = vkEnumeratePhysicalDevices(mInstance, &mNumDevices, nullptr);
  151. assert(result == VK_SUCCESS);
  152. Vector<VkPhysicalDevice> physicalDevices(mNumDevices);
  153. result = vkEnumeratePhysicalDevices(mInstance, &mNumDevices, physicalDevices.data());
  154. assert(result == VK_SUCCESS);
  155. mDevices.resize(mNumDevices);
  156. for(uint32_t i = 0; i < mNumDevices; i++)
  157. mDevices[i] = bs_shared_ptr_new<VulkanDevice>(physicalDevices[i], i);
  158. // Find primary device
  159. // Note: MULTIGPU - Detect multiple similar devices here if supporting multi-GPU
  160. for (uint32_t i = 0; i < mNumDevices; i++)
  161. {
  162. bool isPrimary = mDevices[i]->getDeviceProperties().deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU;
  163. if (isPrimary)
  164. {
  165. mDevices[i]->setIsPrimary();
  166. mPrimaryDevices.push_back(mDevices[i]);
  167. break;
  168. }
  169. }
  170. if (mPrimaryDevices.size() == 0)
  171. mPrimaryDevices.push_back(mDevices[0]);
  172. #if BS_PLATFORM == BS_PLATFORM_WIN32
  173. mVideoModeInfo = bs_shared_ptr_new<Win32VideoModeInfo>();
  174. #else
  175. static_assert(false, "mVideoModeInfo needs to be created.")
  176. #endif
  177. // Get required extension functions
  178. GET_INSTANCE_PROC_ADDR(mInstance, GetPhysicalDeviceSurfaceSupportKHR);
  179. GET_INSTANCE_PROC_ADDR(mInstance, GetPhysicalDeviceSurfaceFormatsKHR);
  180. GET_INSTANCE_PROC_ADDR(mInstance, GetPhysicalDeviceSurfaceCapabilitiesKHR);
  181. GET_INSTANCE_PROC_ADDR(mInstance, GetPhysicalDeviceSurfacePresentModesKHR);
  182. VkDevice presentDevice = _getPresentDevice()->getLogical();
  183. GET_DEVICE_PROC_ADDR(presentDevice, CreateSwapchainKHR);
  184. GET_DEVICE_PROC_ADDR(presentDevice, DestroySwapchainKHR);
  185. GET_DEVICE_PROC_ADDR(presentDevice, GetSwapchainImagesKHR);
  186. GET_DEVICE_PROC_ADDR(presentDevice, AcquireNextImageKHR);
  187. GET_DEVICE_PROC_ADDR(presentDevice, QueuePresentKHR);
  188. // Create command buffer manager
  189. CommandBufferManager::startUp<VulkanCommandBufferManager>(*this);
  190. // Create main command buffer
  191. mMainCommandBuffer = std::static_pointer_cast<VulkanCommandBuffer>(CommandBuffer::create(GQT_GRAPHICS));
  192. // Create the texture manager for use by others
  193. bs::TextureManager::startUp<bs::VulkanTextureManager>();
  194. TextureManager::startUp<VulkanTextureManager>();
  195. // Create hardware buffer manager
  196. bs::HardwareBufferManager::startUp();
  197. HardwareBufferManager::startUp<VulkanHardwareBufferManager>();
  198. // Create render window manager
  199. bs::RenderWindowManager::startUp<bs::VulkanRenderWindowManager>();
  200. RenderWindowManager::startUp<VulkanRenderWindowManager>(*this);
  201. // Create query manager
  202. QueryManager::startUp<VulkanQueryManager>(*this);
  203. // Create vertex input manager
  204. VulkanVertexInputManager::startUp();
  205. // Create & register HLSL factory
  206. mGLSLFactory = bs_new<VulkanGLSLProgramFactory>();
  207. // Create render state manager
  208. RenderStateManager::startUp<VulkanRenderStateManager>();
  209. GpuProgramManager::instance().addFactory(mGLSLFactory);
  210. initCapabilites();
  211. RenderAPI::initialize();
  212. }
  213. void VulkanRenderAPI::destroyCore()
  214. {
  215. THROW_IF_NOT_CORE_THREAD;
  216. if (mGLSLFactory != nullptr)
  217. {
  218. bs_delete(mGLSLFactory);
  219. mGLSLFactory = nullptr;
  220. }
  221. VulkanVertexInputManager::shutDown();
  222. QueryManager::shutDown();
  223. RenderStateManager::shutDown();
  224. RenderWindowManager::shutDown();
  225. bs::RenderWindowManager::shutDown();
  226. HardwareBufferManager::shutDown();
  227. bs::HardwareBufferManager::shutDown();
  228. TextureManager::shutDown();
  229. bs::TextureManager::shutDown();
  230. mMainCommandBuffer = nullptr;
  231. // Make sure everything finishes and all resources get freed
  232. VulkanCommandBufferManager& cmdBufManager = static_cast<VulkanCommandBufferManager&>(CommandBufferManager::instance());
  233. for (UINT32 i = 0; i < (UINT32)mDevices.size(); i++)
  234. {
  235. mDevices[i]->waitIdle();
  236. cmdBufManager.refreshStates(i);
  237. }
  238. CommandBufferManager::shutDown();
  239. mPrimaryDevices.clear();
  240. mDevices.clear();
  241. #if BS_DEBUG_MODE
  242. if (mDebugCallback != nullptr)
  243. vkDestroyDebugReportCallbackEXT(mInstance, mDebugCallback, gVulkanAllocator);
  244. #endif
  245. vkDestroyInstance(mInstance, gVulkanAllocator);
  246. RenderAPI::destroyCore();
  247. }
  248. void VulkanRenderAPI::setGraphicsPipeline(const SPtr<GraphicsPipelineState>& pipelineState,
  249. const SPtr<CommandBuffer>& commandBuffer)
  250. {
  251. VulkanCommandBuffer* cb = getCB(commandBuffer);
  252. VulkanCmdBuffer* vkCB = cb->getInternal();
  253. vkCB->setPipelineState(pipelineState);
  254. BS_INC_RENDER_STAT(NumPipelineStateChanges);
  255. }
  256. void VulkanRenderAPI::setComputePipeline(const SPtr<ComputePipelineState>& pipelineState,
  257. const SPtr<CommandBuffer>& commandBuffer)
  258. {
  259. VulkanCommandBuffer* cb = getCB(commandBuffer);
  260. VulkanCmdBuffer* vkCB = cb->getInternal();
  261. vkCB->setPipelineState(pipelineState);
  262. BS_INC_RENDER_STAT(NumPipelineStateChanges);
  263. }
  264. void VulkanRenderAPI::setGpuParams(const SPtr<GpuParams>& gpuParams, const SPtr<CommandBuffer>& commandBuffer)
  265. {
  266. VulkanCommandBuffer* cb = getCB(commandBuffer);
  267. VulkanCmdBuffer* vkCB = cb->getInternal();
  268. UINT32 globalQueueIdx = CommandSyncMask::getGlobalQueueIdx(cb->getType(), cb->getQueueIdx());
  269. for (UINT32 i = 0; i < GPT_COUNT; i++)
  270. {
  271. SPtr<GpuParamDesc> paramDesc = gpuParams->getParamDesc((GpuProgramType)i);
  272. if (paramDesc == nullptr)
  273. continue;
  274. // Flush all param block buffers
  275. for (auto iter = paramDesc->paramBlocks.begin(); iter != paramDesc->paramBlocks.end(); ++iter)
  276. {
  277. SPtr<GpuParamBlockBuffer> buffer = gpuParams->getParamBlockBuffer(iter->second.set, iter->second.slot);
  278. if (buffer != nullptr)
  279. buffer->flushToGPU(globalQueueIdx);
  280. }
  281. }
  282. vkCB->setGpuParams(gpuParams);
  283. BS_INC_RENDER_STAT(NumGpuParamBinds);
  284. }
  285. void VulkanRenderAPI::setViewport(const Rect2& vp, const SPtr<CommandBuffer>& commandBuffer)
  286. {
  287. VulkanCommandBuffer* cb = getCB(commandBuffer);
  288. VulkanCmdBuffer* vkCB = cb->getInternal();
  289. vkCB->setViewport(vp);
  290. }
  291. void VulkanRenderAPI::setVertexBuffers(UINT32 index, SPtr<VertexBuffer>* buffers, UINT32 numBuffers,
  292. const SPtr<CommandBuffer>& commandBuffer)
  293. {
  294. VulkanCommandBuffer* cb = getCB(commandBuffer);
  295. VulkanCmdBuffer* vkCB = cb->getInternal();
  296. vkCB->setVertexBuffers(index, buffers, numBuffers);
  297. BS_INC_RENDER_STAT(NumVertexBufferBinds);
  298. }
  299. void VulkanRenderAPI::setIndexBuffer(const SPtr<IndexBuffer>& buffer, const SPtr<CommandBuffer>& commandBuffer)
  300. {
  301. VulkanCommandBuffer* cb = getCB(commandBuffer);
  302. VulkanCmdBuffer* vkCB = cb->getInternal();
  303. vkCB->setIndexBuffer(buffer);
  304. BS_INC_RENDER_STAT(NumIndexBufferBinds);
  305. }
  306. void VulkanRenderAPI::setVertexDeclaration(const SPtr<VertexDeclaration>& vertexDeclaration,
  307. const SPtr<CommandBuffer>& commandBuffer)
  308. {
  309. VulkanCommandBuffer* cb = getCB(commandBuffer);
  310. VulkanCmdBuffer* vkCB = cb->getInternal();
  311. vkCB->setVertexDeclaration(vertexDeclaration);
  312. }
  313. void VulkanRenderAPI::setDrawOperation(DrawOperationType op, const SPtr<CommandBuffer>& commandBuffer)
  314. {
  315. VulkanCommandBuffer* cb = getCB(commandBuffer);
  316. VulkanCmdBuffer* vkCB = cb->getInternal();
  317. vkCB->setDrawOp(op);
  318. }
  319. void VulkanRenderAPI::draw(UINT32 vertexOffset, UINT32 vertexCount, UINT32 instanceCount,
  320. const SPtr<CommandBuffer>& commandBuffer)
  321. {
  322. UINT32 primCount = 0;
  323. VulkanCommandBuffer* cb = getCB(commandBuffer);
  324. VulkanCmdBuffer* vkCB = cb->getInternal();
  325. vkCB->draw(vertexOffset, vertexCount, instanceCount);
  326. BS_INC_RENDER_STAT(NumDrawCalls);
  327. BS_ADD_RENDER_STAT(NumVertices, vertexCount);
  328. BS_ADD_RENDER_STAT(NumPrimitives, primCount);
  329. }
  330. void VulkanRenderAPI::drawIndexed(UINT32 startIndex, UINT32 indexCount, UINT32 vertexOffset, UINT32 vertexCount,
  331. UINT32 instanceCount, const SPtr<CommandBuffer>& commandBuffer)
  332. {
  333. UINT32 primCount = 0;
  334. VulkanCommandBuffer* cb = getCB(commandBuffer);
  335. VulkanCmdBuffer* vkCB = cb->getInternal();
  336. vkCB->drawIndexed(startIndex, indexCount, vertexOffset, instanceCount);
  337. BS_INC_RENDER_STAT(NumDrawCalls);
  338. BS_ADD_RENDER_STAT(NumVertices, vertexCount);
  339. BS_ADD_RENDER_STAT(NumPrimitives, primCount);
  340. }
  341. void VulkanRenderAPI::dispatchCompute(UINT32 numGroupsX, UINT32 numGroupsY, UINT32 numGroupsZ,
  342. const SPtr<CommandBuffer>& commandBuffer)
  343. {
  344. VulkanCommandBuffer* cb = getCB(commandBuffer);
  345. VulkanCmdBuffer* vkCB = cb->getInternal();
  346. vkCB->dispatch(numGroupsX, numGroupsY, numGroupsZ);
  347. BS_INC_RENDER_STAT(NumComputeCalls);
  348. }
  349. void VulkanRenderAPI::setScissorRect(UINT32 left, UINT32 top, UINT32 right, UINT32 bottom,
  350. const SPtr<CommandBuffer>& commandBuffer)
  351. {
  352. VulkanCommandBuffer* cb = getCB(commandBuffer);
  353. VulkanCmdBuffer* vkCB = cb->getInternal();
  354. Rect2I area(left, top, right - left, bottom - top);
  355. vkCB->setScissorRect(area);
  356. }
  357. void VulkanRenderAPI::setStencilRef(UINT32 value, const SPtr<CommandBuffer>& commandBuffer)
  358. {
  359. VulkanCommandBuffer* cb = getCB(commandBuffer);
  360. VulkanCmdBuffer* vkCB = cb->getInternal();
  361. vkCB->setStencilRef(value);
  362. }
  363. void VulkanRenderAPI::clearViewport(UINT32 buffers, const Color& color, float depth, UINT16 stencil, UINT8 targetMask,
  364. const SPtr<CommandBuffer>& commandBuffer)
  365. {
  366. VulkanCommandBuffer* cb = getCB(commandBuffer);
  367. VulkanCmdBuffer* vkCB = cb->getInternal();
  368. vkCB->clearViewport(buffers, color, depth, stencil, targetMask);
  369. BS_INC_RENDER_STAT(NumClears);
  370. }
  371. void VulkanRenderAPI::clearRenderTarget(UINT32 buffers, const Color& color, float depth, UINT16 stencil,
  372. UINT8 targetMask, const SPtr<CommandBuffer>& commandBuffer)
  373. {
  374. VulkanCommandBuffer* cb = getCB(commandBuffer);
  375. VulkanCmdBuffer* vkCB = cb->getInternal();
  376. vkCB->clearRenderTarget(buffers, color, depth, stencil, targetMask);
  377. BS_INC_RENDER_STAT(NumClears);
  378. }
  379. void VulkanRenderAPI::setRenderTarget(const SPtr<RenderTarget>& target, bool readOnlyDepthStencil,
  380. RenderSurfaceMask loadMask, const SPtr<CommandBuffer>& commandBuffer)
  381. {
  382. VulkanCommandBuffer* cb = getCB(commandBuffer);
  383. VulkanCmdBuffer* vkCB = cb->getInternal();
  384. vkCB->setRenderTarget(target, readOnlyDepthStencil, loadMask);
  385. BS_INC_RENDER_STAT(NumRenderTargetChanges);
  386. }
  387. void VulkanRenderAPI::swapBuffers(const SPtr<RenderTarget>& target, UINT32 syncMask)
  388. {
  389. THROW_IF_NOT_CORE_THREAD;
  390. submitCommandBuffer(mMainCommandBuffer, syncMask);
  391. target->swapBuffers(syncMask);
  392. // See if any command buffers finished executing
  393. VulkanCommandBufferManager& cbm = static_cast<VulkanCommandBufferManager&>(CommandBufferManager::instance());
  394. for (UINT32 i = 0; i < (UINT32)mDevices.size(); i++)
  395. cbm.refreshStates(i);
  396. BS_INC_RENDER_STAT(NumPresents);
  397. }
  398. void VulkanRenderAPI::addCommands(const SPtr<CommandBuffer>& commandBuffer, const SPtr<CommandBuffer>& secondary)
  399. {
  400. BS_EXCEPT(NotImplementedException, "Secondary command buffers not implemented");
  401. }
  402. void VulkanRenderAPI::submitCommandBuffer(const SPtr<CommandBuffer>& commandBuffer, UINT32 syncMask)
  403. {
  404. THROW_IF_NOT_CORE_THREAD;
  405. VulkanCommandBuffer* cmdBuffer = getCB(commandBuffer);
  406. // Submit all transfer buffers first
  407. VulkanCommandBufferManager& cbm = static_cast<VulkanCommandBufferManager&>(CommandBufferManager::instance());
  408. cbm.flushTransferBuffers(cmdBuffer->getDeviceIdx());
  409. cmdBuffer->submit(syncMask);
  410. }
  411. void VulkanRenderAPI::convertProjectionMatrix(const Matrix4& matrix, Matrix4& dest)
  412. {
  413. dest = matrix;
  414. // Flip Y axis
  415. dest[1][1] = -dest[1][1];
  416. // Convert depth range from [-1,1] to [0,1]
  417. dest[2][0] = (dest[2][0] + dest[3][0]) / 2;
  418. dest[2][1] = (dest[2][1] + dest[3][1]) / 2;
  419. dest[2][2] = (dest[2][2] + dest[3][2]) / 2;
  420. dest[2][3] = (dest[2][3] + dest[3][3]) / 2;
  421. }
  422. const RenderAPIInfo& VulkanRenderAPI::getAPIInfo() const
  423. {
  424. static RenderAPIInfo info(0.0f, 0.0f, 0.0f, 1.0f, VET_COLOR_ABGR, false, false, true, true, true);
  425. return info;
  426. }
  427. GpuParamBlockDesc VulkanRenderAPI::generateParamBlockDesc(const String& name, Vector<GpuParamDataDesc>& params)
  428. {
  429. GpuParamBlockDesc block;
  430. block.blockSize = 0;
  431. block.isShareable = true;
  432. block.name = name;
  433. block.slot = 0;
  434. block.set = 0;
  435. for (auto& param : params)
  436. {
  437. const GpuParamDataTypeInfo& typeInfo = bs::GpuParams::PARAM_SIZES.lookup[param.type];
  438. UINT32 size = typeInfo.size / 4;
  439. UINT32 alignment = typeInfo.alignment / 4;
  440. // Fix alignment if needed
  441. UINT32 alignOffset = block.blockSize % alignment;
  442. if (alignOffset != 0)
  443. {
  444. UINT32 padding = (alignment - alignOffset);
  445. block.blockSize += padding;
  446. }
  447. if (param.arraySize > 1)
  448. {
  449. // Array elements are always padded and aligned to vec4
  450. alignOffset = size % typeInfo.baseTypeSize;
  451. if (alignOffset != 0)
  452. {
  453. UINT32 padding = (typeInfo.baseTypeSize - alignOffset);
  454. size += padding;
  455. }
  456. alignOffset = block.blockSize % typeInfo.baseTypeSize;
  457. if (alignOffset != 0)
  458. {
  459. UINT32 padding = (typeInfo.baseTypeSize - alignOffset);
  460. block.blockSize += padding;
  461. }
  462. param.elementSize = size;
  463. param.arrayElementStride = size;
  464. param.cpuMemOffset = block.blockSize;
  465. param.gpuMemOffset = 0;
  466. block.blockSize += size * param.arraySize;
  467. }
  468. else
  469. {
  470. param.elementSize = size;
  471. param.arrayElementStride = size;
  472. param.cpuMemOffset = block.blockSize;
  473. param.gpuMemOffset = 0;
  474. block.blockSize += size;
  475. }
  476. param.paramBlockSlot = 0;
  477. param.paramBlockSet = 0;
  478. }
  479. // Constant buffer size must always be a multiple of 16
  480. if (block.blockSize % 4 != 0)
  481. block.blockSize += (4 - (block.blockSize % 4));
  482. return block;
  483. }
  484. void VulkanRenderAPI::initCapabilites()
  485. {
  486. mNumDevices = (UINT32)mDevices.size();
  487. mCurrentCapabilities = bs_newN<RenderAPICapabilities>(mNumDevices);
  488. UINT32 deviceIdx = 0;
  489. for (auto& device : mDevices)
  490. {
  491. RenderAPICapabilities& caps = mCurrentCapabilities[deviceIdx];
  492. const VkPhysicalDeviceProperties& deviceProps = device->getDeviceProperties();
  493. const VkPhysicalDeviceFeatures& deviceFeatures = device->getDeviceFeatures();
  494. const VkPhysicalDeviceLimits& deviceLimits = deviceProps.limits;
  495. DriverVersion driverVersion;
  496. driverVersion.major = ((uint32_t)(deviceProps.apiVersion) >> 22);
  497. driverVersion.minor = ((uint32_t)(deviceProps.apiVersion) >> 12) & 0x3ff;
  498. driverVersion.release = (uint32_t)(deviceProps.apiVersion) & 0xfff;
  499. driverVersion.build = 0;
  500. caps.setDriverVersion(driverVersion);
  501. caps.setDeviceName(deviceProps.deviceName);
  502. // Determine vendor
  503. switch (deviceProps.vendorID)
  504. {
  505. case 0x10DE:
  506. caps.setVendor(GPU_NVIDIA);
  507. break;
  508. case 0x1002:
  509. caps.setVendor(GPU_AMD);
  510. break;
  511. case 0x163C:
  512. case 0x8086:
  513. caps.setVendor(GPU_INTEL);
  514. break;
  515. default:
  516. caps.setVendor(GPU_UNKNOWN);
  517. break;
  518. };
  519. caps.setRenderAPIName(getName());
  520. if(deviceFeatures.textureCompressionBC)
  521. caps.setCapability(RSC_TEXTURE_COMPRESSION_BC);
  522. if (deviceFeatures.textureCompressionETC2)
  523. caps.setCapability(RSC_TEXTURE_COMPRESSION_ETC2);
  524. if (deviceFeatures.textureCompressionASTC_LDR)
  525. caps.setCapability(RSC_TEXTURE_COMPRESSION_ASTC);
  526. caps.setMaxBoundVertexBuffers(deviceLimits.maxVertexInputBindings);
  527. caps.setNumMultiRenderTargets(deviceLimits.maxColorAttachments);
  528. caps.setCapability(RSC_COMPUTE_PROGRAM);
  529. caps.setNumTextureUnits(GPT_FRAGMENT_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  530. caps.setNumTextureUnits(GPT_VERTEX_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  531. caps.setNumTextureUnits(GPT_COMPUTE_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  532. caps.setNumGpuParamBlockBuffers(GPT_FRAGMENT_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  533. caps.setNumGpuParamBlockBuffers(GPT_VERTEX_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  534. caps.setNumGpuParamBlockBuffers(GPT_COMPUTE_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  535. caps.setNumLoadStoreTextureUnits(GPT_FRAGMENT_PROGRAM, deviceLimits.maxPerStageDescriptorStorageImages);
  536. caps.setNumLoadStoreTextureUnits(GPT_COMPUTE_PROGRAM, deviceLimits.maxPerStageDescriptorStorageImages);
  537. if(deviceFeatures.geometryShader)
  538. {
  539. caps.setCapability(RSC_GEOMETRY_PROGRAM);
  540. caps.addShaderProfile("gs_5_0");
  541. caps.setNumTextureUnits(GPT_GEOMETRY_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  542. caps.setNumGpuParamBlockBuffers(GPT_GEOMETRY_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  543. caps.setGeometryProgramNumOutputVertices(deviceLimits.maxGeometryOutputVertices);
  544. }
  545. if (deviceFeatures.tessellationShader)
  546. {
  547. caps.setCapability(RSC_TESSELLATION_PROGRAM);
  548. caps.setNumTextureUnits(GPT_HULL_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  549. caps.setNumTextureUnits(GPT_DOMAIN_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  550. caps.setNumGpuParamBlockBuffers(GPT_HULL_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  551. caps.setNumGpuParamBlockBuffers(GPT_DOMAIN_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  552. }
  553. caps.setNumCombinedTextureUnits(caps.getNumTextureUnits(GPT_FRAGMENT_PROGRAM)
  554. + caps.getNumTextureUnits(GPT_VERTEX_PROGRAM) + caps.getNumTextureUnits(GPT_GEOMETRY_PROGRAM)
  555. + caps.getNumTextureUnits(GPT_HULL_PROGRAM) + caps.getNumTextureUnits(GPT_DOMAIN_PROGRAM)
  556. + caps.getNumTextureUnits(GPT_COMPUTE_PROGRAM));
  557. caps.setNumCombinedGpuParamBlockBuffers(caps.getNumGpuParamBlockBuffers(GPT_FRAGMENT_PROGRAM)
  558. + caps.getNumGpuParamBlockBuffers(GPT_VERTEX_PROGRAM) + caps.getNumGpuParamBlockBuffers(GPT_GEOMETRY_PROGRAM)
  559. + caps.getNumGpuParamBlockBuffers(GPT_HULL_PROGRAM) + caps.getNumGpuParamBlockBuffers(GPT_DOMAIN_PROGRAM)
  560. + caps.getNumGpuParamBlockBuffers(GPT_COMPUTE_PROGRAM));
  561. caps.setNumCombinedLoadStoreTextureUnits(caps.getNumLoadStoreTextureUnits(GPT_FRAGMENT_PROGRAM)
  562. + caps.getNumLoadStoreTextureUnits(GPT_COMPUTE_PROGRAM));
  563. caps.addShaderProfile("glsl");
  564. deviceIdx++;
  565. }
  566. }
  567. VulkanCommandBuffer* VulkanRenderAPI::getCB(const SPtr<CommandBuffer>& buffer)
  568. {
  569. if (buffer != nullptr)
  570. return static_cast<VulkanCommandBuffer*>(buffer.get());
  571. return static_cast<VulkanCommandBuffer*>(mMainCommandBuffer.get());
  572. }
  573. VulkanRenderAPI& gVulkanRenderAPI()
  574. {
  575. return static_cast<VulkanRenderAPI&>(RenderAPI::instance());
  576. }
  577. }}