BsVulkanRenderAPI.cpp 22 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsVulkanRenderAPI.h"
  4. #include "BsCoreThread.h"
  5. #include "BsRenderStats.h"
  6. #include "BsGpuParamDesc.h"
  7. #include "BsVulkanDevice.h"
  8. #include "BsVulkanTextureManager.h"
  9. #include "BsVulkanRenderWindowManager.h"
  10. #include "BsVulkanHardwareBufferManager.h"
  11. #include "BsVulkanRenderStateManager.h"
  12. #include "BsGpuProgramManager.h"
  13. #include "BsVulkanQueryManager.h"
  14. #include "BsVulkanGLSLProgramFactory.h"
  15. #include "BsVulkanCommandBufferManager.h"
  16. #include "BsVulkanCommandBuffer.h"
  17. #include "BsVulkanGpuParams.h"
  18. #include "BsVulkanVertexInputManager.h"
  19. #if BS_PLATFORM == BS_PLATFORM_WIN32
  20. #include "Win32/BsWin32VideoModeInfo.h"
  21. #else
  22. static_assert(false, "Other platform includes go here.");
  23. #endif
  24. namespace BansheeEngine
  25. {
  26. PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = nullptr;
  27. PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = nullptr;
  28. PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = nullptr;
  29. PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = nullptr;
  30. PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = nullptr;
  31. PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = nullptr;
  32. PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = nullptr;
  33. PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = nullptr;
  34. PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = nullptr;
  35. PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = nullptr;
  36. PFN_vkQueuePresentKHR vkQueuePresentKHR = nullptr;
  37. VkBool32 debugMsgCallback(VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objType, uint64_t srcObject,
  38. size_t location, int32_t msgCode, const char* pLayerPrefix, const char* pMsg, void* pUserData)
  39. {
  40. StringStream message;
  41. // Determine prefix
  42. if (flags & VK_DEBUG_REPORT_ERROR_BIT_EXT)
  43. message << "ERROR";
  44. if (flags & VK_DEBUG_REPORT_WARNING_BIT_EXT)
  45. message << "WARNING";
  46. if (flags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT)
  47. message << "PERFORMANCE";
  48. if (flags & VK_DEBUG_REPORT_INFORMATION_BIT_EXT)
  49. message << "INFO";
  50. if (flags & VK_DEBUG_REPORT_DEBUG_BIT_EXT)
  51. message << "DEBUG";
  52. message << ": [" << pLayerPrefix << "] Code " << msgCode << ": " << pMsg << std::endl;
  53. if (flags & VK_DEBUG_REPORT_ERROR_BIT_EXT)
  54. BS_EXCEPT(RenderingAPIException, message.str())
  55. else if (flags & VK_DEBUG_REPORT_WARNING_BIT_EXT || flags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT)
  56. LOGWRN(message.str())
  57. else
  58. LOGDBG(message.str())
  59. // Don't abort calls that caused a validation message
  60. return VK_FALSE;
  61. }
  62. VulkanRenderAPI::VulkanRenderAPI()
  63. :mInstance(nullptr), mDebugCallback(nullptr)
  64. { }
  65. VulkanRenderAPI::~VulkanRenderAPI()
  66. {
  67. }
  68. const StringID& VulkanRenderAPI::getName() const
  69. {
  70. static StringID strName("VulkanRenderAPI");
  71. return strName;
  72. }
  73. const String& VulkanRenderAPI::getShadingLanguageName() const
  74. {
  75. static String strName("glsl");
  76. return strName;
  77. }
  78. void VulkanRenderAPI::initialize()
  79. {
  80. THROW_IF_NOT_CORE_THREAD;
  81. // Create instance
  82. VkApplicationInfo appInfo;
  83. appInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
  84. appInfo.pNext = nullptr;
  85. appInfo.pApplicationName = "Banshee3D App";
  86. appInfo.applicationVersion = 1;
  87. appInfo.pEngineName = "Banshee3D";
  88. appInfo.engineVersion = (0 << 24) | (4 << 16) | 0;
  89. appInfo.apiVersion = VK_API_VERSION_1_0;
  90. #if BS_DEBUG_MODE
  91. const char* layers[] =
  92. {
  93. "VK_LAYER_LUNARG_standard_validation"
  94. };
  95. const char* extensions[] =
  96. {
  97. nullptr, /** Surface extension */
  98. nullptr, /** OS specific surface extension */
  99. VK_EXT_DEBUG_REPORT_EXTENSION_NAME
  100. };
  101. #else
  102. const char** layers = nullptr;
  103. const char* extensions[] =
  104. {
  105. nullptr, /** Surface extension */
  106. nullptr, /** OS specific surface extension */
  107. };
  108. #endif
  109. extensions[0] = VK_KHR_SURFACE_EXTENSION_NAME;
  110. #if BS_PLATFORM == BS_PLATFORM_WIN32
  111. extensions[1] = VK_KHR_WIN32_SURFACE_EXTENSION_NAME;
  112. #elif BS_PLATFORM == BS_PLATFORM_ANDROID
  113. extensions[1] = VK_KHR_ANDROID_SURFACE_EXTENSION_NAME;
  114. #else
  115. extensions[1] = VK_KHR_XCB_SURFACE_EXTENSION_NAME;
  116. #endif
  117. uint32_t numLayers = sizeof(layers) / sizeof(layers[0]);
  118. uint32_t numExtensions = sizeof(extensions) / sizeof(extensions[0]);
  119. VkInstanceCreateInfo instanceInfo;
  120. instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
  121. instanceInfo.pNext = nullptr;
  122. instanceInfo.flags = 0;
  123. instanceInfo.pApplicationInfo = &appInfo;
  124. instanceInfo.enabledLayerCount = numLayers;
  125. instanceInfo.ppEnabledLayerNames = layers;
  126. instanceInfo.enabledExtensionCount = numExtensions;
  127. instanceInfo.ppEnabledExtensionNames = extensions;
  128. VkResult result = vkCreateInstance(&instanceInfo, gVulkanAllocator, &mInstance);
  129. assert(result == VK_SUCCESS);
  130. // Set up debugging
  131. #if BS_DEBUG_MODE
  132. VkDebugReportFlagsEXT debugFlags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT |
  133. VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT;
  134. GET_INSTANCE_PROC_ADDR(mInstance, CreateDebugReportCallbackEXT);
  135. GET_INSTANCE_PROC_ADDR(mInstance, DestroyDebugReportCallbackEXT);
  136. VkDebugReportCallbackCreateInfoEXT debugInfo;
  137. debugInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
  138. debugInfo.pNext = nullptr;
  139. debugInfo.flags = 0;
  140. debugInfo.pfnCallback = (PFN_vkDebugReportCallbackEXT)debugMsgCallback;
  141. debugInfo.flags = debugFlags;
  142. result = vkCreateDebugReportCallbackEXT(mInstance, &debugInfo, nullptr, &mDebugCallback);
  143. assert(result == VK_SUCCESS);
  144. #endif
  145. // Enumerate all devices
  146. uint32_t numDevices;
  147. result = vkEnumeratePhysicalDevices(mInstance, &numDevices, nullptr);
  148. assert(result == VK_SUCCESS);
  149. Vector<VkPhysicalDevice> physicalDevices(numDevices);
  150. result = vkEnumeratePhysicalDevices(mInstance, &numDevices, physicalDevices.data());
  151. assert(result == VK_SUCCESS);
  152. mDevices.resize(numDevices);
  153. for(uint32_t i = 0; i < numDevices; i++)
  154. mDevices[i] = bs_shared_ptr_new<VulkanDevice>(physicalDevices[i], i);
  155. // Find primary device
  156. // Note: MULTIGPU - Detect multiple similar devices here if supporting multi-GPU
  157. for (uint32_t i = 0; i < numDevices; i++)
  158. {
  159. bool isPrimary = mDevices[i]->getDeviceProperties().deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU;
  160. if (isPrimary)
  161. {
  162. mDevices[i]->setIsPrimary();
  163. mPrimaryDevices.push_back(mDevices[i]);
  164. break;
  165. }
  166. }
  167. if (mPrimaryDevices.size() == 0)
  168. mPrimaryDevices.push_back(mDevices[0]);
  169. #if BS_PLATFORM == BS_PLATFORM_WIN32
  170. mVideoModeInfo = bs_shared_ptr_new<Win32VideoModeInfo>();
  171. #else
  172. static_assert(false, "mVideoModeInfo needs to be created.")
  173. #endif
  174. // Get required extension functions
  175. GET_INSTANCE_PROC_ADDR(mInstance, GetPhysicalDeviceSurfaceSupportKHR);
  176. GET_INSTANCE_PROC_ADDR(mInstance, GetPhysicalDeviceSurfaceFormatsKHR);
  177. GET_INSTANCE_PROC_ADDR(mInstance, GetPhysicalDeviceSurfaceCapabilitiesKHR);
  178. GET_INSTANCE_PROC_ADDR(mInstance, GetPhysicalDeviceSurfacePresentModesKHR);
  179. VkDevice presentDevice = _getPresentDevice()->getLogical();
  180. GET_DEVICE_PROC_ADDR(presentDevice, CreateSwapchainKHR);
  181. GET_DEVICE_PROC_ADDR(presentDevice, DestroySwapchainKHR);
  182. GET_DEVICE_PROC_ADDR(presentDevice, GetSwapchainImagesKHR);
  183. GET_DEVICE_PROC_ADDR(presentDevice, AcquireNextImageKHR);
  184. GET_DEVICE_PROC_ADDR(presentDevice, QueuePresentKHR);
  185. // Create command buffer manager
  186. CommandBufferManager::startUp<VulkanCommandBufferManager>(*this);
  187. // Create main command buffer
  188. mMainCommandBuffer = std::static_pointer_cast<VulkanCommandBuffer>(CommandBuffer::create(GQT_GRAPHICS));
  189. // Create the texture manager for use by others
  190. TextureManager::startUp<VulkanTextureManager>();
  191. TextureCoreManager::startUp<VulkanTextureCoreManager>();
  192. // Create hardware buffer manager
  193. HardwareBufferManager::startUp();
  194. HardwareBufferCoreManager::startUp<VulkanHardwareBufferCoreManager>();
  195. // Create render window manager
  196. RenderWindowManager::startUp<VulkanRenderWindowManager>();
  197. RenderWindowCoreManager::startUp<VulkanRenderWindowCoreManager>(*this);
  198. // Create query manager
  199. QueryManager::startUp<VulkanQueryManager>();
  200. // Create vertex input manager
  201. VulkanVertexInputManager::startUp();
  202. // Create & register HLSL factory
  203. mGLSLFactory = bs_new<VulkanGLSLProgramFactory>();
  204. // Create render state manager
  205. RenderStateCoreManager::startUp<VulkanRenderStateCoreManager>();
  206. GpuProgramCoreManager::instance().addFactory(mGLSLFactory);
  207. initCapabilites();
  208. RenderAPICore::initialize();
  209. }
  210. void VulkanRenderAPI::destroyCore()
  211. {
  212. THROW_IF_NOT_CORE_THREAD;
  213. if (mGLSLFactory != nullptr)
  214. {
  215. bs_delete(mGLSLFactory);
  216. mGLSLFactory = nullptr;
  217. }
  218. VulkanVertexInputManager::shutDown();
  219. QueryManager::shutDown();
  220. RenderStateCoreManager::shutDown();
  221. RenderWindowCoreManager::shutDown();
  222. RenderWindowManager::shutDown();
  223. HardwareBufferCoreManager::shutDown();
  224. HardwareBufferManager::shutDown();
  225. TextureCoreManager::shutDown();
  226. TextureManager::shutDown();
  227. mMainCommandBuffer = nullptr;
  228. // Make sure everything finishes and all resources get freed
  229. VulkanCommandBufferManager& cmdBufManager = static_cast<VulkanCommandBufferManager&>(CommandBufferManager::instance());
  230. for (UINT32 i = 0; i < (UINT32)mDevices.size(); i++)
  231. {
  232. mDevices[i]->waitIdle();
  233. cmdBufManager.refreshStates(i);
  234. }
  235. CommandBufferManager::shutDown();
  236. mPrimaryDevices.clear();
  237. mDevices.clear();
  238. #if BS_DEBUG_MODE
  239. if (mDebugCallback != nullptr)
  240. vkDestroyDebugReportCallbackEXT(mInstance, mDebugCallback, gVulkanAllocator);
  241. #endif
  242. vkDestroyInstance(mInstance, gVulkanAllocator);
  243. RenderAPICore::destroyCore();
  244. }
  245. void VulkanRenderAPI::setGraphicsPipeline(const SPtr<GpuPipelineStateCore>& pipelineState,
  246. const SPtr<CommandBuffer>& commandBuffer)
  247. {
  248. // TODO
  249. BS_INC_RENDER_STAT(NumPipelineStateChanges);
  250. }
  251. void VulkanRenderAPI::setComputePipeline(const SPtr<GpuProgramCore>& computeProgram,
  252. const SPtr<CommandBuffer>& commandBuffer)
  253. {
  254. // TODO
  255. BS_INC_RENDER_STAT(NumPipelineStateChanges);
  256. }
  257. void VulkanRenderAPI::setGpuParams(const SPtr<GpuParamsCore>& gpuParams, const SPtr<CommandBuffer>& commandBuffer)
  258. {
  259. VulkanCommandBuffer* cb = getCB(commandBuffer);
  260. SPtr<VulkanGpuParams> vulkanGpuParams = std::static_pointer_cast<VulkanGpuParams>(gpuParams);
  261. vulkanGpuParams->bind(*cb);
  262. BS_INC_RENDER_STAT(NumGpuParamBinds);
  263. }
  264. void VulkanRenderAPI::beginFrame(const SPtr<CommandBuffer>& commandBuffer)
  265. {
  266. }
  267. void VulkanRenderAPI::endFrame(const SPtr<CommandBuffer>& commandBuffer)
  268. {
  269. }
  270. void VulkanRenderAPI::setViewport(const Rect2& vp, const SPtr<CommandBuffer>& commandBuffer)
  271. {
  272. // TODO
  273. }
  274. void VulkanRenderAPI::setVertexBuffers(UINT32 index, SPtr<VertexBufferCore>* buffers, UINT32 numBuffers,
  275. const SPtr<CommandBuffer>& commandBuffer)
  276. {
  277. // TODO
  278. BS_INC_RENDER_STAT(NumVertexBufferBinds);
  279. }
  280. void VulkanRenderAPI::setIndexBuffer(const SPtr<IndexBufferCore>& buffer, const SPtr<CommandBuffer>& commandBuffer)
  281. {
  282. // TODO
  283. BS_INC_RENDER_STAT(NumIndexBufferBinds);
  284. }
  285. void VulkanRenderAPI::setVertexDeclaration(const SPtr<VertexDeclarationCore>& vertexDeclaration,
  286. const SPtr<CommandBuffer>& commandBuffer)
  287. {
  288. // TODO
  289. }
  290. void VulkanRenderAPI::setDrawOperation(DrawOperationType op, const SPtr<CommandBuffer>& commandBuffer)
  291. {
  292. // TODO
  293. }
  294. void VulkanRenderAPI::draw(UINT32 vertexOffset, UINT32 vertexCount, UINT32 instanceCount,
  295. const SPtr<CommandBuffer>& commandBuffer)
  296. {
  297. UINT32 primCount = 0;
  298. // TODO
  299. BS_INC_RENDER_STAT(NumDrawCalls);
  300. BS_ADD_RENDER_STAT(NumVertices, vertexCount);
  301. BS_ADD_RENDER_STAT(NumPrimitives, primCount);
  302. }
  303. void VulkanRenderAPI::drawIndexed(UINT32 startIndex, UINT32 indexCount, UINT32 vertexOffset, UINT32 vertexCount,
  304. UINT32 instanceCount, const SPtr<CommandBuffer>& commandBuffer)
  305. {
  306. UINT32 primCount = 0;
  307. // TODO
  308. BS_INC_RENDER_STAT(NumDrawCalls);
  309. BS_ADD_RENDER_STAT(NumVertices, vertexCount);
  310. BS_ADD_RENDER_STAT(NumPrimitives, primCount);
  311. }
  312. void VulkanRenderAPI::dispatchCompute(UINT32 numGroupsX, UINT32 numGroupsY, UINT32 numGroupsZ,
  313. const SPtr<CommandBuffer>& commandBuffer)
  314. {
  315. // TODO
  316. BS_INC_RENDER_STAT(NumComputeCalls);
  317. }
  318. void VulkanRenderAPI::setScissorRect(UINT32 left, UINT32 top, UINT32 right, UINT32 bottom,
  319. const SPtr<CommandBuffer>& commandBuffer)
  320. {
  321. // TODO
  322. }
  323. void VulkanRenderAPI::setStencilRef(UINT32 value, const SPtr<CommandBuffer>& commandBuffer)
  324. {
  325. // TODO
  326. }
  327. void VulkanRenderAPI::clearViewport(UINT32 buffers, const Color& color, float depth, UINT16 stencil, UINT8 targetMask,
  328. const SPtr<CommandBuffer>& commandBuffer)
  329. {
  330. // TODO - If clearing the whole viewport, call clearRenderTarget, otherwise begin render pass (if needed), and
  331. // execute vkCmdClearAttachments with a valid rect. If no RT is bound, this is a no-op (log warning)
  332. BS_INC_RENDER_STAT(NumClears);
  333. }
  334. void VulkanRenderAPI::clearRenderTarget(UINT32 buffers, const Color& color, float depth, UINT16 stencil,
  335. UINT8 targetMask, const SPtr<CommandBuffer>& commandBuffer)
  336. {
  337. // TODO - If currently within render pass, call vkCmdClearAttachments. Otherwise call cb->setClearValues
  338. // which should then queue CB clear on render pass begin.
  339. BS_INC_RENDER_STAT(NumClears);
  340. }
  341. void VulkanRenderAPI::setRenderTarget(const SPtr<RenderTargetCore>& target, bool readOnlyDepthStencil,
  342. const SPtr<CommandBuffer>& commandBuffer)
  343. {
  344. VulkanCommandBuffer* cb = getCB(commandBuffer);
  345. VulkanCmdBuffer* vkCB = cb->getInternal();
  346. if(vkCB->isInRenderPass())
  347. vkCB->endRenderPass();
  348. // We don't actually begin a new render pass until the next render-pass specific command gets queued on the CB
  349. vkCB->setRenderTarget(target);
  350. BS_INC_RENDER_STAT(NumRenderTargetChanges);
  351. }
  352. void VulkanRenderAPI::swapBuffers(const SPtr<RenderTargetCore>& target, UINT32 syncMask)
  353. {
  354. THROW_IF_NOT_CORE_THREAD;
  355. target->swapBuffers(syncMask);
  356. // See if any command buffers finished executing
  357. VulkanCommandBufferManager& cbm = static_cast<VulkanCommandBufferManager&>(CommandBufferManager::instance());
  358. for (UINT32 i = 0; i < (UINT32)mDevices.size(); i++)
  359. cbm.refreshStates(i);
  360. BS_INC_RENDER_STAT(NumPresents);
  361. }
  362. void VulkanRenderAPI::addCommands(const SPtr<CommandBuffer>& commandBuffer, const SPtr<CommandBuffer>& secondary)
  363. {
  364. BS_EXCEPT(NotImplementedException, "Secondary command buffers not implemented");
  365. }
  366. void VulkanRenderAPI::executeCommands(const SPtr<CommandBuffer>& commandBuffer, UINT32 syncMask)
  367. {
  368. THROW_IF_NOT_CORE_THREAD;
  369. if (commandBuffer == nullptr)
  370. return;
  371. VulkanCommandBuffer& cmdBuffer = static_cast<VulkanCommandBuffer&>(*commandBuffer);
  372. cmdBuffer.submit(syncMask);
  373. }
  374. void VulkanRenderAPI::convertProjectionMatrix(const Matrix4& matrix, Matrix4& dest)
  375. {
  376. dest = matrix;
  377. // Convert depth range from [-1,1] to [0,1]
  378. dest[2][0] = (dest[2][0] + dest[3][0]) / 2;
  379. dest[2][1] = (dest[2][1] + dest[3][1]) / 2;
  380. dest[2][2] = (dest[2][2] + dest[3][2]) / 2;
  381. dest[2][3] = (dest[2][3] + dest[3][3]) / 2;
  382. }
  383. const RenderAPIInfo& VulkanRenderAPI::getAPIInfo() const
  384. {
  385. static RenderAPIInfo info(0.0f, 0.0f, 0.0f, 1.0f, VET_COLOR_ABGR, false, true, true, true);
  386. return info;
  387. }
  388. GpuParamBlockDesc VulkanRenderAPI::generateParamBlockDesc(const String& name, Vector<GpuParamDataDesc>& params)
  389. {
  390. GpuParamBlockDesc block;
  391. block.blockSize = 0;
  392. block.isShareable = true;
  393. block.name = name;
  394. block.slot = 0;
  395. block.set = 0;
  396. for (auto& param : params)
  397. {
  398. const GpuParamDataTypeInfo& typeInfo = GpuParams::PARAM_SIZES.lookup[param.type];
  399. UINT32 size = typeInfo.size / 4;
  400. UINT32 alignment = typeInfo.alignment / 4;
  401. // Fix alignment if needed
  402. UINT32 alignOffset = block.blockSize % alignment;
  403. if (alignOffset != 0)
  404. {
  405. UINT32 padding = (alignment - alignOffset);
  406. block.blockSize += padding;
  407. }
  408. if (param.arraySize > 1)
  409. {
  410. // Array elements are always padded and aligned to vec4
  411. alignOffset = size % typeInfo.baseTypeSize;
  412. if (alignOffset != 0)
  413. {
  414. UINT32 padding = (typeInfo.baseTypeSize - alignOffset);
  415. size += padding;
  416. }
  417. alignOffset = block.blockSize % typeInfo.baseTypeSize;
  418. if (alignOffset != 0)
  419. {
  420. UINT32 padding = (typeInfo.baseTypeSize - alignOffset);
  421. block.blockSize += padding;
  422. }
  423. param.elementSize = size;
  424. param.arrayElementStride = size;
  425. param.cpuMemOffset = block.blockSize;
  426. param.gpuMemOffset = 0;
  427. block.blockSize += size * param.arraySize;
  428. }
  429. else
  430. {
  431. param.elementSize = size;
  432. param.arrayElementStride = size;
  433. param.cpuMemOffset = block.blockSize;
  434. param.gpuMemOffset = 0;
  435. block.blockSize += size;
  436. }
  437. param.paramBlockSlot = 0;
  438. param.paramBlockSet = 0;
  439. }
  440. // Constant buffer size must always be a multiple of 16
  441. if (block.blockSize % 4 != 0)
  442. block.blockSize += (4 - (block.blockSize % 4));
  443. return block;
  444. }
  445. void VulkanRenderAPI::initCapabilites()
  446. {
  447. mNumDevices = (UINT32)mDevices.size();
  448. mCurrentCapabilities = bs_newN<RenderAPICapabilities>(mNumDevices);
  449. UINT32 deviceIdx = 0;
  450. for (auto& device : mDevices)
  451. {
  452. RenderAPICapabilities& caps = mCurrentCapabilities[deviceIdx];
  453. const VkPhysicalDeviceProperties& deviceProps = device->getDeviceProperties();
  454. const VkPhysicalDeviceFeatures& deviceFeatures = device->getDeviceFeatures();
  455. const VkPhysicalDeviceLimits& deviceLimits = deviceProps.limits;
  456. DriverVersion driverVersion;
  457. driverVersion.major = ((uint32_t)(deviceProps.apiVersion) >> 22);
  458. driverVersion.minor = ((uint32_t)(deviceProps.apiVersion) >> 12) & 0x3ff;
  459. driverVersion.release = (uint32_t)(deviceProps.apiVersion) & 0xfff;
  460. driverVersion.build = 0;
  461. caps.setDriverVersion(driverVersion);
  462. caps.setDeviceName(deviceProps.deviceName);
  463. // Determine vendor
  464. switch (deviceProps.vendorID)
  465. {
  466. case 0x10DE:
  467. caps.setVendor(GPU_NVIDIA);
  468. break;
  469. case 0x1002:
  470. caps.setVendor(GPU_AMD);
  471. break;
  472. case 0x163C:
  473. case 0x8086:
  474. caps.setVendor(GPU_INTEL);
  475. break;
  476. default:
  477. caps.setVendor(GPU_UNKNOWN);
  478. break;
  479. };
  480. caps.setRenderAPIName(getName());
  481. if(deviceFeatures.textureCompressionBC)
  482. caps.setCapability(RSC_TEXTURE_COMPRESSION_BC);
  483. if (deviceFeatures.textureCompressionETC2)
  484. caps.setCapability(RSC_TEXTURE_COMPRESSION_ETC2);
  485. if (deviceFeatures.textureCompressionASTC_LDR)
  486. caps.setCapability(RSC_TEXTURE_COMPRESSION_ASTC);
  487. caps.setMaxBoundVertexBuffers(deviceLimits.maxVertexInputBindings);
  488. caps.setNumMultiRenderTargets(deviceLimits.maxColorAttachments);
  489. caps.setCapability(RSC_COMPUTE_PROGRAM);
  490. caps.addShaderProfile("ps_5_0");
  491. caps.addShaderProfile("vs_5_0");
  492. caps.addShaderProfile("cs_5_0");
  493. caps.addGpuProgramProfile(GPP_FS_5_0, "ps_5_0");
  494. caps.addGpuProgramProfile(GPP_VS_5_0, "vs_5_0");
  495. caps.addGpuProgramProfile(GPP_CS_5_0, "cs_5_0");
  496. caps.setNumTextureUnits(GPT_FRAGMENT_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  497. caps.setNumTextureUnits(GPT_VERTEX_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  498. caps.setNumTextureUnits(GPT_COMPUTE_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  499. caps.setNumGpuParamBlockBuffers(GPT_FRAGMENT_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  500. caps.setNumGpuParamBlockBuffers(GPT_VERTEX_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  501. caps.setNumGpuParamBlockBuffers(GPT_COMPUTE_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  502. caps.setNumLoadStoreTextureUnits(GPT_FRAGMENT_PROGRAM, deviceLimits.maxPerStageDescriptorStorageImages);
  503. caps.setNumLoadStoreTextureUnits(GPT_COMPUTE_PROGRAM, deviceLimits.maxPerStageDescriptorStorageImages);
  504. if(deviceFeatures.geometryShader)
  505. {
  506. caps.setCapability(RSC_GEOMETRY_PROGRAM);
  507. caps.addShaderProfile("gs_5_0");
  508. caps.addGpuProgramProfile(GPP_GS_5_0, "gs_5_0");
  509. caps.setNumTextureUnits(GPT_GEOMETRY_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  510. caps.setNumGpuParamBlockBuffers(GPT_GEOMETRY_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  511. caps.setGeometryProgramNumOutputVertices(deviceLimits.maxGeometryOutputVertices);
  512. }
  513. if (deviceFeatures.tessellationShader)
  514. {
  515. caps.setCapability(RSC_TESSELLATION_PROGRAM);
  516. caps.addShaderProfile("hs_5_0");
  517. caps.addShaderProfile("ds_5_0");
  518. caps.addGpuProgramProfile(GPP_HS_5_0, "hs_5_0");
  519. caps.addGpuProgramProfile(GPP_DS_5_0, "ds_5_0");
  520. caps.setNumTextureUnits(GPT_HULL_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  521. caps.setNumTextureUnits(GPT_DOMAIN_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  522. caps.setNumGpuParamBlockBuffers(GPT_HULL_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  523. caps.setNumGpuParamBlockBuffers(GPT_DOMAIN_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  524. }
  525. caps.setNumCombinedTextureUnits(caps.getNumTextureUnits(GPT_FRAGMENT_PROGRAM)
  526. + caps.getNumTextureUnits(GPT_VERTEX_PROGRAM) + caps.getNumTextureUnits(GPT_GEOMETRY_PROGRAM)
  527. + caps.getNumTextureUnits(GPT_HULL_PROGRAM) + caps.getNumTextureUnits(GPT_DOMAIN_PROGRAM)
  528. + caps.getNumTextureUnits(GPT_COMPUTE_PROGRAM));
  529. caps.setNumCombinedGpuParamBlockBuffers(caps.getNumGpuParamBlockBuffers(GPT_FRAGMENT_PROGRAM)
  530. + caps.getNumGpuParamBlockBuffers(GPT_VERTEX_PROGRAM) + caps.getNumGpuParamBlockBuffers(GPT_GEOMETRY_PROGRAM)
  531. + caps.getNumGpuParamBlockBuffers(GPT_HULL_PROGRAM) + caps.getNumGpuParamBlockBuffers(GPT_DOMAIN_PROGRAM)
  532. + caps.getNumGpuParamBlockBuffers(GPT_COMPUTE_PROGRAM));
  533. caps.setNumCombinedLoadStoreTextureUnits(caps.getNumLoadStoreTextureUnits(GPT_FRAGMENT_PROGRAM)
  534. + caps.getNumLoadStoreTextureUnits(GPT_COMPUTE_PROGRAM));
  535. caps.addShaderProfile("glsl");
  536. deviceIdx++;
  537. }
  538. }
  539. VulkanCommandBuffer* VulkanRenderAPI::getCB(const SPtr<CommandBuffer>& buffer)
  540. {
  541. if (buffer != nullptr)
  542. return static_cast<VulkanCommandBuffer*>(buffer.get());
  543. return static_cast<VulkanCommandBuffer*>(mMainCommandBuffer.get());
  544. }
  545. }