BsVulkanRenderAPI.cpp 21 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsVulkanRenderAPI.h"
  4. #include "BsCoreThread.h"
  5. #include "BsRenderStats.h"
  6. #include "BsGpuParamDesc.h"
  7. #include "BsVulkanDevice.h"
  8. #include "BsVulkanTextureManager.h"
  9. #include "BsVulkanRenderWindowManager.h"
  10. #include "BsVulkanHardwareBufferManager.h"
  11. #include "BsVulkanRenderStateManager.h"
  12. #include "BsGpuProgramManager.h"
  13. #include "BsVulkanQueryManager.h"
  14. #include "BsVulkanGLSLProgramFactory.h"
  15. #include "BsVulkanCommandBufferManager.h"
  16. #include "BsVulkanCommandBuffer.h"
  17. #include "BsVulkanGpuParams.h"
  18. #include "BsVulkanVertexInputManager.h"
  19. #include "Win32/BsWin32VideoModeInfo.h"
  20. namespace BansheeEngine
  21. {
  22. PFN_vkCreateDebugReportCallbackEXT vkCreateDebugReportCallbackEXT = nullptr;
  23. PFN_vkDestroyDebugReportCallbackEXT vkDestroyDebugReportCallbackEXT = nullptr;
  24. PFN_vkGetPhysicalDeviceSurfaceSupportKHR vkGetPhysicalDeviceSurfaceSupportKHR = nullptr;
  25. PFN_vkGetPhysicalDeviceSurfaceFormatsKHR vkGetPhysicalDeviceSurfaceFormatsKHR = nullptr;
  26. PFN_vkGetPhysicalDeviceSurfaceCapabilitiesKHR vkGetPhysicalDeviceSurfaceCapabilitiesKHR = nullptr;
  27. PFN_vkGetPhysicalDeviceSurfacePresentModesKHR vkGetPhysicalDeviceSurfacePresentModesKHR = nullptr;
  28. PFN_vkCreateSwapchainKHR vkCreateSwapchainKHR = nullptr;
  29. PFN_vkDestroySwapchainKHR vkDestroySwapchainKHR = nullptr;
  30. PFN_vkGetSwapchainImagesKHR vkGetSwapchainImagesKHR = nullptr;
  31. PFN_vkAcquireNextImageKHR vkAcquireNextImageKHR = nullptr;
  32. PFN_vkQueuePresentKHR vkQueuePresentKHR = nullptr;
  33. VkBool32 debugMsgCallback(VkDebugReportFlagsEXT flags, VkDebugReportObjectTypeEXT objType, uint64_t srcObject,
  34. size_t location, int32_t msgCode, const char* pLayerPrefix, const char* pMsg, void* pUserData)
  35. {
  36. StringStream message;
  37. // Determine prefix
  38. if (flags & VK_DEBUG_REPORT_ERROR_BIT_EXT)
  39. message << "ERROR";
  40. if (flags & VK_DEBUG_REPORT_WARNING_BIT_EXT)
  41. message << "WARNING";
  42. if (flags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT)
  43. message << "PERFORMANCE";
  44. if (flags & VK_DEBUG_REPORT_INFORMATION_BIT_EXT)
  45. message << "INFO";
  46. if (flags & VK_DEBUG_REPORT_DEBUG_BIT_EXT)
  47. message << "DEBUG";
  48. message << ": [" << pLayerPrefix << "] Code " << msgCode << ": " << pMsg << std::endl;
  49. if (flags & VK_DEBUG_REPORT_ERROR_BIT_EXT)
  50. BS_EXCEPT(RenderingAPIException, message.str())
  51. else if (flags & VK_DEBUG_REPORT_WARNING_BIT_EXT || flags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT)
  52. LOGWRN(message.str())
  53. else
  54. LOGDBG(message.str())
  55. // Don't abort calls that caused a validation message
  56. return VK_FALSE;
  57. }
  58. VulkanRenderAPI::VulkanRenderAPI()
  59. :mInstance(nullptr), mDebugCallback(nullptr)
  60. { }
  61. VulkanRenderAPI::~VulkanRenderAPI()
  62. {
  63. }
  64. const StringID& VulkanRenderAPI::getName() const
  65. {
  66. static StringID strName("VulkanRenderAPI");
  67. return strName;
  68. }
  69. const String& VulkanRenderAPI::getShadingLanguageName() const
  70. {
  71. static String strName("glsl");
  72. return strName;
  73. }
  74. void VulkanRenderAPI::initialize()
  75. {
  76. THROW_IF_NOT_CORE_THREAD;
  77. // Create instance
  78. VkApplicationInfo appInfo;
  79. appInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
  80. appInfo.pNext = nullptr;
  81. appInfo.pApplicationName = "Banshee3D App";
  82. appInfo.applicationVersion = 1;
  83. appInfo.pEngineName = "Banshee3D";
  84. appInfo.engineVersion = (0 << 24) | (4 << 16) | 0;
  85. appInfo.apiVersion = VK_API_VERSION_1_0;
  86. #if BS_DEBUG_MODE
  87. const char* layers[] =
  88. {
  89. "VK_LAYER_LUNARG_standard_validation"
  90. };
  91. const char* extensions[] =
  92. {
  93. nullptr, /** Surface extension */
  94. nullptr, /** OS specific surface extension */
  95. VK_EXT_DEBUG_REPORT_EXTENSION_NAME
  96. };
  97. #else
  98. const char** layers = nullptr;
  99. const char* extensions[] =
  100. {
  101. nullptr, /** Surface extension */
  102. nullptr, /** OS specific surface extension */
  103. };
  104. #endif
  105. extensions[0] = VK_KHR_SURFACE_EXTENSION_NAME;
  106. #if BS_PLATFORM == BS_PLATFORM_WIN32
  107. extensions[1] = VK_KHR_WIN32_SURFACE_EXTENSION_NAME;
  108. #elif BS_PLATFORM == BS_PLATFORM_ANDROID
  109. extensions[1] = VK_KHR_ANDROID_SURFACE_EXTENSION_NAME;
  110. #else
  111. extensions[1] = VK_KHR_XCB_SURFACE_EXTENSION_NAME;
  112. #endif
  113. uint32_t numLayers = sizeof(layers) / sizeof(layers[0]);
  114. uint32_t numExtensions = sizeof(extensions) / sizeof(extensions[0]);
  115. VkInstanceCreateInfo instanceInfo;
  116. instanceInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
  117. instanceInfo.pNext = nullptr;
  118. instanceInfo.flags = 0;
  119. instanceInfo.pApplicationInfo = &appInfo;
  120. instanceInfo.enabledLayerCount = numLayers;
  121. instanceInfo.ppEnabledLayerNames = layers;
  122. instanceInfo.enabledExtensionCount = numExtensions;
  123. instanceInfo.ppEnabledExtensionNames = extensions;
  124. VkResult result = vkCreateInstance(&instanceInfo, gVulkanAllocator, &mInstance);
  125. assert(result == VK_SUCCESS);
  126. // Set up debugging
  127. #if BS_DEBUG_MODE
  128. VkDebugReportFlagsEXT debugFlags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT |
  129. VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT;
  130. GET_INSTANCE_PROC_ADDR(mInstance, CreateDebugReportCallbackEXT);
  131. GET_INSTANCE_PROC_ADDR(mInstance, DestroyDebugReportCallbackEXT);
  132. VkDebugReportCallbackCreateInfoEXT debugInfo;
  133. debugInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
  134. debugInfo.pNext = nullptr;
  135. debugInfo.flags = 0;
  136. debugInfo.pfnCallback = (PFN_vkDebugReportCallbackEXT)debugMsgCallback;
  137. debugInfo.flags = debugFlags;
  138. result = vkCreateDebugReportCallbackEXT(mInstance, &debugInfo, nullptr, &mDebugCallback);
  139. assert(result == VK_SUCCESS);
  140. #endif
  141. // Enumerate all devices
  142. uint32_t numDevices;
  143. result = vkEnumeratePhysicalDevices(mInstance, &numDevices, nullptr);
  144. assert(result == VK_SUCCESS);
  145. Vector<VkPhysicalDevice> physicalDevices(numDevices);
  146. result = vkEnumeratePhysicalDevices(mInstance, &numDevices, physicalDevices.data());
  147. assert(result == VK_SUCCESS);
  148. mDevices.resize(numDevices);
  149. for(uint32_t i = 0; i < numDevices; i++)
  150. mDevices[i] = bs_shared_ptr_new<VulkanDevice>(physicalDevices[i], i);
  151. // Find primary device
  152. // Note: MULTIGPU - Detect multiple similar devices here if supporting multi-GPU
  153. for (uint32_t i = 0; i < numDevices; i++)
  154. {
  155. bool isPrimary = mDevices[i]->getDeviceProperties().deviceType == VK_PHYSICAL_DEVICE_TYPE_DISCRETE_GPU;
  156. if (isPrimary)
  157. {
  158. mDevices[i]->setIsPrimary();
  159. mPrimaryDevices.push_back(mDevices[i]);
  160. break;
  161. }
  162. }
  163. if (mPrimaryDevices.size() == 0)
  164. mPrimaryDevices.push_back(mDevices[0]);
  165. #if BS_PLATFORM == BS_PLATFORM_WIN32
  166. mVideoModeInfo = bs_shared_ptr_new<Win32VideoModeInfo>();
  167. #else
  168. static_assert(false, "mVideoModeInfo needs to be created.")
  169. #endif
  170. // Get required extension functions
  171. GET_INSTANCE_PROC_ADDR(mInstance, GetPhysicalDeviceSurfaceSupportKHR);
  172. GET_INSTANCE_PROC_ADDR(mInstance, GetPhysicalDeviceSurfaceFormatsKHR);
  173. GET_INSTANCE_PROC_ADDR(mInstance, GetPhysicalDeviceSurfaceCapabilitiesKHR);
  174. GET_INSTANCE_PROC_ADDR(mInstance, GetPhysicalDeviceSurfacePresentModesKHR);
  175. VkDevice presentDevice = _getPresentDevice()->getLogical();
  176. GET_DEVICE_PROC_ADDR(presentDevice, CreateSwapchainKHR);
  177. GET_DEVICE_PROC_ADDR(presentDevice, DestroySwapchainKHR);
  178. GET_DEVICE_PROC_ADDR(presentDevice, GetSwapchainImagesKHR);
  179. GET_DEVICE_PROC_ADDR(presentDevice, AcquireNextImageKHR);
  180. GET_DEVICE_PROC_ADDR(presentDevice, QueuePresentKHR);
  181. // Create command buffer manager
  182. CommandBufferManager::startUp<VulkanCommandBufferManager>(*this);
  183. // Create main command buffer
  184. mMainCommandBuffer = std::static_pointer_cast<VulkanCommandBuffer>(CommandBuffer::create(GQT_GRAPHICS));
  185. // Create the texture manager for use by others
  186. TextureManager::startUp<VulkanTextureManager>();
  187. TextureCoreManager::startUp<VulkanTextureCoreManager>();
  188. // Create hardware buffer manager
  189. HardwareBufferManager::startUp();
  190. HardwareBufferCoreManager::startUp<VulkanHardwareBufferCoreManager>();
  191. // Create render window manager
  192. RenderWindowManager::startUp<VulkanRenderWindowManager>();
  193. RenderWindowCoreManager::startUp<VulkanRenderWindowCoreManager>(*this);
  194. // Create query manager
  195. QueryManager::startUp<VulkanQueryManager>();
  196. // Create vertex input manager
  197. VulkanVertexInputManager::startUp();
  198. // Create & register HLSL factory
  199. mGLSLFactory = bs_new<VulkanGLSLProgramFactory>();
  200. // Create render state manager
  201. RenderStateCoreManager::startUp<VulkanRenderStateCoreManager>();
  202. GpuProgramCoreManager::instance().addFactory(mGLSLFactory);
  203. initCapabilites();
  204. RenderAPICore::initialize();
  205. }
  206. void VulkanRenderAPI::destroyCore()
  207. {
  208. THROW_IF_NOT_CORE_THREAD;
  209. if (mGLSLFactory != nullptr)
  210. {
  211. bs_delete(mGLSLFactory);
  212. mGLSLFactory = nullptr;
  213. }
  214. VulkanVertexInputManager::shutDown();
  215. QueryManager::shutDown();
  216. RenderStateCoreManager::shutDown();
  217. RenderWindowCoreManager::shutDown();
  218. RenderWindowManager::shutDown();
  219. HardwareBufferCoreManager::shutDown();
  220. HardwareBufferManager::shutDown();
  221. TextureCoreManager::shutDown();
  222. TextureManager::shutDown();
  223. mMainCommandBuffer = nullptr;
  224. // Make sure everything finishes and all resources get freed
  225. VulkanCommandBufferManager& cmdBufManager = static_cast<VulkanCommandBufferManager&>(CommandBufferManager::instance());
  226. for (UINT32 i = 0; i < (UINT32)mDevices.size(); i++)
  227. {
  228. mDevices[i]->waitIdle();
  229. cmdBufManager.refreshStates(i);
  230. }
  231. CommandBufferManager::shutDown();
  232. mPrimaryDevices.clear();
  233. mDevices.clear();
  234. #if BS_DEBUG_MODE
  235. if (mDebugCallback != nullptr)
  236. vkDestroyDebugReportCallbackEXT(mInstance, mDebugCallback, gVulkanAllocator);
  237. #endif
  238. vkDestroyInstance(mInstance, gVulkanAllocator);
  239. RenderAPICore::destroyCore();
  240. }
  241. void VulkanRenderAPI::setGraphicsPipeline(const SPtr<GpuPipelineStateCore>& pipelineState,
  242. const SPtr<CommandBuffer>& commandBuffer)
  243. {
  244. // TODO
  245. BS_INC_RENDER_STAT(NumPipelineStateChanges);
  246. }
  247. void VulkanRenderAPI::setComputePipeline(const SPtr<GpuProgramCore>& computeProgram,
  248. const SPtr<CommandBuffer>& commandBuffer)
  249. {
  250. // TODO
  251. BS_INC_RENDER_STAT(NumPipelineStateChanges);
  252. }
  253. void VulkanRenderAPI::setGpuParams(const SPtr<GpuParamsCore>& gpuParams, const SPtr<CommandBuffer>& commandBuffer)
  254. {
  255. VulkanCommandBuffer* cb = getCB(commandBuffer);
  256. SPtr<VulkanGpuParams> vulkanGpuParams = std::static_pointer_cast<VulkanGpuParams>(gpuParams);
  257. vulkanGpuParams->bind(*cb);
  258. BS_INC_RENDER_STAT(NumGpuParamBinds);
  259. }
  260. void VulkanRenderAPI::beginFrame(const SPtr<CommandBuffer>& commandBuffer)
  261. {
  262. }
  263. void VulkanRenderAPI::endFrame(const SPtr<CommandBuffer>& commandBuffer)
  264. {
  265. }
  266. void VulkanRenderAPI::setViewport(const Rect2& vp, const SPtr<CommandBuffer>& commandBuffer)
  267. {
  268. // TODO
  269. }
  270. void VulkanRenderAPI::setVertexBuffers(UINT32 index, SPtr<VertexBufferCore>* buffers, UINT32 numBuffers,
  271. const SPtr<CommandBuffer>& commandBuffer)
  272. {
  273. // TODO
  274. BS_INC_RENDER_STAT(NumVertexBufferBinds);
  275. }
  276. void VulkanRenderAPI::setIndexBuffer(const SPtr<IndexBufferCore>& buffer, const SPtr<CommandBuffer>& commandBuffer)
  277. {
  278. // TODO
  279. BS_INC_RENDER_STAT(NumIndexBufferBinds);
  280. }
  281. void VulkanRenderAPI::setVertexDeclaration(const SPtr<VertexDeclarationCore>& vertexDeclaration,
  282. const SPtr<CommandBuffer>& commandBuffer)
  283. {
  284. // TODO
  285. }
  286. void VulkanRenderAPI::setDrawOperation(DrawOperationType op, const SPtr<CommandBuffer>& commandBuffer)
  287. {
  288. // TODO
  289. }
  290. void VulkanRenderAPI::draw(UINT32 vertexOffset, UINT32 vertexCount, UINT32 instanceCount,
  291. const SPtr<CommandBuffer>& commandBuffer)
  292. {
  293. UINT32 primCount = 0;
  294. // TODO
  295. BS_INC_RENDER_STAT(NumDrawCalls);
  296. BS_ADD_RENDER_STAT(NumVertices, vertexCount);
  297. BS_ADD_RENDER_STAT(NumPrimitives, primCount);
  298. }
  299. void VulkanRenderAPI::drawIndexed(UINT32 startIndex, UINT32 indexCount, UINT32 vertexOffset, UINT32 vertexCount,
  300. UINT32 instanceCount, const SPtr<CommandBuffer>& commandBuffer)
  301. {
  302. UINT32 primCount = 0;
  303. // TODO
  304. BS_INC_RENDER_STAT(NumDrawCalls);
  305. BS_ADD_RENDER_STAT(NumVertices, vertexCount);
  306. BS_ADD_RENDER_STAT(NumPrimitives, primCount);
  307. }
  308. void VulkanRenderAPI::dispatchCompute(UINT32 numGroupsX, UINT32 numGroupsY, UINT32 numGroupsZ,
  309. const SPtr<CommandBuffer>& commandBuffer)
  310. {
  311. // TODO
  312. BS_INC_RENDER_STAT(NumComputeCalls);
  313. }
  314. void VulkanRenderAPI::setScissorRect(UINT32 left, UINT32 top, UINT32 right, UINT32 bottom,
  315. const SPtr<CommandBuffer>& commandBuffer)
  316. {
  317. // TODO
  318. }
  319. void VulkanRenderAPI::setStencilRef(UINT32 value, const SPtr<CommandBuffer>& commandBuffer)
  320. {
  321. // TODO
  322. }
  323. void VulkanRenderAPI::clearViewport(UINT32 buffers, const Color& color, float depth, UINT16 stencil, UINT8 targetMask,
  324. const SPtr<CommandBuffer>& commandBuffer)
  325. {
  326. // TODO
  327. BS_INC_RENDER_STAT(NumClears);
  328. }
  329. void VulkanRenderAPI::clearRenderTarget(UINT32 buffers, const Color& color, float depth, UINT16 stencil,
  330. UINT8 targetMask, const SPtr<CommandBuffer>& commandBuffer)
  331. {
  332. // TODO
  333. BS_INC_RENDER_STAT(NumClears);
  334. }
  335. void VulkanRenderAPI::setRenderTarget(const SPtr<RenderTargetCore>& target, bool readOnlyDepthStencil,
  336. const SPtr<CommandBuffer>& commandBuffer)
  337. {
  338. // TODO
  339. BS_INC_RENDER_STAT(NumRenderTargetChanges);
  340. }
  341. void VulkanRenderAPI::swapBuffers(const SPtr<RenderTargetCore>& target, UINT32 syncMask)
  342. {
  343. THROW_IF_NOT_CORE_THREAD;
  344. target->swapBuffers(syncMask);
  345. // See if any command buffers finished executing
  346. VulkanCommandBufferManager& cbm = static_cast<VulkanCommandBufferManager&>(CommandBufferManager::instance());
  347. for (UINT32 i = 0; i < (UINT32)mDevices.size(); i++)
  348. cbm.refreshStates(i);
  349. BS_INC_RENDER_STAT(NumPresents);
  350. }
  351. void VulkanRenderAPI::addCommands(const SPtr<CommandBuffer>& commandBuffer, const SPtr<CommandBuffer>& secondary)
  352. {
  353. BS_EXCEPT(NotImplementedException, "Secondary command buffers not implemented");
  354. }
  355. void VulkanRenderAPI::executeCommands(const SPtr<CommandBuffer>& commandBuffer, UINT32 syncMask)
  356. {
  357. THROW_IF_NOT_CORE_THREAD;
  358. if (commandBuffer == nullptr)
  359. return;
  360. VulkanCommandBuffer& cmdBuffer = static_cast<VulkanCommandBuffer&>(*commandBuffer);
  361. cmdBuffer.submit(syncMask);
  362. }
  363. void VulkanRenderAPI::convertProjectionMatrix(const Matrix4& matrix, Matrix4& dest)
  364. {
  365. dest = matrix;
  366. // Convert depth range from [-1,1] to [0,1]
  367. dest[2][0] = (dest[2][0] + dest[3][0]) / 2;
  368. dest[2][1] = (dest[2][1] + dest[3][1]) / 2;
  369. dest[2][2] = (dest[2][2] + dest[3][2]) / 2;
  370. dest[2][3] = (dest[2][3] + dest[3][3]) / 2;
  371. }
  372. const RenderAPIInfo& VulkanRenderAPI::getAPIInfo() const
  373. {
  374. static RenderAPIInfo info(0.0f, 0.0f, 0.0f, 1.0f, VET_COLOR_ABGR, false, true, true, true);
  375. return info;
  376. }
  377. GpuParamBlockDesc VulkanRenderAPI::generateParamBlockDesc(const String& name, Vector<GpuParamDataDesc>& params)
  378. {
  379. GpuParamBlockDesc block;
  380. block.blockSize = 0;
  381. block.isShareable = true;
  382. block.name = name;
  383. block.slot = 0;
  384. block.set = 0;
  385. for (auto& param : params)
  386. {
  387. const GpuParamDataTypeInfo& typeInfo = GpuParams::PARAM_SIZES.lookup[param.type];
  388. UINT32 size = typeInfo.size / 4;
  389. UINT32 alignment = typeInfo.alignment / 4;
  390. // Fix alignment if needed
  391. UINT32 alignOffset = block.blockSize % alignment;
  392. if (alignOffset != 0)
  393. {
  394. UINT32 padding = (alignment - alignOffset);
  395. block.blockSize += padding;
  396. }
  397. if (param.arraySize > 1)
  398. {
  399. // Array elements are always padded and aligned to vec4
  400. alignOffset = size % typeInfo.baseTypeSize;
  401. if (alignOffset != 0)
  402. {
  403. UINT32 padding = (typeInfo.baseTypeSize - alignOffset);
  404. size += padding;
  405. }
  406. alignOffset = block.blockSize % typeInfo.baseTypeSize;
  407. if (alignOffset != 0)
  408. {
  409. UINT32 padding = (typeInfo.baseTypeSize - alignOffset);
  410. block.blockSize += padding;
  411. }
  412. param.elementSize = size;
  413. param.arrayElementStride = size;
  414. param.cpuMemOffset = block.blockSize;
  415. param.gpuMemOffset = 0;
  416. block.blockSize += size * param.arraySize;
  417. }
  418. else
  419. {
  420. param.elementSize = size;
  421. param.arrayElementStride = size;
  422. param.cpuMemOffset = block.blockSize;
  423. param.gpuMemOffset = 0;
  424. block.blockSize += size;
  425. }
  426. param.paramBlockSlot = 0;
  427. param.paramBlockSet = 0;
  428. }
  429. // Constant buffer size must always be a multiple of 16
  430. if (block.blockSize % 4 != 0)
  431. block.blockSize += (4 - (block.blockSize % 4));
  432. return block;
  433. }
  434. void VulkanRenderAPI::initCapabilites()
  435. {
  436. mNumDevices = (UINT32)mDevices.size();
  437. mCurrentCapabilities = bs_newN<RenderAPICapabilities>(mNumDevices);
  438. UINT32 deviceIdx = 0;
  439. for (auto& device : mDevices)
  440. {
  441. RenderAPICapabilities& caps = mCurrentCapabilities[deviceIdx];
  442. const VkPhysicalDeviceProperties& deviceProps = device->getDeviceProperties();
  443. const VkPhysicalDeviceFeatures& deviceFeatures = device->getDeviceFeatures();
  444. const VkPhysicalDeviceLimits& deviceLimits = deviceProps.limits;
  445. DriverVersion driverVersion;
  446. driverVersion.major = ((uint32_t)(deviceProps.apiVersion) >> 22);
  447. driverVersion.minor = ((uint32_t)(deviceProps.apiVersion) >> 12) & 0x3ff;
  448. driverVersion.release = (uint32_t)(deviceProps.apiVersion) & 0xfff;
  449. driverVersion.build = 0;
  450. caps.setDriverVersion(driverVersion);
  451. caps.setDeviceName(deviceProps.deviceName);
  452. // Determine vendor
  453. switch (deviceProps.vendorID)
  454. {
  455. case 0x10DE:
  456. caps.setVendor(GPU_NVIDIA);
  457. break;
  458. case 0x1002:
  459. caps.setVendor(GPU_AMD);
  460. break;
  461. case 0x163C:
  462. case 0x8086:
  463. caps.setVendor(GPU_INTEL);
  464. break;
  465. default:
  466. caps.setVendor(GPU_UNKNOWN);
  467. break;
  468. };
  469. caps.setRenderAPIName(getName());
  470. if(deviceFeatures.textureCompressionBC)
  471. caps.setCapability(RSC_TEXTURE_COMPRESSION_BC);
  472. if (deviceFeatures.textureCompressionETC2)
  473. caps.setCapability(RSC_TEXTURE_COMPRESSION_ETC2);
  474. if (deviceFeatures.textureCompressionASTC_LDR)
  475. caps.setCapability(RSC_TEXTURE_COMPRESSION_ASTC);
  476. caps.setMaxBoundVertexBuffers(deviceLimits.maxVertexInputBindings);
  477. caps.setNumMultiRenderTargets(deviceLimits.maxColorAttachments);
  478. caps.setCapability(RSC_COMPUTE_PROGRAM);
  479. caps.addShaderProfile("ps_5_0");
  480. caps.addShaderProfile("vs_5_0");
  481. caps.addShaderProfile("cs_5_0");
  482. caps.addGpuProgramProfile(GPP_FS_5_0, "ps_5_0");
  483. caps.addGpuProgramProfile(GPP_VS_5_0, "vs_5_0");
  484. caps.addGpuProgramProfile(GPP_CS_5_0, "cs_5_0");
  485. caps.setNumTextureUnits(GPT_FRAGMENT_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  486. caps.setNumTextureUnits(GPT_VERTEX_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  487. caps.setNumTextureUnits(GPT_COMPUTE_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  488. caps.setNumGpuParamBlockBuffers(GPT_FRAGMENT_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  489. caps.setNumGpuParamBlockBuffers(GPT_VERTEX_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  490. caps.setNumGpuParamBlockBuffers(GPT_COMPUTE_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  491. caps.setNumLoadStoreTextureUnits(GPT_FRAGMENT_PROGRAM, deviceLimits.maxPerStageDescriptorStorageImages);
  492. caps.setNumLoadStoreTextureUnits(GPT_COMPUTE_PROGRAM, deviceLimits.maxPerStageDescriptorStorageImages);
  493. if(deviceFeatures.geometryShader)
  494. {
  495. caps.setCapability(RSC_GEOMETRY_PROGRAM);
  496. caps.addShaderProfile("gs_5_0");
  497. caps.addGpuProgramProfile(GPP_GS_5_0, "gs_5_0");
  498. caps.setNumTextureUnits(GPT_GEOMETRY_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  499. caps.setNumGpuParamBlockBuffers(GPT_GEOMETRY_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  500. caps.setGeometryProgramNumOutputVertices(deviceLimits.maxGeometryOutputVertices);
  501. }
  502. if (deviceFeatures.tessellationShader)
  503. {
  504. caps.setCapability(RSC_TESSELLATION_PROGRAM);
  505. caps.addShaderProfile("hs_5_0");
  506. caps.addShaderProfile("ds_5_0");
  507. caps.addGpuProgramProfile(GPP_HS_5_0, "hs_5_0");
  508. caps.addGpuProgramProfile(GPP_DS_5_0, "ds_5_0");
  509. caps.setNumTextureUnits(GPT_HULL_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  510. caps.setNumTextureUnits(GPT_DOMAIN_PROGRAM, deviceLimits.maxPerStageDescriptorSampledImages);
  511. caps.setNumGpuParamBlockBuffers(GPT_HULL_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  512. caps.setNumGpuParamBlockBuffers(GPT_DOMAIN_PROGRAM, deviceLimits.maxPerStageDescriptorUniformBuffers);
  513. }
  514. caps.setNumCombinedTextureUnits(caps.getNumTextureUnits(GPT_FRAGMENT_PROGRAM)
  515. + caps.getNumTextureUnits(GPT_VERTEX_PROGRAM) + caps.getNumTextureUnits(GPT_GEOMETRY_PROGRAM)
  516. + caps.getNumTextureUnits(GPT_HULL_PROGRAM) + caps.getNumTextureUnits(GPT_DOMAIN_PROGRAM)
  517. + caps.getNumTextureUnits(GPT_COMPUTE_PROGRAM));
  518. caps.setNumCombinedGpuParamBlockBuffers(caps.getNumGpuParamBlockBuffers(GPT_FRAGMENT_PROGRAM)
  519. + caps.getNumGpuParamBlockBuffers(GPT_VERTEX_PROGRAM) + caps.getNumGpuParamBlockBuffers(GPT_GEOMETRY_PROGRAM)
  520. + caps.getNumGpuParamBlockBuffers(GPT_HULL_PROGRAM) + caps.getNumGpuParamBlockBuffers(GPT_DOMAIN_PROGRAM)
  521. + caps.getNumGpuParamBlockBuffers(GPT_COMPUTE_PROGRAM));
  522. caps.setNumCombinedLoadStoreTextureUnits(caps.getNumLoadStoreTextureUnits(GPT_FRAGMENT_PROGRAM)
  523. + caps.getNumLoadStoreTextureUnits(GPT_COMPUTE_PROGRAM));
  524. caps.addShaderProfile("glsl");
  525. deviceIdx++;
  526. }
  527. }
  528. VulkanCommandBuffer* VulkanRenderAPI::getCB(const SPtr<CommandBuffer>& buffer)
  529. {
  530. if (buffer != nullptr)
  531. return static_cast<VulkanCommandBuffer*>(buffer.get());
  532. return static_cast<VulkanCommandBuffer*>(mMainCommandBuffer.get());
  533. }
  534. }