BsVulkanUtility.cpp 20 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsVulkanUtility.h"
  4. #include "BsVulkanRenderAPI.h"
  5. #include "BsVulkanDevice.h"
  6. #include "Error/BsException.h"
  7. #include "RenderAPI/BsGpuParams.h"
  8. namespace bs { namespace ct
  9. {
  10. PixelFormat VulkanUtility::getClosestSupportedPixelFormat(VulkanDevice& device, PixelFormat format, TextureType texType,
  11. int usage, bool optimalTiling, bool hwGamma)
  12. {
  13. // Check for any obvious issues first
  14. PixelUtil::checkFormat(format, texType, usage);
  15. // Check actual device for format support
  16. VkFormatFeatureFlags wantedFeatureFlags = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT;
  17. if ((usage & TU_RENDERTARGET) != 0)
  18. wantedFeatureFlags |= VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT;
  19. if ((usage & TU_DEPTHSTENCIL) != 0)
  20. wantedFeatureFlags |= VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT;
  21. if ((usage & TU_LOADSTORE) != 0)
  22. wantedFeatureFlags |= VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT;
  23. VkFormatProperties props;
  24. auto isSupported = [&](VkFormat vkFmt)
  25. {
  26. vkGetPhysicalDeviceFormatProperties(device.getPhysical(), vkFmt, &props);
  27. VkFormatFeatureFlags featureFlags = optimalTiling ? props.optimalTilingFeatures : props.linearTilingFeatures;
  28. return (featureFlags & wantedFeatureFlags) != 0;
  29. };
  30. VkFormat vkFormat = getPixelFormat(format, hwGamma);
  31. if(!isSupported(vkFormat))
  32. {
  33. if ((usage & TU_DEPTHSTENCIL) != 0)
  34. {
  35. bool hasStencil = format == PF_D24S8 || format == PF_D32_S8X24;
  36. // Spec guarantees at least one depth-only, and one depth-stencil format to be supported
  37. if(hasStencil)
  38. {
  39. if (isSupported(VK_FORMAT_D32_SFLOAT_S8_UINT))
  40. format = PF_D32_S8X24;
  41. else
  42. format = PF_D24S8;
  43. // We ignore 8-bit stencil-only, and 16/8 depth/stencil combo buffers as engine doesn't expose them,
  44. // and spec guarantees one of the above must be implemented.
  45. }
  46. else
  47. {
  48. // The only format that could have failed is 32-bit depth, so we must use the alternative 16-bit.
  49. // Spec guarantees it is always supported.
  50. format = PF_D16;
  51. }
  52. }
  53. else
  54. {
  55. int bitDepths[4];
  56. PixelUtil::getBitDepths(format, bitDepths);
  57. if (bitDepths[0] == 16) // 16-bit format, fall back to 4-channel 16-bit, guaranteed to be supported
  58. format = PF_RGBA16F;
  59. else if(format == PF_BC6H) // Fall back to uncompressed alternative
  60. format = PF_RGBA16F;
  61. else // Must be 8-bit per channel format, compressed format or some uneven format
  62. format = PF_RGBA8;
  63. }
  64. }
  65. return format;
  66. }
  67. VkFormat VulkanUtility::getPixelFormat(PixelFormat format, bool sRGB)
  68. {
  69. switch (format)
  70. {
  71. case PF_R8:
  72. if(sRGB)
  73. return VK_FORMAT_R8_SRGB;
  74. return VK_FORMAT_R8_UNORM;
  75. case PF_RG8:
  76. if (sRGB)
  77. return VK_FORMAT_R8G8_SRGB;
  78. return VK_FORMAT_R8G8_UNORM;
  79. case PF_RGB8:
  80. if (sRGB)
  81. return VK_FORMAT_R8G8B8_SRGB;
  82. return VK_FORMAT_R8G8B8_UNORM;
  83. case PF_RGBA8:
  84. if (sRGB)
  85. return VK_FORMAT_R8G8B8A8_SRGB;
  86. return VK_FORMAT_R8G8B8A8_UNORM;
  87. case PF_BGRA8:
  88. if (sRGB)
  89. return VK_FORMAT_B8G8R8A8_SRGB;
  90. return VK_FORMAT_B8G8R8A8_UNORM;
  91. case PF_R8I:
  92. return VK_FORMAT_R8_SINT;
  93. case PF_RG8I:
  94. return VK_FORMAT_R8G8_SINT;
  95. case PF_RGBA8I:
  96. return VK_FORMAT_R8G8B8A8_SINT;
  97. case PF_R8U:
  98. return VK_FORMAT_R8_UINT;
  99. case PF_RG8U:
  100. return VK_FORMAT_R8G8_UINT;
  101. case PF_RGBA8U:
  102. return VK_FORMAT_R8G8B8A8_UINT;
  103. case PF_R8S:
  104. return VK_FORMAT_R8_SNORM;
  105. case PF_RG8S:
  106. return VK_FORMAT_R8G8_SNORM;
  107. case PF_RGBA8S:
  108. return VK_FORMAT_R8G8B8A8_SNORM;
  109. case PF_R16F:
  110. return VK_FORMAT_R16_SFLOAT;
  111. case PF_RG16F:
  112. return VK_FORMAT_R16G16_SFLOAT;
  113. case PF_RGBA16F:
  114. return VK_FORMAT_R16G16B16A16_SFLOAT;
  115. case PF_R32F:
  116. return VK_FORMAT_R32_SFLOAT;
  117. case PF_RG32F:
  118. return VK_FORMAT_R32G32_SFLOAT;
  119. case PF_RGB32F:
  120. return VK_FORMAT_R32G32B32_SFLOAT;
  121. case PF_RGBA32F:
  122. return VK_FORMAT_R32G32B32A32_SFLOAT;
  123. case PF_R16I:
  124. return VK_FORMAT_R16_SINT;
  125. case PF_RG16I:
  126. return VK_FORMAT_R16G16_SINT;
  127. case PF_RGBA16I:
  128. return VK_FORMAT_R16G16B16A16_SINT;
  129. case PF_R16U:
  130. return VK_FORMAT_R16_UINT;
  131. case PF_RG16U:
  132. return VK_FORMAT_R16G16_UINT;
  133. case PF_RGBA16U:
  134. return VK_FORMAT_R16G16B16A16_UINT;
  135. case PF_R32I:
  136. return VK_FORMAT_R32_SINT;
  137. case PF_RG32I:
  138. return VK_FORMAT_R32G32_SINT;
  139. case PF_RGB32I:
  140. return VK_FORMAT_R32G32B32_SINT;
  141. case PF_RGBA32I:
  142. return VK_FORMAT_R32G32B32A32_SINT;
  143. case PF_R32U:
  144. return VK_FORMAT_R32_UINT;
  145. case PF_RG32U:
  146. return VK_FORMAT_R32G32_UINT;
  147. case PF_RGB32U:
  148. return VK_FORMAT_R32G32B32_UINT;
  149. case PF_RGBA32U:
  150. return VK_FORMAT_R32G32B32A32_UINT;
  151. case PF_R16S:
  152. return VK_FORMAT_R16_SNORM;
  153. case PF_RG16S:
  154. return VK_FORMAT_R16G16_SNORM;
  155. case PF_RGBA16S:
  156. return VK_FORMAT_R16G16B16A16_SNORM;
  157. case PF_R16:
  158. return VK_FORMAT_R16_UNORM;
  159. case PF_RG16:
  160. return VK_FORMAT_R16G16_UNORM;
  161. case PF_RGBA16:
  162. return VK_FORMAT_R16G16B16A16_UNORM;
  163. case PF_BC1:
  164. case PF_BC1a:
  165. if (sRGB)
  166. return VK_FORMAT_BC1_RGB_SRGB_BLOCK;
  167. return VK_FORMAT_BC1_RGB_UNORM_BLOCK;
  168. case PF_BC2:
  169. if (sRGB)
  170. return VK_FORMAT_BC2_SRGB_BLOCK;
  171. return VK_FORMAT_BC2_UNORM_BLOCK;
  172. case PF_BC3:
  173. if (sRGB)
  174. return VK_FORMAT_BC3_SRGB_BLOCK;
  175. return VK_FORMAT_BC3_UNORM_BLOCK;
  176. case PF_BC4:
  177. return VK_FORMAT_BC4_SNORM_BLOCK;
  178. case PF_BC5:
  179. return VK_FORMAT_BC5_UNORM_BLOCK;
  180. case PF_BC6H:
  181. return VK_FORMAT_BC6H_SFLOAT_BLOCK;
  182. case PF_BC7:
  183. if (sRGB)
  184. return VK_FORMAT_BC7_SRGB_BLOCK;
  185. return VK_FORMAT_BC7_UNORM_BLOCK;
  186. case PF_D32_S8X24:
  187. return VK_FORMAT_D32_SFLOAT_S8_UINT;
  188. case PF_D24S8:
  189. return VK_FORMAT_D24_UNORM_S8_UINT;
  190. case PF_D32:
  191. return VK_FORMAT_D32_SFLOAT;
  192. case PF_D16:
  193. return VK_FORMAT_D16_UNORM;
  194. case PF_RG11B10F:
  195. return VK_FORMAT_B10G11R11_UFLOAT_PACK32;
  196. case PF_RGB10A2:
  197. return VK_FORMAT_A2B10G10R10_UNORM_PACK32;
  198. case PF_UNKNOWN:
  199. default:
  200. return VK_FORMAT_UNDEFINED;
  201. }
  202. }
  203. VkFormat VulkanUtility::getBufferFormat(GpuBufferFormat format)
  204. {
  205. static bool lookupInitialized = false;
  206. static VkFormat lookup[BF_COUNT];
  207. if (!lookupInitialized)
  208. {
  209. lookup[BF_16X1F] = VK_FORMAT_R16_SFLOAT;
  210. lookup[BF_16X2F] = VK_FORMAT_R16G16_SFLOAT;
  211. lookup[BF_16X4F] = VK_FORMAT_R16G16B16A16_SFLOAT;
  212. lookup[BF_32X1F] = VK_FORMAT_R32_SFLOAT;
  213. lookup[BF_32X2F] = VK_FORMAT_R32G32_SFLOAT;
  214. lookup[BF_32X3F] = VK_FORMAT_R32G32B32_SFLOAT;
  215. lookup[BF_32X4F] = VK_FORMAT_R32G32B32A32_SFLOAT;
  216. lookup[BF_8X1] = VK_FORMAT_R8_UNORM;
  217. lookup[BF_8X2] = VK_FORMAT_R8G8_UNORM;
  218. lookup[BF_8X4] = VK_FORMAT_R8G8B8A8_UNORM;
  219. lookup[BF_16X1] = VK_FORMAT_R16_UNORM;
  220. lookup[BF_16X2] = VK_FORMAT_R16G16_UNORM;
  221. lookup[BF_16X4] = VK_FORMAT_R16G16B16A16_UNORM;
  222. lookup[BF_8X1S] = VK_FORMAT_R8_SINT;
  223. lookup[BF_8X2S] = VK_FORMAT_R8G8_SINT;
  224. lookup[BF_8X4S] = VK_FORMAT_R8G8B8A8_SINT;
  225. lookup[BF_16X1S] = VK_FORMAT_R16_SINT;
  226. lookup[BF_16X2S] = VK_FORMAT_R16G16_SINT;
  227. lookup[BF_16X4S] = VK_FORMAT_R16G16B16A16_SINT;
  228. lookup[BF_32X1S] = VK_FORMAT_R32_SINT;
  229. lookup[BF_32X2S] = VK_FORMAT_R32G32_SINT;
  230. lookup[BF_32X3S] = VK_FORMAT_R32G32B32_SINT;
  231. lookup[BF_32X4S] = VK_FORMAT_R32G32B32A32_SINT;
  232. lookup[BF_8X1U] = VK_FORMAT_R8_UINT;
  233. lookup[BF_8X2U] = VK_FORMAT_R8G8_UINT;
  234. lookup[BF_8X4U] = VK_FORMAT_R8G8B8A8_UINT;
  235. lookup[BF_16X1U] = VK_FORMAT_R16_UINT;
  236. lookup[BF_16X2U] = VK_FORMAT_R16G16_UINT;
  237. lookup[BF_16X4U] = VK_FORMAT_R16G16B16A16_UINT;
  238. lookup[BF_32X1U] = VK_FORMAT_R32_UINT;
  239. lookup[BF_32X2U] = VK_FORMAT_R32G32_UINT;
  240. lookup[BF_32X3U] = VK_FORMAT_R32G32B32_UINT;
  241. lookup[BF_32X4U] = VK_FORMAT_R32G32B32A32_UINT;
  242. lookupInitialized = true;
  243. }
  244. if (format >= BF_COUNT)
  245. return VK_FORMAT_UNDEFINED;
  246. return lookup[(UINT32)format];
  247. }
  248. VkFormat VulkanUtility::getVertexType(VertexElementType type)
  249. {
  250. static bool lookupInitialized = false;
  251. static VkFormat lookup[VET_COUNT];
  252. if (!lookupInitialized)
  253. {
  254. lookup[VET_COLOR] = VK_FORMAT_R8G8B8A8_UNORM;
  255. lookup[VET_COLOR_ABGR] = VK_FORMAT_R8G8B8A8_UNORM;
  256. lookup[VET_COLOR_ARGB] = VK_FORMAT_R8G8B8A8_UNORM;
  257. lookup[VET_UBYTE4_NORM] = VK_FORMAT_R8G8B8A8_UNORM;
  258. lookup[VET_FLOAT1] = VK_FORMAT_R32_SFLOAT;
  259. lookup[VET_FLOAT2] = VK_FORMAT_R32G32_SFLOAT;
  260. lookup[VET_FLOAT3] = VK_FORMAT_R32G32B32_SFLOAT;
  261. lookup[VET_FLOAT4] = VK_FORMAT_R32G32B32A32_SFLOAT;
  262. lookup[VET_USHORT1] = VK_FORMAT_R16_UINT;
  263. lookup[VET_USHORT2] = VK_FORMAT_R16G16_UINT;
  264. lookup[VET_USHORT4] = VK_FORMAT_R16G16B16A16_UINT;
  265. lookup[VET_SHORT1] = VK_FORMAT_R16_SINT;
  266. lookup[VET_SHORT2] = VK_FORMAT_R16G16_SINT;
  267. lookup[VET_SHORT4] = VK_FORMAT_R16G16B16A16_SINT;
  268. lookup[VET_UINT1] = VK_FORMAT_R32_UINT;
  269. lookup[VET_UINT2] = VK_FORMAT_R32G32_UINT;
  270. lookup[VET_UINT3] = VK_FORMAT_R32G32B32_UINT;
  271. lookup[VET_UINT4] = VK_FORMAT_R32G32B32A32_UINT;
  272. lookup[VET_INT1] = VK_FORMAT_R32_SINT;
  273. lookup[VET_INT2] = VK_FORMAT_R32G32_SINT;
  274. lookup[VET_INT3] = VK_FORMAT_R32G32B32_SINT;
  275. lookup[VET_INT4] = VK_FORMAT_R32G32B32A32_SINT;
  276. lookup[VET_UBYTE4] = VK_FORMAT_R8G8B8A8_UINT;
  277. lookupInitialized = true;
  278. }
  279. if (type >= VET_COUNT)
  280. return VK_FORMAT_UNDEFINED;
  281. return lookup[(UINT32)type];
  282. }
  283. VkSampleCountFlagBits VulkanUtility::getSampleFlags(UINT32 numSamples)
  284. {
  285. switch(numSamples)
  286. {
  287. case 0:
  288. case 1:
  289. return VK_SAMPLE_COUNT_1_BIT;
  290. case 2:
  291. return VK_SAMPLE_COUNT_2_BIT;
  292. case 4:
  293. return VK_SAMPLE_COUNT_4_BIT;
  294. case 8:
  295. return VK_SAMPLE_COUNT_8_BIT;
  296. case 16:
  297. return VK_SAMPLE_COUNT_16_BIT;
  298. case 32:
  299. return VK_SAMPLE_COUNT_32_BIT;
  300. case 64:
  301. return VK_SAMPLE_COUNT_64_BIT;
  302. }
  303. return VK_SAMPLE_COUNT_1_BIT;
  304. }
  305. VkShaderStageFlagBits VulkanUtility::getShaderStage(GpuProgramType type)
  306. {
  307. switch(type)
  308. {
  309. case GPT_FRAGMENT_PROGRAM:
  310. return VK_SHADER_STAGE_FRAGMENT_BIT;
  311. case GPT_HULL_PROGRAM:
  312. return VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT;
  313. case GPT_DOMAIN_PROGRAM:
  314. return VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
  315. case GPT_GEOMETRY_PROGRAM:
  316. return VK_SHADER_STAGE_GEOMETRY_BIT;
  317. case GPT_VERTEX_PROGRAM:
  318. return VK_SHADER_STAGE_VERTEX_BIT;
  319. case GPT_COMPUTE_PROGRAM:
  320. return VK_SHADER_STAGE_COMPUTE_BIT;
  321. default:
  322. break;
  323. }
  324. // Unsupported type
  325. return VK_SHADER_STAGE_VERTEX_BIT;
  326. }
  327. VkSamplerAddressMode VulkanUtility::getAddressingMode(TextureAddressingMode mode)
  328. {
  329. switch (mode)
  330. {
  331. case TAM_WRAP:
  332. return VK_SAMPLER_ADDRESS_MODE_REPEAT;
  333. case TAM_MIRROR:
  334. return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT;
  335. case TAM_CLAMP:
  336. return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
  337. case TAM_BORDER:
  338. return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
  339. }
  340. return VK_SAMPLER_ADDRESS_MODE_REPEAT;
  341. }
  342. VkBlendFactor VulkanUtility::getBlendFactor(BlendFactor factor)
  343. {
  344. switch (factor)
  345. {
  346. case BF_ONE:
  347. return VK_BLEND_FACTOR_ONE;
  348. case BF_ZERO:
  349. return VK_BLEND_FACTOR_ZERO;
  350. case BF_DEST_COLOR:
  351. return VK_BLEND_FACTOR_DST_COLOR;
  352. case BF_SOURCE_COLOR:
  353. return VK_BLEND_FACTOR_SRC_COLOR;
  354. case BF_INV_DEST_COLOR:
  355. return VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR;
  356. case BF_INV_SOURCE_COLOR:
  357. return VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR;
  358. case BF_DEST_ALPHA:
  359. return VK_BLEND_FACTOR_DST_ALPHA;
  360. case BF_SOURCE_ALPHA:
  361. return VK_BLEND_FACTOR_SRC_ALPHA;
  362. case BF_INV_DEST_ALPHA:
  363. return VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA;
  364. case BF_INV_SOURCE_ALPHA:
  365. return VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
  366. }
  367. // Unsupported type
  368. return VK_BLEND_FACTOR_ZERO;
  369. }
  370. VkBlendOp VulkanUtility::getBlendOp(BlendOperation op)
  371. {
  372. switch (op)
  373. {
  374. case BO_ADD:
  375. return VK_BLEND_OP_ADD;
  376. case BO_SUBTRACT:
  377. return VK_BLEND_OP_SUBTRACT;
  378. case BO_REVERSE_SUBTRACT:
  379. return VK_BLEND_OP_REVERSE_SUBTRACT;
  380. case BO_MIN:
  381. return VK_BLEND_OP_MIN;
  382. case BO_MAX:
  383. return VK_BLEND_OP_MAX;
  384. }
  385. // Unsupported type
  386. return VK_BLEND_OP_ADD;
  387. }
  388. VkCompareOp VulkanUtility::getCompareOp(CompareFunction op)
  389. {
  390. switch (op)
  391. {
  392. case CMPF_ALWAYS_FAIL:
  393. return VK_COMPARE_OP_NEVER;
  394. case CMPF_ALWAYS_PASS:
  395. return VK_COMPARE_OP_ALWAYS;
  396. case CMPF_LESS:
  397. return VK_COMPARE_OP_LESS;
  398. case CMPF_LESS_EQUAL:
  399. return VK_COMPARE_OP_LESS_OR_EQUAL;
  400. case CMPF_EQUAL:
  401. return VK_COMPARE_OP_EQUAL;
  402. case CMPF_NOT_EQUAL:
  403. return VK_COMPARE_OP_NOT_EQUAL;
  404. case CMPF_GREATER_EQUAL:
  405. return VK_COMPARE_OP_GREATER_OR_EQUAL;
  406. case CMPF_GREATER:
  407. return VK_COMPARE_OP_GREATER;
  408. };
  409. // Unsupported type
  410. return VK_COMPARE_OP_ALWAYS;
  411. }
  412. VkCullModeFlagBits VulkanUtility::getCullMode(CullingMode mode)
  413. {
  414. switch (mode)
  415. {
  416. case CULL_NONE:
  417. return VK_CULL_MODE_NONE;
  418. case CULL_CLOCKWISE:
  419. return VK_CULL_MODE_FRONT_BIT;
  420. case CULL_COUNTERCLOCKWISE:
  421. return VK_CULL_MODE_BACK_BIT;
  422. }
  423. // Unsupported type
  424. return VK_CULL_MODE_NONE;
  425. }
  426. VkPolygonMode VulkanUtility::getPolygonMode(PolygonMode mode)
  427. {
  428. switch (mode)
  429. {
  430. case PM_WIREFRAME:
  431. return VK_POLYGON_MODE_LINE;
  432. case PM_SOLID:
  433. return VK_POLYGON_MODE_FILL;
  434. }
  435. return VK_POLYGON_MODE_FILL;
  436. }
  437. VkStencilOp VulkanUtility::getStencilOp(StencilOperation op)
  438. {
  439. switch (op)
  440. {
  441. case SOP_KEEP:
  442. return VK_STENCIL_OP_KEEP;
  443. case SOP_ZERO:
  444. return VK_STENCIL_OP_ZERO;
  445. case SOP_REPLACE:
  446. return VK_STENCIL_OP_REPLACE;
  447. case SOP_INCREMENT:
  448. return VK_STENCIL_OP_INCREMENT_AND_CLAMP;
  449. case SOP_DECREMENT:
  450. return VK_STENCIL_OP_DECREMENT_AND_CLAMP;
  451. case SOP_INCREMENT_WRAP:
  452. return VK_STENCIL_OP_INCREMENT_AND_WRAP;
  453. case SOP_DECREMENT_WRAP:
  454. return VK_STENCIL_OP_DECREMENT_AND_WRAP;
  455. case SOP_INVERT:
  456. return VK_STENCIL_OP_INVERT;
  457. }
  458. // Unsupported type
  459. return VK_STENCIL_OP_KEEP;
  460. }
  461. VkIndexType VulkanUtility::getIndexType(IndexType op)
  462. {
  463. switch(op)
  464. {
  465. case IT_16BIT:
  466. return VK_INDEX_TYPE_UINT16;
  467. case IT_32BIT:
  468. return VK_INDEX_TYPE_UINT32;
  469. }
  470. // Unsupported type
  471. return VK_INDEX_TYPE_UINT32;
  472. }
  473. VkPrimitiveTopology VulkanUtility::getDrawOp(DrawOperationType op)
  474. {
  475. switch (op)
  476. {
  477. case DOT_POINT_LIST:
  478. return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
  479. case DOT_LINE_LIST:
  480. return VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
  481. case DOT_LINE_STRIP:
  482. return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
  483. case DOT_TRIANGLE_LIST:
  484. return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
  485. case DOT_TRIANGLE_STRIP:
  486. return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
  487. case DOT_TRIANGLE_FAN:
  488. return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN;
  489. }
  490. // Unsupported type
  491. return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
  492. }
  493. VkFilter VulkanUtility::getFilter(FilterOptions filter)
  494. {
  495. switch(filter)
  496. {
  497. case FO_LINEAR:
  498. case FO_ANISOTROPIC:
  499. return VK_FILTER_LINEAR;
  500. case FO_POINT:
  501. case FO_NONE:
  502. return VK_FILTER_NEAREST;
  503. }
  504. // Unsupported type
  505. return VK_FILTER_LINEAR;
  506. }
  507. VkSamplerMipmapMode VulkanUtility::getMipFilter(FilterOptions filter)
  508. {
  509. switch (filter)
  510. {
  511. case FO_LINEAR:
  512. case FO_ANISOTROPIC:
  513. return VK_SAMPLER_MIPMAP_MODE_LINEAR;
  514. case FO_POINT:
  515. case FO_NONE:
  516. return VK_SAMPLER_MIPMAP_MODE_NEAREST;
  517. }
  518. // Unsupported type
  519. return VK_SAMPLER_MIPMAP_MODE_LINEAR;
  520. }
  521. void VulkanUtility::getDevices(const VulkanRenderAPI& rapi, GpuDeviceFlags flags, VulkanDevice*(&devices)[BS_MAX_DEVICES])
  522. {
  523. UINT32 numDevices = rapi._getNumDevices();
  524. for (UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  525. {
  526. if(i >= numDevices)
  527. {
  528. devices[i] = nullptr;
  529. continue;
  530. }
  531. VulkanDevice* device = rapi._getDevice(i).get();
  532. if (isDeviceIdxSet(rapi, i, flags))
  533. devices[i] = device;
  534. else
  535. devices[i] = nullptr;
  536. }
  537. }
  538. bool VulkanUtility::isDeviceIdxSet(const VulkanRenderAPI& rapi, UINT32 idx, GpuDeviceFlags flags)
  539. {
  540. VulkanDevice* device = rapi._getDevice(idx).get();
  541. return ((flags & (1 << idx)) != 0 || (flags == GDF_DEFAULT && device->isPrimary()));
  542. }
  543. void cutHorizontal(const VkImageSubresourceRange& toCut, const VkImageSubresourceRange& cutWith,
  544. VkImageSubresourceRange* output, UINT32& numAreas)
  545. {
  546. numAreas = 0;
  547. INT32 leftCut = cutWith.baseArrayLayer - toCut.baseArrayLayer;
  548. INT32 rightCut = (cutWith.baseArrayLayer + cutWith.layerCount) - toCut.baseArrayLayer;
  549. if (leftCut > 0 && leftCut < (INT32)(toCut.baseArrayLayer + toCut.layerCount))
  550. {
  551. output[numAreas] = toCut;
  552. VkImageSubresourceRange& range = output[numAreas];
  553. range.baseArrayLayer = toCut.baseArrayLayer;
  554. range.layerCount = leftCut;
  555. numAreas++;
  556. }
  557. if (rightCut > 0 && rightCut < (INT32)toCut.layerCount)
  558. {
  559. output[numAreas] = toCut;
  560. VkImageSubresourceRange& range = output[numAreas];
  561. range.baseArrayLayer = toCut.baseArrayLayer + rightCut;
  562. range.layerCount = toCut.layerCount - rightCut;
  563. numAreas++;
  564. }
  565. // If we made both left and right cuts, this means we need a middle one as well
  566. if (numAreas == 2)
  567. {
  568. output[numAreas] = toCut;
  569. VkImageSubresourceRange& range = output[numAreas];
  570. range.baseArrayLayer = toCut.baseArrayLayer + leftCut;
  571. range.layerCount = toCut.layerCount - (toCut.layerCount - rightCut) - leftCut;
  572. numAreas++;
  573. }
  574. // Nothing to cut
  575. if (numAreas == 0)
  576. {
  577. output[numAreas] = toCut;
  578. numAreas++;
  579. }
  580. }
  581. void cutVertical(const VkImageSubresourceRange& toCut, const VkImageSubresourceRange& cutWith,
  582. VkImageSubresourceRange* output, UINT32& numAreas)
  583. {
  584. numAreas = 0;
  585. INT32 topCut = cutWith.baseMipLevel - toCut.baseMipLevel;
  586. INT32 bottomCut = (cutWith.baseMipLevel + cutWith.levelCount) - toCut.baseMipLevel;
  587. if (topCut > 0 && topCut < (INT32)(toCut.baseMipLevel + toCut.levelCount))
  588. {
  589. output[numAreas] = toCut;
  590. VkImageSubresourceRange& range = output[numAreas];
  591. range.baseMipLevel = toCut.baseMipLevel;
  592. range.levelCount = topCut;
  593. numAreas++;
  594. }
  595. if (bottomCut > 0 && bottomCut < (INT32)toCut.levelCount)
  596. {
  597. output[numAreas] = toCut;
  598. VkImageSubresourceRange& range = output[numAreas];
  599. range.baseMipLevel = toCut.baseMipLevel + bottomCut;
  600. range.levelCount = toCut.levelCount - bottomCut;
  601. numAreas++;
  602. }
  603. // If we made both top and bottom cuts, this means we need a middle one as well
  604. if (numAreas == 2)
  605. {
  606. output[numAreas] = toCut;
  607. VkImageSubresourceRange& range = output[numAreas];
  608. range.baseMipLevel = toCut.baseMipLevel + topCut;
  609. range.levelCount = toCut.levelCount - (toCut.levelCount - bottomCut) - topCut;
  610. numAreas++;
  611. }
  612. // Nothing to cut
  613. if (numAreas == 0)
  614. {
  615. output[numAreas] = toCut;
  616. numAreas++;
  617. }
  618. }
  619. void VulkanUtility::cutRange(const VkImageSubresourceRange& toCut, const VkImageSubresourceRange& cutWith,
  620. std::array<VkImageSubresourceRange, 5>& output, UINT32& numAreas)
  621. {
  622. numAreas = 0;
  623. // Cut horizontally
  624. UINT32 numHorzCuts = 0;
  625. std::array<VkImageSubresourceRange, 3> horzCuts;
  626. cutHorizontal(toCut, cutWith, horzCuts.data(), numHorzCuts);
  627. // Cut vertically
  628. for (UINT32 i = 0; i < numHorzCuts; i++)
  629. {
  630. VkImageSubresourceRange& range = horzCuts[i];
  631. if (range.baseArrayLayer >= cutWith.baseArrayLayer &&
  632. (range.baseArrayLayer + range.layerCount) <= (cutWith.baseArrayLayer + cutWith.layerCount))
  633. {
  634. UINT32 numVertCuts = 0;
  635. cutVertical(range, cutWith, output.data() + numAreas, numVertCuts);
  636. numAreas += numVertCuts;
  637. }
  638. else
  639. {
  640. output[numAreas] = range;
  641. numAreas++;
  642. }
  643. }
  644. assert(numAreas <= 5);
  645. }
  646. bool VulkanUtility::rangeOverlaps(const VkImageSubresourceRange& a, const VkImageSubresourceRange& b)
  647. {
  648. INT32 aRight = a.baseArrayLayer + (INT32)a.layerCount;
  649. INT32 bRight = b.baseArrayLayer + (INT32)b.layerCount;
  650. INT32 aBottom = a.baseMipLevel + (INT32)a.levelCount;
  651. INT32 bBottom = b.baseMipLevel + (INT32)b.levelCount;
  652. if ((INT32)a.baseArrayLayer < bRight && aRight >(INT32)b.baseArrayLayer &&
  653. (INT32)a.baseMipLevel < bBottom && aBottom >(INT32)b.baseMipLevel)
  654. return true;
  655. return false;
  656. }
  657. UINT32 VulkanUtility::calcInterfaceBlockElementSizeAndOffset(GpuParamDataType type, UINT32 arraySize, UINT32& offset)
  658. {
  659. const GpuParamDataTypeInfo& typeInfo = bs::GpuParams::PARAM_SIZES.lookup[type];
  660. UINT32 size = (typeInfo.baseTypeSize * typeInfo.numColumns * typeInfo.numRows) / 4;
  661. UINT32 alignment = typeInfo.alignment / 4;
  662. // Fix alignment if needed
  663. UINT32 alignOffset = offset % alignment;
  664. if (alignOffset != 0)
  665. {
  666. UINT32 padding = (alignment - alignOffset);
  667. offset += padding;
  668. }
  669. if (arraySize > 1)
  670. {
  671. // Array elements are always padded and aligned to vec4
  672. alignOffset = size % 4;
  673. if (alignOffset != 0)
  674. {
  675. UINT32 padding = (4 - alignOffset);
  676. size += padding;
  677. }
  678. alignOffset = offset % 4;
  679. if (alignOffset != 0)
  680. {
  681. UINT32 padding = (4 - alignOffset);
  682. offset += padding;
  683. }
  684. return size;
  685. }
  686. else
  687. return size;
  688. }
  689. }}