BsVulkanUtility.cpp 19 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751
  1. //********************************** Banshee Engine (www.banshee3d.com) **************************************************//
  2. //**************** Copyright (c) 2016 Marko Pintera ([email protected]). All rights reserved. **********************//
  3. #include "BsVulkanUtility.h"
  4. #include "BsVulkanRenderAPI.h"
  5. #include "BsVulkanDevice.h"
  6. #include "Error/BsException.h"
  7. namespace bs { namespace ct
  8. {
  9. PixelFormat VulkanUtility::getClosestSupportedPixelFormat(VulkanDevice& device, PixelFormat format, TextureType texType,
  10. int usage, bool optimalTiling, bool hwGamma)
  11. {
  12. // Check for any obvious issues first
  13. PixelUtil::checkFormat(format, texType, usage);
  14. // Check actual device for format support
  15. VkFormatFeatureFlags wantedFeatureFlags = VK_FORMAT_FEATURE_SAMPLED_IMAGE_BIT;
  16. if ((usage & TU_RENDERTARGET) != 0)
  17. wantedFeatureFlags |= VK_FORMAT_FEATURE_COLOR_ATTACHMENT_BIT;
  18. if ((usage & TU_DEPTHSTENCIL) != 0)
  19. wantedFeatureFlags |= VK_FORMAT_FEATURE_DEPTH_STENCIL_ATTACHMENT_BIT;
  20. if ((usage & TU_LOADSTORE) != 0)
  21. wantedFeatureFlags |= VK_FORMAT_FEATURE_STORAGE_IMAGE_BIT;
  22. VkFormatProperties props;
  23. auto isSupported = [&](VkFormat vkFmt)
  24. {
  25. vkGetPhysicalDeviceFormatProperties(device.getPhysical(), vkFmt, &props);
  26. VkFormatFeatureFlags featureFlags = optimalTiling ? props.optimalTilingFeatures : props.linearTilingFeatures;
  27. return (featureFlags & wantedFeatureFlags) != 0;
  28. };
  29. VkFormat vkFormat = getPixelFormat(format, hwGamma);
  30. if(!isSupported(vkFormat))
  31. {
  32. if ((usage & TU_DEPTHSTENCIL) != 0)
  33. {
  34. bool hasStencil = format == PF_D24S8 || format == PF_D32_S8X24;
  35. // Spec guarantees at least one depth-only, and one depth-stencil format to be supported
  36. if(hasStencil)
  37. {
  38. if (isSupported(VK_FORMAT_D32_SFLOAT_S8_UINT))
  39. format = PF_D32_S8X24;
  40. else
  41. format = PF_D24S8;
  42. // We ignore 8-bit stencil-only, and 16/8 depth/stencil combo buffers as engine doesn't expose them,
  43. // and spec guarantees one of the above must be implemented.
  44. }
  45. else
  46. {
  47. // The only format that could have failed is 32-bit depth, so we must use the alternative 16-bit.
  48. // Spec guarantees it is always supported.
  49. format = PF_D16;
  50. }
  51. }
  52. else
  53. {
  54. int bitDepths[4];
  55. PixelUtil::getBitDepths(format, bitDepths);
  56. if (bitDepths[0] == 16) // 16-bit format, fall back to 4-channel 16-bit, guaranteed to be supported
  57. format = PF_RGBA16F;
  58. else if(format == PF_BC6H) // Fall back to uncompressed alternative
  59. format = PF_RGBA16F;
  60. else // Must be 8-bit per channel format, compressed format or some uneven format
  61. format = PF_RGBA8;
  62. }
  63. }
  64. return format;
  65. }
  66. VkFormat VulkanUtility::getPixelFormat(PixelFormat format, bool sRGB)
  67. {
  68. switch (format)
  69. {
  70. case PF_R8:
  71. if(sRGB)
  72. return VK_FORMAT_R8_SRGB;
  73. return VK_FORMAT_R8_UNORM;
  74. case PF_RG8:
  75. if (sRGB)
  76. return VK_FORMAT_R8G8_SRGB;
  77. return VK_FORMAT_R8G8_UNORM;
  78. case PF_RGB8:
  79. if (sRGB)
  80. return VK_FORMAT_R8G8B8_SRGB;
  81. return VK_FORMAT_R8G8B8_UNORM;
  82. case PF_RGBA8:
  83. if (sRGB)
  84. return VK_FORMAT_R8G8B8A8_SRGB;
  85. return VK_FORMAT_R8G8B8A8_UNORM;
  86. case PF_BGRA8:
  87. if (sRGB)
  88. return VK_FORMAT_B8G8R8A8_SRGB;
  89. return VK_FORMAT_B8G8R8A8_UNORM;
  90. case PF_R8I:
  91. return VK_FORMAT_R8_SINT;
  92. case PF_RG8I:
  93. return VK_FORMAT_R8G8_SINT;
  94. case PF_RGBA8I:
  95. return VK_FORMAT_R8G8B8A8_SINT;
  96. case PF_R8U:
  97. return VK_FORMAT_R8_UINT;
  98. case PF_RG8U:
  99. return VK_FORMAT_R8G8_UINT;
  100. case PF_RGBA8U:
  101. return VK_FORMAT_R8G8B8A8_UINT;
  102. case PF_R8S:
  103. return VK_FORMAT_R8_SNORM;
  104. case PF_RG8S:
  105. return VK_FORMAT_R8G8_SNORM;
  106. case PF_RGBA8S:
  107. return VK_FORMAT_R8G8B8A8_SNORM;
  108. case PF_R16F:
  109. return VK_FORMAT_R16_SFLOAT;
  110. case PF_RG16F:
  111. return VK_FORMAT_R16G16_SFLOAT;
  112. case PF_RGBA16F:
  113. return VK_FORMAT_R16G16B16A16_SFLOAT;
  114. case PF_R32F:
  115. return VK_FORMAT_R32_SFLOAT;
  116. case PF_RG32F:
  117. return VK_FORMAT_R32G32_SFLOAT;
  118. case PF_RGB32F:
  119. return VK_FORMAT_R32G32B32_SFLOAT;
  120. case PF_RGBA32F:
  121. return VK_FORMAT_R32G32B32A32_SFLOAT;
  122. case PF_R16I:
  123. return VK_FORMAT_R16_SINT;
  124. case PF_RG16I:
  125. return VK_FORMAT_R16G16_SINT;
  126. case PF_RGBA16I:
  127. return VK_FORMAT_R16G16B16A16_SINT;
  128. case PF_R16U:
  129. return VK_FORMAT_R16_UINT;
  130. case PF_RG16U:
  131. return VK_FORMAT_R16G16_UINT;
  132. case PF_RGBA16U:
  133. return VK_FORMAT_R16G16B16A16_UINT;
  134. case PF_R32I:
  135. return VK_FORMAT_R32_SINT;
  136. case PF_RG32I:
  137. return VK_FORMAT_R32G32_SINT;
  138. case PF_RGB32I:
  139. return VK_FORMAT_R32G32B32_SINT;
  140. case PF_RGBA32I:
  141. return VK_FORMAT_R32G32B32A32_SINT;
  142. case PF_R32U:
  143. return VK_FORMAT_R32_UINT;
  144. case PF_RG32U:
  145. return VK_FORMAT_R32G32_UINT;
  146. case PF_RGB32U:
  147. return VK_FORMAT_R32G32B32_UINT;
  148. case PF_RGBA32U:
  149. return VK_FORMAT_R32G32B32A32_UINT;
  150. case PF_R16S:
  151. return VK_FORMAT_R16_SNORM;
  152. case PF_RG16S:
  153. return VK_FORMAT_R16G16_SNORM;
  154. case PF_RGBA16S:
  155. return VK_FORMAT_R16G16B16A16_SNORM;
  156. case PF_R16:
  157. return VK_FORMAT_R16_UNORM;
  158. case PF_RG16:
  159. return VK_FORMAT_R16G16_UNORM;
  160. case PF_RGBA16:
  161. return VK_FORMAT_R16G16B16A16_UNORM;
  162. case PF_BC1:
  163. case PF_BC1a:
  164. if (sRGB)
  165. return VK_FORMAT_BC1_RGB_SRGB_BLOCK;
  166. return VK_FORMAT_BC1_RGB_UNORM_BLOCK;
  167. case PF_BC2:
  168. if (sRGB)
  169. return VK_FORMAT_BC2_SRGB_BLOCK;
  170. return VK_FORMAT_BC2_UNORM_BLOCK;
  171. case PF_BC3:
  172. if (sRGB)
  173. return VK_FORMAT_BC3_SRGB_BLOCK;
  174. return VK_FORMAT_BC3_UNORM_BLOCK;
  175. case PF_BC4:
  176. return VK_FORMAT_BC4_SNORM_BLOCK;
  177. case PF_BC5:
  178. return VK_FORMAT_BC5_UNORM_BLOCK;
  179. case PF_BC6H:
  180. return VK_FORMAT_BC6H_SFLOAT_BLOCK;
  181. case PF_BC7:
  182. if (sRGB)
  183. return VK_FORMAT_BC7_SRGB_BLOCK;
  184. return VK_FORMAT_BC7_UNORM_BLOCK;
  185. case PF_D32_S8X24:
  186. return VK_FORMAT_D32_SFLOAT_S8_UINT;
  187. case PF_D24S8:
  188. return VK_FORMAT_D24_UNORM_S8_UINT;
  189. case PF_D32:
  190. return VK_FORMAT_D32_SFLOAT;
  191. case PF_D16:
  192. return VK_FORMAT_D16_UNORM;
  193. case PF_RG11B10F:
  194. return VK_FORMAT_B10G11R11_UFLOAT_PACK32;
  195. case PF_RGB10A2:
  196. return VK_FORMAT_A2B10G10R10_UNORM_PACK32;
  197. case PF_UNKNOWN:
  198. default:
  199. return VK_FORMAT_UNDEFINED;
  200. }
  201. }
  202. VkFormat VulkanUtility::getBufferFormat(GpuBufferFormat format)
  203. {
  204. static bool lookupInitialized = false;
  205. static VkFormat lookup[BF_COUNT];
  206. if (!lookupInitialized)
  207. {
  208. lookup[BF_16X1F] = VK_FORMAT_R16_SFLOAT;
  209. lookup[BF_16X2F] = VK_FORMAT_R16G16_SFLOAT;
  210. lookup[BF_16X4F] = VK_FORMAT_R16G16B16A16_SFLOAT;
  211. lookup[BF_32X1F] = VK_FORMAT_R32_SFLOAT;
  212. lookup[BF_32X2F] = VK_FORMAT_R32G32_SFLOAT;
  213. lookup[BF_32X3F] = VK_FORMAT_R32G32B32_SFLOAT;
  214. lookup[BF_32X4F] = VK_FORMAT_R32G32B32A32_SFLOAT;
  215. lookup[BF_8X1] = VK_FORMAT_R8_UNORM;
  216. lookup[BF_8X2] = VK_FORMAT_R8G8_UNORM;
  217. lookup[BF_8X4] = VK_FORMAT_R8G8B8A8_UNORM;
  218. lookup[BF_16X1] = VK_FORMAT_R16_UNORM;
  219. lookup[BF_16X2] = VK_FORMAT_R16G16_UNORM;
  220. lookup[BF_16X4] = VK_FORMAT_R16G16B16A16_UNORM;
  221. lookup[BF_8X1S] = VK_FORMAT_R8_SINT;
  222. lookup[BF_8X2S] = VK_FORMAT_R8G8_SINT;
  223. lookup[BF_8X4S] = VK_FORMAT_R8G8B8A8_SINT;
  224. lookup[BF_16X1S] = VK_FORMAT_R16_SINT;
  225. lookup[BF_16X2S] = VK_FORMAT_R16G16_SINT;
  226. lookup[BF_16X4S] = VK_FORMAT_R16G16B16A16_SINT;
  227. lookup[BF_32X1S] = VK_FORMAT_R32_SINT;
  228. lookup[BF_32X2S] = VK_FORMAT_R32G32_SINT;
  229. lookup[BF_32X3S] = VK_FORMAT_R32G32B32_SINT;
  230. lookup[BF_32X4S] = VK_FORMAT_R32G32B32A32_SINT;
  231. lookup[BF_8X1U] = VK_FORMAT_R8_UINT;
  232. lookup[BF_8X2U] = VK_FORMAT_R8G8_UINT;
  233. lookup[BF_8X4U] = VK_FORMAT_R8G8B8A8_UINT;
  234. lookup[BF_16X1U] = VK_FORMAT_R16_UINT;
  235. lookup[BF_16X2U] = VK_FORMAT_R16G16_UINT;
  236. lookup[BF_16X4U] = VK_FORMAT_R16G16B16A16_UINT;
  237. lookup[BF_32X1U] = VK_FORMAT_R32_UINT;
  238. lookup[BF_32X2U] = VK_FORMAT_R32G32_UINT;
  239. lookup[BF_32X3U] = VK_FORMAT_R32G32B32_UINT;
  240. lookup[BF_32X4U] = VK_FORMAT_R32G32B32A32_UINT;
  241. lookupInitialized = true;
  242. }
  243. if (format >= BF_COUNT)
  244. return VK_FORMAT_UNDEFINED;
  245. return lookup[(UINT32)format];
  246. }
  247. VkFormat VulkanUtility::getVertexType(VertexElementType type)
  248. {
  249. static bool lookupInitialized = false;
  250. static VkFormat lookup[VET_COUNT];
  251. if (!lookupInitialized)
  252. {
  253. lookup[VET_COLOR] = VK_FORMAT_R8G8B8A8_UNORM;
  254. lookup[VET_COLOR_ABGR] = VK_FORMAT_R8G8B8A8_UNORM;
  255. lookup[VET_COLOR_ARGB] = VK_FORMAT_R8G8B8A8_UNORM;
  256. lookup[VET_UBYTE4_NORM] = VK_FORMAT_R8G8B8A8_UNORM;
  257. lookup[VET_FLOAT1] = VK_FORMAT_R32_SFLOAT;
  258. lookup[VET_FLOAT2] = VK_FORMAT_R32G32_SFLOAT;
  259. lookup[VET_FLOAT3] = VK_FORMAT_R32G32B32_SFLOAT;
  260. lookup[VET_FLOAT4] = VK_FORMAT_R32G32B32A32_SFLOAT;
  261. lookup[VET_USHORT1] = VK_FORMAT_R16_UINT;
  262. lookup[VET_USHORT2] = VK_FORMAT_R16G16_UINT;
  263. lookup[VET_USHORT4] = VK_FORMAT_R16G16B16A16_UINT;
  264. lookup[VET_SHORT1] = VK_FORMAT_R16_SINT;
  265. lookup[VET_SHORT2] = VK_FORMAT_R16G16_SINT;
  266. lookup[VET_SHORT4] = VK_FORMAT_R16G16B16A16_SINT;
  267. lookup[VET_UINT1] = VK_FORMAT_R32_UINT;
  268. lookup[VET_UINT2] = VK_FORMAT_R32G32_UINT;
  269. lookup[VET_UINT3] = VK_FORMAT_R32G32B32_UINT;
  270. lookup[VET_UINT4] = VK_FORMAT_R32G32B32A32_UINT;
  271. lookup[VET_INT1] = VK_FORMAT_R32_SINT;
  272. lookup[VET_INT2] = VK_FORMAT_R32G32_SINT;
  273. lookup[VET_INT3] = VK_FORMAT_R32G32B32_SINT;
  274. lookup[VET_INT4] = VK_FORMAT_R32G32B32A32_SINT;
  275. lookup[VET_UBYTE4] = VK_FORMAT_R8G8B8A8_UINT;
  276. lookupInitialized = true;
  277. }
  278. if (type >= VET_COUNT)
  279. return VK_FORMAT_UNDEFINED;
  280. return lookup[(UINT32)type];
  281. }
  282. VkSampleCountFlagBits VulkanUtility::getSampleFlags(UINT32 numSamples)
  283. {
  284. switch(numSamples)
  285. {
  286. case 0:
  287. case 1:
  288. return VK_SAMPLE_COUNT_1_BIT;
  289. case 2:
  290. return VK_SAMPLE_COUNT_2_BIT;
  291. case 4:
  292. return VK_SAMPLE_COUNT_4_BIT;
  293. case 8:
  294. return VK_SAMPLE_COUNT_8_BIT;
  295. case 16:
  296. return VK_SAMPLE_COUNT_16_BIT;
  297. case 32:
  298. return VK_SAMPLE_COUNT_32_BIT;
  299. case 64:
  300. return VK_SAMPLE_COUNT_64_BIT;
  301. }
  302. return VK_SAMPLE_COUNT_1_BIT;
  303. }
  304. VkShaderStageFlagBits VulkanUtility::getShaderStage(GpuProgramType type)
  305. {
  306. switch(type)
  307. {
  308. case GPT_FRAGMENT_PROGRAM:
  309. return VK_SHADER_STAGE_FRAGMENT_BIT;
  310. case GPT_HULL_PROGRAM:
  311. return VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT;
  312. case GPT_DOMAIN_PROGRAM:
  313. return VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
  314. case GPT_GEOMETRY_PROGRAM:
  315. return VK_SHADER_STAGE_GEOMETRY_BIT;
  316. case GPT_VERTEX_PROGRAM:
  317. return VK_SHADER_STAGE_VERTEX_BIT;
  318. case GPT_COMPUTE_PROGRAM:
  319. return VK_SHADER_STAGE_COMPUTE_BIT;
  320. }
  321. // Unsupported type
  322. return VK_SHADER_STAGE_VERTEX_BIT;
  323. }
  324. VkSamplerAddressMode VulkanUtility::getAddressingMode(TextureAddressingMode mode)
  325. {
  326. switch (mode)
  327. {
  328. case TAM_WRAP:
  329. return VK_SAMPLER_ADDRESS_MODE_REPEAT;
  330. case TAM_MIRROR:
  331. return VK_SAMPLER_ADDRESS_MODE_MIRRORED_REPEAT;
  332. case TAM_CLAMP:
  333. return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
  334. case TAM_BORDER:
  335. return VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
  336. }
  337. return VK_SAMPLER_ADDRESS_MODE_REPEAT;
  338. }
  339. VkBlendFactor VulkanUtility::getBlendFactor(BlendFactor factor)
  340. {
  341. switch (factor)
  342. {
  343. case BF_ONE:
  344. return VK_BLEND_FACTOR_ONE;
  345. case BF_ZERO:
  346. return VK_BLEND_FACTOR_ZERO;
  347. case BF_DEST_COLOR:
  348. return VK_BLEND_FACTOR_DST_COLOR;
  349. case BF_SOURCE_COLOR:
  350. return VK_BLEND_FACTOR_SRC_COLOR;
  351. case BF_INV_DEST_COLOR:
  352. return VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR;
  353. case BF_INV_SOURCE_COLOR:
  354. return VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR;
  355. case BF_DEST_ALPHA:
  356. return VK_BLEND_FACTOR_DST_ALPHA;
  357. case BF_SOURCE_ALPHA:
  358. return VK_BLEND_FACTOR_SRC_ALPHA;
  359. case BF_INV_DEST_ALPHA:
  360. return VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA;
  361. case BF_INV_SOURCE_ALPHA:
  362. return VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA;
  363. }
  364. // Unsupported type
  365. return VK_BLEND_FACTOR_ZERO;
  366. }
  367. VkBlendOp VulkanUtility::getBlendOp(BlendOperation op)
  368. {
  369. switch (op)
  370. {
  371. case BO_ADD:
  372. return VK_BLEND_OP_ADD;
  373. case BO_SUBTRACT:
  374. return VK_BLEND_OP_SUBTRACT;
  375. case BO_REVERSE_SUBTRACT:
  376. return VK_BLEND_OP_REVERSE_SUBTRACT;
  377. case BO_MIN:
  378. return VK_BLEND_OP_MIN;
  379. case BO_MAX:
  380. return VK_BLEND_OP_MAX;
  381. }
  382. // Unsupported type
  383. return VK_BLEND_OP_ADD;
  384. }
  385. VkCompareOp VulkanUtility::getCompareOp(CompareFunction op)
  386. {
  387. switch (op)
  388. {
  389. case CMPF_ALWAYS_FAIL:
  390. return VK_COMPARE_OP_NEVER;
  391. case CMPF_ALWAYS_PASS:
  392. return VK_COMPARE_OP_ALWAYS;
  393. case CMPF_LESS:
  394. return VK_COMPARE_OP_LESS;
  395. case CMPF_LESS_EQUAL:
  396. return VK_COMPARE_OP_LESS_OR_EQUAL;
  397. case CMPF_EQUAL:
  398. return VK_COMPARE_OP_EQUAL;
  399. case CMPF_NOT_EQUAL:
  400. return VK_COMPARE_OP_NOT_EQUAL;
  401. case CMPF_GREATER_EQUAL:
  402. return VK_COMPARE_OP_GREATER_OR_EQUAL;
  403. case CMPF_GREATER:
  404. return VK_COMPARE_OP_GREATER;
  405. };
  406. // Unsupported type
  407. return VK_COMPARE_OP_ALWAYS;
  408. }
  409. VkCullModeFlagBits VulkanUtility::getCullMode(CullingMode mode)
  410. {
  411. switch (mode)
  412. {
  413. case CULL_NONE:
  414. return VK_CULL_MODE_NONE;
  415. case CULL_CLOCKWISE:
  416. return VK_CULL_MODE_FRONT_BIT;
  417. case CULL_COUNTERCLOCKWISE:
  418. return VK_CULL_MODE_BACK_BIT;
  419. }
  420. // Unsupported type
  421. return VK_CULL_MODE_NONE;
  422. }
  423. VkPolygonMode VulkanUtility::getPolygonMode(PolygonMode mode)
  424. {
  425. switch (mode)
  426. {
  427. case PM_WIREFRAME:
  428. return VK_POLYGON_MODE_LINE;
  429. case PM_SOLID:
  430. return VK_POLYGON_MODE_FILL;
  431. }
  432. return VK_POLYGON_MODE_FILL;
  433. }
  434. VkStencilOp VulkanUtility::getStencilOp(StencilOperation op)
  435. {
  436. switch (op)
  437. {
  438. case SOP_KEEP:
  439. return VK_STENCIL_OP_KEEP;
  440. case SOP_ZERO:
  441. return VK_STENCIL_OP_ZERO;
  442. case SOP_REPLACE:
  443. return VK_STENCIL_OP_REPLACE;
  444. case SOP_INCREMENT:
  445. return VK_STENCIL_OP_INCREMENT_AND_CLAMP;
  446. case SOP_DECREMENT:
  447. return VK_STENCIL_OP_DECREMENT_AND_CLAMP;
  448. case SOP_INCREMENT_WRAP:
  449. return VK_STENCIL_OP_INCREMENT_AND_WRAP;
  450. case SOP_DECREMENT_WRAP:
  451. return VK_STENCIL_OP_DECREMENT_AND_WRAP;
  452. case SOP_INVERT:
  453. return VK_STENCIL_OP_INVERT;
  454. }
  455. // Unsupported type
  456. return VK_STENCIL_OP_KEEP;
  457. }
  458. VkIndexType VulkanUtility::getIndexType(IndexType op)
  459. {
  460. switch(op)
  461. {
  462. case IT_16BIT:
  463. return VK_INDEX_TYPE_UINT16;
  464. case IT_32BIT:
  465. return VK_INDEX_TYPE_UINT32;
  466. }
  467. // Unsupported type
  468. return VK_INDEX_TYPE_UINT32;
  469. }
  470. VkPrimitiveTopology VulkanUtility::getDrawOp(DrawOperationType op)
  471. {
  472. switch (op)
  473. {
  474. case DOT_POINT_LIST:
  475. return VK_PRIMITIVE_TOPOLOGY_POINT_LIST;
  476. case DOT_LINE_LIST:
  477. return VK_PRIMITIVE_TOPOLOGY_LINE_LIST;
  478. case DOT_LINE_STRIP:
  479. return VK_PRIMITIVE_TOPOLOGY_LINE_STRIP;
  480. case DOT_TRIANGLE_LIST:
  481. return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
  482. case DOT_TRIANGLE_STRIP:
  483. return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP;
  484. case DOT_TRIANGLE_FAN:
  485. return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_FAN;
  486. }
  487. // Unsupported type
  488. return VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST;
  489. }
  490. VkFilter VulkanUtility::getFilter(FilterOptions filter)
  491. {
  492. switch(filter)
  493. {
  494. case FO_LINEAR:
  495. case FO_ANISOTROPIC:
  496. return VK_FILTER_LINEAR;
  497. case FO_POINT:
  498. case FO_NONE:
  499. return VK_FILTER_NEAREST;
  500. }
  501. // Unsupported type
  502. return VK_FILTER_LINEAR;
  503. }
  504. VkSamplerMipmapMode VulkanUtility::getMipFilter(FilterOptions filter)
  505. {
  506. switch (filter)
  507. {
  508. case FO_LINEAR:
  509. case FO_ANISOTROPIC:
  510. return VK_SAMPLER_MIPMAP_MODE_LINEAR;
  511. case FO_POINT:
  512. case FO_NONE:
  513. return VK_SAMPLER_MIPMAP_MODE_NEAREST;
  514. }
  515. // Unsupported type
  516. return VK_SAMPLER_MIPMAP_MODE_LINEAR;
  517. }
  518. void VulkanUtility::getDevices(const VulkanRenderAPI& rapi, GpuDeviceFlags flags, VulkanDevice*(&devices)[BS_MAX_DEVICES])
  519. {
  520. UINT32 numDevices = rapi._getNumDevices();
  521. for (UINT32 i = 0; i < BS_MAX_DEVICES; i++)
  522. {
  523. if(i >= numDevices)
  524. {
  525. devices[i] = nullptr;
  526. continue;
  527. }
  528. VulkanDevice* device = rapi._getDevice(i).get();
  529. if (isDeviceIdxSet(rapi, i, flags))
  530. devices[i] = device;
  531. else
  532. devices[i] = nullptr;
  533. }
  534. }
  535. bool VulkanUtility::isDeviceIdxSet(const VulkanRenderAPI& rapi, UINT32 idx, GpuDeviceFlags flags)
  536. {
  537. VulkanDevice* device = rapi._getDevice(idx).get();
  538. return ((flags & (1 << idx)) != 0 || (flags == GDF_DEFAULT && device->isPrimary()));
  539. }
  540. void cutHorizontal(const VkImageSubresourceRange& toCut, const VkImageSubresourceRange& cutWith,
  541. VkImageSubresourceRange* output, UINT32& numAreas)
  542. {
  543. numAreas = 0;
  544. INT32 leftCut = cutWith.baseArrayLayer - toCut.baseArrayLayer;
  545. INT32 rightCut = (cutWith.baseArrayLayer + cutWith.layerCount) - toCut.baseArrayLayer;
  546. if (leftCut > 0 && leftCut < (INT32)(toCut.baseArrayLayer + toCut.layerCount))
  547. {
  548. output[numAreas] = toCut;
  549. VkImageSubresourceRange& range = output[numAreas];
  550. range.baseArrayLayer = toCut.baseArrayLayer;
  551. range.layerCount = leftCut;
  552. numAreas++;
  553. }
  554. if (rightCut > 0 && rightCut < (INT32)toCut.layerCount)
  555. {
  556. output[numAreas] = toCut;
  557. VkImageSubresourceRange& range = output[numAreas];
  558. range.baseArrayLayer = toCut.baseArrayLayer + rightCut;
  559. range.layerCount = toCut.layerCount - rightCut;
  560. numAreas++;
  561. }
  562. // If we made both left and right cuts, this means we need a middle one as well
  563. if (numAreas == 2)
  564. {
  565. output[numAreas] = toCut;
  566. VkImageSubresourceRange& range = output[numAreas];
  567. range.baseArrayLayer = toCut.baseArrayLayer + leftCut;
  568. range.layerCount = toCut.layerCount - (toCut.layerCount - rightCut) - leftCut;
  569. numAreas++;
  570. }
  571. // Nothing to cut
  572. if (numAreas == 0)
  573. {
  574. output[numAreas] = toCut;
  575. numAreas++;
  576. }
  577. }
  578. void cutVertical(const VkImageSubresourceRange& toCut, const VkImageSubresourceRange& cutWith,
  579. VkImageSubresourceRange* output, UINT32& numAreas)
  580. {
  581. numAreas = 0;
  582. INT32 topCut = cutWith.baseMipLevel - toCut.baseMipLevel;
  583. INT32 bottomCut = (cutWith.baseMipLevel + cutWith.levelCount) - toCut.baseMipLevel;
  584. if (topCut > 0 && topCut < (INT32)(toCut.baseMipLevel + toCut.levelCount))
  585. {
  586. output[numAreas] = toCut;
  587. VkImageSubresourceRange& range = output[numAreas];
  588. range.baseMipLevel = toCut.baseMipLevel;
  589. range.levelCount = topCut;
  590. numAreas++;
  591. }
  592. if (bottomCut > 0 && bottomCut < (INT32)toCut.levelCount)
  593. {
  594. output[numAreas] = toCut;
  595. VkImageSubresourceRange& range = output[numAreas];
  596. range.baseMipLevel = toCut.baseMipLevel + bottomCut;
  597. range.levelCount = toCut.levelCount - bottomCut;
  598. numAreas++;
  599. }
  600. // If we made both top and bottom cuts, this means we need a middle one as well
  601. if (numAreas == 2)
  602. {
  603. output[numAreas] = toCut;
  604. VkImageSubresourceRange& range = output[numAreas];
  605. range.baseMipLevel = toCut.baseMipLevel + topCut;
  606. range.levelCount = toCut.levelCount - (toCut.levelCount - bottomCut) - topCut;
  607. numAreas++;
  608. }
  609. // Nothing to cut
  610. if (numAreas == 0)
  611. {
  612. output[numAreas] = toCut;
  613. numAreas++;
  614. }
  615. }
  616. void VulkanUtility::cutRange(const VkImageSubresourceRange& toCut, const VkImageSubresourceRange& cutWith,
  617. std::array<VkImageSubresourceRange, 5>& output, UINT32& numAreas)
  618. {
  619. numAreas = 0;
  620. // Cut horizontally
  621. UINT32 numHorzCuts = 0;
  622. std::array<VkImageSubresourceRange, 3> horzCuts;
  623. cutHorizontal(toCut, cutWith, horzCuts.data(), numHorzCuts);
  624. // Cut vertically
  625. for (UINT32 i = 0; i < numHorzCuts; i++)
  626. {
  627. VkImageSubresourceRange& range = horzCuts[i];
  628. if (range.baseArrayLayer >= cutWith.baseArrayLayer &&
  629. (range.baseArrayLayer + range.layerCount) <= (cutWith.baseArrayLayer + cutWith.layerCount))
  630. {
  631. UINT32 numVertCuts = 0;
  632. cutVertical(range, cutWith, output.data() + numAreas, numVertCuts);
  633. numAreas += numVertCuts;
  634. }
  635. else
  636. {
  637. output[numAreas] = range;
  638. numAreas++;
  639. }
  640. }
  641. assert(numAreas <= 5);
  642. }
  643. bool VulkanUtility::rangeOverlaps(const VkImageSubresourceRange& a, const VkImageSubresourceRange& b)
  644. {
  645. INT32 aRight = a.baseArrayLayer + (INT32)a.layerCount;
  646. INT32 bRight = b.baseArrayLayer + (INT32)b.layerCount;
  647. INT32 aBottom = a.baseMipLevel + (INT32)a.levelCount;
  648. INT32 bBottom = b.baseMipLevel + (INT32)b.levelCount;
  649. if ((INT32)a.baseArrayLayer < bRight && aRight >(INT32)b.baseArrayLayer &&
  650. (INT32)a.baseMipLevel < bBottom && aBottom >(INT32)b.baseMipLevel)
  651. return true;
  652. return false;
  653. }
  654. }}