Browse Source

- Update vulkan headers and spirv-cross
- Add mesh shader support in graphics api

Panagiotis Christopoulos Charitos 2 years ago
parent
commit
79377a4c80
81 changed files with 36037 additions and 3630 deletions
  1. 2 0
      AnKi/Gr/CommandBuffer.h
  2. 18 9
      AnKi/Gr/Common.h
  3. 1 0
      AnKi/Gr/GrManager.h
  4. 18 2
      AnKi/Gr/ShaderProgram.cpp
  5. 6 0
      AnKi/Gr/Vulkan/CommandBuffer.cpp
  6. 7 0
      AnKi/Gr/Vulkan/CommandBufferImpl.h
  7. 10 0
      AnKi/Gr/Vulkan/Common.cpp
  8. 3 2
      AnKi/Gr/Vulkan/Common.h
  9. 110 95
      AnKi/Gr/Vulkan/GrManagerImpl.cpp
  10. 9 12
      AnKi/Gr/Vulkan/GrManagerImpl.h
  11. 5 2
      AnKi/Gr/Vulkan/ShaderProgramImpl.cpp
  12. 1 1
      AnKi/Gr/Vulkan/ShaderProgramImpl.h
  13. 2 5
      AnKi/Math/Vec.h
  14. 6 0
      AnKi/ShaderCompiler/Dxc.cpp
  15. 2 2
      AnKi/ShaderCompiler/ShaderProgramCompiler.h
  16. 10 2
      AnKi/ShaderCompiler/ShaderProgramParser.cpp
  17. 7 2
      Tests/Gr/GrCommon.h
  18. 273 0
      Tests/Gr/GrMeshShaders.cpp
  19. 312 0
      ThirdParty/Khronos/vk_video/vulkan_video_codec_h264std.h
  20. 77 0
      ThirdParty/Khronos/vk_video/vulkan_video_codec_h264std_decode.h
  21. 147 0
      ThirdParty/Khronos/vk_video/vulkan_video_codec_h264std_encode.h
  22. 446 0
      ThirdParty/Khronos/vk_video/vulkan_video_codec_h265std.h
  23. 67 0
      ThirdParty/Khronos/vk_video/vulkan_video_codec_h265std_decode.h
  24. 157 0
      ThirdParty/Khronos/vk_video/vulkan_video_codec_h265std_encode.h
  25. 36 0
      ThirdParty/Khronos/vk_video/vulkan_video_codecs_common.h
  26. 23 24
      ThirdParty/Khronos/vulkan/vk_icd.h
  27. 6 27
      ThirdParty/Khronos/vulkan/vk_layer.h
  28. 1 1
      ThirdParty/Khronos/vulkan/vk_platform.h
  29. 3199 0
      ThirdParty/Khronos/vulkan/vulkan.cppm
  30. 9 2
      ThirdParty/Khronos/vulkan/vulkan.h
  31. 385 409
      ThirdParty/Khronos/vulkan/vulkan.hpp
  32. 29 1
      ThirdParty/Khronos/vulkan/vulkan_android.h
  33. 345 594
      ThirdParty/Khronos/vulkan/vulkan_beta.h
  34. 329 144
      ThirdParty/Khronos/vulkan/vulkan_core.h
  35. 2 1
      ThirdParty/Khronos/vulkan/vulkan_directfb.h
  36. 653 267
      ThirdParty/Khronos/vulkan/vulkan_enums.hpp
  37. 1654 0
      ThirdParty/Khronos/vulkan/vulkan_extension_inspection.hpp
  38. 376 583
      ThirdParty/Khronos/vulkan/vulkan_format_traits.hpp
  39. 5 1
      ThirdParty/Khronos/vulkan/vulkan_fuchsia.h
  40. 158 153
      ThirdParty/Khronos/vulkan/vulkan_funcs.hpp
  41. 3 1
      ThirdParty/Khronos/vulkan/vulkan_ggp.h
  42. 920 195
      ThirdParty/Khronos/vulkan/vulkan_handles.hpp
  43. 776 22
      ThirdParty/Khronos/vulkan/vulkan_hash.hpp
  44. 270 0
      ThirdParty/Khronos/vulkan/vulkan_hpp_macros.hpp
  45. 2 1
      ThirdParty/Khronos/vulkan/vulkan_ios.h
  46. 2 1
      ThirdParty/Khronos/vulkan/vulkan_macos.h
  47. 3 1
      ThirdParty/Khronos/vulkan/vulkan_metal.h
  48. 482 379
      ThirdParty/Khronos/vulkan/vulkan_raii.hpp
  49. 55 1
      ThirdParty/Khronos/vulkan/vulkan_screen.h
  50. 988 0
      ThirdParty/Khronos/vulkan/vulkan_shared.hpp
  51. 7127 0
      ThirdParty/Khronos/vulkan/vulkan_static_assertions.hpp
  52. 777 26
      ThirdParty/Khronos/vulkan/vulkan_structs.hpp
  53. 365 178
      ThirdParty/Khronos/vulkan/vulkan_to_string.hpp
  54. 2 1
      ThirdParty/Khronos/vulkan/vulkan_vi.h
  55. 2699 0
      ThirdParty/Khronos/vulkan/vulkan_video.hpp
  56. 2 1
      ThirdParty/Khronos/vulkan/vulkan_wayland.h
  57. 28 1
      ThirdParty/Khronos/vulkan/vulkan_win32.h
  58. 2 1
      ThirdParty/Khronos/vulkan/vulkan_xcb.h
  59. 2 1
      ThirdParty/Khronos/vulkan/vulkan_xlib.h
  60. 2 1
      ThirdParty/Khronos/vulkan/vulkan_xlib_xrandr.h
  61. 6 23
      ThirdParty/SpirvCross/GLSL.std.450.h
  62. 2579 0
      ThirdParty/SpirvCross/spirv.hpp
  63. 36 10
      ThirdParty/SpirvCross/spirv_cfg.cpp
  64. 6 1
      ThirdParty/SpirvCross/spirv_cfg.hpp
  65. 133 14
      ThirdParty/SpirvCross/spirv_common.hpp
  66. 504 102
      ThirdParty/SpirvCross/spirv_cross.cpp
  67. 97 11
      ThirdParty/SpirvCross/spirv_cross.hpp
  68. 17 9
      ThirdParty/SpirvCross/spirv_cross_containers.hpp
  69. 1 1
      ThirdParty/SpirvCross/spirv_cross_error_handling.hpp
  70. 39 22
      ThirdParty/SpirvCross/spirv_cross_parsed_ir.cpp
  71. 10 3
      ThirdParty/SpirvCross/spirv_cross_parsed_ir.hpp
  72. 77 0
      ThirdParty/SpirvCross/spirv_cross_util.cpp
  73. 37 0
      ThirdParty/SpirvCross/spirv_cross_util.hpp
  74. 462 108
      ThirdParty/SpirvCross/spirv_glsl.cpp
  75. 168 34
      ThirdParty/SpirvCross/spirv_glsl.hpp
  76. 6771 0
      ThirdParty/SpirvCross/spirv_hlsl.cpp
  77. 415 0
      ThirdParty/SpirvCross/spirv_hlsl.hpp
  78. 147 14
      ThirdParty/SpirvCross/spirv_parser.cpp
  79. 3 1
      ThirdParty/SpirvCross/spirv_parser.hpp
  80. 599 72
      ThirdParty/Volk/volk.c
  81. 517 53
      ThirdParty/Volk/volk.h

+ 2 - 0
AnKi/Gr/CommandBuffer.h

@@ -310,6 +310,8 @@ public:
 	void drawIndirectCount(PrimitiveTopology topology, Buffer* argBuffer, PtrSize argBufferOffset, U32 argBufferStride, Buffer* countBuffer,
 						   PtrSize countBufferOffset, U32 maxDrawCount);
 
+	void drawMeshTasks(U32 groupCountX, U32 groupCountY, U32 groupCountZ);
+
 	void dispatchCompute(U32 groupCountX, U32 groupCountY, U32 groupCountZ);
 
 	void dispatchComputeIndirect(Buffer* argBuffer, PtrSize argBufferOffset);

+ 18 - 9
AnKi/Gr/Common.h

@@ -217,6 +217,9 @@ public:
 
 	/// DLSS.
 	Bool m_dlss = false;
+
+	/// Mesh shaders.
+	Bool m_meshShaders = false;
 };
 ANKI_END_PACKED_STRUCT
 
@@ -533,6 +536,8 @@ enum class ShaderType : U16
 	kTessellationControl,
 	kTessellationEvaluation,
 	kGeometry,
+	kTask,
+	kMesh,
 	kFragment,
 	kCompute,
 	kRayGen,
@@ -558,17 +563,21 @@ enum class ShaderTypeBit : U16
 	kTessellationControl = 1 << 1,
 	kTessellationEvaluation = 1 << 2,
 	kGeometry = 1 << 3,
-	kFragment = 1 << 4,
-	kCompute = 1 << 5,
-	kRayGen = 1 << 6,
-	kAnyHit = 1 << 7,
-	kClosestHit = 1 << 8,
-	kMiss = 1 << 9,
-	kIntersection = 1 << 10,
-	kCallable = 1 << 11,
+	kTask = 1 << 4,
+	kMesh = 1 << 5,
+	kFragment = 1 << 6,
+	kCompute = 1 << 7,
+	kRayGen = 1 << 8,
+	kAnyHit = 1 << 9,
+	kClosestHit = 1 << 10,
+	kMiss = 1 << 11,
+	kIntersection = 1 << 12,
+	kCallable = 1 << 13,
 
 	kNone = 0,
-	kAllGraphics = kVertex | kTessellationControl | kTessellationEvaluation | kGeometry | kFragment,
+	kAllGraphics = kVertex | kTessellationControl | kTessellationEvaluation | kGeometry | kTask | kMesh | kFragment,
+	kAllLegacyGeometry = kVertex | kTessellationControl | kTessellationEvaluation | kGeometry,
+	kAllModernGeometry = kTask | kMesh,
 	kAllRayTracing = kRayGen | kAnyHit | kClosestHit | kMiss | kIntersection | kCallable,
 	kAll = kAllGraphics | kCompute | kAllRayTracing,
 };

+ 1 - 0
AnKi/Gr/GrManager.h

@@ -17,6 +17,7 @@ class NativeWindow;
 extern BoolCVar g_vsyncCVar;
 extern BoolCVar g_validationCVar;
 extern BoolCVar g_debugMarkersCVar;
+extern BoolCVar g_meshShadersCVar;
 
 /// @addtogroup graphics
 /// @{

+ 18 - 2
AnKi/Gr/ShaderProgram.cpp

@@ -22,9 +22,25 @@ Bool ShaderProgramInitInfo::isValid() const
 		}
 	}
 
-	if(!!graphicsMask && (graphicsMask & (ShaderTypeBit::kVertex | ShaderTypeBit::kFragment)) != (ShaderTypeBit::kVertex | ShaderTypeBit::kFragment))
+	if(!!graphicsMask)
 	{
-		return false;
+		if(!(graphicsMask & ShaderTypeBit::kFragment))
+		{
+			return false;
+		}
+
+		const Bool hasMesh = !!(graphicsMask & ShaderTypeBit::kAllModernGeometry);
+		const Bool hasVert = !!(graphicsMask & ShaderTypeBit::kAllLegacyGeometry);
+
+		if(hasMesh && !!(graphicsMask & ShaderTypeBit::kAllLegacyGeometry))
+		{
+			return false;
+		}
+
+		if(hasVert && !!(graphicsMask & ShaderTypeBit::kAllModernGeometry))
+		{
+			return false;
+		}
 	}
 
 	Bool compute = false;

+ 6 - 0
AnKi/Gr/Vulkan/CommandBuffer.cpp

@@ -294,6 +294,12 @@ void CommandBuffer::drawIndexedIndirect(PrimitiveTopology topology, U32 drawCoun
 	self.drawIndexedIndirectInternal(topology, drawCount, offset, buff);
 }
 
+void CommandBuffer::drawMeshTasks(U32 groupCountX, U32 groupCountY, U32 groupCountZ)
+{
+	ANKI_VK_SELF(CommandBufferImpl);
+	self.drawMeshTasksInternal(groupCountX, groupCountY, groupCountZ);
+}
+
 void CommandBuffer::dispatchCompute(U32 groupCountX, U32 groupCountY, U32 groupCountZ)
 {
 	ANKI_VK_SELF(CommandBufferImpl);

+ 7 - 0
AnKi/Gr/Vulkan/CommandBufferImpl.h

@@ -317,6 +317,13 @@ public:
 		vkCmdDrawIndexedIndirect(m_handle, impl.getHandle(), offset, drawCount, sizeof(DrawIndexedIndirectArgs));
 	}
 
+	ANKI_FORCE_INLINE void drawMeshTasksInternal(U32 groupCountX, U32 groupCountY, U32 groupCountZ)
+	{
+		ANKI_ASSERT(!!(getGrManagerImpl().getExtensions() & VulkanExtensions::kEXT_mesh_shader));
+		drawcallCommon();
+		vkCmdDrawMeshTasksEXT(m_handle, groupCountX, groupCountY, groupCountZ);
+	}
+
 	ANKI_FORCE_INLINE void drawIndexedIndirectCountInternal(PrimitiveTopology topology, Buffer* argBuffer, PtrSize argBufferOffset,
 															U32 argBufferStride, Buffer* countBuffer, PtrSize countBufferOffset, U32 maxDrawCount)
 	{

+ 10 - 0
AnKi/Gr/Vulkan/Common.cpp

@@ -505,6 +505,16 @@ VkShaderStageFlags convertShaderTypeBit(ShaderTypeBit bit)
 		out |= VK_SHADER_STAGE_GEOMETRY_BIT;
 	}
 
+	if(!!(bit & ShaderTypeBit::kTask))
+	{
+		out |= VK_SHADER_STAGE_TASK_BIT_EXT;
+	}
+
+	if(!!(bit & ShaderTypeBit::kMesh))
+	{
+		out |= VK_SHADER_STAGE_MESH_BIT_EXT;
+	}
+
 	if(!!(bit & ShaderTypeBit::kFragment))
 	{
 		out |= VK_SHADER_STAGE_FRAGMENT_BIT;

+ 3 - 2
AnKi/Gr/Vulkan/Common.h

@@ -63,7 +63,7 @@ enum class DescriptorType : U8
 	kCount
 };
 
-enum class VulkanExtensions : U32
+enum class VulkanExtensions : U64
 {
 	kNone = 0,
 	kKHR_xcb_surface = 1 << 1,
@@ -94,7 +94,8 @@ enum class VulkanExtensions : U32
 	kNVX_image_view_handle = 1 << 27,
 	kKHR_push_descriptor = 1 << 28,
 	kKHR_maintenance_4 = 1 << 29,
-	kKHR_draw_indirect_count = 1 << 30
+	kKHR_draw_indirect_count = 1 << 30,
+	kEXT_mesh_shader = 1 << 31
 };
 ANKI_ENUM_ALLOW_NUMERIC_OPERATIONS(VulkanExtensions)
 

+ 110 - 95
AnKi/Gr/Vulkan/GrManagerImpl.cpp

@@ -24,6 +24,7 @@ static BoolCVar g_rayTracingCVar(CVarSubsystem::kGr, "RayTracing", false, "Try e
 static BoolCVar g_64bitAtomicsCVar(CVarSubsystem::kGr, "64bitAtomics", true, "Enable or not 64bit atomics");
 static BoolCVar g_samplerFilterMinMaxCVar(CVarSubsystem::kGr, "SamplerFilterMinMax", true, "Enable or not min/max sample filtering");
 static BoolCVar g_vrsCVar(CVarSubsystem::kGr, "Vrs", false, "Enable or not VRS");
+BoolCVar g_meshShadersCVar(CVarSubsystem::kGr, "MeshShaders", false, "Enable or not mesh shaders");
 static BoolCVar g_asyncComputeCVar(CVarSubsystem::kGr, "AsyncCompute", true, "Enable or not async compute");
 static NumericCVar<U8> g_vkMinorCVar(CVarSubsystem::kGr, "VkMinor", 1, 1, 1, "Vulkan minor version");
 static NumericCVar<U8> g_vkMajorCVar(CVarSubsystem::kGr, "VkMajor", 1, 1, 1, "Vulkan major version");
@@ -802,6 +803,11 @@ Error GrManagerImpl::initDevice()
 				m_extensions |= VulkanExtensions::kKHR_draw_indirect_count;
 				extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
 			}
+			else if(extensionName == VK_EXT_MESH_SHADER_EXTENSION_NAME && g_meshShadersCVar.get())
+			{
+				m_extensions |= VulkanExtensions::kEXT_mesh_shader;
+				extensionsToEnable[extensionsToEnableCount++] = extensionName.cstr();
+			}
 		}
 
 		ANKI_VK_LOGI("Will enable the following device extensions:");
@@ -815,15 +821,16 @@ Error GrManagerImpl::initDevice()
 	}
 
 	// Enable/disable generic features
+	VkPhysicalDeviceFeatures devFeatures = {};
 	{
-		VkPhysicalDeviceFeatures2 devFeatures = {};
-		devFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
-		vkGetPhysicalDeviceFeatures2(m_physicalDevice, &devFeatures);
-		m_devFeatures = devFeatures.features;
-		m_devFeatures.robustBufferAccess = (g_validationCVar.get() && m_devFeatures.robustBufferAccess) ? true : false;
-		ANKI_VK_LOGI("Robust buffer access is %s", (m_devFeatures.robustBufferAccess) ? "enabled" : "disabled");
+		VkPhysicalDeviceFeatures2 devFeatures2 = {};
+		devFeatures2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+		vkGetPhysicalDeviceFeatures2(m_physicalDevice, &devFeatures2);
+		devFeatures = devFeatures2.features;
+		devFeatures.robustBufferAccess = (g_validationCVar.get() && devFeatures.robustBufferAccess) ? true : false;
+		ANKI_VK_LOGI("Robust buffer access is %s", (devFeatures.robustBufferAccess) ? "enabled" : "disabled");
 
-		ci.pEnabledFeatures = &m_devFeatures;
+		ci.pEnabledFeatures = &devFeatures;
 	}
 
 #if ANKI_PLATFORM_MOBILE
@@ -851,6 +858,7 @@ Error GrManagerImpl::initDevice()
 	}
 
 	// Descriptor indexing
+	VkPhysicalDeviceDescriptorIndexingFeatures descriptorIndexingFeatures = {};
 	if(!(m_extensions & VulkanExtensions::kEXT_descriptor_indexing))
 	{
 		ANKI_VK_LOGE(VK_EXT_DESCRIPTOR_INDEXING_EXTENSION_NAME " is not supported");
@@ -858,59 +866,53 @@ Error GrManagerImpl::initDevice()
 	}
 	else
 	{
-		m_descriptorIndexingFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT;
+		descriptorIndexingFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_DESCRIPTOR_INDEXING_FEATURES_EXT;
+		getPhysicalDevicaFeatures2(descriptorIndexingFeatures);
 
-		VkPhysicalDeviceFeatures2 features = {};
-		features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
-		features.pNext = &m_descriptorIndexingFeatures;
-		vkGetPhysicalDeviceFeatures2(m_physicalDevice, &features);
-
-		if(!m_descriptorIndexingFeatures.shaderSampledImageArrayNonUniformIndexing
-		   || !m_descriptorIndexingFeatures.shaderStorageImageArrayNonUniformIndexing)
+		if(!descriptorIndexingFeatures.shaderSampledImageArrayNonUniformIndexing
+		   || !descriptorIndexingFeatures.shaderStorageImageArrayNonUniformIndexing)
 		{
 			ANKI_VK_LOGE("Non uniform indexing is not supported by the device");
 			return Error::kFunctionFailed;
 		}
 
-		if(!m_descriptorIndexingFeatures.descriptorBindingSampledImageUpdateAfterBind
-		   || !m_descriptorIndexingFeatures.descriptorBindingStorageImageUpdateAfterBind)
+		if(!descriptorIndexingFeatures.descriptorBindingSampledImageUpdateAfterBind
+		   || !descriptorIndexingFeatures.descriptorBindingStorageImageUpdateAfterBind)
 		{
 			ANKI_VK_LOGE("Update descriptors after bind is not supported by the device");
 			return Error::kFunctionFailed;
 		}
 
-		if(!m_descriptorIndexingFeatures.descriptorBindingUpdateUnusedWhilePending)
+		if(!descriptorIndexingFeatures.descriptorBindingUpdateUnusedWhilePending)
 		{
 			ANKI_VK_LOGE("Update descriptors while cmd buffer is pending is not supported by the device");
 			return Error::kFunctionFailed;
 		}
 
-		m_descriptorIndexingFeatures.pNext = const_cast<void*>(ci.pNext);
-		ci.pNext = &m_descriptorIndexingFeatures;
+		descriptorIndexingFeatures.pNext = const_cast<void*>(ci.pNext);
+		ci.pNext = &descriptorIndexingFeatures;
 	}
 
 	// Buffer address
+	VkPhysicalDeviceBufferDeviceAddressFeaturesKHR deviceBufferFeatures = {};
 	if(!(m_extensions & VulkanExtensions::kKHR_buffer_device_address))
 	{
 		ANKI_VK_LOGW(VK_KHR_BUFFER_DEVICE_ADDRESS_EXTENSION_NAME " is not supported");
 	}
 	else
 	{
-		m_deviceBufferFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR;
-
-		VkPhysicalDeviceFeatures2 features = {};
-		features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
-		features.pNext = &m_deviceBufferFeatures;
-		vkGetPhysicalDeviceFeatures2(m_physicalDevice, &features);
+		deviceBufferFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_BUFFER_DEVICE_ADDRESS_FEATURES_KHR;
+		getPhysicalDevicaFeatures2(deviceBufferFeatures);
 
-		m_deviceBufferFeatures.bufferDeviceAddressCaptureReplay = m_deviceBufferFeatures.bufferDeviceAddressCaptureReplay && g_debugMarkersCVar.get();
-		m_deviceBufferFeatures.bufferDeviceAddressMultiDevice = false;
+		deviceBufferFeatures.bufferDeviceAddressCaptureReplay = deviceBufferFeatures.bufferDeviceAddressCaptureReplay && g_debugMarkersCVar.get();
+		deviceBufferFeatures.bufferDeviceAddressMultiDevice = false;
 
-		m_deviceBufferFeatures.pNext = const_cast<void*>(ci.pNext);
-		ci.pNext = &m_deviceBufferFeatures;
+		deviceBufferFeatures.pNext = const_cast<void*>(ci.pNext);
+		ci.pNext = &deviceBufferFeatures;
 	}
 
 	// Scalar block layout
+	VkPhysicalDeviceScalarBlockLayoutFeaturesEXT scalarBlockLayoutFeatures = {};
 	if(!(m_extensions & VulkanExtensions::kEXT_scalar_block_layout))
 	{
 		ANKI_VK_LOGE(VK_EXT_SCALAR_BLOCK_LAYOUT_EXTENSION_NAME " is not supported");
@@ -918,24 +920,21 @@ Error GrManagerImpl::initDevice()
 	}
 	else
 	{
-		m_scalarBlockLayout.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT;
+		scalarBlockLayoutFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SCALAR_BLOCK_LAYOUT_FEATURES_EXT;
+		getPhysicalDevicaFeatures2(scalarBlockLayoutFeatures);
 
-		VkPhysicalDeviceFeatures2 features = {};
-		features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
-		features.pNext = &m_scalarBlockLayout;
-		vkGetPhysicalDeviceFeatures2(m_physicalDevice, &features);
-
-		if(!m_scalarBlockLayout.scalarBlockLayout)
+		if(!scalarBlockLayoutFeatures.scalarBlockLayout)
 		{
 			ANKI_VK_LOGE("Scalar block layout is not supported by the device");
 			return Error::kFunctionFailed;
 		}
 
-		m_scalarBlockLayout.pNext = const_cast<void*>(ci.pNext);
-		ci.pNext = &m_scalarBlockLayout;
+		scalarBlockLayoutFeatures.pNext = const_cast<void*>(ci.pNext);
+		ci.pNext = &scalarBlockLayoutFeatures;
 	}
 
 	// Timeline semaphore
+	VkPhysicalDeviceTimelineSemaphoreFeaturesKHR timelineSemaphoreFeatures = {};
 	if(!(m_extensions & VulkanExtensions::kKHR_timeline_semaphore))
 	{
 		ANKI_VK_LOGE(VK_KHR_TIMELINE_SEMAPHORE_EXTENSION_NAME " is not supported");
@@ -943,54 +942,53 @@ Error GrManagerImpl::initDevice()
 	}
 	else
 	{
-		m_timelineSemaphoreFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR;
+		timelineSemaphoreFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_TIMELINE_SEMAPHORE_FEATURES_KHR;
+		getPhysicalDevicaFeatures2(timelineSemaphoreFeatures);
 
-		VkPhysicalDeviceFeatures2 features = {};
-		features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
-		features.pNext = &m_timelineSemaphoreFeatures;
-		vkGetPhysicalDeviceFeatures2(m_physicalDevice, &features);
-
-		if(!m_timelineSemaphoreFeatures.timelineSemaphore)
+		if(!timelineSemaphoreFeatures.timelineSemaphore)
 		{
 			ANKI_VK_LOGE("Timeline semaphores are not supported by the device");
 			return Error::kFunctionFailed;
 		}
 
-		m_timelineSemaphoreFeatures.pNext = const_cast<void*>(ci.pNext);
-		ci.pNext = &m_timelineSemaphoreFeatures;
+		timelineSemaphoreFeatures.pNext = const_cast<void*>(ci.pNext);
+		ci.pNext = &timelineSemaphoreFeatures;
 	}
 
 	// Set RT features
+	VkPhysicalDeviceRayTracingPipelineFeaturesKHR rtPipelineFeatures = {};
+	VkPhysicalDeviceRayQueryFeaturesKHR rayQueryFeatures = {};
+	VkPhysicalDeviceAccelerationStructureFeaturesKHR accelerationStructureFeatures = {};
 	if(!!(m_extensions & VulkanExtensions::kKHR_ray_tracing))
 	{
-		m_rtPipelineFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR;
-		m_rayQueryFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR;
-		m_accelerationStructureFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR;
+		rtPipelineFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_TRACING_PIPELINE_FEATURES_KHR;
+		rayQueryFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_RAY_QUERY_FEATURES_KHR;
+		accelerationStructureFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_ACCELERATION_STRUCTURE_FEATURES_KHR;
 
 		VkPhysicalDeviceFeatures2 features = {};
 		features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
-		features.pNext = &m_rtPipelineFeatures;
-		m_rtPipelineFeatures.pNext = &m_rayQueryFeatures;
-		m_rayQueryFeatures.pNext = &m_accelerationStructureFeatures;
+		features.pNext = &rtPipelineFeatures;
+		rtPipelineFeatures.pNext = &rayQueryFeatures;
+		rayQueryFeatures.pNext = &accelerationStructureFeatures;
 		vkGetPhysicalDeviceFeatures2(m_physicalDevice, &features);
 
-		if(!m_rtPipelineFeatures.rayTracingPipeline || !m_rayQueryFeatures.rayQuery || !m_accelerationStructureFeatures.accelerationStructure)
+		if(!rtPipelineFeatures.rayTracingPipeline || !rayQueryFeatures.rayQuery || !accelerationStructureFeatures.accelerationStructure)
 		{
 			ANKI_VK_LOGE("Ray tracing and ray query are both required");
 			return Error::kFunctionFailed;
 		}
 
 		// Only enable what's necessary
-		m_rtPipelineFeatures.rayTracingPipelineShaderGroupHandleCaptureReplay = false;
-		m_rtPipelineFeatures.rayTracingPipelineShaderGroupHandleCaptureReplayMixed = false;
-		m_rtPipelineFeatures.rayTraversalPrimitiveCulling = false;
-		m_accelerationStructureFeatures.accelerationStructureCaptureReplay = false;
-		m_accelerationStructureFeatures.accelerationStructureHostCommands = false;
-		m_accelerationStructureFeatures.descriptorBindingAccelerationStructureUpdateAfterBind = false;
+		rtPipelineFeatures.rayTracingPipelineShaderGroupHandleCaptureReplay = false;
+		rtPipelineFeatures.rayTracingPipelineShaderGroupHandleCaptureReplayMixed = false;
+		rtPipelineFeatures.rayTraversalPrimitiveCulling = false;
+		accelerationStructureFeatures.accelerationStructureCaptureReplay = false;
+		accelerationStructureFeatures.accelerationStructureHostCommands = false;
+		accelerationStructureFeatures.descriptorBindingAccelerationStructureUpdateAfterBind = false;
 
-		ANKI_ASSERT(m_accelerationStructureFeatures.pNext == nullptr);
-		m_accelerationStructureFeatures.pNext = const_cast<void*>(ci.pNext);
-		ci.pNext = &m_rtPipelineFeatures;
+		ANKI_ASSERT(accelerationStructureFeatures.pNext == nullptr);
+		accelerationStructureFeatures.pNext = const_cast<void*>(ci.pNext);
+		ci.pNext = &rtPipelineFeatures;
 
 		// Get some more stuff
 		VkPhysicalDeviceAccelerationStructurePropertiesKHR props = {};
@@ -1000,16 +998,18 @@ Error GrManagerImpl::initDevice()
 	}
 
 	// Pipeline features
+	VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR pplineExecutablePropertiesFeatures = {};
 	if(!!(m_extensions & VulkanExtensions::kKHR_pipeline_executable_properties))
 	{
-		m_pplineExecutablePropertiesFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR;
-		m_pplineExecutablePropertiesFeatures.pipelineExecutableInfo = true;
+		pplineExecutablePropertiesFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_PIPELINE_EXECUTABLE_PROPERTIES_FEATURES_KHR;
+		pplineExecutablePropertiesFeatures.pipelineExecutableInfo = true;
 
-		m_pplineExecutablePropertiesFeatures.pNext = const_cast<void*>(ci.pNext);
-		ci.pNext = &m_pplineExecutablePropertiesFeatures;
+		pplineExecutablePropertiesFeatures.pNext = const_cast<void*>(ci.pNext);
+		ci.pNext = &pplineExecutablePropertiesFeatures;
 	}
 
 	// F16 I8
+	VkPhysicalDeviceShaderFloat16Int8FeaturesKHR float16Int8Features = {};
 	if(!(m_extensions & VulkanExtensions::kKHR_shader_float16_int8))
 	{
 		ANKI_VK_LOGE(VK_KHR_SHADER_FLOAT16_INT8_EXTENSION_NAME " is not supported");
@@ -1017,18 +1017,15 @@ Error GrManagerImpl::initDevice()
 	}
 	else
 	{
-		m_float16Int8Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR;
-
-		VkPhysicalDeviceFeatures2 features = {};
-		features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
-		features.pNext = &m_float16Int8Features;
-		vkGetPhysicalDeviceFeatures2(m_physicalDevice, &features);
+		float16Int8Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FLOAT16_INT8_FEATURES_KHR;
+		getPhysicalDevicaFeatures2(float16Int8Features);
 
-		m_float16Int8Features.pNext = const_cast<void*>(ci.pNext);
-		ci.pNext = &m_float16Int8Features;
+		float16Int8Features.pNext = const_cast<void*>(ci.pNext);
+		ci.pNext = &float16Int8Features;
 	}
 
 	// 64bit atomics
+	VkPhysicalDeviceShaderAtomicInt64FeaturesKHR atomicInt64Features = {};
 	if(!(m_extensions & VulkanExtensions::kKHR_shader_atomic_int64))
 	{
 		ANKI_VK_LOGW(VK_KHR_SHADER_ATOMIC_INT64_EXTENSION_NAME " is not supported or disabled");
@@ -1038,18 +1035,15 @@ Error GrManagerImpl::initDevice()
 	{
 		m_capabilities.m_64bitAtomics = true;
 
-		m_atomicInt64Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR;
-
-		VkPhysicalDeviceFeatures2 features = {};
-		features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
-		features.pNext = &m_atomicInt64Features;
-		vkGetPhysicalDeviceFeatures2(m_physicalDevice, &features);
+		atomicInt64Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_SHADER_ATOMIC_INT64_FEATURES_KHR;
+		getPhysicalDevicaFeatures2(atomicInt64Features);
 
-		m_atomicInt64Features.pNext = const_cast<void*>(ci.pNext);
-		ci.pNext = &m_atomicInt64Features;
+		atomicInt64Features.pNext = const_cast<void*>(ci.pNext);
+		ci.pNext = &atomicInt64Features;
 	}
 
 	// VRS
+	VkPhysicalDeviceFragmentShadingRateFeaturesKHR fragmentShadingRateFeatures = {};
 	if(!(m_extensions & VulkanExtensions::kKHR_fragment_shading_rate))
 	{
 		ANKI_VK_LOGI(VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME " is not supported or disabled");
@@ -1059,15 +1053,11 @@ Error GrManagerImpl::initDevice()
 	{
 		m_capabilities.m_vrs = true;
 
-		m_fragmentShadingRateFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR;
-
-		VkPhysicalDeviceFeatures2 features = {};
-		features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
-		features.pNext = &m_fragmentShadingRateFeatures;
-		vkGetPhysicalDeviceFeatures2(m_physicalDevice, &features);
+		fragmentShadingRateFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FRAGMENT_SHADING_RATE_FEATURES_KHR;
+		getPhysicalDevicaFeatures2(fragmentShadingRateFeatures);
 
 		// Some checks
-		if(!m_fragmentShadingRateFeatures.attachmentFragmentShadingRate || !m_fragmentShadingRateFeatures.pipelineFragmentShadingRate)
+		if(!fragmentShadingRateFeatures.attachmentFragmentShadingRate || !fragmentShadingRateFeatures.pipelineFragmentShadingRate)
 		{
 			ANKI_VK_LOGW(VK_KHR_FRAGMENT_SHADING_RATE_EXTENSION_NAME " doesn't support attachment and/or pipeline rates. Will disable VRS");
 			m_capabilities.m_vrs = false;
@@ -1075,7 +1065,7 @@ Error GrManagerImpl::initDevice()
 		else
 		{
 			// Disable some things
-			m_fragmentShadingRateFeatures.primitiveFragmentShadingRate = false;
+			fragmentShadingRateFeatures.primitiveFragmentShadingRate = false;
 		}
 
 		if(m_capabilities.m_vrs)
@@ -1102,15 +1092,40 @@ Error GrManagerImpl::initDevice()
 
 		if(m_capabilities.m_vrs)
 		{
-			m_fragmentShadingRateFeatures.pNext = const_cast<void*>(ci.pNext);
-			ci.pNext = &m_fragmentShadingRateFeatures;
+			fragmentShadingRateFeatures.pNext = const_cast<void*>(ci.pNext);
+			ci.pNext = &fragmentShadingRateFeatures;
+		}
+	}
+
+	// Mesh shaders
+	VkPhysicalDeviceMeshShaderFeaturesEXT meshShadersFeatures = {};
+	if(!!(m_extensions & VulkanExtensions::kEXT_mesh_shader))
+	{
+		m_capabilities.m_meshShaders = true;
+
+		meshShadersFeatures.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MESH_SHADER_FEATURES_EXT;
+		getPhysicalDevicaFeatures2(meshShadersFeatures);
+
+		if(meshShadersFeatures.taskShader == false)
+		{
+			ANKI_LOGE(VK_EXT_MESH_SHADER_EXTENSION_NAME " doesn't support task shaders");
+			return Error::kFunctionFailed;
 		}
+
+		meshShadersFeatures.pNext = const_cast<void*>(ci.pNext);
+		ci.pNext = &meshShadersFeatures;
+
+		ANKI_VK_LOGI(VK_EXT_MESH_SHADER_EXTENSION_NAME " is supported and enabled");
+	}
+	else
+	{
+		ANKI_VK_LOGI(VK_EXT_MESH_SHADER_EXTENSION_NAME " is not supported or disabled ");
 	}
 
 	VkPhysicalDeviceMaintenance4FeaturesKHR maintenance4Features = {};
-	maintenance4Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR;
 	if(!!(m_extensions & VulkanExtensions::kKHR_maintenance_4))
 	{
+		maintenance4Features.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_MAINTENANCE_4_FEATURES_KHR;
 		maintenance4Features.maintenance4 = true;
 		maintenance4Features.pNext = const_cast<void*>(ci.pNext);
 		ci.pNext = &maintenance4Features;

+ 9 - 12
AnKi/Gr/Vulkan/GrManagerImpl.h

@@ -228,18 +228,6 @@ private:
 	VkPhysicalDeviceProperties2 m_devProps = {};
 	VkPhysicalDeviceAccelerationStructurePropertiesKHR m_accelerationStructureProps = {};
 	VkPhysicalDeviceRayTracingPipelinePropertiesKHR m_rtPipelineProps = {};
-	VkPhysicalDeviceFeatures m_devFeatures = {};
-	VkPhysicalDeviceAccelerationStructureFeaturesKHR m_accelerationStructureFeatures = {};
-	VkPhysicalDeviceRayTracingPipelineFeaturesKHR m_rtPipelineFeatures = {};
-	VkPhysicalDeviceRayQueryFeaturesKHR m_rayQueryFeatures = {};
-	VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR m_pplineExecutablePropertiesFeatures = {};
-	VkPhysicalDeviceDescriptorIndexingFeatures m_descriptorIndexingFeatures = {};
-	VkPhysicalDeviceBufferDeviceAddressFeaturesKHR m_deviceBufferFeatures = {};
-	VkPhysicalDeviceScalarBlockLayoutFeaturesEXT m_scalarBlockLayout = {};
-	VkPhysicalDeviceTimelineSemaphoreFeaturesKHR m_timelineSemaphoreFeatures = {};
-	VkPhysicalDeviceShaderFloat16Int8FeaturesKHR m_float16Int8Features = {};
-	VkPhysicalDeviceShaderAtomicInt64FeaturesKHR m_atomicInt64Features = {};
-	VkPhysicalDeviceFragmentShadingRateFeaturesKHR m_fragmentShadingRateFeatures = {};
 
 	VkDebugUtilsMessengerEXT m_debugUtilsMessager = VK_NULL_HANDLE;
 
@@ -333,6 +321,15 @@ private:
 		properties.pNext = &props;
 		vkGetPhysicalDeviceProperties2(m_physicalDevice, &properties);
 	}
+
+	template<typename TFeatures>
+	void getPhysicalDevicaFeatures2(TFeatures& features) const
+	{
+		VkPhysicalDeviceFeatures2 features2 = {};
+		features2.sType = VK_STRUCTURE_TYPE_PHYSICAL_DEVICE_FEATURES_2;
+		features2.pNext = &features;
+		vkGetPhysicalDeviceFeatures2(m_physicalDevice, &features2);
+	}
 };
 /// @}
 

+ 5 - 2
AnKi/Gr/Vulkan/ShaderProgramImpl.cpp

@@ -41,7 +41,7 @@ Error ShaderProgramImpl::init(const ShaderProgramInitInfo& inf)
 	{
 		m_shaders.emplaceBack(inf.m_computeShader);
 	}
-	else if(inf.m_graphicsShaders[ShaderType::kVertex])
+	else if(inf.m_graphicsShaders[ShaderType::kFragment])
 	{
 		for(Shader* s : inf.m_graphicsShaders)
 		{
@@ -178,7 +178,10 @@ Error ShaderProgramImpl::init(const ShaderProgramInitInfo& inf)
 	const Bool graphicsProg = !!(m_stages & ShaderTypeBit::kAllGraphics);
 	if(graphicsProg)
 	{
-		m_refl.m_attributeMask = static_cast<const ShaderImpl&>(*inf.m_graphicsShaders[ShaderType::kVertex]).m_attributeMask;
+		if(inf.m_graphicsShaders[ShaderType::kVertex])
+		{
+			m_refl.m_attributeMask = static_cast<const ShaderImpl&>(*inf.m_graphicsShaders[ShaderType::kVertex]).m_attributeMask;
+		}
 
 		m_refl.m_colorAttachmentWritemask = static_cast<const ShaderImpl&>(*inf.m_graphicsShaders[ShaderType::kFragment]).m_colorAttachmentWritemask;
 

+ 1 - 1
AnKi/Gr/Vulkan/ShaderProgramImpl.h

@@ -121,7 +121,7 @@ private:
 	class
 	{
 	public:
-		Array<VkPipelineShaderStageCreateInfo, U32(ShaderType::kFragment - ShaderType::kVertex) + 1> m_shaderCreateInfos;
+		Array<VkPipelineShaderStageCreateInfo, U32(ShaderType::kFragment - ShaderType::kVertex) + 1> m_shaderCreateInfos = {};
 		U32 m_shaderCreateInfoCount = 0;
 		PipelineFactory* m_pplineFactory = nullptr;
 	} m_graphics;

+ 2 - 5
AnKi/Math/Vec.h

@@ -145,12 +145,9 @@ public:
 	// Vec4 specific
 
 	ANKI_ENABLE_METHOD(kTComponentCount == 4 && !kHasVec4SIMD)
-	TVec(const T x_, const T y_, const T z_, const T w_)
+	constexpr TVec(const T x_, const T y_, const T z_, const T w_)
+		: m_arr{x_, y_, z_, w_}
 	{
-		x() = x_;
-		y() = y_;
-		z() = z_;
-		w() = w_;
 	}
 
 #if ANKI_ENABLE_SIMD

+ 6 - 0
AnKi/ShaderCompiler/Dxc.cpp

@@ -32,6 +32,12 @@ static CString profile(ShaderType shaderType)
 	case ShaderType::kGeometry:
 		return "gs_6_7";
 		break;
+	case ShaderType::kTask:
+		return "as_6_7";
+		break;
+	case ShaderType::kMesh:
+		return "ms_6_7";
+		break;
 	case ShaderType::kCompute:
 		return "cs_6_7";
 		break;

+ 2 - 2
AnKi/ShaderCompiler/ShaderProgramCompiler.h

@@ -14,8 +14,8 @@ namespace anki {
 /// @addtogroup shader_compiler
 /// @{
 
-inline constexpr const char* kShaderBinaryMagic = "ANKISDR8"; //! WARNING: If changed change kShaderBinaryVersion
-constexpr U32 kShaderBinaryVersion = 8;
+inline constexpr const char* kShaderBinaryMagic = "ANKISDR9"; //! WARNING: If changed change kShaderBinaryVersion
+constexpr U32 kShaderBinaryVersion = 9;
 
 /// A wrapper over the POD ShaderProgramBinary class.
 /// @memberof ShaderProgramCompiler

+ 10 - 2
AnKi/ShaderCompiler/ShaderProgramParser.cpp

@@ -16,8 +16,8 @@ namespace anki {
 	return Error::kUserData
 
 inline constexpr Array<CString, U32(ShaderType::kCount)> kShaderStageNames = {{"VERTEX", "TESSELLATION_CONTROL", "TESSELLATION_EVALUATION",
-																			   "GEOMETRY", "FRAGMENT", "COMPUTE", "RAY_GEN", "ANY_HIT", "CLOSEST_HIT",
-																			   "MISS", "INTERSECTION", "CALLABLE"}};
+																			   "GEOMETRY", "TASK", "MESH", "FRAGMENT", "COMPUTE", "RAY_GEN",
+																			   "ANY_HIT", "CLOSEST_HIT", "MISS", "INTERSECTION", "CALLABLE"}};
 
 inline constexpr char kShaderHeader[] = R"(#define ANKI_%s_SHADER 1
 #define ANKI_PLATFORM_MOBILE %d
@@ -91,6 +91,14 @@ Error ShaderProgramParser::parsePragmaStart(const String* begin, const String* e
 	{
 		shaderType = ShaderType::kGeometry;
 	}
+	else if(*begin == "task")
+	{
+		shaderType = ShaderType::kTask;
+	}
+	else if(*begin == "mesh")
+	{
+		shaderType = ShaderType::kMesh;
+	}
 	else if(*begin == "frag")
 	{
 		shaderType = ShaderType::kFragment;

+ 7 - 2
Tests/Gr/GrCommon.h

@@ -6,6 +6,7 @@
 #include <AnKi/Gr.h>
 #include <AnKi/ShaderCompiler.h>
 #include <AnKi/ShaderCompiler/ShaderProgramParser.h>
+#include <AnKi/ShaderCompiler/Dxc.h>
 #include <Tests/Framework/Framework.h>
 
 namespace anki {
@@ -19,8 +20,12 @@ inline ShaderPtr createShader(CString src, ShaderType type, GrManager& gr, Const
 	DynamicArray<U8> spirv;
 	String errorLog;
 
-	ANKI_ASSERT(!"TODO");
-	// ANKI_TEST_EXPECT_NO_ERR(compileGlslToSpirv(header, type, pool, spirv, errorLog));
+	const Error err = compileHlslToSpirv(header, type, false, spirv, errorLog);
+	if(err)
+	{
+		ANKI_TEST_LOGE("Compile error:\n%s", errorLog.cstr());
+	}
+	ANKI_TEST_EXPECT_NO_ERR(err);
 
 	ShaderInitInfo initInf(type, spirv);
 	initInf.m_constValues = specVals;

+ 273 - 0
Tests/Gr/GrMeshShaders.cpp

@@ -0,0 +1,273 @@
+// Copyright (C) 2009-2023, Panagiotis Christopoulos Charitos and contributors.
+// All rights reserved.
+// Code licensed under the BSD License.
+// http://www.anki3d.org/LICENSE
+
+#include <Tests/Framework/Framework.h>
+#include <Tests/Gr/GrCommon.h>
+#include <AnKi/Gr.h>
+#include <AnKi/Util/MemoryPool.h>
+#include <AnKi/Util/HighRezTimer.h>
+
+ANKI_TEST(Gr, MeshShaders)
+{
+	constexpr U32 kTileCount = 4;
+	constexpr U32 kVertCount = 4;
+
+	g_validationCVar.set(true);
+	g_windowWidthCVar.set(64 * kTileCount);
+	g_windowHeightCVar.set(64);
+	g_meshShadersCVar.set(true);
+
+	DefaultMemoryPool::allocateSingleton(allocAligned, nullptr);
+	NativeWindow* win = createWindow();
+	GrManager* gr = createGrManager(win);
+
+	{
+		const CString taskShaderSrc = R"(
+struct Payload
+{
+	uint m_meshletIndices[64];
+};
+
+groupshared Payload s_payload;
+groupshared uint s_visibleCount;
+
+struct Meshlet
+{
+	uint m_firstIndex;
+	uint m_firstVertex;
+};
+
+[[vk::binding(3)]] StructuredBuffer<Meshlet> g_meshlets;
+
+[numthreads(64, 1, 1)] void main(uint svDispatchThreadId : SV_DISPATCHTHREADID)
+{
+	uint meshletCount, unused;
+	g_meshlets.GetDimensions(meshletCount, unused);
+
+	bool visible = ((svDispatchThreadId & 1u) == 0u) && (svDispatchThreadId < meshletCount);
+
+	s_visibleCount = 0;
+	GroupMemoryBarrierWithGroupSync();
+
+	if(visible)
+	{
+		uint index;
+		InterlockedAdd(s_visibleCount, 1u, index);
+
+		s_payload.m_meshletIndices[index] = svDispatchThreadId;
+	}
+
+	GroupMemoryBarrierWithGroupSync();
+	DispatchMesh(s_visibleCount, 1, 1, s_payload);
+})";
+
+		const CString meshShaderSrc = R"(
+struct Payload
+{
+	uint m_meshletIndices[64];
+};
+
+struct VertOut
+{
+	float4 m_svPosition : SV_POSITION;
+	float3 m_color : COLOR0;
+};
+
+struct Meshlet
+{
+	uint m_firstIndex;
+	uint m_firstVertex;
+};
+
+[[vk::binding(0)]] StructuredBuffer<uint> g_indices;
+[[vk::binding(1)]] StructuredBuffer<float4> g_positions;
+[[vk::binding(2)]] StructuredBuffer<float4> g_colors;
+[[vk::binding(3)]] StructuredBuffer<Meshlet> g_meshlets;
+
+[numthreads(6, 1, 1)] [outputtopology("triangle")] void main(in payload Payload payload, out vertices VertOut verts[4],
+                                                             out indices uint3 indices[6], uint svGroupId : SV_GROUPID,
+                                                             uint svGroupIndex : SV_GROUPINDEX)
+{
+	uint meshletIdx = payload.m_meshletIndices[svGroupId];
+	Meshlet meshlet = g_meshlets[meshletIdx];
+
+	SetMeshOutputCounts(4, 6);
+
+	if(svGroupIndex < 4)
+	{
+		verts[svGroupIndex].m_svPosition = g_positions[meshlet.m_firstVertex + svGroupIndex];
+		verts[svGroupIndex].m_color = g_colors[meshlet.m_firstVertex + svGroupIndex];
+	}
+
+	[unroll] for(uint i = 0; i < 6; ++i)
+	{
+		indices[svGroupIndex][i] = g_indices[meshlet.m_firstIndex + i];
+	}
+})";
+
+		const CString fragShaderSrc = R"(
+struct VertOut
+{
+	float4 m_svPosition : SV_POSITION;
+	float3 m_color : COLOR0;
+};
+
+float3 main(VertOut input) : SV_TARGET0
+{
+	return input.m_color;
+})";
+
+		ShaderProgramPtr prog;
+		{
+			ShaderPtr taskShader = createShader(taskShaderSrc, ShaderType::kTask, *gr);
+			ShaderPtr meshShader = createShader(meshShaderSrc, ShaderType::kMesh, *gr);
+			ShaderPtr fragShader = createShader(fragShaderSrc, ShaderType::kFragment, *gr);
+
+			ShaderProgramInitInfo progInit("Program");
+			progInit.m_graphicsShaders[ShaderType::kTask] = taskShader.get();
+			progInit.m_graphicsShaders[ShaderType::kMesh] = meshShader.get();
+			progInit.m_graphicsShaders[ShaderType::kFragment] = fragShader.get();
+			prog = gr->newShaderProgram(progInit);
+		}
+
+		BufferPtr indexBuff;
+		{
+			BufferInitInfo buffInit("Index");
+			buffInit.m_mapAccess = BufferMapAccessBit::kWrite;
+			buffInit.m_usage = BufferUsageBit::kStorageGeometryRead;
+			buffInit.m_size = sizeof(U32) * 6;
+			indexBuff = gr->newBuffer(buffInit);
+
+			void* mapped = indexBuff->map(0, kMaxPtrSize, BufferMapAccessBit::kWrite);
+			const U32 indices[] = {0, 1, 2, 2, 1, 3};
+			memcpy(mapped, indices, sizeof(indices));
+			indexBuff->unmap();
+		}
+
+		BufferPtr positionsBuff;
+		{
+			BufferInitInfo buffInit("Positions");
+			buffInit.m_mapAccess = BufferMapAccessBit::kWrite;
+			buffInit.m_usage = BufferUsageBit::kStorageGeometryRead;
+			buffInit.m_size = kVertCount * sizeof(Vec4) * kTileCount;
+			positionsBuff = gr->newBuffer(buffInit);
+
+			Vec4* mapped = static_cast<Vec4*>(positionsBuff->map(0, kMaxPtrSize, BufferMapAccessBit::kWrite));
+
+			for(U32 t = 0; t < kTileCount; t++)
+			{
+				const F32 left = (1.0f / F32(kTileCount) * F32(t)) * 2.0f - 1.0f;
+				const F32 right = (1.0f / F32(kTileCount) * F32(t + 1)) * 2.0f - 1.0f;
+				mapped[0] = Vec4(left, -1.0f, 1.0f, 1.0f);
+				mapped[1] = Vec4(right, -1.0f, 1.0f, 1.0f);
+				mapped[2] = Vec4(left, 1.0f, 1.0f, 1.0f);
+				mapped[3] = Vec4(right, 1.0f, 1.0f, 1.0f);
+
+				mapped += 4;
+			}
+
+			positionsBuff->unmap();
+		}
+
+		BufferPtr colorsBuff;
+		{
+			BufferInitInfo buffInit("Colors");
+			buffInit.m_mapAccess = BufferMapAccessBit::kWrite;
+			buffInit.m_usage = BufferUsageBit::kStorageGeometryRead;
+			buffInit.m_size = kVertCount * sizeof(Vec4) * kTileCount;
+			colorsBuff = gr->newBuffer(buffInit);
+
+			Vec4* mapped = static_cast<Vec4*>(colorsBuff->map(0, kMaxPtrSize, BufferMapAccessBit::kWrite));
+
+			const Array<Vec4, kTileCount> colors = {Vec4(1.0f, 0.0f, 0.0f, 0.0f), Vec4(0.0f, 1.0f, 0.0f, 0.0f), Vec4(0.0f, 0.0f, 1.0f, 0.0f),
+													Vec4(1.0f, 0.0f, 1.0f, 0.0f)};
+			for(U32 t = 0; t < kTileCount; t++)
+			{
+				mapped[0] = mapped[1] = mapped[2] = mapped[3] = colors[t];
+
+				mapped += 4;
+			}
+
+			colorsBuff->unmap();
+		}
+
+		BufferPtr meshletsBuff;
+		{
+			class Meshlet
+			{
+			public:
+				U32 m_firstIndex;
+				U32 m_firstVertex;
+			};
+
+			BufferInitInfo buffInit("Meshlets");
+			buffInit.m_mapAccess = BufferMapAccessBit::kWrite;
+			buffInit.m_usage = BufferUsageBit::kStorageGeometryRead;
+			buffInit.m_size = sizeof(Meshlet) * kTileCount;
+			meshletsBuff = gr->newBuffer(buffInit);
+
+			Meshlet* mapped = static_cast<Meshlet*>(meshletsBuff->map(0, kMaxPtrSize, BufferMapAccessBit::kWrite));
+
+			for(U32 t = 0; t < kTileCount; t++)
+			{
+				mapped[t].m_firstIndex = 0;
+				mapped[t].m_firstVertex = kVertCount * t;
+			}
+
+			meshletsBuff->unmap();
+		}
+
+		for(U32 i = 0; i < 100; ++i)
+		{
+			TexturePtr swapchainTex = gr->acquireNextPresentableTexture();
+			TextureViewInitInfo viewInit(swapchainTex.get(), "RTView");
+			TextureViewPtr swapchainView = gr->newTextureView(viewInit);
+
+			FramebufferInitInfo fbInit("FB");
+			fbInit.m_colorAttachmentCount = 1;
+			fbInit.m_colorAttachments[0].m_textureView = swapchainView;
+			fbInit.m_colorAttachments[0].m_clearValue.m_colorf = {1.0f, 0.0f, 1.0f, 0.0f};
+			FramebufferPtr fb = gr->newFramebuffer(fbInit);
+
+			CommandBufferInitInfo cmdbinit;
+			CommandBufferPtr cmdb = gr->newCommandBuffer(cmdbinit);
+
+			cmdb->setViewport(0, 0, g_windowWidthCVar.get(), g_windowHeightCVar.get());
+
+			TextureBarrierInfo barrier;
+			barrier.m_texture = swapchainTex.get();
+			barrier.m_previousUsage = TextureUsageBit::kNone;
+			barrier.m_nextUsage = TextureUsageBit::kFramebufferWrite;
+			cmdb->setPipelineBarrier({&barrier, 1}, {}, {});
+
+			cmdb->beginRenderPass(fb.get(), {TextureUsageBit::kFramebufferWrite}, TextureUsageBit::kNone);
+
+			cmdb->bindStorageBuffer(0, 0, indexBuff.get(), 0, kMaxPtrSize);
+			cmdb->bindStorageBuffer(0, 1, positionsBuff.get(), 0, kMaxPtrSize);
+			cmdb->bindStorageBuffer(0, 2, colorsBuff.get(), 0, kMaxPtrSize);
+			cmdb->bindStorageBuffer(0, 3, meshletsBuff.get(), 0, kMaxPtrSize);
+
+			cmdb->bindShaderProgram(prog.get());
+
+			cmdb->drawMeshTasks(1, 1, 1);
+
+			cmdb->endRenderPass();
+
+			barrier.m_previousUsage = TextureUsageBit::kFramebufferWrite;
+			barrier.m_nextUsage = TextureUsageBit::kPresent;
+			cmdb->setPipelineBarrier({&barrier, 1}, {}, {});
+
+			cmdb->flush();
+
+			gr->swapBuffers();
+
+			HighRezTimer::sleep(1.0_sec / 60.0);
+		}
+	}
+
+	GrManager::freeSingleton();
+	NativeWindow::freeSingleton();
+	DefaultMemoryPool::freeSingleton();
+}

+ 312 - 0
ThirdParty/Khronos/vk_video/vulkan_video_codec_h264std.h

@@ -0,0 +1,312 @@
+#ifndef VULKAN_VIDEO_CODEC_H264STD_H_
+#define VULKAN_VIDEO_CODEC_H264STD_H_ 1
+
+/*
+** Copyright 2015-2023 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+// vulkan_video_codec_h264std is a preprocessor guard. Do not pass it to API calls.
+#define vulkan_video_codec_h264std 1
+#include "vulkan_video_codecs_common.h"
+#define STD_VIDEO_H264_CPB_CNT_LIST_SIZE  32
+#define STD_VIDEO_H264_SCALING_LIST_4X4_NUM_LISTS 6
+#define STD_VIDEO_H264_SCALING_LIST_4X4_NUM_ELEMENTS 16
+#define STD_VIDEO_H264_SCALING_LIST_8X8_NUM_LISTS 6
+#define STD_VIDEO_H264_SCALING_LIST_8X8_NUM_ELEMENTS 64
+#define STD_VIDEO_H264_MAX_NUM_LIST_REF   32
+#define STD_VIDEO_H264_MAX_CHROMA_PLANES  2
+#define STD_VIDEO_H264_NO_REFERENCE_PICTURE 0xFF
+
+typedef enum StdVideoH264ChromaFormatIdc {
+    STD_VIDEO_H264_CHROMA_FORMAT_IDC_MONOCHROME = 0,
+    STD_VIDEO_H264_CHROMA_FORMAT_IDC_420 = 1,
+    STD_VIDEO_H264_CHROMA_FORMAT_IDC_422 = 2,
+    STD_VIDEO_H264_CHROMA_FORMAT_IDC_444 = 3,
+    STD_VIDEO_H264_CHROMA_FORMAT_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_CHROMA_FORMAT_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264ChromaFormatIdc;
+
+typedef enum StdVideoH264ProfileIdc {
+    STD_VIDEO_H264_PROFILE_IDC_BASELINE = 66,
+    STD_VIDEO_H264_PROFILE_IDC_MAIN = 77,
+    STD_VIDEO_H264_PROFILE_IDC_HIGH = 100,
+    STD_VIDEO_H264_PROFILE_IDC_HIGH_444_PREDICTIVE = 244,
+    STD_VIDEO_H264_PROFILE_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_PROFILE_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264ProfileIdc;
+
+typedef enum StdVideoH264LevelIdc {
+    STD_VIDEO_H264_LEVEL_IDC_1_0 = 0,
+    STD_VIDEO_H264_LEVEL_IDC_1_1 = 1,
+    STD_VIDEO_H264_LEVEL_IDC_1_2 = 2,
+    STD_VIDEO_H264_LEVEL_IDC_1_3 = 3,
+    STD_VIDEO_H264_LEVEL_IDC_2_0 = 4,
+    STD_VIDEO_H264_LEVEL_IDC_2_1 = 5,
+    STD_VIDEO_H264_LEVEL_IDC_2_2 = 6,
+    STD_VIDEO_H264_LEVEL_IDC_3_0 = 7,
+    STD_VIDEO_H264_LEVEL_IDC_3_1 = 8,
+    STD_VIDEO_H264_LEVEL_IDC_3_2 = 9,
+    STD_VIDEO_H264_LEVEL_IDC_4_0 = 10,
+    STD_VIDEO_H264_LEVEL_IDC_4_1 = 11,
+    STD_VIDEO_H264_LEVEL_IDC_4_2 = 12,
+    STD_VIDEO_H264_LEVEL_IDC_5_0 = 13,
+    STD_VIDEO_H264_LEVEL_IDC_5_1 = 14,
+    STD_VIDEO_H264_LEVEL_IDC_5_2 = 15,
+    STD_VIDEO_H264_LEVEL_IDC_6_0 = 16,
+    STD_VIDEO_H264_LEVEL_IDC_6_1 = 17,
+    STD_VIDEO_H264_LEVEL_IDC_6_2 = 18,
+    STD_VIDEO_H264_LEVEL_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_LEVEL_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264LevelIdc;
+
+typedef enum StdVideoH264PocType {
+    STD_VIDEO_H264_POC_TYPE_0 = 0,
+    STD_VIDEO_H264_POC_TYPE_1 = 1,
+    STD_VIDEO_H264_POC_TYPE_2 = 2,
+    STD_VIDEO_H264_POC_TYPE_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_POC_TYPE_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264PocType;
+
+typedef enum StdVideoH264AspectRatioIdc {
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_UNSPECIFIED = 0,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_SQUARE = 1,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_12_11 = 2,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_10_11 = 3,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_16_11 = 4,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_40_33 = 5,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_24_11 = 6,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_20_11 = 7,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_32_11 = 8,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_80_33 = 9,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_18_11 = 10,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_15_11 = 11,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_64_33 = 12,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_160_99 = 13,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_4_3 = 14,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_3_2 = 15,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_2_1 = 16,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_EXTENDED_SAR = 255,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_ASPECT_RATIO_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264AspectRatioIdc;
+
+typedef enum StdVideoH264WeightedBipredIdc {
+    STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_DEFAULT = 0,
+    STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_EXPLICIT = 1,
+    STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_IMPLICIT = 2,
+    STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264WeightedBipredIdc;
+
+typedef enum StdVideoH264ModificationOfPicNumsIdc {
+    STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_SUBTRACT = 0,
+    STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_ADD = 1,
+    STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_LONG_TERM = 2,
+    STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_END = 3,
+    STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264ModificationOfPicNumsIdc;
+
+typedef enum StdVideoH264MemMgmtControlOp {
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_END = 0,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_SHORT_TERM = 1,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_LONG_TERM = 2,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_LONG_TERM = 3,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_SET_MAX_LONG_TERM_INDEX = 4,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_ALL = 5,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_CURRENT_AS_LONG_TERM = 6,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264MemMgmtControlOp;
+
+typedef enum StdVideoH264CabacInitIdc {
+    STD_VIDEO_H264_CABAC_INIT_IDC_0 = 0,
+    STD_VIDEO_H264_CABAC_INIT_IDC_1 = 1,
+    STD_VIDEO_H264_CABAC_INIT_IDC_2 = 2,
+    STD_VIDEO_H264_CABAC_INIT_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_CABAC_INIT_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264CabacInitIdc;
+
+typedef enum StdVideoH264DisableDeblockingFilterIdc {
+    STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_DISABLED = 0,
+    STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_ENABLED = 1,
+    STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_PARTIAL = 2,
+    STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264DisableDeblockingFilterIdc;
+
+typedef enum StdVideoH264SliceType {
+    STD_VIDEO_H264_SLICE_TYPE_P = 0,
+    STD_VIDEO_H264_SLICE_TYPE_B = 1,
+    STD_VIDEO_H264_SLICE_TYPE_I = 2,
+    STD_VIDEO_H264_SLICE_TYPE_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_SLICE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264SliceType;
+
+typedef enum StdVideoH264PictureType {
+    STD_VIDEO_H264_PICTURE_TYPE_P = 0,
+    STD_VIDEO_H264_PICTURE_TYPE_B = 1,
+    STD_VIDEO_H264_PICTURE_TYPE_I = 2,
+    STD_VIDEO_H264_PICTURE_TYPE_IDR = 5,
+    STD_VIDEO_H264_PICTURE_TYPE_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_PICTURE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264PictureType;
+
+typedef enum StdVideoH264NonVclNaluType {
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_SPS = 0,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_PPS = 1,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_AUD = 2,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_PREFIX = 3,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_SEQUENCE = 4,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_STREAM = 5,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_PRECODED = 6,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H264_NON_VCL_NALU_TYPE_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH264NonVclNaluType;
+typedef struct StdVideoH264SpsVuiFlags {
+    uint32_t    aspect_ratio_info_present_flag : 1;
+    uint32_t    overscan_info_present_flag : 1;
+    uint32_t    overscan_appropriate_flag : 1;
+    uint32_t    video_signal_type_present_flag : 1;
+    uint32_t    video_full_range_flag : 1;
+    uint32_t    color_description_present_flag : 1;
+    uint32_t    chroma_loc_info_present_flag : 1;
+    uint32_t    timing_info_present_flag : 1;
+    uint32_t    fixed_frame_rate_flag : 1;
+    uint32_t    bitstream_restriction_flag : 1;
+    uint32_t    nal_hrd_parameters_present_flag : 1;
+    uint32_t    vcl_hrd_parameters_present_flag : 1;
+} StdVideoH264SpsVuiFlags;
+
+typedef struct StdVideoH264HrdParameters {
+    uint8_t     cpb_cnt_minus1;
+    uint8_t     bit_rate_scale;
+    uint8_t     cpb_size_scale;
+    uint8_t     reserved1;
+    uint32_t    bit_rate_value_minus1[STD_VIDEO_H264_CPB_CNT_LIST_SIZE];
+    uint32_t    cpb_size_value_minus1[STD_VIDEO_H264_CPB_CNT_LIST_SIZE];
+    uint8_t     cbr_flag[STD_VIDEO_H264_CPB_CNT_LIST_SIZE];
+    uint32_t    initial_cpb_removal_delay_length_minus1;
+    uint32_t    cpb_removal_delay_length_minus1;
+    uint32_t    dpb_output_delay_length_minus1;
+    uint32_t    time_offset_length;
+} StdVideoH264HrdParameters;
+
+typedef struct StdVideoH264SequenceParameterSetVui {
+    StdVideoH264SpsVuiFlags             flags;
+    StdVideoH264AspectRatioIdc          aspect_ratio_idc;
+    uint16_t                            sar_width;
+    uint16_t                            sar_height;
+    uint8_t                             video_format;
+    uint8_t                             colour_primaries;
+    uint8_t                             transfer_characteristics;
+    uint8_t                             matrix_coefficients;
+    uint32_t                            num_units_in_tick;
+    uint32_t                            time_scale;
+    uint8_t                             max_num_reorder_frames;
+    uint8_t                             max_dec_frame_buffering;
+    uint8_t                             chroma_sample_loc_type_top_field;
+    uint8_t                             chroma_sample_loc_type_bottom_field;
+    uint32_t                            reserved1;
+    const StdVideoH264HrdParameters*    pHrdParameters;
+} StdVideoH264SequenceParameterSetVui;
+
+typedef struct StdVideoH264SpsFlags {
+    uint32_t    constraint_set0_flag : 1;
+    uint32_t    constraint_set1_flag : 1;
+    uint32_t    constraint_set2_flag : 1;
+    uint32_t    constraint_set3_flag : 1;
+    uint32_t    constraint_set4_flag : 1;
+    uint32_t    constraint_set5_flag : 1;
+    uint32_t    direct_8x8_inference_flag : 1;
+    uint32_t    mb_adaptive_frame_field_flag : 1;
+    uint32_t    frame_mbs_only_flag : 1;
+    uint32_t    delta_pic_order_always_zero_flag : 1;
+    uint32_t    separate_colour_plane_flag : 1;
+    uint32_t    gaps_in_frame_num_value_allowed_flag : 1;
+    uint32_t    qpprime_y_zero_transform_bypass_flag : 1;
+    uint32_t    frame_cropping_flag : 1;
+    uint32_t    seq_scaling_matrix_present_flag : 1;
+    uint32_t    vui_parameters_present_flag : 1;
+} StdVideoH264SpsFlags;
+
+typedef struct StdVideoH264ScalingLists {
+    uint16_t    scaling_list_present_mask;
+    uint16_t    use_default_scaling_matrix_mask;
+    uint8_t     ScalingList4x4[STD_VIDEO_H264_SCALING_LIST_4X4_NUM_LISTS][STD_VIDEO_H264_SCALING_LIST_4X4_NUM_ELEMENTS];
+    uint8_t     ScalingList8x8[STD_VIDEO_H264_SCALING_LIST_8X8_NUM_LISTS][STD_VIDEO_H264_SCALING_LIST_8X8_NUM_ELEMENTS];
+} StdVideoH264ScalingLists;
+
+typedef struct StdVideoH264SequenceParameterSet {
+    StdVideoH264SpsFlags                          flags;
+    StdVideoH264ProfileIdc                        profile_idc;
+    StdVideoH264LevelIdc                          level_idc;
+    StdVideoH264ChromaFormatIdc                   chroma_format_idc;
+    uint8_t                                       seq_parameter_set_id;
+    uint8_t                                       bit_depth_luma_minus8;
+    uint8_t                                       bit_depth_chroma_minus8;
+    uint8_t                                       log2_max_frame_num_minus4;
+    StdVideoH264PocType                           pic_order_cnt_type;
+    int32_t                                       offset_for_non_ref_pic;
+    int32_t                                       offset_for_top_to_bottom_field;
+    uint8_t                                       log2_max_pic_order_cnt_lsb_minus4;
+    uint8_t                                       num_ref_frames_in_pic_order_cnt_cycle;
+    uint8_t                                       max_num_ref_frames;
+    uint8_t                                       reserved1;
+    uint32_t                                      pic_width_in_mbs_minus1;
+    uint32_t                                      pic_height_in_map_units_minus1;
+    uint32_t                                      frame_crop_left_offset;
+    uint32_t                                      frame_crop_right_offset;
+    uint32_t                                      frame_crop_top_offset;
+    uint32_t                                      frame_crop_bottom_offset;
+    uint32_t                                      reserved2;
+    const int32_t*                                pOffsetForRefFrame;
+    const StdVideoH264ScalingLists*               pScalingLists;
+    const StdVideoH264SequenceParameterSetVui*    pSequenceParameterSetVui;
+} StdVideoH264SequenceParameterSet;
+
+typedef struct StdVideoH264PpsFlags {
+    uint32_t    transform_8x8_mode_flag : 1;
+    uint32_t    redundant_pic_cnt_present_flag : 1;
+    uint32_t    constrained_intra_pred_flag : 1;
+    uint32_t    deblocking_filter_control_present_flag : 1;
+    uint32_t    weighted_pred_flag : 1;
+    uint32_t    bottom_field_pic_order_in_frame_present_flag : 1;
+    uint32_t    entropy_coding_mode_flag : 1;
+    uint32_t    pic_scaling_matrix_present_flag : 1;
+} StdVideoH264PpsFlags;
+
+typedef struct StdVideoH264PictureParameterSet {
+    StdVideoH264PpsFlags               flags;
+    uint8_t                            seq_parameter_set_id;
+    uint8_t                            pic_parameter_set_id;
+    uint8_t                            num_ref_idx_l0_default_active_minus1;
+    uint8_t                            num_ref_idx_l1_default_active_minus1;
+    StdVideoH264WeightedBipredIdc      weighted_bipred_idc;
+    int8_t                             pic_init_qp_minus26;
+    int8_t                             pic_init_qs_minus26;
+    int8_t                             chroma_qp_index_offset;
+    int8_t                             second_chroma_qp_index_offset;
+    const StdVideoH264ScalingLists*    pScalingLists;
+} StdVideoH264PictureParameterSet;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif

+ 77 - 0
ThirdParty/Khronos/vk_video/vulkan_video_codec_h264std_decode.h

@@ -0,0 +1,77 @@
+#ifndef VULKAN_VIDEO_CODEC_H264STD_DECODE_H_
+#define VULKAN_VIDEO_CODEC_H264STD_DECODE_H_ 1
+
+/*
+** Copyright 2015-2023 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+// vulkan_video_codec_h264std_decode is a preprocessor guard. Do not pass it to API calls.
+#define vulkan_video_codec_h264std_decode 1
+#include "vulkan_video_codec_h264std.h"
+
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0)
+
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_API_VERSION_1_0_0
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h264_decode"
+#define STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE 2
+
+typedef enum StdVideoDecodeH264FieldOrderCount {
+    STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_TOP = 0,
+    STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_BOTTOM = 1,
+    STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_MAX_ENUM = 0x7FFFFFFF
+} StdVideoDecodeH264FieldOrderCount;
+typedef struct StdVideoDecodeH264PictureInfoFlags {
+    uint32_t    field_pic_flag : 1;
+    uint32_t    is_intra : 1;
+    uint32_t    IdrPicFlag : 1;
+    uint32_t    bottom_field_flag : 1;
+    uint32_t    is_reference : 1;
+    uint32_t    complementary_field_pair : 1;
+} StdVideoDecodeH264PictureInfoFlags;
+
+typedef struct StdVideoDecodeH264PictureInfo {
+    StdVideoDecodeH264PictureInfoFlags    flags;
+    uint8_t                               seq_parameter_set_id;
+    uint8_t                               pic_parameter_set_id;
+    uint8_t                               reserved1;
+    uint8_t                               reserved2;
+    uint16_t                              frame_num;
+    uint16_t                              idr_pic_id;
+    int32_t                               PicOrderCnt[STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE];
+} StdVideoDecodeH264PictureInfo;
+
+typedef struct StdVideoDecodeH264ReferenceInfoFlags {
+    uint32_t    top_field_flag : 1;
+    uint32_t    bottom_field_flag : 1;
+    uint32_t    used_for_long_term_reference : 1;
+    uint32_t    is_non_existing : 1;
+} StdVideoDecodeH264ReferenceInfoFlags;
+
+typedef struct StdVideoDecodeH264ReferenceInfo {
+    StdVideoDecodeH264ReferenceInfoFlags    flags;
+    uint16_t                                FrameNum;
+    uint16_t                                reserved;
+    int32_t                                 PicOrderCnt[STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE];
+} StdVideoDecodeH264ReferenceInfo;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif

+ 147 - 0
ThirdParty/Khronos/vk_video/vulkan_video_codec_h264std_encode.h

@@ -0,0 +1,147 @@
+#ifndef VULKAN_VIDEO_CODEC_H264STD_ENCODE_H_
+#define VULKAN_VIDEO_CODEC_H264STD_ENCODE_H_ 1
+
+/*
+** Copyright 2015-2023 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+// vulkan_video_codec_h264std_encode is a preprocessor guard. Do not pass it to API calls.
+#define vulkan_video_codec_h264std_encode 1
+#include "vulkan_video_codec_h264std.h"
+// Vulkan 0.9 provisional Vulkan video H.264 encode std specification version number
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_0_9_11 VK_MAKE_VIDEO_STD_VERSION(0, 9, 11)
+
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_API_VERSION_0_9_11
+#define VK_STD_VULKAN_VIDEO_CODEC_H264_ENCODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h264_encode"
+typedef struct StdVideoEncodeH264WeightTableFlags {
+    uint32_t    luma_weight_l0_flag;
+    uint32_t    chroma_weight_l0_flag;
+    uint32_t    luma_weight_l1_flag;
+    uint32_t    chroma_weight_l1_flag;
+} StdVideoEncodeH264WeightTableFlags;
+
+typedef struct StdVideoEncodeH264WeightTable {
+    StdVideoEncodeH264WeightTableFlags    flags;
+    uint8_t                               luma_log2_weight_denom;
+    uint8_t                               chroma_log2_weight_denom;
+    int8_t                                luma_weight_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF];
+    int8_t                                luma_offset_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF];
+    int8_t                                chroma_weight_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES];
+    int8_t                                chroma_offset_l0[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES];
+    int8_t                                luma_weight_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF];
+    int8_t                                luma_offset_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF];
+    int8_t                                chroma_weight_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES];
+    int8_t                                chroma_offset_l1[STD_VIDEO_H264_MAX_NUM_LIST_REF][STD_VIDEO_H264_MAX_CHROMA_PLANES];
+} StdVideoEncodeH264WeightTable;
+
+typedef struct StdVideoEncodeH264SliceHeaderFlags {
+    uint32_t    direct_spatial_mv_pred_flag : 1;
+    uint32_t    num_ref_idx_active_override_flag : 1;
+    uint32_t    reserved : 30;
+} StdVideoEncodeH264SliceHeaderFlags;
+
+typedef struct StdVideoEncodeH264PictureInfoFlags {
+    uint32_t    IdrPicFlag : 1;
+    uint32_t    is_reference : 1;
+    uint32_t    no_output_of_prior_pics_flag : 1;
+    uint32_t    long_term_reference_flag : 1;
+    uint32_t    adaptive_ref_pic_marking_mode_flag : 1;
+    uint32_t    reserved : 27;
+} StdVideoEncodeH264PictureInfoFlags;
+
+typedef struct StdVideoEncodeH264ReferenceInfoFlags {
+    uint32_t    used_for_long_term_reference : 1;
+    uint32_t    reserved : 31;
+} StdVideoEncodeH264ReferenceInfoFlags;
+
+typedef struct StdVideoEncodeH264ReferenceListsInfoFlags {
+    uint32_t    ref_pic_list_modification_flag_l0 : 1;
+    uint32_t    ref_pic_list_modification_flag_l1 : 1;
+    uint32_t    reserved : 30;
+} StdVideoEncodeH264ReferenceListsInfoFlags;
+
+typedef struct StdVideoEncodeH264RefListModEntry {
+    StdVideoH264ModificationOfPicNumsIdc    modification_of_pic_nums_idc;
+    uint16_t                                abs_diff_pic_num_minus1;
+    uint16_t                                long_term_pic_num;
+} StdVideoEncodeH264RefListModEntry;
+
+typedef struct StdVideoEncodeH264RefPicMarkingEntry {
+    StdVideoH264MemMgmtControlOp    memory_management_control_operation;
+    uint16_t                        difference_of_pic_nums_minus1;
+    uint16_t                        long_term_pic_num;
+    uint16_t                        long_term_frame_idx;
+    uint16_t                        max_long_term_frame_idx_plus1;
+} StdVideoEncodeH264RefPicMarkingEntry;
+
+typedef struct StdVideoEncodeH264ReferenceListsInfo {
+    StdVideoEncodeH264ReferenceListsInfoFlags      flags;
+    uint8_t                                        num_ref_idx_l0_active_minus1;
+    uint8_t                                        num_ref_idx_l1_active_minus1;
+    uint8_t                                        RefPicList0[STD_VIDEO_H264_MAX_NUM_LIST_REF];
+    uint8_t                                        RefPicList1[STD_VIDEO_H264_MAX_NUM_LIST_REF];
+    uint8_t                                        refList0ModOpCount;
+    uint8_t                                        refList1ModOpCount;
+    uint8_t                                        refPicMarkingOpCount;
+    uint8_t                                        reserved1[7];
+    const StdVideoEncodeH264RefListModEntry*       pRefList0ModOperations;
+    const StdVideoEncodeH264RefListModEntry*       pRefList1ModOperations;
+    const StdVideoEncodeH264RefPicMarkingEntry*    pRefPicMarkingOperations;
+} StdVideoEncodeH264ReferenceListsInfo;
+
+typedef struct StdVideoEncodeH264PictureInfo {
+    StdVideoEncodeH264PictureInfoFlags             flags;
+    uint8_t                                        seq_parameter_set_id;
+    uint8_t                                        pic_parameter_set_id;
+    uint16_t                                       idr_pic_id;
+    StdVideoH264PictureType                        primary_pic_type;
+    uint32_t                                       frame_num;
+    int32_t                                        PicOrderCnt;
+    uint8_t                                        temporal_id;
+    uint8_t                                        reserved1[3];
+    const StdVideoEncodeH264ReferenceListsInfo*    pRefLists;
+} StdVideoEncodeH264PictureInfo;
+
+typedef struct StdVideoEncodeH264ReferenceInfo {
+    StdVideoEncodeH264ReferenceInfoFlags    flags;
+    StdVideoH264PictureType                 primary_pic_type;
+    uint32_t                                FrameNum;
+    int32_t                                 PicOrderCnt;
+    uint16_t                                long_term_pic_num;
+    uint16_t                                long_term_frame_idx;
+    uint8_t                                 temporal_id;
+} StdVideoEncodeH264ReferenceInfo;
+
+typedef struct StdVideoEncodeH264SliceHeader {
+    StdVideoEncodeH264SliceHeaderFlags        flags;
+    uint32_t                                  first_mb_in_slice;
+    StdVideoH264SliceType                     slice_type;
+    int8_t                                    slice_alpha_c0_offset_div2;
+    int8_t                                    slice_beta_offset_div2;
+    int8_t                                    slice_qp_delta;
+    uint8_t                                   reserved1;
+    StdVideoH264CabacInitIdc                  cabac_init_idc;
+    StdVideoH264DisableDeblockingFilterIdc    disable_deblocking_filter_idc;
+    const StdVideoEncodeH264WeightTable*      pWeightTable;
+} StdVideoEncodeH264SliceHeader;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif

+ 446 - 0
ThirdParty/Khronos/vk_video/vulkan_video_codec_h265std.h

@@ -0,0 +1,446 @@
+#ifndef VULKAN_VIDEO_CODEC_H265STD_H_
+#define VULKAN_VIDEO_CODEC_H265STD_H_ 1
+
+/*
+** Copyright 2015-2023 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+// vulkan_video_codec_h265std is a preprocessor guard. Do not pass it to API calls.
+#define vulkan_video_codec_h265std 1
+#include "vulkan_video_codecs_common.h"
+#define STD_VIDEO_H265_CPB_CNT_LIST_SIZE  32
+#define STD_VIDEO_H265_SUBLAYERS_LIST_SIZE 7
+#define STD_VIDEO_H265_SCALING_LIST_4X4_NUM_LISTS 6
+#define STD_VIDEO_H265_SCALING_LIST_4X4_NUM_ELEMENTS 16
+#define STD_VIDEO_H265_SCALING_LIST_8X8_NUM_LISTS 6
+#define STD_VIDEO_H265_SCALING_LIST_8X8_NUM_ELEMENTS 64
+#define STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS 6
+#define STD_VIDEO_H265_SCALING_LIST_16X16_NUM_ELEMENTS 64
+#define STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS 2
+#define STD_VIDEO_H265_SCALING_LIST_32X32_NUM_ELEMENTS 64
+#define STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE 6
+#define STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE 19
+#define STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE 21
+#define STD_VIDEO_H265_PREDICTOR_PALETTE_COMPONENTS_LIST_SIZE 3
+#define STD_VIDEO_H265_PREDICTOR_PALETTE_COMP_ENTRIES_LIST_SIZE 128
+#define STD_VIDEO_H265_MAX_NUM_LIST_REF   15
+#define STD_VIDEO_H265_MAX_CHROMA_PLANES  2
+#define STD_VIDEO_H265_MAX_SHORT_TERM_REF_PIC_SETS 64
+#define STD_VIDEO_H265_MAX_DPB_SIZE       16
+#define STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS 32
+#define STD_VIDEO_H265_MAX_LONG_TERM_PICS 16
+#define STD_VIDEO_H265_MAX_DELTA_POC      48
+#define STD_VIDEO_H265_NO_REFERENCE_PICTURE 0xFF
+
+typedef enum StdVideoH265ChromaFormatIdc {
+    STD_VIDEO_H265_CHROMA_FORMAT_IDC_MONOCHROME = 0,
+    STD_VIDEO_H265_CHROMA_FORMAT_IDC_420 = 1,
+    STD_VIDEO_H265_CHROMA_FORMAT_IDC_422 = 2,
+    STD_VIDEO_H265_CHROMA_FORMAT_IDC_444 = 3,
+    STD_VIDEO_H265_CHROMA_FORMAT_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H265_CHROMA_FORMAT_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH265ChromaFormatIdc;
+
+typedef enum StdVideoH265ProfileIdc {
+    STD_VIDEO_H265_PROFILE_IDC_MAIN = 1,
+    STD_VIDEO_H265_PROFILE_IDC_MAIN_10 = 2,
+    STD_VIDEO_H265_PROFILE_IDC_MAIN_STILL_PICTURE = 3,
+    STD_VIDEO_H265_PROFILE_IDC_FORMAT_RANGE_EXTENSIONS = 4,
+    STD_VIDEO_H265_PROFILE_IDC_SCC_EXTENSIONS = 9,
+    STD_VIDEO_H265_PROFILE_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H265_PROFILE_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH265ProfileIdc;
+
+typedef enum StdVideoH265LevelIdc {
+    STD_VIDEO_H265_LEVEL_IDC_1_0 = 0,
+    STD_VIDEO_H265_LEVEL_IDC_2_0 = 1,
+    STD_VIDEO_H265_LEVEL_IDC_2_1 = 2,
+    STD_VIDEO_H265_LEVEL_IDC_3_0 = 3,
+    STD_VIDEO_H265_LEVEL_IDC_3_1 = 4,
+    STD_VIDEO_H265_LEVEL_IDC_4_0 = 5,
+    STD_VIDEO_H265_LEVEL_IDC_4_1 = 6,
+    STD_VIDEO_H265_LEVEL_IDC_5_0 = 7,
+    STD_VIDEO_H265_LEVEL_IDC_5_1 = 8,
+    STD_VIDEO_H265_LEVEL_IDC_5_2 = 9,
+    STD_VIDEO_H265_LEVEL_IDC_6_0 = 10,
+    STD_VIDEO_H265_LEVEL_IDC_6_1 = 11,
+    STD_VIDEO_H265_LEVEL_IDC_6_2 = 12,
+    STD_VIDEO_H265_LEVEL_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H265_LEVEL_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH265LevelIdc;
+
+typedef enum StdVideoH265SliceType {
+    STD_VIDEO_H265_SLICE_TYPE_B = 0,
+    STD_VIDEO_H265_SLICE_TYPE_P = 1,
+    STD_VIDEO_H265_SLICE_TYPE_I = 2,
+    STD_VIDEO_H265_SLICE_TYPE_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H265_SLICE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH265SliceType;
+
+typedef enum StdVideoH265PictureType {
+    STD_VIDEO_H265_PICTURE_TYPE_P = 0,
+    STD_VIDEO_H265_PICTURE_TYPE_B = 1,
+    STD_VIDEO_H265_PICTURE_TYPE_I = 2,
+    STD_VIDEO_H265_PICTURE_TYPE_IDR = 3,
+    STD_VIDEO_H265_PICTURE_TYPE_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H265_PICTURE_TYPE_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH265PictureType;
+
+typedef enum StdVideoH265AspectRatioIdc {
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_UNSPECIFIED = 0,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_SQUARE = 1,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_12_11 = 2,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_10_11 = 3,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_16_11 = 4,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_40_33 = 5,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_24_11 = 6,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_20_11 = 7,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_32_11 = 8,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_80_33 = 9,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_18_11 = 10,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_15_11 = 11,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_64_33 = 12,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_160_99 = 13,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_4_3 = 14,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_3_2 = 15,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_2_1 = 16,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_EXTENDED_SAR = 255,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_INVALID = 0x7FFFFFFF,
+    STD_VIDEO_H265_ASPECT_RATIO_IDC_MAX_ENUM = 0x7FFFFFFF
+} StdVideoH265AspectRatioIdc;
+typedef struct StdVideoH265DecPicBufMgr {
+    uint32_t    max_latency_increase_plus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
+    uint8_t     max_dec_pic_buffering_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
+    uint8_t     max_num_reorder_pics[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
+} StdVideoH265DecPicBufMgr;
+
+typedef struct StdVideoH265SubLayerHrdParameters {
+    uint32_t    bit_rate_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE];
+    uint32_t    cpb_size_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE];
+    uint32_t    cpb_size_du_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE];
+    uint32_t    bit_rate_du_value_minus1[STD_VIDEO_H265_CPB_CNT_LIST_SIZE];
+    uint32_t    cbr_flag;
+} StdVideoH265SubLayerHrdParameters;
+
+typedef struct StdVideoH265HrdFlags {
+    uint32_t    nal_hrd_parameters_present_flag : 1;
+    uint32_t    vcl_hrd_parameters_present_flag : 1;
+    uint32_t    sub_pic_hrd_params_present_flag : 1;
+    uint32_t    sub_pic_cpb_params_in_pic_timing_sei_flag : 1;
+    uint32_t    fixed_pic_rate_general_flag : 8;
+    uint32_t    fixed_pic_rate_within_cvs_flag : 8;
+    uint32_t    low_delay_hrd_flag : 8;
+} StdVideoH265HrdFlags;
+
+typedef struct StdVideoH265HrdParameters {
+    StdVideoH265HrdFlags                        flags;
+    uint8_t                                     tick_divisor_minus2;
+    uint8_t                                     du_cpb_removal_delay_increment_length_minus1;
+    uint8_t                                     dpb_output_delay_du_length_minus1;
+    uint8_t                                     bit_rate_scale;
+    uint8_t                                     cpb_size_scale;
+    uint8_t                                     cpb_size_du_scale;
+    uint8_t                                     initial_cpb_removal_delay_length_minus1;
+    uint8_t                                     au_cpb_removal_delay_length_minus1;
+    uint8_t                                     dpb_output_delay_length_minus1;
+    uint8_t                                     cpb_cnt_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
+    uint16_t                                    elemental_duration_in_tc_minus1[STD_VIDEO_H265_SUBLAYERS_LIST_SIZE];
+    uint16_t                                    reserved[3];
+    const StdVideoH265SubLayerHrdParameters*    pSubLayerHrdParametersNal;
+    const StdVideoH265SubLayerHrdParameters*    pSubLayerHrdParametersVcl;
+} StdVideoH265HrdParameters;
+
+typedef struct StdVideoH265VpsFlags {
+    uint32_t    vps_temporal_id_nesting_flag : 1;
+    uint32_t    vps_sub_layer_ordering_info_present_flag : 1;
+    uint32_t    vps_timing_info_present_flag : 1;
+    uint32_t    vps_poc_proportional_to_timing_flag : 1;
+} StdVideoH265VpsFlags;
+
+typedef struct StdVideoH265ProfileTierLevelFlags {
+    uint32_t    general_tier_flag : 1;
+    uint32_t    general_progressive_source_flag : 1;
+    uint32_t    general_interlaced_source_flag : 1;
+    uint32_t    general_non_packed_constraint_flag : 1;
+    uint32_t    general_frame_only_constraint_flag : 1;
+} StdVideoH265ProfileTierLevelFlags;
+
+typedef struct StdVideoH265ProfileTierLevel {
+    StdVideoH265ProfileTierLevelFlags    flags;
+    StdVideoH265ProfileIdc               general_profile_idc;
+    StdVideoH265LevelIdc                 general_level_idc;
+} StdVideoH265ProfileTierLevel;
+
+typedef struct StdVideoH265VideoParameterSet {
+    StdVideoH265VpsFlags                   flags;
+    uint8_t                                vps_video_parameter_set_id;
+    uint8_t                                vps_max_sub_layers_minus1;
+    uint8_t                                reserved1;
+    uint8_t                                reserved2;
+    uint32_t                               vps_num_units_in_tick;
+    uint32_t                               vps_time_scale;
+    uint32_t                               vps_num_ticks_poc_diff_one_minus1;
+    uint32_t                               reserved3;
+    const StdVideoH265DecPicBufMgr*        pDecPicBufMgr;
+    const StdVideoH265HrdParameters*       pHrdParameters;
+    const StdVideoH265ProfileTierLevel*    pProfileTierLevel;
+} StdVideoH265VideoParameterSet;
+
+typedef struct StdVideoH265ScalingLists {
+    uint8_t    ScalingList4x4[STD_VIDEO_H265_SCALING_LIST_4X4_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_4X4_NUM_ELEMENTS];
+    uint8_t    ScalingList8x8[STD_VIDEO_H265_SCALING_LIST_8X8_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_8X8_NUM_ELEMENTS];
+    uint8_t    ScalingList16x16[STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_16X16_NUM_ELEMENTS];
+    uint8_t    ScalingList32x32[STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS][STD_VIDEO_H265_SCALING_LIST_32X32_NUM_ELEMENTS];
+    uint8_t    ScalingListDCCoef16x16[STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS];
+    uint8_t    ScalingListDCCoef32x32[STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS];
+} StdVideoH265ScalingLists;
+
+typedef struct StdVideoH265SpsVuiFlags {
+    uint32_t    aspect_ratio_info_present_flag : 1;
+    uint32_t    overscan_info_present_flag : 1;
+    uint32_t    overscan_appropriate_flag : 1;
+    uint32_t    video_signal_type_present_flag : 1;
+    uint32_t    video_full_range_flag : 1;
+    uint32_t    colour_description_present_flag : 1;
+    uint32_t    chroma_loc_info_present_flag : 1;
+    uint32_t    neutral_chroma_indication_flag : 1;
+    uint32_t    field_seq_flag : 1;
+    uint32_t    frame_field_info_present_flag : 1;
+    uint32_t    default_display_window_flag : 1;
+    uint32_t    vui_timing_info_present_flag : 1;
+    uint32_t    vui_poc_proportional_to_timing_flag : 1;
+    uint32_t    vui_hrd_parameters_present_flag : 1;
+    uint32_t    bitstream_restriction_flag : 1;
+    uint32_t    tiles_fixed_structure_flag : 1;
+    uint32_t    motion_vectors_over_pic_boundaries_flag : 1;
+    uint32_t    restricted_ref_pic_lists_flag : 1;
+} StdVideoH265SpsVuiFlags;
+
+typedef struct StdVideoH265SequenceParameterSetVui {
+    StdVideoH265SpsVuiFlags             flags;
+    StdVideoH265AspectRatioIdc          aspect_ratio_idc;
+    uint16_t                            sar_width;
+    uint16_t                            sar_height;
+    uint8_t                             video_format;
+    uint8_t                             colour_primaries;
+    uint8_t                             transfer_characteristics;
+    uint8_t                             matrix_coeffs;
+    uint8_t                             chroma_sample_loc_type_top_field;
+    uint8_t                             chroma_sample_loc_type_bottom_field;
+    uint8_t                             reserved1;
+    uint8_t                             reserved2;
+    uint16_t                            def_disp_win_left_offset;
+    uint16_t                            def_disp_win_right_offset;
+    uint16_t                            def_disp_win_top_offset;
+    uint16_t                            def_disp_win_bottom_offset;
+    uint32_t                            vui_num_units_in_tick;
+    uint32_t                            vui_time_scale;
+    uint32_t                            vui_num_ticks_poc_diff_one_minus1;
+    uint16_t                            min_spatial_segmentation_idc;
+    uint16_t                            reserved3;
+    uint8_t                             max_bytes_per_pic_denom;
+    uint8_t                             max_bits_per_min_cu_denom;
+    uint8_t                             log2_max_mv_length_horizontal;
+    uint8_t                             log2_max_mv_length_vertical;
+    const StdVideoH265HrdParameters*    pHrdParameters;
+} StdVideoH265SequenceParameterSetVui;
+
+typedef struct StdVideoH265PredictorPaletteEntries {
+    uint16_t    PredictorPaletteEntries[STD_VIDEO_H265_PREDICTOR_PALETTE_COMPONENTS_LIST_SIZE][STD_VIDEO_H265_PREDICTOR_PALETTE_COMP_ENTRIES_LIST_SIZE];
+} StdVideoH265PredictorPaletteEntries;
+
+typedef struct StdVideoH265SpsFlags {
+    uint32_t    sps_temporal_id_nesting_flag : 1;
+    uint32_t    separate_colour_plane_flag : 1;
+    uint32_t    conformance_window_flag : 1;
+    uint32_t    sps_sub_layer_ordering_info_present_flag : 1;
+    uint32_t    scaling_list_enabled_flag : 1;
+    uint32_t    sps_scaling_list_data_present_flag : 1;
+    uint32_t    amp_enabled_flag : 1;
+    uint32_t    sample_adaptive_offset_enabled_flag : 1;
+    uint32_t    pcm_enabled_flag : 1;
+    uint32_t    pcm_loop_filter_disabled_flag : 1;
+    uint32_t    long_term_ref_pics_present_flag : 1;
+    uint32_t    sps_temporal_mvp_enabled_flag : 1;
+    uint32_t    strong_intra_smoothing_enabled_flag : 1;
+    uint32_t    vui_parameters_present_flag : 1;
+    uint32_t    sps_extension_present_flag : 1;
+    uint32_t    sps_range_extension_flag : 1;
+    uint32_t    transform_skip_rotation_enabled_flag : 1;
+    uint32_t    transform_skip_context_enabled_flag : 1;
+    uint32_t    implicit_rdpcm_enabled_flag : 1;
+    uint32_t    explicit_rdpcm_enabled_flag : 1;
+    uint32_t    extended_precision_processing_flag : 1;
+    uint32_t    intra_smoothing_disabled_flag : 1;
+    uint32_t    high_precision_offsets_enabled_flag : 1;
+    uint32_t    persistent_rice_adaptation_enabled_flag : 1;
+    uint32_t    cabac_bypass_alignment_enabled_flag : 1;
+    uint32_t    sps_scc_extension_flag : 1;
+    uint32_t    sps_curr_pic_ref_enabled_flag : 1;
+    uint32_t    palette_mode_enabled_flag : 1;
+    uint32_t    sps_palette_predictor_initializers_present_flag : 1;
+    uint32_t    intra_boundary_filtering_disabled_flag : 1;
+} StdVideoH265SpsFlags;
+
+typedef struct StdVideoH265ShortTermRefPicSetFlags {
+    uint32_t    inter_ref_pic_set_prediction_flag : 1;
+    uint32_t    delta_rps_sign : 1;
+} StdVideoH265ShortTermRefPicSetFlags;
+
+typedef struct StdVideoH265ShortTermRefPicSet {
+    StdVideoH265ShortTermRefPicSetFlags    flags;
+    uint32_t                               delta_idx_minus1;
+    uint16_t                               use_delta_flag;
+    uint16_t                               abs_delta_rps_minus1;
+    uint16_t                               used_by_curr_pic_flag;
+    uint16_t                               used_by_curr_pic_s0_flag;
+    uint16_t                               used_by_curr_pic_s1_flag;
+    uint16_t                               reserved1;
+    uint8_t                                reserved2;
+    uint8_t                                reserved3;
+    uint8_t                                num_negative_pics;
+    uint8_t                                num_positive_pics;
+    uint16_t                               delta_poc_s0_minus1[STD_VIDEO_H265_MAX_DPB_SIZE];
+    uint16_t                               delta_poc_s1_minus1[STD_VIDEO_H265_MAX_DPB_SIZE];
+} StdVideoH265ShortTermRefPicSet;
+
+typedef struct StdVideoH265LongTermRefPicsSps {
+    uint32_t    used_by_curr_pic_lt_sps_flag;
+    uint32_t    lt_ref_pic_poc_lsb_sps[STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS];
+} StdVideoH265LongTermRefPicsSps;
+
+typedef struct StdVideoH265SequenceParameterSet {
+    StdVideoH265SpsFlags                          flags;
+    StdVideoH265ChromaFormatIdc                   chroma_format_idc;
+    uint32_t                                      pic_width_in_luma_samples;
+    uint32_t                                      pic_height_in_luma_samples;
+    uint8_t                                       sps_video_parameter_set_id;
+    uint8_t                                       sps_max_sub_layers_minus1;
+    uint8_t                                       sps_seq_parameter_set_id;
+    uint8_t                                       bit_depth_luma_minus8;
+    uint8_t                                       bit_depth_chroma_minus8;
+    uint8_t                                       log2_max_pic_order_cnt_lsb_minus4;
+    uint8_t                                       log2_min_luma_coding_block_size_minus3;
+    uint8_t                                       log2_diff_max_min_luma_coding_block_size;
+    uint8_t                                       log2_min_luma_transform_block_size_minus2;
+    uint8_t                                       log2_diff_max_min_luma_transform_block_size;
+    uint8_t                                       max_transform_hierarchy_depth_inter;
+    uint8_t                                       max_transform_hierarchy_depth_intra;
+    uint8_t                                       num_short_term_ref_pic_sets;
+    uint8_t                                       num_long_term_ref_pics_sps;
+    uint8_t                                       pcm_sample_bit_depth_luma_minus1;
+    uint8_t                                       pcm_sample_bit_depth_chroma_minus1;
+    uint8_t                                       log2_min_pcm_luma_coding_block_size_minus3;
+    uint8_t                                       log2_diff_max_min_pcm_luma_coding_block_size;
+    uint8_t                                       reserved1;
+    uint8_t                                       reserved2;
+    uint8_t                                       palette_max_size;
+    uint8_t                                       delta_palette_max_predictor_size;
+    uint8_t                                       motion_vector_resolution_control_idc;
+    uint8_t                                       sps_num_palette_predictor_initializers_minus1;
+    uint32_t                                      conf_win_left_offset;
+    uint32_t                                      conf_win_right_offset;
+    uint32_t                                      conf_win_top_offset;
+    uint32_t                                      conf_win_bottom_offset;
+    const StdVideoH265ProfileTierLevel*           pProfileTierLevel;
+    const StdVideoH265DecPicBufMgr*               pDecPicBufMgr;
+    const StdVideoH265ScalingLists*               pScalingLists;
+    const StdVideoH265ShortTermRefPicSet*         pShortTermRefPicSet;
+    const StdVideoH265LongTermRefPicsSps*         pLongTermRefPicsSps;
+    const StdVideoH265SequenceParameterSetVui*    pSequenceParameterSetVui;
+    const StdVideoH265PredictorPaletteEntries*    pPredictorPaletteEntries;
+} StdVideoH265SequenceParameterSet;
+
+typedef struct StdVideoH265PpsFlags {
+    uint32_t    dependent_slice_segments_enabled_flag : 1;
+    uint32_t    output_flag_present_flag : 1;
+    uint32_t    sign_data_hiding_enabled_flag : 1;
+    uint32_t    cabac_init_present_flag : 1;
+    uint32_t    constrained_intra_pred_flag : 1;
+    uint32_t    transform_skip_enabled_flag : 1;
+    uint32_t    cu_qp_delta_enabled_flag : 1;
+    uint32_t    pps_slice_chroma_qp_offsets_present_flag : 1;
+    uint32_t    weighted_pred_flag : 1;
+    uint32_t    weighted_bipred_flag : 1;
+    uint32_t    transquant_bypass_enabled_flag : 1;
+    uint32_t    tiles_enabled_flag : 1;
+    uint32_t    entropy_coding_sync_enabled_flag : 1;
+    uint32_t    uniform_spacing_flag : 1;
+    uint32_t    loop_filter_across_tiles_enabled_flag : 1;
+    uint32_t    pps_loop_filter_across_slices_enabled_flag : 1;
+    uint32_t    deblocking_filter_control_present_flag : 1;
+    uint32_t    deblocking_filter_override_enabled_flag : 1;
+    uint32_t    pps_deblocking_filter_disabled_flag : 1;
+    uint32_t    pps_scaling_list_data_present_flag : 1;
+    uint32_t    lists_modification_present_flag : 1;
+    uint32_t    slice_segment_header_extension_present_flag : 1;
+    uint32_t    pps_extension_present_flag : 1;
+    uint32_t    cross_component_prediction_enabled_flag : 1;
+    uint32_t    chroma_qp_offset_list_enabled_flag : 1;
+    uint32_t    pps_curr_pic_ref_enabled_flag : 1;
+    uint32_t    residual_adaptive_colour_transform_enabled_flag : 1;
+    uint32_t    pps_slice_act_qp_offsets_present_flag : 1;
+    uint32_t    pps_palette_predictor_initializers_present_flag : 1;
+    uint32_t    monochrome_palette_flag : 1;
+    uint32_t    pps_range_extension_flag : 1;
+} StdVideoH265PpsFlags;
+
+typedef struct StdVideoH265PictureParameterSet {
+    StdVideoH265PpsFlags                          flags;
+    uint8_t                                       pps_pic_parameter_set_id;
+    uint8_t                                       pps_seq_parameter_set_id;
+    uint8_t                                       sps_video_parameter_set_id;
+    uint8_t                                       num_extra_slice_header_bits;
+    uint8_t                                       num_ref_idx_l0_default_active_minus1;
+    uint8_t                                       num_ref_idx_l1_default_active_minus1;
+    int8_t                                        init_qp_minus26;
+    uint8_t                                       diff_cu_qp_delta_depth;
+    int8_t                                        pps_cb_qp_offset;
+    int8_t                                        pps_cr_qp_offset;
+    int8_t                                        pps_beta_offset_div2;
+    int8_t                                        pps_tc_offset_div2;
+    uint8_t                                       log2_parallel_merge_level_minus2;
+    uint8_t                                       log2_max_transform_skip_block_size_minus2;
+    uint8_t                                       diff_cu_chroma_qp_offset_depth;
+    uint8_t                                       chroma_qp_offset_list_len_minus1;
+    int8_t                                        cb_qp_offset_list[STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE];
+    int8_t                                        cr_qp_offset_list[STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE];
+    uint8_t                                       log2_sao_offset_scale_luma;
+    uint8_t                                       log2_sao_offset_scale_chroma;
+    int8_t                                        pps_act_y_qp_offset_plus5;
+    int8_t                                        pps_act_cb_qp_offset_plus5;
+    int8_t                                        pps_act_cr_qp_offset_plus3;
+    uint8_t                                       pps_num_palette_predictor_initializers;
+    uint8_t                                       luma_bit_depth_entry_minus8;
+    uint8_t                                       chroma_bit_depth_entry_minus8;
+    uint8_t                                       num_tile_columns_minus1;
+    uint8_t                                       num_tile_rows_minus1;
+    uint8_t                                       reserved1;
+    uint8_t                                       reserved2;
+    uint16_t                                      column_width_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE];
+    uint16_t                                      row_height_minus1[STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE];
+    uint32_t                                      reserved3;
+    const StdVideoH265ScalingLists*               pScalingLists;
+    const StdVideoH265PredictorPaletteEntries*    pPredictorPaletteEntries;
+} StdVideoH265PictureParameterSet;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif

+ 67 - 0
ThirdParty/Khronos/vk_video/vulkan_video_codec_h265std_decode.h

@@ -0,0 +1,67 @@
+#ifndef VULKAN_VIDEO_CODEC_H265STD_DECODE_H_
+#define VULKAN_VIDEO_CODEC_H265STD_DECODE_H_ 1
+
+/*
+** Copyright 2015-2023 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+// vulkan_video_codec_h265std_decode is a preprocessor guard. Do not pass it to API calls.
+#define vulkan_video_codec_h265std_decode 1
+#include "vulkan_video_codec_h265std.h"
+
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_1_0_0 VK_MAKE_VIDEO_STD_VERSION(1, 0, 0)
+
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_API_VERSION_1_0_0
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_DECODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h265_decode"
+#define STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE 8
+typedef struct StdVideoDecodeH265PictureInfoFlags {
+    uint32_t    IrapPicFlag : 1;
+    uint32_t    IdrPicFlag  : 1;
+    uint32_t    IsReference : 1;
+    uint32_t    short_term_ref_pic_set_sps_flag : 1;
+} StdVideoDecodeH265PictureInfoFlags;
+
+typedef struct StdVideoDecodeH265PictureInfo {
+    StdVideoDecodeH265PictureInfoFlags    flags;
+    uint8_t                               sps_video_parameter_set_id;
+    uint8_t                               pps_seq_parameter_set_id;
+    uint8_t                               pps_pic_parameter_set_id;
+    uint8_t                               NumDeltaPocsOfRefRpsIdx;
+    int32_t                               PicOrderCntVal;
+    uint16_t                              NumBitsForSTRefPicSetInSlice;
+    uint16_t                              reserved;
+    uint8_t                               RefPicSetStCurrBefore[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE];
+    uint8_t                               RefPicSetStCurrAfter[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE];
+    uint8_t                               RefPicSetLtCurr[STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE];
+} StdVideoDecodeH265PictureInfo;
+
+typedef struct StdVideoDecodeH265ReferenceInfoFlags {
+    uint32_t    used_for_long_term_reference : 1;
+    uint32_t    unused_for_reference : 1;
+} StdVideoDecodeH265ReferenceInfoFlags;
+
+typedef struct StdVideoDecodeH265ReferenceInfo {
+    StdVideoDecodeH265ReferenceInfoFlags    flags;
+    int32_t                                 PicOrderCntVal;
+} StdVideoDecodeH265ReferenceInfo;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif

+ 157 - 0
ThirdParty/Khronos/vk_video/vulkan_video_codec_h265std_encode.h

@@ -0,0 +1,157 @@
+#ifndef VULKAN_VIDEO_CODEC_H265STD_ENCODE_H_
+#define VULKAN_VIDEO_CODEC_H265STD_ENCODE_H_ 1
+
+/*
+** Copyright 2015-2023 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+// vulkan_video_codec_h265std_encode is a preprocessor guard. Do not pass it to API calls.
+#define vulkan_video_codec_h265std_encode 1
+#include "vulkan_video_codec_h265std.h"
+// Vulkan 0.9 provisional Vulkan video H.265 encode std specification version number
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_0_9_12 VK_MAKE_VIDEO_STD_VERSION(0, 9, 12)
+
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_SPEC_VERSION VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_API_VERSION_0_9_12
+#define VK_STD_VULKAN_VIDEO_CODEC_H265_ENCODE_EXTENSION_NAME "VK_STD_vulkan_video_codec_h265_encode"
+typedef struct StdVideoEncodeH265WeightTableFlags {
+    uint16_t    luma_weight_l0_flag;
+    uint16_t    chroma_weight_l0_flag;
+    uint16_t    luma_weight_l1_flag;
+    uint16_t    chroma_weight_l1_flag;
+} StdVideoEncodeH265WeightTableFlags;
+
+typedef struct StdVideoEncodeH265WeightTable {
+    StdVideoEncodeH265WeightTableFlags    flags;
+    uint8_t                               luma_log2_weight_denom;
+    int8_t                                delta_chroma_log2_weight_denom;
+    int8_t                                delta_luma_weight_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF];
+    int8_t                                luma_offset_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF];
+    int8_t                                delta_chroma_weight_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES];
+    int8_t                                delta_chroma_offset_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES];
+    int8_t                                delta_luma_weight_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF];
+    int8_t                                luma_offset_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF];
+    int8_t                                delta_chroma_weight_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES];
+    int8_t                                delta_chroma_offset_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF][STD_VIDEO_H265_MAX_CHROMA_PLANES];
+} StdVideoEncodeH265WeightTable;
+
+typedef struct StdVideoEncodeH265SliceSegmentHeaderFlags {
+    uint32_t    first_slice_segment_in_pic_flag : 1;
+    uint32_t    dependent_slice_segment_flag : 1;
+    uint32_t    slice_sao_luma_flag : 1;
+    uint32_t    slice_sao_chroma_flag : 1;
+    uint32_t    num_ref_idx_active_override_flag : 1;
+    uint32_t    mvd_l1_zero_flag : 1;
+    uint32_t    cabac_init_flag : 1;
+    uint32_t    cu_chroma_qp_offset_enabled_flag : 1;
+    uint32_t    deblocking_filter_override_flag : 1;
+    uint32_t    slice_deblocking_filter_disabled_flag : 1;
+    uint32_t    collocated_from_l0_flag : 1;
+    uint32_t    slice_loop_filter_across_slices_enabled_flag : 1;
+    uint32_t    reserved : 20;
+} StdVideoEncodeH265SliceSegmentHeaderFlags;
+
+typedef struct StdVideoEncodeH265SliceSegmentHeader {
+    StdVideoEncodeH265SliceSegmentHeaderFlags    flags;
+    StdVideoH265SliceType                        slice_type;
+    uint32_t                                     slice_segment_address;
+    uint8_t                                      collocated_ref_idx;
+    uint8_t                                      MaxNumMergeCand;
+    int8_t                                       slice_cb_qp_offset;
+    int8_t                                       slice_cr_qp_offset;
+    int8_t                                       slice_beta_offset_div2;
+    int8_t                                       slice_tc_offset_div2;
+    int8_t                                       slice_act_y_qp_offset;
+    int8_t                                       slice_act_cb_qp_offset;
+    int8_t                                       slice_act_cr_qp_offset;
+    int8_t                                       slice_qp_delta;
+    uint16_t                                     reserved1;
+    const StdVideoEncodeH265WeightTable*         pWeightTable;
+} StdVideoEncodeH265SliceSegmentHeader;
+
+typedef struct StdVideoEncodeH265ReferenceListsInfoFlags {
+    uint32_t    ref_pic_list_modification_flag_l0 : 1;
+    uint32_t    ref_pic_list_modification_flag_l1 : 1;
+    uint32_t    reserved : 30;
+} StdVideoEncodeH265ReferenceListsInfoFlags;
+
+typedef struct StdVideoEncodeH265ReferenceListsInfo {
+    StdVideoEncodeH265ReferenceListsInfoFlags    flags;
+    uint8_t                                      num_ref_idx_l0_active_minus1;
+    uint8_t                                      num_ref_idx_l1_active_minus1;
+    uint8_t                                      RefPicList0[STD_VIDEO_H265_MAX_NUM_LIST_REF];
+    uint8_t                                      RefPicList1[STD_VIDEO_H265_MAX_NUM_LIST_REF];
+    uint8_t                                      list_entry_l0[STD_VIDEO_H265_MAX_NUM_LIST_REF];
+    uint8_t                                      list_entry_l1[STD_VIDEO_H265_MAX_NUM_LIST_REF];
+} StdVideoEncodeH265ReferenceListsInfo;
+
+typedef struct StdVideoEncodeH265PictureInfoFlags {
+    uint32_t    is_reference : 1;
+    uint32_t    IrapPicFlag : 1;
+    uint32_t    used_for_long_term_reference : 1;
+    uint32_t    discardable_flag : 1;
+    uint32_t    cross_layer_bla_flag : 1;
+    uint32_t    pic_output_flag : 1;
+    uint32_t    no_output_of_prior_pics_flag : 1;
+    uint32_t    short_term_ref_pic_set_sps_flag : 1;
+    uint32_t    slice_temporal_mvp_enabled_flag : 1;
+    uint32_t    reserved : 23;
+} StdVideoEncodeH265PictureInfoFlags;
+
+typedef struct StdVideoEncodeH265LongTermRefPics {
+    uint8_t     num_long_term_sps;
+    uint8_t     num_long_term_pics;
+    uint8_t     lt_idx_sps[STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS];
+    uint8_t     poc_lsb_lt[STD_VIDEO_H265_MAX_LONG_TERM_PICS];
+    uint16_t    used_by_curr_pic_lt_flag;
+    uint8_t     delta_poc_msb_present_flag[STD_VIDEO_H265_MAX_DELTA_POC];
+    uint8_t     delta_poc_msb_cycle_lt[STD_VIDEO_H265_MAX_DELTA_POC];
+} StdVideoEncodeH265LongTermRefPics;
+
+typedef struct StdVideoEncodeH265PictureInfo {
+    StdVideoEncodeH265PictureInfoFlags             flags;
+    StdVideoH265PictureType                        pic_type;
+    uint8_t                                        sps_video_parameter_set_id;
+    uint8_t                                        pps_seq_parameter_set_id;
+    uint8_t                                        pps_pic_parameter_set_id;
+    uint8_t                                        short_term_ref_pic_set_idx;
+    int32_t                                        PicOrderCntVal;
+    uint8_t                                        TemporalId;
+    uint8_t                                        reserved1[7];
+    const StdVideoEncodeH265ReferenceListsInfo*    pRefLists;
+    const StdVideoH265ShortTermRefPicSet*          pShortTermRefPicSet;
+    const StdVideoEncodeH265LongTermRefPics*       pLongTermRefPics;
+} StdVideoEncodeH265PictureInfo;
+
+typedef struct StdVideoEncodeH265ReferenceInfoFlags {
+    uint32_t    used_for_long_term_reference : 1;
+    uint32_t    unused_for_reference : 1;
+    uint32_t    reserved : 30;
+} StdVideoEncodeH265ReferenceInfoFlags;
+
+typedef struct StdVideoEncodeH265ReferenceInfo {
+    StdVideoEncodeH265ReferenceInfoFlags    flags;
+    StdVideoH265PictureType                 pic_type;
+    int32_t                                 PicOrderCntVal;
+    uint8_t                                 TemporalId;
+} StdVideoEncodeH265ReferenceInfo;
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif

+ 36 - 0
ThirdParty/Khronos/vk_video/vulkan_video_codecs_common.h

@@ -0,0 +1,36 @@
+#ifndef VULKAN_VIDEO_CODECS_COMMON_H_
+#define VULKAN_VIDEO_CODECS_COMMON_H_ 1
+
+/*
+** Copyright 2015-2023 The Khronos Group Inc.
+**
+** SPDX-License-Identifier: Apache-2.0
+*/
+
+/*
+** This header is generated from the Khronos Vulkan XML API Registry.
+**
+*/
+
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
+
+// vulkan_video_codecs_common is a preprocessor guard. Do not pass it to API calls.
+#define vulkan_video_codecs_common 1
+#if !defined(VK_NO_STDINT_H)
+    #include <stdint.h>
+#endif
+
+#define VK_MAKE_VIDEO_STD_VERSION(major, minor, patch) \
+    ((((uint32_t)(major)) << 22) | (((uint32_t)(minor)) << 12) | ((uint32_t)(patch)))
+
+
+#ifdef __cplusplus
+}
+#endif
+
+#endif

+ 23 - 24
ThirdParty/Khronos/vulkan/vk_icd.h

@@ -1,27 +1,11 @@
-//
-// File: vk_icd.h
-//
 /*
- * Copyright (c) 2015-2016 The Khronos Group Inc.
- * Copyright (c) 2015-2016 Valve Corporation
- * Copyright (c) 2015-2016 LunarG, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Copyright 2015-2023 The Khronos Group Inc.
+ * Copyright 2015-2023 Valve Corporation
+ * Copyright 2015-2023 LunarG, Inc.
  *
+ * SPDX-License-Identifier: Apache-2.0
  */
-
-#ifndef VKICD_H
-#define VKICD_H
+#pragma once
 
 #include "vulkan.h"
 #include <stdbool.h>
@@ -42,7 +26,17 @@
 //               call for any API version > 1.0.  Otherwise, the loader will
 //               manually determine if it can support the expected version.
 //   Version 6 - Add support for vk_icdEnumerateAdapterPhysicalDevices.
-#define CURRENT_LOADER_ICD_INTERFACE_VERSION 6
+//   Version 7 - If an ICD supports any of the following functions, they must be
+//               queryable with vk_icdGetInstanceProcAddr:
+//                   vk_icdNegotiateLoaderICDInterfaceVersion
+//                   vk_icdGetPhysicalDeviceProcAddr
+//                   vk_icdEnumerateAdapterPhysicalDevices (Windows only)
+//               In addition, these functions no longer need to be exported directly.
+//               This version allows drivers provided through the extension
+//               VK_LUNARG_direct_driver_loading be able to support the entire
+//               Driver-Loader interface.
+
+#define CURRENT_LOADER_ICD_INTERFACE_VERSION 7
 #define MIN_SUPPORTED_LOADER_ICD_INTERFACE_VERSION 0
 #define MIN_PHYS_DEV_EXTENSION_ICD_INTERFACE_VERSION 4
 
@@ -70,7 +64,7 @@ extern "C" {
 #endif
     VKAPI_ATTR VkResult VKAPI_CALL vk_icdNegotiateLoaderICDInterfaceVersion(uint32_t* pVersion);
     VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetInstanceProcAddr(VkInstance instance, const char* pName);
-    VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance isntance, const char* pName);
+    VKAPI_ATTR PFN_vkVoidFunction VKAPI_CALL vk_icdGetPhysicalDeviceProcAddr(VkInstance instance, const char* pName);
 #if defined(VK_USE_PLATFORM_WIN32_KHR)
     VKAPI_ATTR VkResult VKAPI_CALL vk_icdEnumerateAdapterPhysicalDevices(VkInstance instance, LUID adapterLUID,
         uint32_t* pPhysicalDeviceCount, VkPhysicalDevice* pPhysicalDevices);
@@ -123,6 +117,7 @@ typedef enum {
     VK_ICD_WSI_PLATFORM_VI,
     VK_ICD_WSI_PLATFORM_GGP,
     VK_ICD_WSI_PLATFORM_SCREEN,
+    VK_ICD_WSI_PLATFORM_FUCHSIA,
 } VkIcdWsiPlatform;
 
 typedef struct {
@@ -242,4 +237,8 @@ typedef struct {
 } VkIcdSurfaceScreen;
 #endif  // VK_USE_PLATFORM_SCREEN_QNX
 
-#endif  // VKICD_H
+#ifdef VK_USE_PLATFORM_FUCHSIA
+typedef struct {
+  VkIcdSurfaceBase base;
+} VkIcdSurfaceImagePipe;
+#endif // VK_USE_PLATFORM_FUCHSIA

+ 6 - 27
ThirdParty/Khronos/vulkan/vk_layer.h

@@ -1,39 +1,18 @@
-//
-// File: vk_layer.h
-//
 /*
- * Copyright (c) 2015-2017 The Khronos Group Inc.
- * Copyright (c) 2015-2017 Valve Corporation
- * Copyright (c) 2015-2017 LunarG, Inc.
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *     http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
+ * Copyright 2015-2023 The Khronos Group Inc.
+ * Copyright 2015-2023 Valve Corporation
+ * Copyright 2015-2023 LunarG, Inc.
  *
+ * SPDX-License-Identifier: Apache-2.0
  */
+#pragma once
 
 /* Need to define dispatch table
  * Core struct can then have ptr to dispatch table at the top
  * Along with object ptrs for current and next OBJ
  */
-#pragma once
 
-#include "vulkan.h"
-#if defined(__GNUC__) && __GNUC__ >= 4
-#define VK_LAYER_EXPORT __attribute__((visibility("default")))
-#elif defined(__SUNPRO_C) && (__SUNPRO_C >= 0x590)
-#define VK_LAYER_EXPORT __attribute__((visibility("default")))
-#else
-#define VK_LAYER_EXPORT
-#endif
+#include "vulkan_core.h"
 
 #define MAX_NUM_UNKNOWN_EXTS 250
 

+ 1 - 1
ThirdParty/Khronos/vulkan/vk_platform.h

@@ -2,7 +2,7 @@
 // File: vk_platform.h
 //
 /*
-** Copyright 2014-2022 The Khronos Group Inc.
+** Copyright 2014-2023 The Khronos Group Inc.
 **
 ** SPDX-License-Identifier: Apache-2.0
 */

+ 3199 - 0
ThirdParty/Khronos/vulkan/vulkan.cppm

@@ -0,0 +1,3199 @@
+// Copyright 2015-2023 The Khronos Group Inc.
+//
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+// Note: This module is still in an experimental state.
+// Any feedback is welcome on https://github.com/KhronosGroup/Vulkan-Hpp/issues.
+
+module;
+
+#include <vulkan/vulkan.hpp>
+#include <vulkan/vulkan_extension_inspection.hpp>
+#include <vulkan/vulkan_format_traits.hpp>
+#include <vulkan/vulkan_hash.hpp>
+#include <vulkan/vulkan_raii.hpp>
+#include <vulkan/vulkan_shared.hpp>
+
+export module vulkan_hpp;
+
+export namespace VULKAN_HPP_NAMESPACE
+{
+  //=====================================
+  //=== HARDCODED TYPEs AND FUNCTIONs ===
+  //=====================================
+  using VULKAN_HPP_NAMESPACE::ArrayWrapper1D;
+  using VULKAN_HPP_NAMESPACE::ArrayWrapper2D;
+  using VULKAN_HPP_NAMESPACE::DispatchLoaderBase;
+  using VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic;
+  using VULKAN_HPP_NAMESPACE::Flags;
+  using VULKAN_HPP_NAMESPACE::FlagTraits;
+
+#if !defined( VK_NO_PROTOTYPES )
+  using VULKAN_HPP_NAMESPACE::DispatchLoaderStatic;
+#endif /*VK_NO_PROTOTYPES*/
+
+  using VULKAN_HPP_NAMESPACE::operator&;
+  using VULKAN_HPP_NAMESPACE::operator|;
+  using VULKAN_HPP_NAMESPACE::operator^;
+  using VULKAN_HPP_NAMESPACE::operator~;
+  using VULKAN_HPP_DEFAULT_DISPATCHER_TYPE;
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+  using VULKAN_HPP_NAMESPACE::ArrayProxy;
+  using VULKAN_HPP_NAMESPACE::ArrayProxyNoTemporaries;
+  using VULKAN_HPP_NAMESPACE::Optional;
+  using VULKAN_HPP_NAMESPACE::SharedHandle;
+  using VULKAN_HPP_NAMESPACE::StridedArrayProxy;
+  using VULKAN_HPP_NAMESPACE::StructureChain;
+  using VULKAN_HPP_NAMESPACE::UniqueHandle;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
+  using VULKAN_HPP_NAMESPACE::ObjectDestroy;
+  using VULKAN_HPP_NAMESPACE::ObjectDestroyShared;
+  using VULKAN_HPP_NAMESPACE::ObjectFree;
+  using VULKAN_HPP_NAMESPACE::ObjectFreeShared;
+  using VULKAN_HPP_NAMESPACE::ObjectRelease;
+  using VULKAN_HPP_NAMESPACE::ObjectReleaseShared;
+  using VULKAN_HPP_NAMESPACE::PoolFree;
+  using VULKAN_HPP_NAMESPACE::PoolFreeShared;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+  //==================
+  //=== BASE TYPEs ===
+  //==================
+  using VULKAN_HPP_NAMESPACE::Bool32;
+  using VULKAN_HPP_NAMESPACE::DeviceAddress;
+  using VULKAN_HPP_NAMESPACE::DeviceSize;
+  using VULKAN_HPP_NAMESPACE::RemoteAddressNV;
+  using VULKAN_HPP_NAMESPACE::SampleMask;
+
+  //=============
+  //=== ENUMs ===
+  //=============
+  using VULKAN_HPP_NAMESPACE::CppType;
+
+  //=== VK_VERSION_1_0 ===
+  using VULKAN_HPP_NAMESPACE::AccessFlagBits;
+  using VULKAN_HPP_NAMESPACE::AccessFlags;
+  using VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlagBits;
+  using VULKAN_HPP_NAMESPACE::AttachmentDescriptionFlags;
+  using VULKAN_HPP_NAMESPACE::AttachmentLoadOp;
+  using VULKAN_HPP_NAMESPACE::AttachmentStoreOp;
+  using VULKAN_HPP_NAMESPACE::BlendFactor;
+  using VULKAN_HPP_NAMESPACE::BlendOp;
+  using VULKAN_HPP_NAMESPACE::BorderColor;
+  using VULKAN_HPP_NAMESPACE::BufferCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::BufferCreateFlags;
+  using VULKAN_HPP_NAMESPACE::BufferUsageFlagBits;
+  using VULKAN_HPP_NAMESPACE::BufferUsageFlags;
+  using VULKAN_HPP_NAMESPACE::BufferViewCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::BufferViewCreateFlags;
+  using VULKAN_HPP_NAMESPACE::ColorComponentFlagBits;
+  using VULKAN_HPP_NAMESPACE::ColorComponentFlags;
+  using VULKAN_HPP_NAMESPACE::CommandBufferLevel;
+  using VULKAN_HPP_NAMESPACE::CommandBufferResetFlagBits;
+  using VULKAN_HPP_NAMESPACE::CommandBufferResetFlags;
+  using VULKAN_HPP_NAMESPACE::CommandBufferUsageFlagBits;
+  using VULKAN_HPP_NAMESPACE::CommandBufferUsageFlags;
+  using VULKAN_HPP_NAMESPACE::CommandPoolCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::CommandPoolCreateFlags;
+  using VULKAN_HPP_NAMESPACE::CommandPoolResetFlagBits;
+  using VULKAN_HPP_NAMESPACE::CommandPoolResetFlags;
+  using VULKAN_HPP_NAMESPACE::CompareOp;
+  using VULKAN_HPP_NAMESPACE::ComponentSwizzle;
+  using VULKAN_HPP_NAMESPACE::CullModeFlagBits;
+  using VULKAN_HPP_NAMESPACE::CullModeFlags;
+  using VULKAN_HPP_NAMESPACE::DependencyFlagBits;
+  using VULKAN_HPP_NAMESPACE::DependencyFlags;
+  using VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::DescriptorPoolCreateFlags;
+  using VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlagBits;
+  using VULKAN_HPP_NAMESPACE::DescriptorPoolResetFlags;
+  using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateFlags;
+  using VULKAN_HPP_NAMESPACE::DescriptorType;
+  using VULKAN_HPP_NAMESPACE::DeviceCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::DeviceCreateFlags;
+  using VULKAN_HPP_NAMESPACE::DynamicState;
+  using VULKAN_HPP_NAMESPACE::EventCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::EventCreateFlags;
+  using VULKAN_HPP_NAMESPACE::FenceCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::FenceCreateFlags;
+  using VULKAN_HPP_NAMESPACE::Filter;
+  using VULKAN_HPP_NAMESPACE::Format;
+  using VULKAN_HPP_NAMESPACE::FormatFeatureFlagBits;
+  using VULKAN_HPP_NAMESPACE::FormatFeatureFlags;
+  using VULKAN_HPP_NAMESPACE::FramebufferCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::FramebufferCreateFlags;
+  using VULKAN_HPP_NAMESPACE::FrontFace;
+  using VULKAN_HPP_NAMESPACE::ImageAspectFlagBits;
+  using VULKAN_HPP_NAMESPACE::ImageAspectFlags;
+  using VULKAN_HPP_NAMESPACE::ImageCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::ImageCreateFlags;
+  using VULKAN_HPP_NAMESPACE::ImageLayout;
+  using VULKAN_HPP_NAMESPACE::ImageTiling;
+  using VULKAN_HPP_NAMESPACE::ImageType;
+  using VULKAN_HPP_NAMESPACE::ImageUsageFlagBits;
+  using VULKAN_HPP_NAMESPACE::ImageUsageFlags;
+  using VULKAN_HPP_NAMESPACE::ImageViewCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::ImageViewCreateFlags;
+  using VULKAN_HPP_NAMESPACE::ImageViewType;
+  using VULKAN_HPP_NAMESPACE::IndexType;
+  using VULKAN_HPP_NAMESPACE::InstanceCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::InstanceCreateFlags;
+  using VULKAN_HPP_NAMESPACE::InternalAllocationType;
+  using VULKAN_HPP_NAMESPACE::LogicOp;
+  using VULKAN_HPP_NAMESPACE::MemoryHeapFlagBits;
+  using VULKAN_HPP_NAMESPACE::MemoryHeapFlags;
+  using VULKAN_HPP_NAMESPACE::MemoryMapFlagBits;
+  using VULKAN_HPP_NAMESPACE::MemoryMapFlags;
+  using VULKAN_HPP_NAMESPACE::MemoryPropertyFlagBits;
+  using VULKAN_HPP_NAMESPACE::MemoryPropertyFlags;
+  using VULKAN_HPP_NAMESPACE::ObjectType;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceType;
+  using VULKAN_HPP_NAMESPACE::PipelineBindPoint;
+  using VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersion;
+  using VULKAN_HPP_NAMESPACE::PipelineCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::PipelineCreateFlags;
+  using VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateFlags;
+  using VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateFlags;
+  using VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateFlags;
+  using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateFlags;
+  using VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateFlags;
+  using VULKAN_HPP_NAMESPACE::PipelineStageFlagBits;
+  using VULKAN_HPP_NAMESPACE::PipelineStageFlags;
+  using VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateFlags;
+  using VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateFlags;
+  using VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateFlags;
+  using VULKAN_HPP_NAMESPACE::PolygonMode;
+  using VULKAN_HPP_NAMESPACE::PrimitiveTopology;
+  using VULKAN_HPP_NAMESPACE::QueryControlFlagBits;
+  using VULKAN_HPP_NAMESPACE::QueryControlFlags;
+  using VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlagBits;
+  using VULKAN_HPP_NAMESPACE::QueryPipelineStatisticFlags;
+  using VULKAN_HPP_NAMESPACE::QueryPoolCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::QueryPoolCreateFlags;
+  using VULKAN_HPP_NAMESPACE::QueryResultFlagBits;
+  using VULKAN_HPP_NAMESPACE::QueryResultFlags;
+  using VULKAN_HPP_NAMESPACE::QueryType;
+  using VULKAN_HPP_NAMESPACE::QueueFlagBits;
+  using VULKAN_HPP_NAMESPACE::QueueFlags;
+  using VULKAN_HPP_NAMESPACE::RenderPassCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::RenderPassCreateFlags;
+  using VULKAN_HPP_NAMESPACE::Result;
+  using VULKAN_HPP_NAMESPACE::SampleCountFlagBits;
+  using VULKAN_HPP_NAMESPACE::SampleCountFlags;
+  using VULKAN_HPP_NAMESPACE::SamplerAddressMode;
+  using VULKAN_HPP_NAMESPACE::SamplerCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::SamplerCreateFlags;
+  using VULKAN_HPP_NAMESPACE::SamplerMipmapMode;
+  using VULKAN_HPP_NAMESPACE::SemaphoreCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::SemaphoreCreateFlags;
+  using VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::ShaderModuleCreateFlags;
+  using VULKAN_HPP_NAMESPACE::ShaderStageFlagBits;
+  using VULKAN_HPP_NAMESPACE::ShaderStageFlags;
+  using VULKAN_HPP_NAMESPACE::SharingMode;
+  using VULKAN_HPP_NAMESPACE::SparseImageFormatFlagBits;
+  using VULKAN_HPP_NAMESPACE::SparseImageFormatFlags;
+  using VULKAN_HPP_NAMESPACE::SparseMemoryBindFlagBits;
+  using VULKAN_HPP_NAMESPACE::SparseMemoryBindFlags;
+  using VULKAN_HPP_NAMESPACE::StencilFaceFlagBits;
+  using VULKAN_HPP_NAMESPACE::StencilFaceFlags;
+  using VULKAN_HPP_NAMESPACE::StencilOp;
+  using VULKAN_HPP_NAMESPACE::StructureType;
+  using VULKAN_HPP_NAMESPACE::SubpassContents;
+  using VULKAN_HPP_NAMESPACE::SubpassDescriptionFlagBits;
+  using VULKAN_HPP_NAMESPACE::SubpassDescriptionFlags;
+  using VULKAN_HPP_NAMESPACE::SystemAllocationScope;
+  using VULKAN_HPP_NAMESPACE::VendorId;
+  using VULKAN_HPP_NAMESPACE::VertexInputRate;
+
+  //=== VK_VERSION_1_1 ===
+  using VULKAN_HPP_NAMESPACE::ChromaLocation;
+  using VULKAN_HPP_NAMESPACE::ChromaLocationKHR;
+  using VULKAN_HPP_NAMESPACE::CommandPoolTrimFlagBits;
+  using VULKAN_HPP_NAMESPACE::CommandPoolTrimFlags;
+  using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateFlags;
+  using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateType;
+  using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateTypeKHR;
+  using VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::DeviceQueueCreateFlags;
+  using VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlagBits;
+  using VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::ExternalFenceFeatureFlags;
+  using VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBits;
+  using VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::ExternalFenceHandleTypeFlags;
+  using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagBits;
+  using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlags;
+  using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBits;
+  using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlags;
+  using VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlagBits;
+  using VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::ExternalSemaphoreFeatureFlags;
+  using VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBits;
+  using VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::ExternalSemaphoreHandleTypeFlags;
+  using VULKAN_HPP_NAMESPACE::FenceImportFlagBits;
+  using VULKAN_HPP_NAMESPACE::FenceImportFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::FenceImportFlags;
+  using VULKAN_HPP_NAMESPACE::MemoryAllocateFlagBits;
+  using VULKAN_HPP_NAMESPACE::MemoryAllocateFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::MemoryAllocateFlags;
+  using VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlagBits;
+  using VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::PeerMemoryFeatureFlags;
+  using VULKAN_HPP_NAMESPACE::PointClippingBehavior;
+  using VULKAN_HPP_NAMESPACE::PointClippingBehaviorKHR;
+  using VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversion;
+  using VULKAN_HPP_NAMESPACE::SamplerYcbcrModelConversionKHR;
+  using VULKAN_HPP_NAMESPACE::SamplerYcbcrRange;
+  using VULKAN_HPP_NAMESPACE::SamplerYcbcrRangeKHR;
+  using VULKAN_HPP_NAMESPACE::SemaphoreImportFlagBits;
+  using VULKAN_HPP_NAMESPACE::SemaphoreImportFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::SemaphoreImportFlags;
+  using VULKAN_HPP_NAMESPACE::SubgroupFeatureFlagBits;
+  using VULKAN_HPP_NAMESPACE::SubgroupFeatureFlags;
+  using VULKAN_HPP_NAMESPACE::TessellationDomainOrigin;
+  using VULKAN_HPP_NAMESPACE::TessellationDomainOriginKHR;
+
+  //=== VK_VERSION_1_2 ===
+  using VULKAN_HPP_NAMESPACE::DescriptorBindingFlagBits;
+  using VULKAN_HPP_NAMESPACE::DescriptorBindingFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::DescriptorBindingFlags;
+  using VULKAN_HPP_NAMESPACE::DriverId;
+  using VULKAN_HPP_NAMESPACE::DriverIdKHR;
+  using VULKAN_HPP_NAMESPACE::ResolveModeFlagBits;
+  using VULKAN_HPP_NAMESPACE::ResolveModeFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::ResolveModeFlags;
+  using VULKAN_HPP_NAMESPACE::SamplerReductionMode;
+  using VULKAN_HPP_NAMESPACE::SamplerReductionModeEXT;
+  using VULKAN_HPP_NAMESPACE::SemaphoreType;
+  using VULKAN_HPP_NAMESPACE::SemaphoreTypeKHR;
+  using VULKAN_HPP_NAMESPACE::SemaphoreWaitFlagBits;
+  using VULKAN_HPP_NAMESPACE::SemaphoreWaitFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::SemaphoreWaitFlags;
+  using VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependence;
+  using VULKAN_HPP_NAMESPACE::ShaderFloatControlsIndependenceKHR;
+
+  //=== VK_VERSION_1_3 ===
+  using VULKAN_HPP_NAMESPACE::AccessFlagBits2;
+  using VULKAN_HPP_NAMESPACE::AccessFlagBits2KHR;
+  using VULKAN_HPP_NAMESPACE::AccessFlags2;
+  using VULKAN_HPP_NAMESPACE::FormatFeatureFlagBits2;
+  using VULKAN_HPP_NAMESPACE::FormatFeatureFlagBits2KHR;
+  using VULKAN_HPP_NAMESPACE::FormatFeatureFlags2;
+  using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagBits;
+  using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackFlags;
+  using VULKAN_HPP_NAMESPACE::PipelineStageFlagBits2;
+  using VULKAN_HPP_NAMESPACE::PipelineStageFlagBits2KHR;
+  using VULKAN_HPP_NAMESPACE::PipelineStageFlags2;
+  using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateFlags;
+  using VULKAN_HPP_NAMESPACE::RenderingFlagBits;
+  using VULKAN_HPP_NAMESPACE::RenderingFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::RenderingFlags;
+  using VULKAN_HPP_NAMESPACE::SubmitFlagBits;
+  using VULKAN_HPP_NAMESPACE::SubmitFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::SubmitFlags;
+  using VULKAN_HPP_NAMESPACE::ToolPurposeFlagBits;
+  using VULKAN_HPP_NAMESPACE::ToolPurposeFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::ToolPurposeFlags;
+
+  //=== VK_KHR_surface ===
+  using VULKAN_HPP_NAMESPACE::ColorSpaceKHR;
+  using VULKAN_HPP_NAMESPACE::CompositeAlphaFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::CompositeAlphaFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::PresentModeKHR;
+  using VULKAN_HPP_NAMESPACE::SurfaceTransformFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::SurfaceTransformFlagsKHR;
+
+  //=== VK_KHR_swapchain ===
+  using VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::DeviceGroupPresentModeFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::SwapchainCreateFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::SwapchainCreateFlagsKHR;
+
+  //=== VK_KHR_display ===
+  using VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::DisplayModeCreateFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::DisplayPlaneAlphaFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateFlagsKHR;
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+  using VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::XlibSurfaceCreateFlagsKHR;
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+  using VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::XcbSurfaceCreateFlagsKHR;
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+  using VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateFlagsKHR;
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_KHR_android_surface ===
+  using VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateFlagsKHR;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+  using VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::Win32SurfaceCreateFlagsKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_debug_report ===
+  using VULKAN_HPP_NAMESPACE::DebugReportFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::DebugReportFlagsEXT;
+  using VULKAN_HPP_NAMESPACE::DebugReportObjectTypeEXT;
+
+  //=== VK_AMD_rasterization_order ===
+  using VULKAN_HPP_NAMESPACE::RasterizationOrderAMD;
+
+  //=== VK_KHR_video_queue ===
+  using VULKAN_HPP_NAMESPACE::QueryResultStatusKHR;
+  using VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoBeginCodingFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoCapabilityFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoCapabilityFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoChromaSubsamplingFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoCodecOperationFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoCodingControlFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoCodingControlFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoComponentBitDepthFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEndCodingFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEndCodingFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoSessionCreateFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateFlagsKHR;
+
+  //=== VK_KHR_video_decode_queue ===
+  using VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoDecodeCapabilityFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoDecodeFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoDecodeFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoDecodeUsageFlagsKHR;
+
+  //=== VK_EXT_transform_feedback ===
+  using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateFlagsEXT;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_EXT_video_encode_h264 ===
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilityFlagsEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlFlagsEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264StdFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264StdFlagsEXT;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_EXT_video_encode_h265 ===
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilityFlagsEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265CtbSizeFlagsEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlFlagsEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265StdFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265StdFlagsEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265TransformBlockSizeFlagsEXT;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_KHR_video_decode_h264 ===
+  using VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureLayoutFlagsKHR;
+
+  //=== VK_AMD_shader_info ===
+  using VULKAN_HPP_NAMESPACE::ShaderInfoTypeAMD;
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_stream_descriptor_surface ===
+  using VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagBitsGGP;
+  using VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateFlagsGGP;
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  //=== VK_NV_external_memory_capabilities ===
+  using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagBitsNV;
+  using VULKAN_HPP_NAMESPACE::ExternalMemoryFeatureFlagsNV;
+  using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagBitsNV;
+  using VULKAN_HPP_NAMESPACE::ExternalMemoryHandleTypeFlagsNV;
+
+  //=== VK_EXT_validation_flags ===
+  using VULKAN_HPP_NAMESPACE::ValidationCheckEXT;
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+  //=== VK_NN_vi_surface ===
+  using VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagBitsNN;
+  using VULKAN_HPP_NAMESPACE::ViSurfaceCreateFlagsNN;
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+  //=== VK_EXT_pipeline_robustness ===
+  using VULKAN_HPP_NAMESPACE::PipelineRobustnessBufferBehaviorEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineRobustnessImageBehaviorEXT;
+
+  //=== VK_EXT_conditional_rendering ===
+  using VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::ConditionalRenderingFlagsEXT;
+
+  //=== VK_EXT_display_surface_counter ===
+  using VULKAN_HPP_NAMESPACE::SurfaceCounterFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::SurfaceCounterFlagsEXT;
+
+  //=== VK_EXT_display_control ===
+  using VULKAN_HPP_NAMESPACE::DeviceEventTypeEXT;
+  using VULKAN_HPP_NAMESPACE::DisplayEventTypeEXT;
+  using VULKAN_HPP_NAMESPACE::DisplayPowerStateEXT;
+
+  //=== VK_NV_viewport_swizzle ===
+  using VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagBitsNV;
+  using VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateFlagsNV;
+  using VULKAN_HPP_NAMESPACE::ViewportCoordinateSwizzleNV;
+
+  //=== VK_EXT_discard_rectangles ===
+  using VULKAN_HPP_NAMESPACE::DiscardRectangleModeEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateFlagsEXT;
+
+  //=== VK_EXT_conservative_rasterization ===
+  using VULKAN_HPP_NAMESPACE::ConservativeRasterizationModeEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateFlagsEXT;
+
+  //=== VK_EXT_depth_clip_enable ===
+  using VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateFlagsEXT;
+
+  //=== VK_KHR_performance_query ===
+  using VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::AcquireProfilingLockFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::PerformanceCounterScopeKHR;
+  using VULKAN_HPP_NAMESPACE::PerformanceCounterStorageKHR;
+  using VULKAN_HPP_NAMESPACE::PerformanceCounterUnitKHR;
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+  //=== VK_MVK_ios_surface ===
+  using VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagBitsMVK;
+  using VULKAN_HPP_NAMESPACE::IOSSurfaceCreateFlagsMVK;
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+  //=== VK_MVK_macos_surface ===
+  using VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagBitsMVK;
+  using VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateFlagsMVK;
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  //=== VK_EXT_debug_utils ===
+  using VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::DebugUtilsMessageSeverityFlagsEXT;
+  using VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::DebugUtilsMessageTypeFlagsEXT;
+  using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataFlagsEXT;
+  using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateFlagsEXT;
+
+  //=== VK_EXT_blend_operation_advanced ===
+  using VULKAN_HPP_NAMESPACE::BlendOverlapEXT;
+
+  //=== VK_NV_fragment_coverage_to_color ===
+  using VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagBitsNV;
+  using VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateFlagsNV;
+
+  //=== VK_KHR_acceleration_structure ===
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureBuildTypeKHR;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureCompatibilityKHR;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureCreateFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureTypeKHR;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureTypeNV;
+  using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagBitsNV;
+  using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::BuildAccelerationStructureModeKHR;
+  using VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeKHR;
+  using VULKAN_HPP_NAMESPACE::CopyAccelerationStructureModeNV;
+  using VULKAN_HPP_NAMESPACE::GeometryFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::GeometryFlagBitsNV;
+  using VULKAN_HPP_NAMESPACE::GeometryFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::GeometryInstanceFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::GeometryInstanceFlagBitsNV;
+  using VULKAN_HPP_NAMESPACE::GeometryInstanceFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::GeometryTypeKHR;
+  using VULKAN_HPP_NAMESPACE::GeometryTypeNV;
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+  using VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeKHR;
+  using VULKAN_HPP_NAMESPACE::RayTracingShaderGroupTypeNV;
+  using VULKAN_HPP_NAMESPACE::ShaderGroupShaderKHR;
+
+  //=== VK_NV_framebuffer_mixed_samples ===
+  using VULKAN_HPP_NAMESPACE::CoverageModulationModeNV;
+  using VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagBitsNV;
+  using VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateFlagsNV;
+
+  //=== VK_EXT_validation_cache ===
+  using VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::ValidationCacheCreateFlagsEXT;
+  using VULKAN_HPP_NAMESPACE::ValidationCacheHeaderVersionEXT;
+
+  //=== VK_NV_shading_rate_image ===
+  using VULKAN_HPP_NAMESPACE::CoarseSampleOrderTypeNV;
+  using VULKAN_HPP_NAMESPACE::ShadingRatePaletteEntryNV;
+
+  //=== VK_NV_ray_tracing ===
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsTypeNV;
+
+  //=== VK_AMD_pipeline_compiler_control ===
+  using VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagBitsAMD;
+  using VULKAN_HPP_NAMESPACE::PipelineCompilerControlFlagsAMD;
+
+  //=== VK_EXT_calibrated_timestamps ===
+  using VULKAN_HPP_NAMESPACE::TimeDomainEXT;
+
+  //=== VK_KHR_global_priority ===
+  using VULKAN_HPP_NAMESPACE::QueueGlobalPriorityEXT;
+  using VULKAN_HPP_NAMESPACE::QueueGlobalPriorityKHR;
+
+  //=== VK_AMD_memory_overallocation_behavior ===
+  using VULKAN_HPP_NAMESPACE::MemoryOverallocationBehaviorAMD;
+
+  //=== VK_INTEL_performance_query ===
+  using VULKAN_HPP_NAMESPACE::PerformanceConfigurationTypeINTEL;
+  using VULKAN_HPP_NAMESPACE::PerformanceOverrideTypeINTEL;
+  using VULKAN_HPP_NAMESPACE::PerformanceParameterTypeINTEL;
+  using VULKAN_HPP_NAMESPACE::PerformanceValueTypeINTEL;
+  using VULKAN_HPP_NAMESPACE::QueryPoolSamplingModeINTEL;
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_imagepipe_surface ===
+  using VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagBitsFUCHSIA;
+  using VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateFlagsFUCHSIA;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_surface ===
+  using VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::MetalSurfaceCreateFlagsEXT;
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_fragment_shading_rate ===
+  using VULKAN_HPP_NAMESPACE::FragmentShadingRateCombinerOpKHR;
+
+  //=== VK_AMD_shader_core_properties2 ===
+  using VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagBitsAMD;
+  using VULKAN_HPP_NAMESPACE::ShaderCorePropertiesFlagsAMD;
+
+  //=== VK_EXT_validation_features ===
+  using VULKAN_HPP_NAMESPACE::ValidationFeatureDisableEXT;
+  using VULKAN_HPP_NAMESPACE::ValidationFeatureEnableEXT;
+
+  //=== VK_NV_coverage_reduction_mode ===
+  using VULKAN_HPP_NAMESPACE::CoverageReductionModeNV;
+  using VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagBitsNV;
+  using VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateFlagsNV;
+
+  //=== VK_EXT_provoking_vertex ===
+  using VULKAN_HPP_NAMESPACE::ProvokingVertexModeEXT;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+  using VULKAN_HPP_NAMESPACE::FullScreenExclusiveEXT;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_headless_surface ===
+  using VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateFlagsEXT;
+
+  //=== VK_EXT_line_rasterization ===
+  using VULKAN_HPP_NAMESPACE::LineRasterizationModeEXT;
+
+  //=== VK_KHR_pipeline_executable_properties ===
+  using VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticFormatKHR;
+
+  //=== VK_EXT_host_image_copy ===
+  using VULKAN_HPP_NAMESPACE::HostImageCopyFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::HostImageCopyFlagsEXT;
+
+  //=== VK_KHR_map_memory2 ===
+  using VULKAN_HPP_NAMESPACE::MemoryUnmapFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::MemoryUnmapFlagsKHR;
+
+  //=== VK_EXT_surface_maintenance1 ===
+  using VULKAN_HPP_NAMESPACE::PresentGravityFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::PresentGravityFlagsEXT;
+  using VULKAN_HPP_NAMESPACE::PresentScalingFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::PresentScalingFlagsEXT;
+
+  //=== VK_NV_device_generated_commands ===
+  using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagBitsNV;
+  using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutUsageFlagsNV;
+  using VULKAN_HPP_NAMESPACE::IndirectCommandsTokenTypeNV;
+  using VULKAN_HPP_NAMESPACE::IndirectStateFlagBitsNV;
+  using VULKAN_HPP_NAMESPACE::IndirectStateFlagsNV;
+
+  //=== VK_EXT_depth_bias_control ===
+  using VULKAN_HPP_NAMESPACE::DepthBiasRepresentationEXT;
+
+  //=== VK_EXT_device_memory_report ===
+  using VULKAN_HPP_NAMESPACE::DeviceMemoryReportEventTypeEXT;
+  using VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::DeviceMemoryReportFlagsEXT;
+
+  //=== VK_EXT_pipeline_creation_cache_control ===
+  using VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::PipelineCacheCreateFlags;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+  using VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeCapabilityFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeContentFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeFeedbackFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlModeFlagsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeTuningModeKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagBitsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeUsageFlagsKHR;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_NV_device_diagnostics_config ===
+  using VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagBitsNV;
+  using VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigFlagsNV;
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+  using VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::ExportMetalObjectTypeFlagsEXT;
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_EXT_graphics_pipeline_library ===
+  using VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryFlagsEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::PipelineLayoutCreateFlags;
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+  using VULKAN_HPP_NAMESPACE::FragmentShadingRateNV;
+  using VULKAN_HPP_NAMESPACE::FragmentShadingRateTypeNV;
+
+  //=== VK_NV_ray_tracing_motion_blur ===
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagBitsNV;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoFlagsNV;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagBitsNV;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceFlagsNV;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceTypeNV;
+
+  //=== VK_EXT_image_compression_control ===
+  using VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::ImageCompressionFixedRateFlagsEXT;
+  using VULKAN_HPP_NAMESPACE::ImageCompressionFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::ImageCompressionFlagsEXT;
+
+  //=== VK_EXT_device_fault ===
+  using VULKAN_HPP_NAMESPACE::DeviceFaultAddressTypeEXT;
+  using VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionEXT;
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+  using VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateFlagsEXT;
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  //=== VK_EXT_device_address_binding_report ===
+  using VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::DeviceAddressBindingFlagsEXT;
+  using VULKAN_HPP_NAMESPACE::DeviceAddressBindingTypeEXT;
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+  using VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagBitsFUCHSIA;
+  using VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFlagsFUCHSIA;
+  using VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagBitsFUCHSIA;
+  using VULKAN_HPP_NAMESPACE::ImageFormatConstraintsFlagsFUCHSIA;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_EXT_frame_boundary ===
+  using VULKAN_HPP_NAMESPACE::FrameBoundaryFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::FrameBoundaryFlagsEXT;
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+  using VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagBitsQNX;
+  using VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateFlagsQNX;
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+  //=== VK_EXT_opacity_micromap ===
+  using VULKAN_HPP_NAMESPACE::BuildMicromapFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::BuildMicromapFlagsEXT;
+  using VULKAN_HPP_NAMESPACE::BuildMicromapModeEXT;
+  using VULKAN_HPP_NAMESPACE::CopyMicromapModeEXT;
+  using VULKAN_HPP_NAMESPACE::MicromapCreateFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::MicromapCreateFlagsEXT;
+  using VULKAN_HPP_NAMESPACE::MicromapTypeEXT;
+  using VULKAN_HPP_NAMESPACE::OpacityMicromapFormatEXT;
+  using VULKAN_HPP_NAMESPACE::OpacityMicromapSpecialIndexEXT;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_NV_displacement_micromap ===
+  using VULKAN_HPP_NAMESPACE::DisplacementMicromapFormatNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_NV_memory_decompression ===
+  using VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagBitsNV;
+  using VULKAN_HPP_NAMESPACE::MemoryDecompressionMethodFlagsNV;
+
+  //=== VK_EXT_subpass_merge_feedback ===
+  using VULKAN_HPP_NAMESPACE::SubpassMergeStatusEXT;
+
+  //=== VK_LUNARG_direct_driver_loading ===
+  using VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagBitsLUNARG;
+  using VULKAN_HPP_NAMESPACE::DirectDriverLoadingFlagsLUNARG;
+  using VULKAN_HPP_NAMESPACE::DirectDriverLoadingModeLUNARG;
+
+  //=== VK_EXT_rasterization_order_attachment_access ===
+  using VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateFlags;
+  using VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlagBits;
+  using VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateFlags;
+
+  //=== VK_NV_optical_flow ===
+  using VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagBitsNV;
+  using VULKAN_HPP_NAMESPACE::OpticalFlowExecuteFlagsNV;
+  using VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagBitsNV;
+  using VULKAN_HPP_NAMESPACE::OpticalFlowGridSizeFlagsNV;
+  using VULKAN_HPP_NAMESPACE::OpticalFlowPerformanceLevelNV;
+  using VULKAN_HPP_NAMESPACE::OpticalFlowSessionBindingPointNV;
+  using VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagBitsNV;
+  using VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateFlagsNV;
+  using VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagBitsNV;
+  using VULKAN_HPP_NAMESPACE::OpticalFlowUsageFlagsNV;
+
+  //=== VK_KHR_maintenance5 ===
+  using VULKAN_HPP_NAMESPACE::BufferUsageFlagBits2KHR;
+  using VULKAN_HPP_NAMESPACE::BufferUsageFlags2KHR;
+  using VULKAN_HPP_NAMESPACE::PipelineCreateFlagBits2KHR;
+  using VULKAN_HPP_NAMESPACE::PipelineCreateFlags2KHR;
+
+  //=== VK_EXT_shader_object ===
+  using VULKAN_HPP_NAMESPACE::ShaderCodeTypeEXT;
+  using VULKAN_HPP_NAMESPACE::ShaderCreateFlagBitsEXT;
+  using VULKAN_HPP_NAMESPACE::ShaderCreateFlagsEXT;
+
+  //=== VK_NV_ray_tracing_invocation_reorder ===
+  using VULKAN_HPP_NAMESPACE::RayTracingInvocationReorderModeNV;
+
+  //=== VK_NV_low_latency2 ===
+  using VULKAN_HPP_NAMESPACE::LatencyMarkerNV;
+  using VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeNV;
+
+  //=== VK_KHR_cooperative_matrix ===
+  using VULKAN_HPP_NAMESPACE::ComponentTypeKHR;
+  using VULKAN_HPP_NAMESPACE::ComponentTypeNV;
+  using VULKAN_HPP_NAMESPACE::ScopeKHR;
+  using VULKAN_HPP_NAMESPACE::ScopeNV;
+
+  //=== VK_QCOM_image_processing2 ===
+  using VULKAN_HPP_NAMESPACE::BlockMatchWindowCompareModeQCOM;
+
+  //=== VK_QCOM_filter_cubic_weights ===
+  using VULKAN_HPP_NAMESPACE::CubicFilterWeightsQCOM;
+
+  //=== VK_MSFT_layered_driver ===
+  using VULKAN_HPP_NAMESPACE::LayeredDriverUnderlyingApiMSFT;
+
+  //=========================
+  //=== Index Type Traits ===
+  //=========================
+  using VULKAN_HPP_NAMESPACE::IndexTypeValue;
+
+  //======================
+  //=== ENUM to_string ===
+  //======================
+#if !defined( VULKAN_HPP_NO_TO_STRING )
+  using VULKAN_HPP_NAMESPACE::to_string;
+  using VULKAN_HPP_NAMESPACE::toHexString;
+#endif /*VULKAN_HPP_NO_TO_STRING*/
+
+  //=============================
+  //=== EXCEPTIONs AND ERRORs ===
+  //=============================
+#if !defined( VULKAN_HPP_NO_EXCEPTIONS )
+  using VULKAN_HPP_NAMESPACE::DeviceLostError;
+  using VULKAN_HPP_NAMESPACE::Error;
+  using VULKAN_HPP_NAMESPACE::errorCategory;
+  using VULKAN_HPP_NAMESPACE::ErrorCategoryImpl;
+  using VULKAN_HPP_NAMESPACE::ExtensionNotPresentError;
+  using VULKAN_HPP_NAMESPACE::FeatureNotPresentError;
+  using VULKAN_HPP_NAMESPACE::FormatNotSupportedError;
+  using VULKAN_HPP_NAMESPACE::FragmentationError;
+  using VULKAN_HPP_NAMESPACE::FragmentedPoolError;
+  using VULKAN_HPP_NAMESPACE::ImageUsageNotSupportedKHRError;
+  using VULKAN_HPP_NAMESPACE::IncompatibleDisplayKHRError;
+  using VULKAN_HPP_NAMESPACE::IncompatibleDriverError;
+  using VULKAN_HPP_NAMESPACE::InitializationFailedError;
+  using VULKAN_HPP_NAMESPACE::InvalidDrmFormatModifierPlaneLayoutEXTError;
+  using VULKAN_HPP_NAMESPACE::InvalidExternalHandleError;
+  using VULKAN_HPP_NAMESPACE::InvalidOpaqueCaptureAddressError;
+  using VULKAN_HPP_NAMESPACE::InvalidShaderNVError;
+  using VULKAN_HPP_NAMESPACE::LayerNotPresentError;
+  using VULKAN_HPP_NAMESPACE::LogicError;
+  using VULKAN_HPP_NAMESPACE::make_error_code;
+  using VULKAN_HPP_NAMESPACE::make_error_condition;
+  using VULKAN_HPP_NAMESPACE::MemoryMapFailedError;
+  using VULKAN_HPP_NAMESPACE::NativeWindowInUseKHRError;
+  using VULKAN_HPP_NAMESPACE::NotPermittedKHRError;
+  using VULKAN_HPP_NAMESPACE::OutOfDateKHRError;
+  using VULKAN_HPP_NAMESPACE::OutOfDeviceMemoryError;
+  using VULKAN_HPP_NAMESPACE::OutOfHostMemoryError;
+  using VULKAN_HPP_NAMESPACE::OutOfPoolMemoryError;
+  using VULKAN_HPP_NAMESPACE::SurfaceLostKHRError;
+  using VULKAN_HPP_NAMESPACE::SystemError;
+  using VULKAN_HPP_NAMESPACE::TooManyObjectsError;
+  using VULKAN_HPP_NAMESPACE::UnknownError;
+  using VULKAN_HPP_NAMESPACE::ValidationFailedEXTError;
+  using VULKAN_HPP_NAMESPACE::VideoPictureLayoutNotSupportedKHRError;
+  using VULKAN_HPP_NAMESPACE::VideoProfileCodecNotSupportedKHRError;
+  using VULKAN_HPP_NAMESPACE::VideoProfileFormatNotSupportedKHRError;
+  using VULKAN_HPP_NAMESPACE::VideoProfileOperationNotSupportedKHRError;
+  using VULKAN_HPP_NAMESPACE::VideoStdVersionNotSupportedKHRError;
+
+#  if defined( VK_USE_PLATFORM_WIN32_KHR )
+  using VULKAN_HPP_NAMESPACE::FullScreenExclusiveModeLostEXTError;
+#  endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#  if defined( VK_ENABLE_BETA_EXTENSIONS )
+  using VULKAN_HPP_NAMESPACE::InvalidVideoStdParametersKHRError;
+#  endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  using VULKAN_HPP_NAMESPACE::CompressionExhaustedEXTError;
+  using VULKAN_HPP_NAMESPACE::IncompatibleShaderBinaryEXTError;
+#endif /*VULKAN_HPP_NO_EXCEPTIONS*/
+
+  using VULKAN_HPP_NAMESPACE::createResultValueType;
+  using VULKAN_HPP_NAMESPACE::ignore;
+  using VULKAN_HPP_NAMESPACE::resultCheck;
+  using VULKAN_HPP_NAMESPACE::ResultValue;
+  using VULKAN_HPP_NAMESPACE::ResultValueType;
+
+  //===========================
+  //=== CONSTEXPR CONSTANTs ===
+  //===========================
+
+  //=== VK_VERSION_1_0 ===
+  using VULKAN_HPP_NAMESPACE::AttachmentUnused;
+  using VULKAN_HPP_NAMESPACE::False;
+  using VULKAN_HPP_NAMESPACE::LodClampNone;
+  using VULKAN_HPP_NAMESPACE::MaxDescriptionSize;
+  using VULKAN_HPP_NAMESPACE::MaxExtensionNameSize;
+  using VULKAN_HPP_NAMESPACE::MaxMemoryHeaps;
+  using VULKAN_HPP_NAMESPACE::MaxMemoryTypes;
+  using VULKAN_HPP_NAMESPACE::MaxPhysicalDeviceNameSize;
+  using VULKAN_HPP_NAMESPACE::QueueFamilyIgnored;
+  using VULKAN_HPP_NAMESPACE::RemainingArrayLayers;
+  using VULKAN_HPP_NAMESPACE::RemainingMipLevels;
+  using VULKAN_HPP_NAMESPACE::SubpassExternal;
+  using VULKAN_HPP_NAMESPACE::True;
+  using VULKAN_HPP_NAMESPACE::UuidSize;
+  using VULKAN_HPP_NAMESPACE::WholeSize;
+
+  //=== VK_VERSION_1_1 ===
+  using VULKAN_HPP_NAMESPACE::LuidSize;
+  using VULKAN_HPP_NAMESPACE::MaxDeviceGroupSize;
+  using VULKAN_HPP_NAMESPACE::QueueFamilyExternal;
+
+  //=== VK_VERSION_1_2 ===
+  using VULKAN_HPP_NAMESPACE::MaxDriverInfoSize;
+  using VULKAN_HPP_NAMESPACE::MaxDriverNameSize;
+
+  //=== VK_KHR_device_group_creation ===
+  using VULKAN_HPP_NAMESPACE::MaxDeviceGroupSizeKHR;
+
+  //=== VK_KHR_external_memory_capabilities ===
+  using VULKAN_HPP_NAMESPACE::LuidSizeKHR;
+
+  //=== VK_KHR_external_memory ===
+  using VULKAN_HPP_NAMESPACE::QueueFamilyExternalKHR;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_AMDX_shader_enqueue ===
+  using VULKAN_HPP_NAMESPACE::ShaderIndexUnusedAMDX;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+  using VULKAN_HPP_NAMESPACE::ShaderUnusedKHR;
+
+  //=== VK_NV_ray_tracing ===
+  using VULKAN_HPP_NAMESPACE::ShaderUnusedNV;
+
+  //=== VK_KHR_global_priority ===
+  using VULKAN_HPP_NAMESPACE::MaxGlobalPrioritySizeKHR;
+
+  //=== VK_KHR_driver_properties ===
+  using VULKAN_HPP_NAMESPACE::MaxDriverInfoSizeKHR;
+  using VULKAN_HPP_NAMESPACE::MaxDriverNameSizeKHR;
+
+  //=== VK_EXT_global_priority_query ===
+  using VULKAN_HPP_NAMESPACE::MaxGlobalPrioritySizeEXT;
+
+  //=== VK_EXT_image_sliced_view_of_3d ===
+  using VULKAN_HPP_NAMESPACE::Remaining3DSlicesEXT;
+
+  //=== VK_EXT_shader_module_identifier ===
+  using VULKAN_HPP_NAMESPACE::MaxShaderModuleIdentifierSizeEXT;
+
+  //========================
+  //=== CONSTEXPR VALUEs ===
+  //========================
+  using VULKAN_HPP_NAMESPACE::HeaderVersion;
+
+  //=========================
+  //=== CONSTEXPR CALLEEs ===
+  //=========================
+  using VULKAN_HPP_NAMESPACE::apiVersionMajor;
+  using VULKAN_HPP_NAMESPACE::apiVersionMinor;
+  using VULKAN_HPP_NAMESPACE::apiVersionPatch;
+  using VULKAN_HPP_NAMESPACE::apiVersionVariant;
+  using VULKAN_HPP_NAMESPACE::makeApiVersion;
+  using VULKAN_HPP_NAMESPACE::makeVersion;
+  using VULKAN_HPP_NAMESPACE::versionMajor;
+  using VULKAN_HPP_NAMESPACE::versionMinor;
+  using VULKAN_HPP_NAMESPACE::versionPatch;
+
+  //==========================
+  //=== CONSTEXPR CALLERSs ===
+  //==========================
+  using VULKAN_HPP_NAMESPACE::ApiVersion;
+  using VULKAN_HPP_NAMESPACE::ApiVersion10;
+  using VULKAN_HPP_NAMESPACE::ApiVersion11;
+  using VULKAN_HPP_NAMESPACE::ApiVersion12;
+  using VULKAN_HPP_NAMESPACE::ApiVersion13;
+  using VULKAN_HPP_NAMESPACE::HeaderVersionComplete;
+
+  //===============
+  //=== STRUCTs ===
+  //===============
+
+  //=== VK_VERSION_1_0 ===
+  using VULKAN_HPP_NAMESPACE::AllocationCallbacks;
+  using VULKAN_HPP_NAMESPACE::ApplicationInfo;
+  using VULKAN_HPP_NAMESPACE::AttachmentDescription;
+  using VULKAN_HPP_NAMESPACE::AttachmentReference;
+  using VULKAN_HPP_NAMESPACE::BaseInStructure;
+  using VULKAN_HPP_NAMESPACE::BaseOutStructure;
+  using VULKAN_HPP_NAMESPACE::BindSparseInfo;
+  using VULKAN_HPP_NAMESPACE::BufferCopy;
+  using VULKAN_HPP_NAMESPACE::BufferCreateInfo;
+  using VULKAN_HPP_NAMESPACE::BufferImageCopy;
+  using VULKAN_HPP_NAMESPACE::BufferMemoryBarrier;
+  using VULKAN_HPP_NAMESPACE::BufferViewCreateInfo;
+  using VULKAN_HPP_NAMESPACE::ClearAttachment;
+  using VULKAN_HPP_NAMESPACE::ClearColorValue;
+  using VULKAN_HPP_NAMESPACE::ClearDepthStencilValue;
+  using VULKAN_HPP_NAMESPACE::ClearRect;
+  using VULKAN_HPP_NAMESPACE::ClearValue;
+  using VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo;
+  using VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo;
+  using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo;
+  using VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo;
+  using VULKAN_HPP_NAMESPACE::ComponentMapping;
+  using VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo;
+  using VULKAN_HPP_NAMESPACE::CopyDescriptorSet;
+  using VULKAN_HPP_NAMESPACE::DescriptorBufferInfo;
+  using VULKAN_HPP_NAMESPACE::DescriptorImageInfo;
+  using VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo;
+  using VULKAN_HPP_NAMESPACE::DescriptorPoolSize;
+  using VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo;
+  using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding;
+  using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo;
+  using VULKAN_HPP_NAMESPACE::DeviceCreateInfo;
+  using VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo;
+  using VULKAN_HPP_NAMESPACE::DispatchIndirectCommand;
+  using VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand;
+  using VULKAN_HPP_NAMESPACE::DrawIndirectCommand;
+  using VULKAN_HPP_NAMESPACE::EventCreateInfo;
+  using VULKAN_HPP_NAMESPACE::ExtensionProperties;
+  using VULKAN_HPP_NAMESPACE::Extent2D;
+  using VULKAN_HPP_NAMESPACE::Extent3D;
+  using VULKAN_HPP_NAMESPACE::FenceCreateInfo;
+  using VULKAN_HPP_NAMESPACE::FormatProperties;
+  using VULKAN_HPP_NAMESPACE::FramebufferCreateInfo;
+  using VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo;
+  using VULKAN_HPP_NAMESPACE::ImageBlit;
+  using VULKAN_HPP_NAMESPACE::ImageCopy;
+  using VULKAN_HPP_NAMESPACE::ImageCreateInfo;
+  using VULKAN_HPP_NAMESPACE::ImageFormatProperties;
+  using VULKAN_HPP_NAMESPACE::ImageMemoryBarrier;
+  using VULKAN_HPP_NAMESPACE::ImageResolve;
+  using VULKAN_HPP_NAMESPACE::ImageSubresource;
+  using VULKAN_HPP_NAMESPACE::ImageSubresourceLayers;
+  using VULKAN_HPP_NAMESPACE::ImageSubresourceRange;
+  using VULKAN_HPP_NAMESPACE::ImageViewCreateInfo;
+  using VULKAN_HPP_NAMESPACE::InstanceCreateInfo;
+  using VULKAN_HPP_NAMESPACE::LayerProperties;
+  using VULKAN_HPP_NAMESPACE::MappedMemoryRange;
+  using VULKAN_HPP_NAMESPACE::MemoryAllocateInfo;
+  using VULKAN_HPP_NAMESPACE::MemoryBarrier;
+  using VULKAN_HPP_NAMESPACE::MemoryHeap;
+  using VULKAN_HPP_NAMESPACE::MemoryRequirements;
+  using VULKAN_HPP_NAMESPACE::MemoryType;
+  using VULKAN_HPP_NAMESPACE::Offset2D;
+  using VULKAN_HPP_NAMESPACE::Offset3D;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties;
+  using VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo;
+  using VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne;
+  using VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState;
+  using VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo;
+  using VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo;
+  using VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo;
+  using VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo;
+  using VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo;
+  using VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo;
+  using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo;
+  using VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo;
+  using VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo;
+  using VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo;
+  using VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo;
+  using VULKAN_HPP_NAMESPACE::PushConstantRange;
+  using VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo;
+  using VULKAN_HPP_NAMESPACE::QueueFamilyProperties;
+  using VULKAN_HPP_NAMESPACE::Rect2D;
+  using VULKAN_HPP_NAMESPACE::RenderPassBeginInfo;
+  using VULKAN_HPP_NAMESPACE::RenderPassCreateInfo;
+  using VULKAN_HPP_NAMESPACE::SamplerCreateInfo;
+  using VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo;
+  using VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo;
+  using VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo;
+  using VULKAN_HPP_NAMESPACE::SparseImageFormatProperties;
+  using VULKAN_HPP_NAMESPACE::SparseImageMemoryBind;
+  using VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo;
+  using VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements;
+  using VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo;
+  using VULKAN_HPP_NAMESPACE::SparseMemoryBind;
+  using VULKAN_HPP_NAMESPACE::SpecializationInfo;
+  using VULKAN_HPP_NAMESPACE::SpecializationMapEntry;
+  using VULKAN_HPP_NAMESPACE::StencilOpState;
+  using VULKAN_HPP_NAMESPACE::SubmitInfo;
+  using VULKAN_HPP_NAMESPACE::SubpassDependency;
+  using VULKAN_HPP_NAMESPACE::SubpassDescription;
+  using VULKAN_HPP_NAMESPACE::SubresourceLayout;
+  using VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription;
+  using VULKAN_HPP_NAMESPACE::VertexInputBindingDescription;
+  using VULKAN_HPP_NAMESPACE::Viewport;
+  using VULKAN_HPP_NAMESPACE::WriteDescriptorSet;
+
+  //=== VK_VERSION_1_1 ===
+  using VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo;
+  using VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfoKHR;
+  using VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo;
+  using VULKAN_HPP_NAMESPACE::BindBufferMemoryInfoKHR;
+  using VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo;
+  using VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfoKHR;
+  using VULKAN_HPP_NAMESPACE::BindImageMemoryInfo;
+  using VULKAN_HPP_NAMESPACE::BindImageMemoryInfoKHR;
+  using VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo;
+  using VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfoKHR;
+  using VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2;
+  using VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2KHR;
+  using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport;
+  using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupportKHR;
+  using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo;
+  using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry;
+  using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntryKHR;
+  using VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo;
+  using VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfoKHR;
+  using VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo;
+  using VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfoKHR;
+  using VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo;
+  using VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo;
+  using VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfoKHR;
+  using VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo;
+  using VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfoKHR;
+  using VULKAN_HPP_NAMESPACE::DeviceQueueInfo2;
+  using VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo;
+  using VULKAN_HPP_NAMESPACE::ExportFenceCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo;
+  using VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo;
+  using VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::ExternalBufferProperties;
+  using VULKAN_HPP_NAMESPACE::ExternalBufferPropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::ExternalFenceProperties;
+  using VULKAN_HPP_NAMESPACE::ExternalFencePropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties;
+  using VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo;
+  using VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo;
+  using VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::ExternalMemoryProperties;
+  using VULKAN_HPP_NAMESPACE::ExternalMemoryPropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties;
+  using VULKAN_HPP_NAMESPACE::ExternalSemaphorePropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::FormatProperties2;
+  using VULKAN_HPP_NAMESPACE::FormatProperties2KHR;
+  using VULKAN_HPP_NAMESPACE::ImageFormatProperties2;
+  using VULKAN_HPP_NAMESPACE::ImageFormatProperties2KHR;
+  using VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2;
+  using VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2KHR;
+  using VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo;
+  using VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfoKHR;
+  using VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2;
+  using VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2KHR;
+  using VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo;
+  using VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference;
+  using VULKAN_HPP_NAMESPACE::InputAttachmentAspectReferenceKHR;
+  using VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo;
+  using VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfoKHR;
+  using VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo;
+  using VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements;
+  using VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirementsKHR;
+  using VULKAN_HPP_NAMESPACE::MemoryRequirements2;
+  using VULKAN_HPP_NAMESPACE::MemoryRequirements2KHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfoKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfoKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfoKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfoKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2KHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupPropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceIDPropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2KHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3PropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2KHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingPropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2KHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParameterFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2KHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointerFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointerFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo;
+  using VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo;
+  using VULKAN_HPP_NAMESPACE::QueueFamilyProperties2;
+  using VULKAN_HPP_NAMESPACE::QueueFamilyProperties2KHR;
+  using VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo;
+  using VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo;
+  using VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo;
+  using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties;
+  using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatPropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo;
+  using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfoKHR;
+  using VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2;
+  using VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2KHR;
+  using VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2;
+  using VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2KHR;
+
+  //=== VK_VERSION_1_2 ===
+  using VULKAN_HPP_NAMESPACE::AttachmentDescription2;
+  using VULKAN_HPP_NAMESPACE::AttachmentDescription2KHR;
+  using VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout;
+  using VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayoutKHR;
+  using VULKAN_HPP_NAMESPACE::AttachmentReference2;
+  using VULKAN_HPP_NAMESPACE::AttachmentReference2KHR;
+  using VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout;
+  using VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayoutKHR;
+  using VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo;
+  using VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfoEXT;
+  using VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfoKHR;
+  using VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo;
+  using VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::ConformanceVersion;
+  using VULKAN_HPP_NAMESPACE::ConformanceVersionKHR;
+  using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo;
+  using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo;
+  using VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport;
+  using VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupportEXT;
+  using VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo;
+  using VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfoKHR;
+  using VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo;
+  using VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfoKHR;
+  using VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo;
+  using VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo;
+  using VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo;
+  using VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo;
+  using VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolvePropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverPropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFloat16Int8FeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsPropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64FeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8FeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphorePropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo;
+  using VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfoKHR;
+  using VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2;
+  using VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2KHR;
+  using VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo;
+  using VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo;
+  using VULKAN_HPP_NAMESPACE::SemaphoreSignalInfoKHR;
+  using VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo;
+  using VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo;
+  using VULKAN_HPP_NAMESPACE::SemaphoreWaitInfoKHR;
+  using VULKAN_HPP_NAMESPACE::SubpassBeginInfo;
+  using VULKAN_HPP_NAMESPACE::SubpassBeginInfoKHR;
+  using VULKAN_HPP_NAMESPACE::SubpassDependency2;
+  using VULKAN_HPP_NAMESPACE::SubpassDependency2KHR;
+  using VULKAN_HPP_NAMESPACE::SubpassDescription2;
+  using VULKAN_HPP_NAMESPACE::SubpassDescription2KHR;
+  using VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve;
+  using VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolveKHR;
+  using VULKAN_HPP_NAMESPACE::SubpassEndInfo;
+  using VULKAN_HPP_NAMESPACE::SubpassEndInfoKHR;
+  using VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo;
+  using VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfoKHR;
+
+  //=== VK_VERSION_1_3 ===
+  using VULKAN_HPP_NAMESPACE::BlitImageInfo2;
+  using VULKAN_HPP_NAMESPACE::BlitImageInfo2KHR;
+  using VULKAN_HPP_NAMESPACE::BufferCopy2;
+  using VULKAN_HPP_NAMESPACE::BufferCopy2KHR;
+  using VULKAN_HPP_NAMESPACE::BufferImageCopy2;
+  using VULKAN_HPP_NAMESPACE::BufferImageCopy2KHR;
+  using VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2;
+  using VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2KHR;
+  using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo;
+  using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfoKHR;
+  using VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo;
+  using VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfoKHR;
+  using VULKAN_HPP_NAMESPACE::CopyBufferInfo2;
+  using VULKAN_HPP_NAMESPACE::CopyBufferInfo2KHR;
+  using VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2;
+  using VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2KHR;
+  using VULKAN_HPP_NAMESPACE::CopyImageInfo2;
+  using VULKAN_HPP_NAMESPACE::CopyImageInfo2KHR;
+  using VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2;
+  using VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2KHR;
+  using VULKAN_HPP_NAMESPACE::DependencyInfo;
+  using VULKAN_HPP_NAMESPACE::DependencyInfoKHR;
+  using VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo;
+  using VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements;
+  using VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirementsKHR;
+  using VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements;
+  using VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirementsKHR;
+  using VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo;
+  using VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::FormatProperties3;
+  using VULKAN_HPP_NAMESPACE::FormatProperties3KHR;
+  using VULKAN_HPP_NAMESPACE::ImageBlit2;
+  using VULKAN_HPP_NAMESPACE::ImageBlit2KHR;
+  using VULKAN_HPP_NAMESPACE::ImageCopy2;
+  using VULKAN_HPP_NAMESPACE::ImageCopy2KHR;
+  using VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2;
+  using VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2KHR;
+  using VULKAN_HPP_NAMESPACE::ImageResolve2;
+  using VULKAN_HPP_NAMESPACE::ImageResolve2KHR;
+  using VULKAN_HPP_NAMESPACE::MemoryBarrier2;
+  using VULKAN_HPP_NAMESPACE::MemoryBarrier2KHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4FeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4PropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductPropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2FeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceToolPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PipelineCreationFeedback;
+  using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo;
+  using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo;
+  using VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo;
+  using VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo;
+  using VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo;
+  using VULKAN_HPP_NAMESPACE::RenderingAttachmentInfoKHR;
+  using VULKAN_HPP_NAMESPACE::RenderingInfo;
+  using VULKAN_HPP_NAMESPACE::RenderingInfoKHR;
+  using VULKAN_HPP_NAMESPACE::ResolveImageInfo2;
+  using VULKAN_HPP_NAMESPACE::ResolveImageInfo2KHR;
+  using VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo;
+  using VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfoKHR;
+  using VULKAN_HPP_NAMESPACE::ShaderRequiredSubgroupSizeCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::SubmitInfo2;
+  using VULKAN_HPP_NAMESPACE::SubmitInfo2KHR;
+  using VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock;
+  using VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlockEXT;
+
+  //=== VK_KHR_surface ===
+  using VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR;
+  using VULKAN_HPP_NAMESPACE::SurfaceFormatKHR;
+
+  //=== VK_KHR_swapchain ===
+  using VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR;
+  using VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR;
+  using VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR;
+  using VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR;
+  using VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::PresentInfoKHR;
+  using VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR;
+
+  //=== VK_KHR_display ===
+  using VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR;
+  using VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR;
+  using VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR;
+
+  //=== VK_KHR_display_swapchain ===
+  using VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR;
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+  //=== VK_KHR_xlib_surface ===
+  using VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR;
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+  //=== VK_KHR_xcb_surface ===
+  using VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR;
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+  //=== VK_KHR_wayland_surface ===
+  using VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR;
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_KHR_android_surface ===
+  using VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_surface ===
+  using VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_debug_report ===
+  using VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT;
+
+  //=== VK_AMD_rasterization_order ===
+  using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD;
+
+  //=== VK_EXT_debug_marker ===
+  using VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT;
+  using VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT;
+  using VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT;
+
+  //=== VK_KHR_video_queue ===
+  using VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR;
+  using VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR;
+  using VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR;
+  using VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR;
+
+  //=== VK_KHR_video_decode_queue ===
+  using VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR;
+  using VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR;
+
+  //=== VK_NV_dedicated_allocation ===
+  using VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV;
+  using VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV;
+  using VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV;
+
+  //=== VK_EXT_transform_feedback ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT;
+
+  //=== VK_NVX_binary_import ===
+  using VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX;
+  using VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX;
+  using VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX;
+
+  //=== VK_NVX_image_view_handle ===
+  using VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX;
+  using VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_EXT_video_encode_h264 ===
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264GopRemainingFrameInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264PictureInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264QualityLevelPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersFeedbackInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersGetInfoEXT;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_EXT_video_encode_h265 ===
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265GopRemainingFrameInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265PictureInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265QualityLevelPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersFeedbackInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersGetInfoEXT;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_KHR_video_decode_h264 ===
+  using VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesKHR;
+  using VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoKHR;
+
+  //=== VK_AMD_texture_gather_bias_lod ===
+  using VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD;
+
+  //=== VK_AMD_shader_info ===
+  using VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD;
+  using VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD;
+
+  //=== VK_KHR_dynamic_rendering ===
+  using VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD;
+  using VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoNV;
+  using VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX;
+  using VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT;
+  using VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR;
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_stream_descriptor_surface ===
+  using VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP;
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  //=== VK_NV_corner_sampled_image ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV;
+
+  //=== VK_NV_external_memory_capabilities ===
+  using VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV;
+
+  //=== VK_NV_external_memory ===
+  using VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV;
+  using VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_external_memory_win32 ===
+  using VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV;
+  using VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_NV_win32_keyed_mutex ===
+  using VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_validation_flags ===
+  using VULKAN_HPP_NAMESPACE::ValidationFlagsEXT;
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+  //=== VK_NN_vi_surface ===
+  using VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN;
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+  //=== VK_EXT_astc_decode_mode ===
+  using VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT;
+
+  //=== VK_EXT_pipeline_robustness ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_memory_win32 ===
+  using VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR;
+  using VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR;
+  using VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR;
+  using VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_memory_fd ===
+  using VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR;
+  using VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_win32_keyed_mutex ===
+  using VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_semaphore_win32 ===
+  using VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR;
+  using VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR;
+  using VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR;
+  using VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_semaphore_fd ===
+  using VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR;
+  using VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR;
+
+  //=== VK_KHR_push_descriptor ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR;
+
+  //=== VK_EXT_conditional_rendering ===
+  using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT;
+  using VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT;
+
+  //=== VK_KHR_incremental_present ===
+  using VULKAN_HPP_NAMESPACE::PresentRegionKHR;
+  using VULKAN_HPP_NAMESPACE::PresentRegionsKHR;
+  using VULKAN_HPP_NAMESPACE::RectLayerKHR;
+
+  //=== VK_NV_clip_space_w_scaling ===
+  using VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV;
+  using VULKAN_HPP_NAMESPACE::ViewportWScalingNV;
+
+  //=== VK_EXT_display_surface_counter ===
+  using VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT;
+
+  //=== VK_EXT_display_control ===
+  using VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT;
+  using VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT;
+  using VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT;
+  using VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT;
+
+  //=== VK_GOOGLE_display_timing ===
+  using VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE;
+  using VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE;
+  using VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE;
+  using VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE;
+
+  //=== VK_NVX_multiview_per_view_attributes ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX;
+
+  //=== VK_NV_viewport_swizzle ===
+  using VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV;
+  using VULKAN_HPP_NAMESPACE::ViewportSwizzleNV;
+
+  //=== VK_EXT_discard_rectangles ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT;
+
+  //=== VK_EXT_conservative_rasterization ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT;
+
+  //=== VK_EXT_depth_clip_enable ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT;
+
+  //=== VK_EXT_hdr_metadata ===
+  using VULKAN_HPP_NAMESPACE::HdrMetadataEXT;
+  using VULKAN_HPP_NAMESPACE::XYColorEXT;
+
+  //=== VK_KHR_shared_presentable_image ===
+  using VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_KHR_external_fence_win32 ===
+  using VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR;
+  using VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR;
+  using VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_KHR_external_fence_fd ===
+  using VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR;
+  using VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR;
+
+  //=== VK_KHR_performance_query ===
+  using VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR;
+  using VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR;
+  using VULKAN_HPP_NAMESPACE::PerformanceCounterKHR;
+  using VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR;
+  using VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR;
+
+  //=== VK_KHR_get_surface_capabilities2 ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR;
+  using VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR;
+  using VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR;
+
+  //=== VK_KHR_get_display_properties2 ===
+  using VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR;
+  using VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR;
+  using VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR;
+  using VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR;
+  using VULKAN_HPP_NAMESPACE::DisplayProperties2KHR;
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+  //=== VK_MVK_ios_surface ===
+  using VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK;
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+  //=== VK_MVK_macos_surface ===
+  using VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK;
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+  //=== VK_EXT_debug_utils ===
+  using VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT;
+  using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT;
+  using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT;
+  using VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT;
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_ANDROID_external_memory_android_hardware_buffer ===
+  using VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID;
+  using VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID;
+  using VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID;
+  using VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID;
+  using VULKAN_HPP_NAMESPACE::ExternalFormatANDROID;
+  using VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID;
+  using VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_AMDX_shader_enqueue ===
+  using VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX;
+  using VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX;
+  using VULKAN_HPP_NAMESPACE::DispatchGraphInfoAMDX;
+  using VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX;
+  using VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX;
+  using VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_EXT_sample_locations ===
+  using VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT;
+  using VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT;
+  using VULKAN_HPP_NAMESPACE::SampleLocationEXT;
+  using VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT;
+  using VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT;
+
+  //=== VK_EXT_blend_operation_advanced ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT;
+
+  //=== VK_NV_fragment_coverage_to_color ===
+  using VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV;
+
+  //=== VK_KHR_acceleration_structure ===
+  using VULKAN_HPP_NAMESPACE::AabbPositionsKHR;
+  using VULKAN_HPP_NAMESPACE::AabbPositionsNV;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceNV;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR;
+  using VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR;
+  using VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR;
+  using VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR;
+  using VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR;
+  using VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::TransformMatrixKHR;
+  using VULKAN_HPP_NAMESPACE::TransformMatrixNV;
+  using VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR;
+
+  //=== VK_KHR_ray_tracing_pipeline ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR;
+  using VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR;
+
+  //=== VK_KHR_ray_query ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR;
+
+  //=== VK_NV_framebuffer_mixed_samples ===
+  using VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV;
+
+  //=== VK_NV_shader_sm_builtins ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV;
+
+  //=== VK_EXT_image_drm_format_modifier ===
+  using VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT;
+  using VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT;
+  using VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT;
+  using VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT;
+
+  //=== VK_EXT_validation_cache ===
+  using VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_portability_subset ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_NV_shading_rate_image ===
+  using VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV;
+  using VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV;
+  using VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV;
+  using VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV;
+  using VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV;
+
+  //=== VK_NV_ray_tracing ===
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV;
+  using VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV;
+  using VULKAN_HPP_NAMESPACE::GeometryAABBNV;
+  using VULKAN_HPP_NAMESPACE::GeometryDataNV;
+  using VULKAN_HPP_NAMESPACE::GeometryNV;
+  using VULKAN_HPP_NAMESPACE::GeometryTrianglesNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV;
+  using VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV;
+  using VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV;
+  using VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV;
+
+  //=== VK_NV_representative_fragment_test ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV;
+  using VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV;
+
+  //=== VK_EXT_filter_cubic ===
+  using VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT;
+
+  //=== VK_EXT_external_memory_host ===
+  using VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT;
+  using VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT;
+
+  //=== VK_KHR_shader_clock ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR;
+
+  //=== VK_AMD_pipeline_compiler_control ===
+  using VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD;
+
+  //=== VK_EXT_calibrated_timestamps ===
+  using VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT;
+
+  //=== VK_AMD_shader_core_properties ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD;
+
+  //=== VK_KHR_video_decode_h265 ===
+  using VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesKHR;
+  using VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoKHR;
+
+  //=== VK_KHR_global_priority ===
+  using VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR;
+
+  //=== VK_AMD_memory_overallocation_behavior ===
+  using VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD;
+
+  //=== VK_EXT_vertex_attribute_divisor ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT;
+
+#if defined( VK_USE_PLATFORM_GGP )
+  //=== VK_GGP_frame_token ===
+  using VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP;
+#endif /*VK_USE_PLATFORM_GGP*/
+
+  //=== VK_NV_compute_shader_derivatives ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV;
+
+  //=== VK_NV_mesh_shader ===
+  using VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV;
+
+  //=== VK_NV_shader_image_footprint ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV;
+
+  //=== VK_NV_scissor_exclusive ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV;
+  using VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV;
+
+  //=== VK_NV_device_diagnostic_checkpoints ===
+  using VULKAN_HPP_NAMESPACE::CheckpointDataNV;
+  using VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV;
+
+  //=== VK_INTEL_shader_integer_functions2 ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL;
+
+  //=== VK_INTEL_performance_query ===
+  using VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL;
+  using VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL;
+  using VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL;
+  using VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL;
+  using VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL;
+  using VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL;
+  using VULKAN_HPP_NAMESPACE::PerformanceValueINTEL;
+  using VULKAN_HPP_NAMESPACE::QueryPoolCreateInfoINTEL;
+  using VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL;
+
+  //=== VK_EXT_pci_bus_info ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT;
+
+  //=== VK_AMD_display_native_hdr ===
+  using VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD;
+  using VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD;
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_imagepipe_surface ===
+  using VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_surface ===
+  using VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT;
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_EXT_fragment_density_map ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT;
+
+  //=== VK_KHR_fragment_shading_rate ===
+  using VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR;
+
+  //=== VK_AMD_shader_core_properties2 ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD;
+
+  //=== VK_AMD_device_coherent_memory ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD;
+
+  //=== VK_EXT_shader_image_atomic_int64 ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT;
+
+  //=== VK_EXT_memory_budget ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT;
+
+  //=== VK_EXT_memory_priority ===
+  using VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT;
+
+  //=== VK_KHR_surface_protected_capabilities ===
+  using VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR;
+
+  //=== VK_NV_dedicated_allocation_image_aliasing ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV;
+
+  //=== VK_EXT_buffer_device_address ===
+  using VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferAddressFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT;
+
+  //=== VK_EXT_validation_features ===
+  using VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT;
+
+  //=== VK_KHR_present_wait ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR;
+
+  //=== VK_NV_cooperative_matrix ===
+  using VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV;
+
+  //=== VK_NV_coverage_reduction_mode ===
+  using VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV;
+  using VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV;
+
+  //=== VK_EXT_fragment_shader_interlock ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT;
+
+  //=== VK_EXT_ycbcr_image_arrays ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT;
+
+  //=== VK_EXT_provoking_vertex ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT;
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+  //=== VK_EXT_full_screen_exclusive ===
+  using VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT;
+  using VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT;
+  using VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT;
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+  //=== VK_EXT_headless_surface ===
+  using VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT;
+
+  //=== VK_EXT_line_rasterization ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT;
+
+  //=== VK_EXT_shader_atomic_float ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT;
+
+  //=== VK_EXT_index_type_uint8 ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT;
+
+  //=== VK_EXT_extended_dynamic_state ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT;
+
+  //=== VK_KHR_pipeline_executable_properties ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR;
+  using VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR;
+  using VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR;
+  using VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR;
+  using VULKAN_HPP_NAMESPACE::PipelineInfoEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineInfoKHR;
+
+  //=== VK_EXT_host_image_copy ===
+  using VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT;
+  using VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT;
+  using VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT;
+  using VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT;
+  using VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT;
+  using VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT;
+  using VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT;
+
+  //=== VK_KHR_map_memory2 ===
+  using VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR;
+  using VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR;
+
+  //=== VK_EXT_shader_atomic_float2 ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT;
+
+  //=== VK_EXT_surface_maintenance1 ===
+  using VULKAN_HPP_NAMESPACE::SurfacePresentModeCompatibilityEXT;
+  using VULKAN_HPP_NAMESPACE::SurfacePresentModeEXT;
+  using VULKAN_HPP_NAMESPACE::SurfacePresentScalingCapabilitiesEXT;
+
+  //=== VK_EXT_swapchain_maintenance1 ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSwapchainMaintenance1FeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT;
+  using VULKAN_HPP_NAMESPACE::SwapchainPresentFenceInfoEXT;
+  using VULKAN_HPP_NAMESPACE::SwapchainPresentModeInfoEXT;
+  using VULKAN_HPP_NAMESPACE::SwapchainPresentModesCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::SwapchainPresentScalingCreateInfoEXT;
+
+  //=== VK_NV_device_generated_commands ===
+  using VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV;
+  using VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV;
+  using VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV;
+  using VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV;
+  using VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV;
+  using VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV;
+  using VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV;
+  using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV;
+  using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV;
+  using VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV;
+  using VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV;
+
+  //=== VK_NV_inherited_viewport_scissor ===
+  using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV;
+
+  //=== VK_EXT_texel_buffer_alignment ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT;
+
+  //=== VK_QCOM_render_pass_transform ===
+  using VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM;
+  using VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM;
+
+  //=== VK_EXT_depth_bias_control ===
+  using VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT;
+  using VULKAN_HPP_NAMESPACE::DepthBiasRepresentationInfoEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthBiasControlFeaturesEXT;
+
+  //=== VK_EXT_device_memory_report ===
+  using VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT;
+
+  //=== VK_EXT_robustness2 ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT;
+
+  //=== VK_EXT_custom_border_color ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT;
+
+  //=== VK_KHR_pipeline_library ===
+  using VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR;
+
+  //=== VK_NV_present_barrier ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV;
+  using VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV;
+  using VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV;
+
+  //=== VK_KHR_present_id ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PresentIdKHR;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_KHR_video_encode_queue ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR;
+  using VULKAN_HPP_NAMESPACE::QueryPoolVideoEncodeFeedbackCreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR;
+  using VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_NV_device_diagnostics_config ===
+  using VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV;
+
+  //=== VK_NV_low_latency ===
+  using VULKAN_HPP_NAMESPACE::QueryLowLatencySupportNV;
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+  //=== VK_EXT_metal_objects ===
+  using VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT;
+  using VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT;
+  using VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT;
+  using VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT;
+  using VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT;
+  using VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT;
+  using VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT;
+  using VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT;
+  using VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT;
+  using VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT;
+  using VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT;
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+  //=== VK_KHR_synchronization2 ===
+  using VULKAN_HPP_NAMESPACE::CheckpointData2NV;
+  using VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV;
+
+  //=== VK_EXT_descriptor_buffer ===
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT;
+  using VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT;
+  using VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT;
+  using VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT;
+  using VULKAN_HPP_NAMESPACE::DescriptorBufferBindingPushDescriptorBufferHandleEXT;
+  using VULKAN_HPP_NAMESPACE::DescriptorDataEXT;
+  using VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT;
+  using VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT;
+  using VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT;
+  using VULKAN_HPP_NAMESPACE::OpaqueCaptureDescriptorDataCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT;
+
+  //=== VK_EXT_graphics_pipeline_library ===
+  using VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT;
+
+  //=== VK_AMD_shader_early_and_late_fragment_tests ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD;
+
+  //=== VK_KHR_fragment_shader_barycentric ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR;
+
+  //=== VK_KHR_shader_subgroup_uniform_control_flow ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR;
+
+  //=== VK_NV_fragment_shading_rate_enums ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV;
+  using VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV;
+
+  //=== VK_NV_ray_tracing_motion_blur ===
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV;
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV;
+  using VULKAN_HPP_NAMESPACE::SRTDataNV;
+
+  //=== VK_EXT_mesh_shader ===
+  using VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT;
+
+  //=== VK_EXT_ycbcr_2plane_444_formats ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT;
+
+  //=== VK_EXT_fragment_density_map2 ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT;
+
+  //=== VK_QCOM_rotated_copy_commands ===
+  using VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM;
+
+  //=== VK_KHR_workgroup_memory_explicit_layout ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR;
+
+  //=== VK_EXT_image_compression_control ===
+  using VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT;
+  using VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT;
+
+  //=== VK_EXT_attachment_feedback_loop_layout ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT;
+
+  //=== VK_EXT_4444_formats ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT;
+
+  //=== VK_EXT_device_fault ===
+  using VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT;
+  using VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT;
+  using VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT;
+  using VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT;
+  using VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT;
+
+  //=== VK_EXT_rgba10x6_formats ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT;
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+  //=== VK_EXT_directfb_surface ===
+  using VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT;
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+  //=== VK_EXT_vertex_input_dynamic_state ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT;
+  using VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT;
+
+  //=== VK_EXT_physical_device_drm ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT;
+
+  //=== VK_EXT_device_address_binding_report ===
+  using VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT;
+
+  //=== VK_EXT_depth_clip_control ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT;
+
+  //=== VK_EXT_primitive_topology_list_restart ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT;
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_memory ===
+  using VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA;
+  using VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA;
+  using VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_external_semaphore ===
+  using VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA;
+  using VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+  using VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA;
+  using VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA;
+  using VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA;
+  using VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA;
+  using VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA;
+  using VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA;
+  using VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA;
+  using VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA;
+  using VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA;
+  using VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_HUAWEI_subpass_shading ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI;
+  using VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI;
+
+  //=== VK_HUAWEI_invocation_mask ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI;
+
+  //=== VK_NV_external_memory_rdma ===
+  using VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV;
+
+  //=== VK_EXT_pipeline_properties ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT;
+
+  //=== VK_EXT_frame_boundary ===
+  using VULKAN_HPP_NAMESPACE::FrameBoundaryEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFrameBoundaryFeaturesEXT;
+
+  //=== VK_EXT_multisampled_render_to_single_sampled ===
+  using VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT;
+
+  //=== VK_EXT_extended_dynamic_state2 ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT;
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_screen_surface ===
+  using VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX;
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+  //=== VK_EXT_color_write_enable ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT;
+
+  //=== VK_EXT_primitives_generated_query ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT;
+
+  //=== VK_KHR_ray_tracing_maintenance1 ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR;
+
+  //=== VK_EXT_image_view_min_lod ===
+  using VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT;
+
+  //=== VK_EXT_multi_draw ===
+  using VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT;
+  using VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT;
+
+  //=== VK_EXT_image_2d_view_of_3d ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT;
+
+  //=== VK_EXT_shader_tile_image ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT;
+
+  //=== VK_EXT_opacity_micromap ===
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT;
+  using VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT;
+  using VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT;
+  using VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT;
+  using VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT;
+  using VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT;
+  using VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::MicromapTriangleEXT;
+  using VULKAN_HPP_NAMESPACE::MicromapUsageEXT;
+  using VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT;
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+  //=== VK_NV_displacement_micromap ===
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesDisplacementMicromapNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapFeaturesNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapPropertiesNV;
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+  //=== VK_HUAWEI_cluster_culling_shader ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderFeaturesHUAWEI;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderPropertiesHUAWEI;
+
+  //=== VK_EXT_border_color_swizzle ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT;
+
+  //=== VK_EXT_pageable_device_local_memory ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT;
+
+  //=== VK_ARM_shader_core_properties ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesARM;
+
+  //=== VK_EXT_image_sliced_view_of_3d ===
+  using VULKAN_HPP_NAMESPACE::ImageViewSlicedCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageSlicedViewOf3DFeaturesEXT;
+
+  //=== VK_VALVE_descriptor_set_host_mapping ===
+  using VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE;
+  using VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE;
+
+  //=== VK_EXT_depth_clamp_zero_one ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT;
+
+  //=== VK_EXT_non_seamless_cube_map ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT;
+
+  //=== VK_QCOM_fragment_density_map_offset ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM;
+  using VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM;
+
+  //=== VK_NV_copy_memory_indirect ===
+  using VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV;
+  using VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV;
+
+  //=== VK_NV_memory_decompression ===
+  using VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV;
+
+  //=== VK_NV_device_generated_commands_compute ===
+  using VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV;
+  using VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV;
+  using VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV;
+
+  //=== VK_NV_linear_color_attachment ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV;
+
+  //=== VK_EXT_image_compression_control_swapchain ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT;
+
+  //=== VK_QCOM_image_processing ===
+  using VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM;
+
+  //=== VK_EXT_nested_command_buffer ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferPropertiesEXT;
+
+  //=== VK_EXT_external_memory_acquire_unmodified ===
+  using VULKAN_HPP_NAMESPACE::ExternalMemoryAcquireUnmodifiedEXT;
+
+  //=== VK_EXT_extended_dynamic_state3 ===
+  using VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT;
+  using VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT;
+
+  //=== VK_EXT_subpass_merge_feedback ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT;
+  using VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT;
+  using VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT;
+
+  //=== VK_LUNARG_direct_driver_loading ===
+  using VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG;
+  using VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG;
+
+  //=== VK_EXT_shader_module_identifier ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT;
+
+  //=== VK_EXT_rasterization_order_attachment_access ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesARM;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT;
+
+  //=== VK_NV_optical_flow ===
+  using VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV;
+  using VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV;
+  using VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV;
+  using VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV;
+  using VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV;
+
+  //=== VK_EXT_legacy_dithering ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT;
+
+  //=== VK_EXT_pipeline_protected_access ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT;
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+  //=== VK_ANDROID_external_format_resolve ===
+  using VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatResolvePropertiesANDROID;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolveFeaturesANDROID;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolvePropertiesANDROID;
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+  //=== VK_KHR_maintenance5 ===
+  using VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR;
+  using VULKAN_HPP_NAMESPACE::ImageSubresource2EXT;
+  using VULKAN_HPP_NAMESPACE::ImageSubresource2KHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR;
+  using VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR;
+  using VULKAN_HPP_NAMESPACE::SubresourceLayout2EXT;
+  using VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR;
+
+  //=== VK_KHR_ray_tracing_position_fetch ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR;
+
+  //=== VK_EXT_shader_object ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT;
+  using VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT;
+
+  //=== VK_QCOM_tile_properties ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM;
+  using VULKAN_HPP_NAMESPACE::TilePropertiesQCOM;
+
+  //=== VK_SEC_amigo_profiling ===
+  using VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC;
+
+  //=== VK_QCOM_multiview_per_view_viewports ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM;
+
+  //=== VK_NV_ray_tracing_invocation_reorder ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV;
+
+  //=== VK_NV_extended_sparse_address_space ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpacePropertiesNV;
+
+  //=== VK_EXT_mutable_descriptor_type ===
+  using VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT;
+  using VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoVALVE;
+  using VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT;
+  using VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListVALVE;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesVALVE;
+
+  //=== VK_ARM_shader_core_builtins ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM;
+
+  //=== VK_EXT_pipeline_library_group_handles ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT;
+
+  //=== VK_EXT_dynamic_rendering_unused_attachments ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT;
+
+  //=== VK_NV_low_latency2 ===
+  using VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV;
+  using VULKAN_HPP_NAMESPACE::LatencySleepInfoNV;
+  using VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV;
+  using VULKAN_HPP_NAMESPACE::LatencySubmissionPresentIdNV;
+  using VULKAN_HPP_NAMESPACE::LatencySurfaceCapabilitiesNV;
+  using VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV;
+  using VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV;
+  using VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV;
+  using VULKAN_HPP_NAMESPACE::SwapchainLatencyCreateInfoNV;
+
+  //=== VK_KHR_cooperative_matrix ===
+  using VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesKHR;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesKHR;
+
+  //=== VK_QCOM_multiview_per_view_render_areas ===
+  using VULKAN_HPP_NAMESPACE::MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM;
+
+  //=== VK_QCOM_image_processing2 ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2FeaturesQCOM;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2PropertiesQCOM;
+  using VULKAN_HPP_NAMESPACE::SamplerBlockMatchWindowCreateInfoQCOM;
+
+  //=== VK_QCOM_filter_cubic_weights ===
+  using VULKAN_HPP_NAMESPACE::BlitImageCubicWeightsInfoQCOM;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicWeightsFeaturesQCOM;
+  using VULKAN_HPP_NAMESPACE::SamplerCubicWeightsCreateInfoQCOM;
+
+  //=== VK_QCOM_ycbcr_degamma ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrDegammaFeaturesQCOM;
+  using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM;
+
+  //=== VK_QCOM_filter_cubic_clamp ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicClampFeaturesQCOM;
+
+  //=== VK_EXT_attachment_feedback_loop_dynamic_state ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT;
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+  //=== VK_QNX_external_memory_screen_buffer ===
+  using VULKAN_HPP_NAMESPACE::ExternalFormatQNX;
+  using VULKAN_HPP_NAMESPACE::ImportScreenBufferInfoQNX;
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX;
+  using VULKAN_HPP_NAMESPACE::ScreenBufferFormatPropertiesQNX;
+  using VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX;
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+  //=== VK_MSFT_layered_driver ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredDriverPropertiesMSFT;
+
+  //=== VK_NV_descriptor_pool_overallocation ===
+  using VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorPoolOverallocationFeaturesNV;
+
+  //===============
+  //=== HANDLEs ===
+  //===============
+
+  using VULKAN_HPP_NAMESPACE::isVulkanHandleType;
+
+  //=== VK_VERSION_1_0 ===
+  using VULKAN_HPP_NAMESPACE::Buffer;
+  using VULKAN_HPP_NAMESPACE::BufferView;
+  using VULKAN_HPP_NAMESPACE::CommandBuffer;
+  using VULKAN_HPP_NAMESPACE::CommandPool;
+  using VULKAN_HPP_NAMESPACE::DescriptorPool;
+  using VULKAN_HPP_NAMESPACE::DescriptorSet;
+  using VULKAN_HPP_NAMESPACE::DescriptorSetLayout;
+  using VULKAN_HPP_NAMESPACE::Device;
+  using VULKAN_HPP_NAMESPACE::DeviceMemory;
+  using VULKAN_HPP_NAMESPACE::Event;
+  using VULKAN_HPP_NAMESPACE::Fence;
+  using VULKAN_HPP_NAMESPACE::Framebuffer;
+  using VULKAN_HPP_NAMESPACE::Image;
+  using VULKAN_HPP_NAMESPACE::ImageView;
+  using VULKAN_HPP_NAMESPACE::Instance;
+  using VULKAN_HPP_NAMESPACE::PhysicalDevice;
+  using VULKAN_HPP_NAMESPACE::Pipeline;
+  using VULKAN_HPP_NAMESPACE::PipelineCache;
+  using VULKAN_HPP_NAMESPACE::PipelineLayout;
+  using VULKAN_HPP_NAMESPACE::QueryPool;
+  using VULKAN_HPP_NAMESPACE::Queue;
+  using VULKAN_HPP_NAMESPACE::RenderPass;
+  using VULKAN_HPP_NAMESPACE::Sampler;
+  using VULKAN_HPP_NAMESPACE::Semaphore;
+  using VULKAN_HPP_NAMESPACE::ShaderModule;
+
+  //=== VK_VERSION_1_1 ===
+  using VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate;
+  using VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion;
+
+  //=== VK_VERSION_1_3 ===
+  using VULKAN_HPP_NAMESPACE::PrivateDataSlot;
+
+  //=== VK_KHR_surface ===
+  using VULKAN_HPP_NAMESPACE::SurfaceKHR;
+
+  //=== VK_KHR_swapchain ===
+  using VULKAN_HPP_NAMESPACE::SwapchainKHR;
+
+  //=== VK_KHR_display ===
+  using VULKAN_HPP_NAMESPACE::DisplayKHR;
+  using VULKAN_HPP_NAMESPACE::DisplayModeKHR;
+
+  //=== VK_EXT_debug_report ===
+  using VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT;
+
+  //=== VK_KHR_video_queue ===
+  using VULKAN_HPP_NAMESPACE::VideoSessionKHR;
+  using VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR;
+
+  //=== VK_NVX_binary_import ===
+  using VULKAN_HPP_NAMESPACE::CuFunctionNVX;
+  using VULKAN_HPP_NAMESPACE::CuModuleNVX;
+
+  //=== VK_EXT_debug_utils ===
+  using VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT;
+
+  //=== VK_KHR_acceleration_structure ===
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureKHR;
+
+  //=== VK_EXT_validation_cache ===
+  using VULKAN_HPP_NAMESPACE::ValidationCacheEXT;
+
+  //=== VK_NV_ray_tracing ===
+  using VULKAN_HPP_NAMESPACE::AccelerationStructureNV;
+
+  //=== VK_INTEL_performance_query ===
+  using VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL;
+
+  //=== VK_KHR_deferred_host_operations ===
+  using VULKAN_HPP_NAMESPACE::DeferredOperationKHR;
+
+  //=== VK_NV_device_generated_commands ===
+  using VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV;
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+  using VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA;
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_EXT_opacity_micromap ===
+  using VULKAN_HPP_NAMESPACE::MicromapEXT;
+
+  //=== VK_NV_optical_flow ===
+  using VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV;
+
+  //=== VK_EXT_shader_object ===
+  using VULKAN_HPP_NAMESPACE::ShaderEXT;
+
+  //======================
+  //=== UNIQUE HANDLEs ===
+  //======================
+
+#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
+
+  //=== VK_VERSION_1_0 ===
+  using VULKAN_HPP_NAMESPACE::UniqueBuffer;
+  using VULKAN_HPP_NAMESPACE::UniqueBufferView;
+  using VULKAN_HPP_NAMESPACE::UniqueCommandBuffer;
+  using VULKAN_HPP_NAMESPACE::UniqueCommandPool;
+  using VULKAN_HPP_NAMESPACE::UniqueDescriptorPool;
+  using VULKAN_HPP_NAMESPACE::UniqueDescriptorSet;
+  using VULKAN_HPP_NAMESPACE::UniqueDescriptorSetLayout;
+  using VULKAN_HPP_NAMESPACE::UniqueDevice;
+  using VULKAN_HPP_NAMESPACE::UniqueDeviceMemory;
+  using VULKAN_HPP_NAMESPACE::UniqueEvent;
+  using VULKAN_HPP_NAMESPACE::UniqueFence;
+  using VULKAN_HPP_NAMESPACE::UniqueFramebuffer;
+  using VULKAN_HPP_NAMESPACE::UniqueImage;
+  using VULKAN_HPP_NAMESPACE::UniqueImageView;
+  using VULKAN_HPP_NAMESPACE::UniqueInstance;
+  using VULKAN_HPP_NAMESPACE::UniquePipeline;
+  using VULKAN_HPP_NAMESPACE::UniquePipelineCache;
+  using VULKAN_HPP_NAMESPACE::UniquePipelineLayout;
+  using VULKAN_HPP_NAMESPACE::UniqueQueryPool;
+  using VULKAN_HPP_NAMESPACE::UniqueRenderPass;
+  using VULKAN_HPP_NAMESPACE::UniqueSampler;
+  using VULKAN_HPP_NAMESPACE::UniqueSemaphore;
+  using VULKAN_HPP_NAMESPACE::UniqueShaderModule;
+
+  //=== VK_VERSION_1_1 ===
+  using VULKAN_HPP_NAMESPACE::UniqueDescriptorUpdateTemplate;
+  using VULKAN_HPP_NAMESPACE::UniqueSamplerYcbcrConversion;
+
+  //=== VK_VERSION_1_3 ===
+  using VULKAN_HPP_NAMESPACE::UniquePrivateDataSlot;
+
+  //=== VK_KHR_surface ===
+  using VULKAN_HPP_NAMESPACE::UniqueSurfaceKHR;
+
+  //=== VK_KHR_swapchain ===
+  using VULKAN_HPP_NAMESPACE::UniqueSwapchainKHR;
+
+  //=== VK_EXT_debug_report ===
+  using VULKAN_HPP_NAMESPACE::UniqueDebugReportCallbackEXT;
+
+  //=== VK_KHR_video_queue ===
+  using VULKAN_HPP_NAMESPACE::UniqueVideoSessionKHR;
+  using VULKAN_HPP_NAMESPACE::UniqueVideoSessionParametersKHR;
+
+  //=== VK_NVX_binary_import ===
+  using VULKAN_HPP_NAMESPACE::UniqueCuFunctionNVX;
+  using VULKAN_HPP_NAMESPACE::UniqueCuModuleNVX;
+
+  //=== VK_EXT_debug_utils ===
+  using VULKAN_HPP_NAMESPACE::UniqueDebugUtilsMessengerEXT;
+
+  //=== VK_KHR_acceleration_structure ===
+  using VULKAN_HPP_NAMESPACE::UniqueAccelerationStructureKHR;
+
+  //=== VK_EXT_validation_cache ===
+  using VULKAN_HPP_NAMESPACE::UniqueValidationCacheEXT;
+
+  //=== VK_NV_ray_tracing ===
+  using VULKAN_HPP_NAMESPACE::UniqueAccelerationStructureNV;
+
+  //=== VK_KHR_deferred_host_operations ===
+  using VULKAN_HPP_NAMESPACE::UniqueDeferredOperationKHR;
+
+  //=== VK_NV_device_generated_commands ===
+  using VULKAN_HPP_NAMESPACE::UniqueIndirectCommandsLayoutNV;
+
+#  if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+  using VULKAN_HPP_NAMESPACE::UniqueBufferCollectionFUCHSIA;
+#  endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_EXT_opacity_micromap ===
+  using VULKAN_HPP_NAMESPACE::UniqueMicromapEXT;
+
+  //=== VK_NV_optical_flow ===
+  using VULKAN_HPP_NAMESPACE::UniqueOpticalFlowSessionNV;
+
+  //=== VK_EXT_shader_object ===
+  using VULKAN_HPP_NAMESPACE::UniqueHandleTraits;
+  using VULKAN_HPP_NAMESPACE::UniqueShaderEXT;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+  //======================
+  //=== SHARED HANDLEs ===
+  //======================
+
+#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
+
+  //=== VK_VERSION_1_0 ===
+  using VULKAN_HPP_NAMESPACE::SharedBuffer;
+  using VULKAN_HPP_NAMESPACE::SharedBufferView;
+  using VULKAN_HPP_NAMESPACE::SharedCommandBuffer;
+  using VULKAN_HPP_NAMESPACE::SharedCommandPool;
+  using VULKAN_HPP_NAMESPACE::SharedDescriptorPool;
+  using VULKAN_HPP_NAMESPACE::SharedDescriptorSet;
+  using VULKAN_HPP_NAMESPACE::SharedDescriptorSetLayout;
+  using VULKAN_HPP_NAMESPACE::SharedDevice;
+  using VULKAN_HPP_NAMESPACE::SharedDeviceMemory;
+  using VULKAN_HPP_NAMESPACE::SharedEvent;
+  using VULKAN_HPP_NAMESPACE::SharedFence;
+  using VULKAN_HPP_NAMESPACE::SharedFramebuffer;
+  using VULKAN_HPP_NAMESPACE::SharedImage;
+  using VULKAN_HPP_NAMESPACE::SharedImageView;
+  using VULKAN_HPP_NAMESPACE::SharedInstance;
+  using VULKAN_HPP_NAMESPACE::SharedPhysicalDevice;
+  using VULKAN_HPP_NAMESPACE::SharedPipeline;
+  using VULKAN_HPP_NAMESPACE::SharedPipelineCache;
+  using VULKAN_HPP_NAMESPACE::SharedPipelineLayout;
+  using VULKAN_HPP_NAMESPACE::SharedQueryPool;
+  using VULKAN_HPP_NAMESPACE::SharedQueue;
+  using VULKAN_HPP_NAMESPACE::SharedRenderPass;
+  using VULKAN_HPP_NAMESPACE::SharedSampler;
+  using VULKAN_HPP_NAMESPACE::SharedSemaphore;
+  using VULKAN_HPP_NAMESPACE::SharedShaderModule;
+
+  //=== VK_VERSION_1_1 ===
+  using VULKAN_HPP_NAMESPACE::SharedDescriptorUpdateTemplate;
+  using VULKAN_HPP_NAMESPACE::SharedSamplerYcbcrConversion;
+
+  //=== VK_VERSION_1_3 ===
+  using VULKAN_HPP_NAMESPACE::SharedPrivateDataSlot;
+
+  //=== VK_KHR_surface ===
+  using VULKAN_HPP_NAMESPACE::SharedSurfaceKHR;
+
+  //=== VK_KHR_swapchain ===
+  using VULKAN_HPP_NAMESPACE::SharedSwapchainKHR;
+
+  //=== VK_KHR_display ===
+  using VULKAN_HPP_NAMESPACE::SharedDisplayKHR;
+  using VULKAN_HPP_NAMESPACE::SharedDisplayModeKHR;
+
+  //=== VK_EXT_debug_report ===
+  using VULKAN_HPP_NAMESPACE::SharedDebugReportCallbackEXT;
+
+  //=== VK_KHR_video_queue ===
+  using VULKAN_HPP_NAMESPACE::SharedVideoSessionKHR;
+  using VULKAN_HPP_NAMESPACE::SharedVideoSessionParametersKHR;
+
+  //=== VK_NVX_binary_import ===
+  using VULKAN_HPP_NAMESPACE::SharedCuFunctionNVX;
+  using VULKAN_HPP_NAMESPACE::SharedCuModuleNVX;
+
+  //=== VK_EXT_debug_utils ===
+  using VULKAN_HPP_NAMESPACE::SharedDebugUtilsMessengerEXT;
+
+  //=== VK_KHR_acceleration_structure ===
+  using VULKAN_HPP_NAMESPACE::SharedAccelerationStructureKHR;
+
+  //=== VK_EXT_validation_cache ===
+  using VULKAN_HPP_NAMESPACE::SharedValidationCacheEXT;
+
+  //=== VK_NV_ray_tracing ===
+  using VULKAN_HPP_NAMESPACE::SharedAccelerationStructureNV;
+
+  //=== VK_INTEL_performance_query ===
+  using VULKAN_HPP_NAMESPACE::SharedPerformanceConfigurationINTEL;
+
+  //=== VK_KHR_deferred_host_operations ===
+  using VULKAN_HPP_NAMESPACE::SharedDeferredOperationKHR;
+
+  //=== VK_NV_device_generated_commands ===
+  using VULKAN_HPP_NAMESPACE::SharedIndirectCommandsLayoutNV;
+
+#  if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+  using VULKAN_HPP_NAMESPACE::SharedBufferCollectionFUCHSIA;
+#  endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_EXT_opacity_micromap ===
+  using VULKAN_HPP_NAMESPACE::SharedMicromapEXT;
+
+  //=== VK_NV_optical_flow ===
+  using VULKAN_HPP_NAMESPACE::SharedOpticalFlowSessionNV;
+
+  //=== VK_EXT_shader_object ===
+  using VULKAN_HPP_NAMESPACE::SharedHandleTraits;
+  using VULKAN_HPP_NAMESPACE::SharedShaderEXT;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+  //===========================
+  //=== COMMAND Definitions ===
+  //===========================
+  using VULKAN_HPP_NAMESPACE::createInstance;
+  using VULKAN_HPP_NAMESPACE::enumerateInstanceExtensionProperties;
+  using VULKAN_HPP_NAMESPACE::enumerateInstanceLayerProperties;
+  using VULKAN_HPP_NAMESPACE::enumerateInstanceVersion;
+
+#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
+  using VULKAN_HPP_NAMESPACE::createInstanceUnique;
+#endif /*VULKAN_HPP_NO_SMART_HANDLE*/
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+  using VULKAN_HPP_NAMESPACE::StructExtends;
+#endif /*VULKAN_HPP_DISABLE_ENHANCED_MODE*/
+
+#if defined( VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL )
+  using VULKAN_HPP_NAMESPACE::DynamicLoader;
+#endif /*VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL*/
+
+  //=====================
+  //=== Format Traits ===
+  //=====================
+  using VULKAN_HPP_NAMESPACE::blockExtent;
+  using VULKAN_HPP_NAMESPACE::blockSize;
+  using VULKAN_HPP_NAMESPACE::compatibilityClass;
+  using VULKAN_HPP_NAMESPACE::componentBits;
+  using VULKAN_HPP_NAMESPACE::componentCount;
+  using VULKAN_HPP_NAMESPACE::componentName;
+  using VULKAN_HPP_NAMESPACE::componentNumericFormat;
+  using VULKAN_HPP_NAMESPACE::componentPlaneIndex;
+  using VULKAN_HPP_NAMESPACE::componentsAreCompressed;
+  using VULKAN_HPP_NAMESPACE::compressionScheme;
+  using VULKAN_HPP_NAMESPACE::isCompressed;
+  using VULKAN_HPP_NAMESPACE::packed;
+  using VULKAN_HPP_NAMESPACE::planeCompatibleFormat;
+  using VULKAN_HPP_NAMESPACE::planeCount;
+  using VULKAN_HPP_NAMESPACE::planeHeightDivisor;
+  using VULKAN_HPP_NAMESPACE::planeWidthDivisor;
+  using VULKAN_HPP_NAMESPACE::texelsPerBlock;
+
+  //======================================
+  //=== Extension inspection functions ===
+  //======================================
+  using VULKAN_HPP_NAMESPACE::getDeprecatedExtensions;
+  using VULKAN_HPP_NAMESPACE::getDeviceExtensions;
+  using VULKAN_HPP_NAMESPACE::getExtensionDepends;
+  using VULKAN_HPP_NAMESPACE::getExtensionDeprecatedBy;
+  using VULKAN_HPP_NAMESPACE::getExtensionObsoletedBy;
+  using VULKAN_HPP_NAMESPACE::getExtensionPromotedTo;
+  using VULKAN_HPP_NAMESPACE::getInstanceExtensions;
+  using VULKAN_HPP_NAMESPACE::getObsoletedExtensions;
+  using VULKAN_HPP_NAMESPACE::getPromotedExtensions;
+  using VULKAN_HPP_NAMESPACE::isDeprecatedExtension;
+  using VULKAN_HPP_NAMESPACE::isDeviceExtension;
+  using VULKAN_HPP_NAMESPACE::isInstanceExtension;
+  using VULKAN_HPP_NAMESPACE::isObsoletedExtension;
+  using VULKAN_HPP_NAMESPACE::isPromotedExtension;
+
+#if !defined( VULKAN_HPP_DISABLE_ENHANCED_MODE ) && !defined( VULKAN_HPP_NO_EXCEPTIONS )
+  namespace VULKAN_HPP_RAII_NAMESPACE
+  {
+    //======================
+    //=== RAII HARDCODED ===
+    //======================
+
+    using VULKAN_HPP_RAII_NAMESPACE::Context;
+    using VULKAN_HPP_RAII_NAMESPACE::ContextDispatcher;
+    using VULKAN_HPP_RAII_NAMESPACE::DeviceDispatcher;
+    using VULKAN_HPP_RAII_NAMESPACE::exchange;
+    using VULKAN_HPP_RAII_NAMESPACE::InstanceDispatcher;
+
+    //====================
+    //=== RAII HANDLEs ===
+    //====================
+
+    //=== VK_VERSION_1_0 ===
+    using VULKAN_HPP_RAII_NAMESPACE::Buffer;
+    using VULKAN_HPP_RAII_NAMESPACE::BufferView;
+    using VULKAN_HPP_RAII_NAMESPACE::CommandBuffer;
+    using VULKAN_HPP_RAII_NAMESPACE::CommandBuffers;
+    using VULKAN_HPP_RAII_NAMESPACE::CommandPool;
+    using VULKAN_HPP_RAII_NAMESPACE::DescriptorPool;
+    using VULKAN_HPP_RAII_NAMESPACE::DescriptorSet;
+    using VULKAN_HPP_RAII_NAMESPACE::DescriptorSetLayout;
+    using VULKAN_HPP_RAII_NAMESPACE::DescriptorSets;
+    using VULKAN_HPP_RAII_NAMESPACE::Device;
+    using VULKAN_HPP_RAII_NAMESPACE::DeviceMemory;
+    using VULKAN_HPP_RAII_NAMESPACE::Event;
+    using VULKAN_HPP_RAII_NAMESPACE::Fence;
+    using VULKAN_HPP_RAII_NAMESPACE::Framebuffer;
+    using VULKAN_HPP_RAII_NAMESPACE::Image;
+    using VULKAN_HPP_RAII_NAMESPACE::ImageView;
+    using VULKAN_HPP_RAII_NAMESPACE::Instance;
+    using VULKAN_HPP_RAII_NAMESPACE::PhysicalDevice;
+    using VULKAN_HPP_RAII_NAMESPACE::PhysicalDevices;
+    using VULKAN_HPP_RAII_NAMESPACE::Pipeline;
+    using VULKAN_HPP_RAII_NAMESPACE::PipelineCache;
+    using VULKAN_HPP_RAII_NAMESPACE::PipelineLayout;
+    using VULKAN_HPP_RAII_NAMESPACE::Pipelines;
+    using VULKAN_HPP_RAII_NAMESPACE::QueryPool;
+    using VULKAN_HPP_RAII_NAMESPACE::Queue;
+    using VULKAN_HPP_RAII_NAMESPACE::RenderPass;
+    using VULKAN_HPP_RAII_NAMESPACE::Sampler;
+    using VULKAN_HPP_RAII_NAMESPACE::Semaphore;
+    using VULKAN_HPP_RAII_NAMESPACE::ShaderModule;
+
+    //=== VK_VERSION_1_1 ===
+    using VULKAN_HPP_RAII_NAMESPACE::DescriptorUpdateTemplate;
+    using VULKAN_HPP_RAII_NAMESPACE::SamplerYcbcrConversion;
+
+    //=== VK_VERSION_1_3 ===
+    using VULKAN_HPP_RAII_NAMESPACE::PrivateDataSlot;
+
+    //=== VK_KHR_surface ===
+    using VULKAN_HPP_RAII_NAMESPACE::SurfaceKHR;
+
+    //=== VK_KHR_swapchain ===
+    using VULKAN_HPP_RAII_NAMESPACE::SwapchainKHR;
+    using VULKAN_HPP_RAII_NAMESPACE::SwapchainKHRs;
+
+    //=== VK_KHR_display ===
+    using VULKAN_HPP_RAII_NAMESPACE::DisplayKHR;
+    using VULKAN_HPP_RAII_NAMESPACE::DisplayKHRs;
+    using VULKAN_HPP_RAII_NAMESPACE::DisplayModeKHR;
+
+    //=== VK_EXT_debug_report ===
+    using VULKAN_HPP_RAII_NAMESPACE::DebugReportCallbackEXT;
+
+    //=== VK_KHR_video_queue ===
+    using VULKAN_HPP_RAII_NAMESPACE::VideoSessionKHR;
+    using VULKAN_HPP_RAII_NAMESPACE::VideoSessionParametersKHR;
+
+    //=== VK_NVX_binary_import ===
+    using VULKAN_HPP_RAII_NAMESPACE::CuFunctionNVX;
+    using VULKAN_HPP_RAII_NAMESPACE::CuModuleNVX;
+
+    //=== VK_EXT_debug_utils ===
+    using VULKAN_HPP_RAII_NAMESPACE::DebugUtilsMessengerEXT;
+
+    //=== VK_KHR_acceleration_structure ===
+    using VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureKHR;
+
+    //=== VK_EXT_validation_cache ===
+    using VULKAN_HPP_RAII_NAMESPACE::ValidationCacheEXT;
+
+    //=== VK_NV_ray_tracing ===
+    using VULKAN_HPP_RAII_NAMESPACE::AccelerationStructureNV;
+
+    //=== VK_INTEL_performance_query ===
+    using VULKAN_HPP_RAII_NAMESPACE::PerformanceConfigurationINTEL;
+
+    //=== VK_KHR_deferred_host_operations ===
+    using VULKAN_HPP_RAII_NAMESPACE::DeferredOperationKHR;
+
+    //=== VK_NV_device_generated_commands ===
+    using VULKAN_HPP_RAII_NAMESPACE::IndirectCommandsLayoutNV;
+
+#  if defined( VK_USE_PLATFORM_FUCHSIA )
+    //=== VK_FUCHSIA_buffer_collection ===
+    using VULKAN_HPP_RAII_NAMESPACE::BufferCollectionFUCHSIA;
+#  endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+    //=== VK_EXT_opacity_micromap ===
+    using VULKAN_HPP_RAII_NAMESPACE::MicromapEXT;
+
+    //=== VK_NV_optical_flow ===
+    using VULKAN_HPP_RAII_NAMESPACE::OpticalFlowSessionNV;
+
+    //=== VK_EXT_shader_object ===
+    using VULKAN_HPP_RAII_NAMESPACE::ShaderEXT;
+    using VULKAN_HPP_RAII_NAMESPACE::ShaderEXTs;
+
+  }  // namespace VULKAN_HPP_RAII_NAMESPACE
+#endif
+}  // namespace VULKAN_HPP_NAMESPACE

+ 9 - 2
ThirdParty/Khronos/vulkan/vulkan.h

@@ -2,7 +2,7 @@
 #define VULKAN_H_ 1
 
 /*
-** Copyright 2015-2022 The Khronos Group Inc.
+** Copyright 2015-2023 The Khronos Group Inc.
 **
 ** SPDX-License-Identifier: Apache-2.0
 */
@@ -38,7 +38,6 @@
 
 
 #ifdef VK_USE_PLATFORM_WAYLAND_KHR
-#include <wayland-client.h>
 #include "vulkan_wayland.h"
 #endif
 
@@ -85,6 +84,14 @@
 #include "vulkan_screen.h"
 #endif
 
+
+#ifdef VK_USE_PLATFORM_SCI
+#include <nvscisync.h>
+#include <nvscibuf.h>
+#include "vulkan_sci.h"
+#endif
+
+
 #ifdef VK_ENABLE_BETA_EXTENSIONS
 #include "vulkan_beta.h"
 #endif

File diff suppressed because it is too large
+ 385 - 409
ThirdParty/Khronos/vulkan/vulkan.hpp


+ 29 - 1
ThirdParty/Khronos/vulkan/vulkan_android.h

@@ -2,7 +2,7 @@
 #define VULKAN_ANDROID_H_ 1
 
 /*
-** Copyright 2015-2022 The Khronos Group Inc.
+** Copyright 2015-2023 The Khronos Group Inc.
 **
 ** SPDX-License-Identifier: Apache-2.0
 */
@@ -19,6 +19,7 @@ extern "C" {
 
 
 
+// VK_KHR_android_surface is a preprocessor guard. Do not pass it to API calls.
 #define VK_KHR_android_surface 1
 struct ANativeWindow;
 #define VK_KHR_ANDROID_SURFACE_SPEC_VERSION 6
@@ -42,6 +43,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateAndroidSurfaceKHR(
 #endif
 
 
+// VK_ANDROID_external_memory_android_hardware_buffer is a preprocessor guard. Do not pass it to API calls.
 #define VK_ANDROID_external_memory_android_hardware_buffer 1
 struct AHardwareBuffer;
 #define VK_ANDROID_EXTERNAL_MEMORY_ANDROID_HARDWARE_BUFFER_SPEC_VERSION 5
@@ -118,6 +120,32 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryAndroidHardwareBufferANDROID(
     struct AHardwareBuffer**                    pBuffer);
 #endif
 
+
+// VK_ANDROID_external_format_resolve is a preprocessor guard. Do not pass it to API calls.
+#define VK_ANDROID_external_format_resolve 1
+#define VK_ANDROID_EXTERNAL_FORMAT_RESOLVE_SPEC_VERSION 1
+#define VK_ANDROID_EXTERNAL_FORMAT_RESOLVE_EXTENSION_NAME "VK_ANDROID_external_format_resolve"
+typedef struct VkPhysicalDeviceExternalFormatResolveFeaturesANDROID {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           externalFormatResolve;
+} VkPhysicalDeviceExternalFormatResolveFeaturesANDROID;
+
+typedef struct VkPhysicalDeviceExternalFormatResolvePropertiesANDROID {
+    VkStructureType     sType;
+    void*               pNext;
+    VkBool32            nullColorAttachmentWithExternalFormatResolve;
+    VkChromaLocation    externalFormatResolveChromaOffsetX;
+    VkChromaLocation    externalFormatResolveChromaOffsetY;
+} VkPhysicalDeviceExternalFormatResolvePropertiesANDROID;
+
+typedef struct VkAndroidHardwareBufferFormatResolvePropertiesANDROID {
+    VkStructureType    sType;
+    void*              pNext;
+    VkFormat           colorAttachmentFormat;
+} VkAndroidHardwareBufferFormatResolvePropertiesANDROID;
+
+
 #ifdef __cplusplus
 }
 #endif

File diff suppressed because it is too large
+ 345 - 594
ThirdParty/Khronos/vulkan/vulkan_beta.h


File diff suppressed because it is too large
+ 329 - 144
ThirdParty/Khronos/vulkan/vulkan_core.h


+ 2 - 1
ThirdParty/Khronos/vulkan/vulkan_directfb.h

@@ -2,7 +2,7 @@
 #define VULKAN_DIRECTFB_H_ 1
 
 /*
-** Copyright 2015-2022 The Khronos Group Inc.
+** Copyright 2015-2023 The Khronos Group Inc.
 **
 ** SPDX-License-Identifier: Apache-2.0
 */
@@ -19,6 +19,7 @@ extern "C" {
 
 
 
+// VK_EXT_directfb_surface is a preprocessor guard. Do not pass it to API calls.
 #define VK_EXT_directfb_surface 1
 #define VK_EXT_DIRECTFB_SURFACE_SPEC_VERSION 1
 #define VK_EXT_DIRECTFB_SURFACE_EXTENSION_NAME "VK_EXT_directfb_surface"

File diff suppressed because it is too large
+ 653 - 267
ThirdParty/Khronos/vulkan/vulkan_enums.hpp


+ 1654 - 0
ThirdParty/Khronos/vulkan/vulkan_extension_inspection.hpp

@@ -0,0 +1,1654 @@
+// Copyright 2015-2023 The Khronos Group Inc.
+//
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_EXTENSION_INSPECTION_HPP
+#define VULKAN_EXTENSION_INSPECTION_HPP
+
+#include <map>
+#include <set>
+#include <vulkan/vulkan.hpp>
+
+namespace VULKAN_HPP_NAMESPACE
+{
+  //======================================
+  //=== Extension inspection functions ===
+  //======================================
+
+  std::set<std::string> const &                                        getDeviceExtensions();
+  std::set<std::string> const &                                        getInstanceExtensions();
+  std::map<std::string, std::string> const &                           getDeprecatedExtensions();
+  std::map<std::string, std::vector<std::vector<std::string>>> const & getExtensionDepends( std::string const & extension );
+  std::pair<bool, std::vector<std::vector<std::string>> const &>       getExtensionDepends( std::string const & version, std::string const & extension );
+  std::map<std::string, std::string> const &                           getObsoletedExtensions();
+  std::map<std::string, std::string> const &                           getPromotedExtensions();
+  VULKAN_HPP_CONSTEXPR_20 std::string getExtensionDeprecatedBy( std::string const & extension );
+  VULKAN_HPP_CONSTEXPR_20 std::string getExtensionObsoletedBy( std::string const & extension );
+  VULKAN_HPP_CONSTEXPR_20 std::string getExtensionPromotedTo( std::string const & extension );
+  VULKAN_HPP_CONSTEXPR_20 bool        isDeprecatedExtension( std::string const & extension );
+  VULKAN_HPP_CONSTEXPR_20 bool        isDeviceExtension( std::string const & extension );
+  VULKAN_HPP_CONSTEXPR_20 bool        isInstanceExtension( std::string const & extension );
+  VULKAN_HPP_CONSTEXPR_20 bool        isObsoletedExtension( std::string const & extension );
+  VULKAN_HPP_CONSTEXPR_20 bool        isPromotedExtension( std::string const & extension );
+
+  //=====================================================
+  //=== Extension inspection function implementations ===
+  //=====================================================
+
+  VULKAN_HPP_INLINE std::map<std::string, std::string> const & getDeprecatedExtensions()
+  {
+    static std::map<std::string, std::string> deprecatedExtensions = { 
+{ "VK_EXT_debug_report", "VK_EXT_debug_utils"}, 
+{ "VK_NV_glsl_shader", ""}, 
+{ "VK_NV_dedicated_allocation", "VK_KHR_dedicated_allocation"}, 
+{ "VK_AMD_gpu_shader_half_float", "VK_KHR_shader_float16_int8"}, 
+{ "VK_IMG_format_pvrtc", ""}, 
+{ "VK_NV_external_memory_capabilities", "VK_KHR_external_memory_capabilities"}, 
+{ "VK_NV_external_memory", "VK_KHR_external_memory"},
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+{ "VK_NV_external_memory_win32", "VK_KHR_external_memory_win32"},
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+{ "VK_EXT_validation_flags", "VK_EXT_validation_features"}, 
+{ "VK_EXT_shader_subgroup_ballot", "VK_VERSION_1_2"}, 
+{ "VK_EXT_shader_subgroup_vote", "VK_VERSION_1_1"},
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+{ "VK_MVK_ios_surface", "VK_EXT_metal_surface"},
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+{ "VK_MVK_macos_surface", "VK_EXT_metal_surface"},
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+{ "VK_AMD_gpu_shader_int16", "VK_KHR_shader_float16_int8"}, 
+{ "VK_EXT_buffer_device_address", "VK_KHR_buffer_device_address"} };
+    return deprecatedExtensions;
+  }
+
+  VULKAN_HPP_INLINE std::set<std::string> const & getDeviceExtensions()
+  {
+    static std::set<std::string> deviceExtensions = { 
+"VK_KHR_swapchain", 
+"VK_KHR_display_swapchain", 
+"VK_NV_glsl_shader", 
+"VK_EXT_depth_range_unrestricted", 
+"VK_KHR_sampler_mirror_clamp_to_edge", 
+"VK_IMG_filter_cubic", 
+"VK_AMD_rasterization_order", 
+"VK_AMD_shader_trinary_minmax", 
+"VK_AMD_shader_explicit_vertex_parameter", 
+"VK_EXT_debug_marker", 
+"VK_KHR_video_queue", 
+"VK_KHR_video_decode_queue", 
+"VK_AMD_gcn_shader", 
+"VK_NV_dedicated_allocation", 
+"VK_EXT_transform_feedback", 
+"VK_NVX_binary_import", 
+"VK_NVX_image_view_handle", 
+"VK_AMD_draw_indirect_count", 
+"VK_AMD_negative_viewport_height", 
+"VK_AMD_gpu_shader_half_float", 
+"VK_AMD_shader_ballot",
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+"VK_EXT_video_encode_h264", 
+"VK_EXT_video_encode_h265",
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+"VK_KHR_video_decode_h264", 
+"VK_AMD_texture_gather_bias_lod", 
+"VK_AMD_shader_info", 
+"VK_KHR_dynamic_rendering", 
+"VK_AMD_shader_image_load_store_lod", 
+"VK_NV_corner_sampled_image", 
+"VK_KHR_multiview", 
+"VK_IMG_format_pvrtc", 
+"VK_NV_external_memory",
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+"VK_NV_external_memory_win32", 
+"VK_NV_win32_keyed_mutex",
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+"VK_KHR_device_group", 
+"VK_KHR_shader_draw_parameters", 
+"VK_EXT_shader_subgroup_ballot", 
+"VK_EXT_shader_subgroup_vote", 
+"VK_EXT_texture_compression_astc_hdr", 
+"VK_EXT_astc_decode_mode", 
+"VK_EXT_pipeline_robustness", 
+"VK_KHR_maintenance1", 
+"VK_KHR_external_memory",
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+"VK_KHR_external_memory_win32",
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+"VK_KHR_external_memory_fd",
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+"VK_KHR_win32_keyed_mutex",
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+"VK_KHR_external_semaphore",
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+"VK_KHR_external_semaphore_win32",
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+"VK_KHR_external_semaphore_fd", 
+"VK_KHR_push_descriptor", 
+"VK_EXT_conditional_rendering", 
+"VK_KHR_shader_float16_int8", 
+"VK_KHR_16bit_storage", 
+"VK_KHR_incremental_present", 
+"VK_KHR_descriptor_update_template", 
+"VK_NV_clip_space_w_scaling", 
+"VK_EXT_display_control", 
+"VK_GOOGLE_display_timing", 
+"VK_NV_sample_mask_override_coverage", 
+"VK_NV_geometry_shader_passthrough", 
+"VK_NV_viewport_array2", 
+"VK_NVX_multiview_per_view_attributes", 
+"VK_NV_viewport_swizzle", 
+"VK_EXT_discard_rectangles", 
+"VK_EXT_conservative_rasterization", 
+"VK_EXT_depth_clip_enable", 
+"VK_EXT_hdr_metadata", 
+"VK_KHR_imageless_framebuffer", 
+"VK_KHR_create_renderpass2", 
+"VK_KHR_shared_presentable_image", 
+"VK_KHR_external_fence",
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+"VK_KHR_external_fence_win32",
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+"VK_KHR_external_fence_fd", 
+"VK_KHR_performance_query", 
+"VK_KHR_maintenance2", 
+"VK_KHR_variable_pointers", 
+"VK_EXT_external_memory_dma_buf", 
+"VK_EXT_queue_family_foreign", 
+"VK_KHR_dedicated_allocation",
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+"VK_ANDROID_external_memory_android_hardware_buffer",
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+"VK_EXT_sampler_filter_minmax", 
+"VK_KHR_storage_buffer_storage_class", 
+"VK_AMD_gpu_shader_int16",
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+"VK_AMDX_shader_enqueue",
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+"VK_AMD_mixed_attachment_samples", 
+"VK_AMD_shader_fragment_mask", 
+"VK_EXT_inline_uniform_block", 
+"VK_EXT_shader_stencil_export", 
+"VK_EXT_sample_locations", 
+"VK_KHR_relaxed_block_layout", 
+"VK_KHR_get_memory_requirements2", 
+"VK_KHR_image_format_list", 
+"VK_EXT_blend_operation_advanced", 
+"VK_NV_fragment_coverage_to_color", 
+"VK_KHR_acceleration_structure", 
+"VK_KHR_ray_tracing_pipeline", 
+"VK_KHR_ray_query", 
+"VK_NV_framebuffer_mixed_samples", 
+"VK_NV_fill_rectangle", 
+"VK_NV_shader_sm_builtins", 
+"VK_EXT_post_depth_coverage", 
+"VK_KHR_sampler_ycbcr_conversion", 
+"VK_KHR_bind_memory2", 
+"VK_EXT_image_drm_format_modifier", 
+"VK_EXT_validation_cache", 
+"VK_EXT_descriptor_indexing", 
+"VK_EXT_shader_viewport_index_layer",
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+"VK_KHR_portability_subset",
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+"VK_NV_shading_rate_image", 
+"VK_NV_ray_tracing", 
+"VK_NV_representative_fragment_test", 
+"VK_KHR_maintenance3", 
+"VK_KHR_draw_indirect_count", 
+"VK_EXT_filter_cubic", 
+"VK_QCOM_render_pass_shader_resolve", 
+"VK_EXT_global_priority", 
+"VK_KHR_shader_subgroup_extended_types", 
+"VK_KHR_8bit_storage", 
+"VK_EXT_external_memory_host", 
+"VK_AMD_buffer_marker", 
+"VK_KHR_shader_atomic_int64", 
+"VK_KHR_shader_clock", 
+"VK_AMD_pipeline_compiler_control", 
+"VK_EXT_calibrated_timestamps", 
+"VK_AMD_shader_core_properties", 
+"VK_KHR_video_decode_h265", 
+"VK_KHR_global_priority", 
+"VK_AMD_memory_overallocation_behavior", 
+"VK_EXT_vertex_attribute_divisor",
+#if defined( VK_USE_PLATFORM_GGP )
+"VK_GGP_frame_token",
+#endif /*VK_USE_PLATFORM_GGP*/
+"VK_EXT_pipeline_creation_feedback", 
+"VK_KHR_driver_properties", 
+"VK_KHR_shader_float_controls", 
+"VK_NV_shader_subgroup_partitioned", 
+"VK_KHR_depth_stencil_resolve", 
+"VK_KHR_swapchain_mutable_format", 
+"VK_NV_compute_shader_derivatives", 
+"VK_NV_mesh_shader", 
+"VK_NV_fragment_shader_barycentric", 
+"VK_NV_shader_image_footprint", 
+"VK_NV_scissor_exclusive", 
+"VK_NV_device_diagnostic_checkpoints", 
+"VK_KHR_timeline_semaphore", 
+"VK_INTEL_shader_integer_functions2", 
+"VK_INTEL_performance_query", 
+"VK_KHR_vulkan_memory_model", 
+"VK_EXT_pci_bus_info", 
+"VK_AMD_display_native_hdr", 
+"VK_KHR_shader_terminate_invocation", 
+"VK_EXT_fragment_density_map", 
+"VK_EXT_scalar_block_layout", 
+"VK_GOOGLE_hlsl_functionality1", 
+"VK_GOOGLE_decorate_string", 
+"VK_EXT_subgroup_size_control", 
+"VK_KHR_fragment_shading_rate", 
+"VK_AMD_shader_core_properties2", 
+"VK_AMD_device_coherent_memory", 
+"VK_EXT_shader_image_atomic_int64", 
+"VK_KHR_spirv_1_4", 
+"VK_EXT_memory_budget", 
+"VK_EXT_memory_priority", 
+"VK_NV_dedicated_allocation_image_aliasing", 
+"VK_KHR_separate_depth_stencil_layouts", 
+"VK_EXT_buffer_device_address", 
+"VK_EXT_tooling_info", 
+"VK_EXT_separate_stencil_usage", 
+"VK_KHR_present_wait", 
+"VK_NV_cooperative_matrix", 
+"VK_NV_coverage_reduction_mode", 
+"VK_EXT_fragment_shader_interlock", 
+"VK_EXT_ycbcr_image_arrays", 
+"VK_KHR_uniform_buffer_standard_layout", 
+"VK_EXT_provoking_vertex",
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+"VK_EXT_full_screen_exclusive",
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+"VK_KHR_buffer_device_address", 
+"VK_EXT_line_rasterization", 
+"VK_EXT_shader_atomic_float", 
+"VK_EXT_host_query_reset", 
+"VK_EXT_index_type_uint8", 
+"VK_EXT_extended_dynamic_state", 
+"VK_KHR_deferred_host_operations", 
+"VK_KHR_pipeline_executable_properties", 
+"VK_EXT_host_image_copy", 
+"VK_KHR_map_memory2", 
+"VK_EXT_shader_atomic_float2", 
+"VK_EXT_swapchain_maintenance1", 
+"VK_EXT_shader_demote_to_helper_invocation", 
+"VK_NV_device_generated_commands", 
+"VK_NV_inherited_viewport_scissor", 
+"VK_KHR_shader_integer_dot_product", 
+"VK_EXT_texel_buffer_alignment", 
+"VK_QCOM_render_pass_transform", 
+"VK_EXT_depth_bias_control", 
+"VK_EXT_device_memory_report", 
+"VK_EXT_robustness2", 
+"VK_EXT_custom_border_color", 
+"VK_GOOGLE_user_type", 
+"VK_KHR_pipeline_library", 
+"VK_NV_present_barrier", 
+"VK_KHR_shader_non_semantic_info", 
+"VK_KHR_present_id", 
+"VK_EXT_private_data", 
+"VK_EXT_pipeline_creation_cache_control",
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+"VK_KHR_video_encode_queue",
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+"VK_NV_device_diagnostics_config", 
+"VK_QCOM_render_pass_store_ops", 
+"VK_NV_low_latency",
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+"VK_EXT_metal_objects",
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+"VK_KHR_synchronization2", 
+"VK_EXT_descriptor_buffer", 
+"VK_EXT_graphics_pipeline_library", 
+"VK_AMD_shader_early_and_late_fragment_tests", 
+"VK_KHR_fragment_shader_barycentric", 
+"VK_KHR_shader_subgroup_uniform_control_flow", 
+"VK_KHR_zero_initialize_workgroup_memory", 
+"VK_NV_fragment_shading_rate_enums", 
+"VK_NV_ray_tracing_motion_blur", 
+"VK_EXT_mesh_shader", 
+"VK_EXT_ycbcr_2plane_444_formats", 
+"VK_EXT_fragment_density_map2", 
+"VK_QCOM_rotated_copy_commands", 
+"VK_EXT_image_robustness", 
+"VK_KHR_workgroup_memory_explicit_layout", 
+"VK_KHR_copy_commands2", 
+"VK_EXT_image_compression_control", 
+"VK_EXT_attachment_feedback_loop_layout", 
+"VK_EXT_4444_formats", 
+"VK_EXT_device_fault", 
+"VK_ARM_rasterization_order_attachment_access", 
+"VK_EXT_rgba10x6_formats",
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+"VK_NV_acquire_winrt_display",
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+"VK_VALVE_mutable_descriptor_type", 
+"VK_EXT_vertex_input_dynamic_state", 
+"VK_EXT_physical_device_drm", 
+"VK_EXT_device_address_binding_report", 
+"VK_EXT_depth_clip_control", 
+"VK_EXT_primitive_topology_list_restart", 
+"VK_KHR_format_feature_flags2",
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+"VK_FUCHSIA_external_memory", 
+"VK_FUCHSIA_external_semaphore", 
+"VK_FUCHSIA_buffer_collection",
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+"VK_HUAWEI_subpass_shading", 
+"VK_HUAWEI_invocation_mask", 
+"VK_NV_external_memory_rdma", 
+"VK_EXT_pipeline_properties", 
+"VK_EXT_frame_boundary", 
+"VK_EXT_multisampled_render_to_single_sampled", 
+"VK_EXT_extended_dynamic_state2", 
+"VK_EXT_color_write_enable", 
+"VK_EXT_primitives_generated_query", 
+"VK_KHR_ray_tracing_maintenance1", 
+"VK_EXT_global_priority_query", 
+"VK_EXT_image_view_min_lod", 
+"VK_EXT_multi_draw", 
+"VK_EXT_image_2d_view_of_3d", 
+"VK_EXT_shader_tile_image", 
+"VK_EXT_opacity_micromap",
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+"VK_NV_displacement_micromap",
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+"VK_EXT_load_store_op_none", 
+"VK_HUAWEI_cluster_culling_shader", 
+"VK_EXT_border_color_swizzle", 
+"VK_EXT_pageable_device_local_memory", 
+"VK_KHR_maintenance4", 
+"VK_ARM_shader_core_properties", 
+"VK_EXT_image_sliced_view_of_3d", 
+"VK_VALVE_descriptor_set_host_mapping", 
+"VK_EXT_depth_clamp_zero_one", 
+"VK_EXT_non_seamless_cube_map", 
+"VK_QCOM_fragment_density_map_offset", 
+"VK_NV_copy_memory_indirect", 
+"VK_NV_memory_decompression", 
+"VK_NV_device_generated_commands_compute", 
+"VK_NV_linear_color_attachment", 
+"VK_EXT_image_compression_control_swapchain", 
+"VK_QCOM_image_processing", 
+"VK_EXT_nested_command_buffer", 
+"VK_EXT_external_memory_acquire_unmodified", 
+"VK_EXT_extended_dynamic_state3", 
+"VK_EXT_subpass_merge_feedback", 
+"VK_EXT_shader_module_identifier", 
+"VK_EXT_rasterization_order_attachment_access", 
+"VK_NV_optical_flow", 
+"VK_EXT_legacy_dithering", 
+"VK_EXT_pipeline_protected_access",
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+"VK_ANDROID_external_format_resolve",
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+"VK_KHR_maintenance5", 
+"VK_KHR_ray_tracing_position_fetch", 
+"VK_EXT_shader_object", 
+"VK_QCOM_tile_properties", 
+"VK_SEC_amigo_profiling", 
+"VK_QCOM_multiview_per_view_viewports", 
+"VK_NV_ray_tracing_invocation_reorder", 
+"VK_NV_extended_sparse_address_space", 
+"VK_EXT_mutable_descriptor_type", 
+"VK_ARM_shader_core_builtins", 
+"VK_EXT_pipeline_library_group_handles", 
+"VK_EXT_dynamic_rendering_unused_attachments", 
+"VK_NV_low_latency2", 
+"VK_KHR_cooperative_matrix", 
+"VK_QCOM_multiview_per_view_render_areas", 
+"VK_QCOM_image_processing2", 
+"VK_QCOM_filter_cubic_weights", 
+"VK_QCOM_ycbcr_degamma", 
+"VK_QCOM_filter_cubic_clamp", 
+"VK_EXT_attachment_feedback_loop_dynamic_state",
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+"VK_QNX_external_memory_screen_buffer",
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+"VK_MSFT_layered_driver", 
+"VK_NV_descriptor_pool_overallocation" };
+    return deviceExtensions;
+  }
+
+  VULKAN_HPP_INLINE std::set<std::string> const & getInstanceExtensions()
+  {
+    static std::set<std::string> instanceExtensions = { 
+"VK_KHR_surface", 
+"VK_KHR_display",
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+"VK_KHR_xlib_surface",
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+"VK_KHR_xcb_surface",
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+"VK_KHR_wayland_surface",
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+"VK_KHR_android_surface",
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+"VK_KHR_win32_surface",
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+"VK_EXT_debug_report",
+#if defined( VK_USE_PLATFORM_GGP )
+"VK_GGP_stream_descriptor_surface",
+#endif /*VK_USE_PLATFORM_GGP*/
+"VK_NV_external_memory_capabilities", 
+"VK_KHR_get_physical_device_properties2", 
+"VK_EXT_validation_flags",
+#if defined( VK_USE_PLATFORM_VI_NN )
+"VK_NN_vi_surface",
+#endif /*VK_USE_PLATFORM_VI_NN*/
+"VK_KHR_device_group_creation", 
+"VK_KHR_external_memory_capabilities", 
+"VK_KHR_external_semaphore_capabilities", 
+"VK_EXT_direct_mode_display",
+#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
+"VK_EXT_acquire_xlib_display",
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+"VK_EXT_display_surface_counter", 
+"VK_EXT_swapchain_colorspace", 
+"VK_KHR_external_fence_capabilities", 
+"VK_KHR_get_surface_capabilities2", 
+"VK_KHR_get_display_properties2",
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+"VK_MVK_ios_surface",
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+"VK_MVK_macos_surface",
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+"VK_EXT_debug_utils",
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+"VK_FUCHSIA_imagepipe_surface",
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+"VK_EXT_metal_surface",
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+"VK_KHR_surface_protected_capabilities", 
+"VK_EXT_validation_features", 
+"VK_EXT_headless_surface", 
+"VK_EXT_surface_maintenance1", 
+"VK_EXT_acquire_drm_display",
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+"VK_EXT_directfb_surface",
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+"VK_QNX_screen_surface",
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+"VK_KHR_portability_enumeration", 
+"VK_GOOGLE_surfaceless_query", 
+"VK_LUNARG_direct_driver_loading" };
+    return instanceExtensions;
+  }
+
+  VULKAN_HPP_INLINE std::map<std::string, std::vector<std::vector<std::string>>> const & getExtensionDepends( std::string const & extension )
+  {
+    static std::map<std::string, std::vector<std::vector<std::string>>> noDependencies;
+    static std::map<std::string, std::map<std::string, std::vector<std::vector<std::string>>>> dependencies = { 
+{ "VK_KHR_swapchain", { { "VK_VERSION_1_0", {  { "VK_KHR_surface",  } } } } }, 
+{ "VK_KHR_display", { { "VK_VERSION_1_0", {  { "VK_KHR_surface",  } } } } }, 
+{ "VK_KHR_display_swapchain", { { "VK_VERSION_1_0", {  { "VK_KHR_swapchain", "VK_KHR_display",  } } } } },
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+{ "VK_KHR_xlib_surface", { { "VK_VERSION_1_0", {  { "VK_KHR_surface",  } } } } },
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+{ "VK_KHR_xcb_surface", { { "VK_VERSION_1_0", {  { "VK_KHR_surface",  } } } } },
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+{ "VK_KHR_wayland_surface", { { "VK_VERSION_1_0", {  { "VK_KHR_surface",  } } } } },
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+{ "VK_KHR_android_surface", { { "VK_VERSION_1_0", {  { "VK_KHR_surface",  } } } } },
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+{ "VK_KHR_win32_surface", { { "VK_VERSION_1_0", {  { "VK_KHR_surface",  } } } } },
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+{ "VK_EXT_debug_marker", { { "VK_VERSION_1_0", {  { "VK_EXT_debug_report",  } } } } }, 
+{ "VK_KHR_video_queue", { { "VK_VERSION_1_1", {  { "VK_KHR_synchronization2",  } } } } }, 
+{ "VK_KHR_video_decode_queue", { { "VK_VERSION_1_0", {  { "VK_KHR_video_queue", "VK_KHR_synchronization2",  } } } } }, 
+{ "VK_EXT_transform_feedback", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } },
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+{ "VK_EXT_video_encode_h264", { { "VK_VERSION_1_0", {  { "VK_KHR_video_encode_queue",  } } } } }, 
+{ "VK_EXT_video_encode_h265", { { "VK_VERSION_1_0", {  { "VK_KHR_video_encode_queue",  } } } } },
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+{ "VK_KHR_video_decode_h264", { { "VK_VERSION_1_0", {  { "VK_KHR_video_decode_queue",  } } } } }, 
+{ "VK_AMD_texture_gather_bias_lod", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_dynamic_rendering", { { "VK_VERSION_1_0", {  { "VK_KHR_depth_stencil_resolve", "VK_KHR_get_physical_device_properties2",  } } } } },
+#if defined( VK_USE_PLATFORM_GGP )
+{ "VK_GGP_stream_descriptor_surface", { { "VK_VERSION_1_0", {  { "VK_KHR_surface",  } } } } },
+#endif /*VK_USE_PLATFORM_GGP*/
+{ "VK_NV_corner_sampled_image", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_multiview", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_NV_external_memory", { { "VK_VERSION_1_0", {  { "VK_NV_external_memory_capabilities",  } } } } },
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+{ "VK_NV_external_memory_win32", { { "VK_VERSION_1_0", {  { "VK_NV_external_memory",  } } } } }, 
+{ "VK_NV_win32_keyed_mutex", { { "VK_VERSION_1_0", {  { "VK_NV_external_memory_win32",  } } } } },
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+{ "VK_KHR_device_group", { { "VK_VERSION_1_0", {  { "VK_KHR_device_group_creation",  } } } } },
+#if defined( VK_USE_PLATFORM_VI_NN )
+{ "VK_NN_vi_surface", { { "VK_VERSION_1_0", {  { "VK_KHR_surface",  } } } } },
+#endif /*VK_USE_PLATFORM_VI_NN*/
+{ "VK_EXT_texture_compression_astc_hdr", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_EXT_astc_decode_mode", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_EXT_pipeline_robustness", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_external_memory_capabilities", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_external_memory", { { "VK_VERSION_1_0", {  { "VK_KHR_external_memory_capabilities",  } } } } },
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+{ "VK_KHR_external_memory_win32", { { "VK_VERSION_1_0", {  { "VK_KHR_external_memory",  } } } } },
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+{ "VK_KHR_external_memory_fd", { { "VK_VERSION_1_0", {  { "VK_KHR_external_memory",  } } }, { "VK_VERSION_1_1", {  {  } } } } },
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+{ "VK_KHR_win32_keyed_mutex", { { "VK_VERSION_1_0", {  { "VK_KHR_external_memory_win32",  } } } } },
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+{ "VK_KHR_external_semaphore_capabilities", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_external_semaphore", { { "VK_VERSION_1_0", {  { "VK_KHR_external_semaphore_capabilities",  } } } } },
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+{ "VK_KHR_external_semaphore_win32", { { "VK_VERSION_1_0", {  { "VK_KHR_external_semaphore",  } } } } },
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+{ "VK_KHR_external_semaphore_fd", { { "VK_VERSION_1_0", {  { "VK_KHR_external_semaphore",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_KHR_push_descriptor", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_conditional_rendering", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_shader_float16_int8", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_16bit_storage", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_KHR_storage_buffer_storage_class",  } } } } }, 
+{ "VK_KHR_incremental_present", { { "VK_VERSION_1_0", {  { "VK_KHR_swapchain",  } } } } }, 
+{ "VK_EXT_direct_mode_display", { { "VK_VERSION_1_0", {  { "VK_KHR_display",  } } } } },
+#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
+{ "VK_EXT_acquire_xlib_display", { { "VK_VERSION_1_0", {  { "VK_EXT_direct_mode_display",  } } } } },
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+{ "VK_EXT_display_surface_counter", { { "VK_VERSION_1_0", {  { "VK_KHR_display",  } } } } }, 
+{ "VK_EXT_display_control", { { "VK_VERSION_1_0", {  { "VK_EXT_display_surface_counter", "VK_KHR_swapchain",  } } } } }, 
+{ "VK_GOOGLE_display_timing", { { "VK_VERSION_1_0", {  { "VK_KHR_swapchain",  } } } } }, 
+{ "VK_NVX_multiview_per_view_attributes", { { "VK_VERSION_1_0", {  { "VK_KHR_multiview",  } } } } }, 
+{ "VK_EXT_discard_rectangles", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_EXT_conservative_rasterization", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_EXT_depth_clip_enable", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_EXT_swapchain_colorspace", { { "VK_VERSION_1_0", {  { "VK_KHR_surface",  } } } } }, 
+{ "VK_EXT_hdr_metadata", { { "VK_VERSION_1_0", {  { "VK_KHR_swapchain",  } } } } }, 
+{ "VK_KHR_imageless_framebuffer", { { "VK_VERSION_1_0", {  { "VK_KHR_maintenance2", "VK_KHR_image_format_list", "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_create_renderpass2", { { "VK_VERSION_1_0", {  { "VK_KHR_multiview", "VK_KHR_maintenance2",  } } } } }, 
+{ "VK_KHR_shared_presentable_image", { { "VK_VERSION_1_0", {  { "VK_KHR_swapchain", "VK_KHR_get_surface_capabilities2", "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  { "VK_KHR_swapchain", "VK_KHR_get_surface_capabilities2",  } } } } }, 
+{ "VK_KHR_external_fence_capabilities", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_external_fence", { { "VK_VERSION_1_0", {  { "VK_KHR_external_fence_capabilities",  } } } } },
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+{ "VK_KHR_external_fence_win32", { { "VK_VERSION_1_0", {  { "VK_KHR_external_fence",  } } } } },
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+{ "VK_KHR_external_fence_fd", { { "VK_VERSION_1_0", {  { "VK_KHR_external_fence",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_KHR_performance_query", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_KHR_get_surface_capabilities2", { { "VK_VERSION_1_0", {  { "VK_KHR_surface",  } } } } }, 
+{ "VK_KHR_variable_pointers", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_KHR_storage_buffer_storage_class",  } } } } }, 
+{ "VK_KHR_get_display_properties2", { { "VK_VERSION_1_0", {  { "VK_KHR_display",  } } } } },
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+{ "VK_MVK_ios_surface", { { "VK_VERSION_1_0", {  { "VK_KHR_surface",  } } } } },
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+{ "VK_MVK_macos_surface", { { "VK_VERSION_1_0", {  { "VK_KHR_surface",  } } } } },
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+{ "VK_EXT_external_memory_dma_buf", { { "VK_VERSION_1_0", {  { "VK_KHR_external_memory_fd",  } } } } }, 
+{ "VK_EXT_queue_family_foreign", { { "VK_VERSION_1_0", {  { "VK_KHR_external_memory",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_KHR_dedicated_allocation", { { "VK_VERSION_1_0", {  { "VK_KHR_get_memory_requirements2",  } } } } },
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+{ "VK_ANDROID_external_memory_android_hardware_buffer", { { "VK_VERSION_1_0", {  { "VK_KHR_sampler_ycbcr_conversion", "VK_KHR_external_memory", "VK_EXT_queue_family_foreign", "VK_KHR_dedicated_allocation",  } } } } },
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+{ "VK_EXT_sampler_filter_minmax", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } },
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+{ "VK_AMDX_shader_enqueue", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_KHR_synchronization2", "VK_KHR_pipeline_library", "VK_KHR_spirv_1_4",  } } } } },
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+{ "VK_EXT_inline_uniform_block", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_KHR_maintenance1",  } } } } }, 
+{ "VK_EXT_sample_locations", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_EXT_blend_operation_advanced", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_KHR_acceleration_structure", { { "VK_VERSION_1_1", {  { "VK_EXT_descriptor_indexing", "VK_KHR_buffer_device_address", "VK_KHR_deferred_host_operations",  } } } } }, 
+{ "VK_KHR_ray_tracing_pipeline", { { "VK_VERSION_1_0", {  { "VK_KHR_spirv_1_4", "VK_KHR_acceleration_structure",  } } } } }, 
+{ "VK_KHR_ray_query", { { "VK_VERSION_1_0", {  { "VK_KHR_spirv_1_4", "VK_KHR_acceleration_structure",  } } } } }, 
+{ "VK_NV_shader_sm_builtins", { { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_KHR_sampler_ycbcr_conversion", { { "VK_VERSION_1_0", {  { "VK_KHR_maintenance1", "VK_KHR_bind_memory2", "VK_KHR_get_memory_requirements2", "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_image_drm_format_modifier", { { "VK_VERSION_1_0", {  { "VK_KHR_bind_memory2", "VK_KHR_get_physical_device_properties2", "VK_KHR_sampler_ycbcr_conversion", "VK_KHR_image_format_list",  } } }, { "VK_VERSION_1_1", {  { "VK_KHR_image_format_list",  } } }, { "VK_VERSION_1_2", {  {  } } } } }, 
+{ "VK_EXT_descriptor_indexing", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_KHR_maintenance3",  } } } } },
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+{ "VK_KHR_portability_subset", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } },
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+{ "VK_NV_shading_rate_image", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_NV_ray_tracing", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_KHR_get_memory_requirements2",  } } } } }, 
+{ "VK_NV_representative_fragment_test", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_maintenance3", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_shader_subgroup_extended_types", { { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_KHR_8bit_storage", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_KHR_storage_buffer_storage_class",  } } } } }, 
+{ "VK_EXT_external_memory_host", { { "VK_VERSION_1_0", {  { "VK_KHR_external_memory",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_KHR_shader_atomic_int64", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_shader_clock", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_EXT_calibrated_timestamps", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_AMD_shader_core_properties", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_video_decode_h265", { { "VK_VERSION_1_0", {  { "VK_KHR_video_decode_queue",  } } } } }, 
+{ "VK_KHR_global_priority", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_vertex_attribute_divisor", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } },
+#if defined( VK_USE_PLATFORM_GGP )
+{ "VK_GGP_frame_token", { { "VK_VERSION_1_0", {  { "VK_KHR_swapchain", "VK_GGP_stream_descriptor_surface",  } } } } },
+#endif /*VK_USE_PLATFORM_GGP*/
+{ "VK_KHR_driver_properties", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_shader_float_controls", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_NV_shader_subgroup_partitioned", { { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_KHR_depth_stencil_resolve", { { "VK_VERSION_1_0", {  { "VK_KHR_create_renderpass2",  } } } } }, 
+{ "VK_KHR_swapchain_mutable_format", { { "VK_VERSION_1_0", {  { "VK_KHR_swapchain", "VK_KHR_maintenance2", "VK_KHR_image_format_list",  } } }, { "VK_VERSION_1_1", {  { "VK_KHR_swapchain", "VK_KHR_image_format_list",  } } }, { "VK_VERSION_1_2", {  { "VK_KHR_swapchain",  } } } } }, 
+{ "VK_NV_compute_shader_derivatives", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_NV_mesh_shader", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_NV_fragment_shader_barycentric", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_NV_shader_image_footprint", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_NV_scissor_exclusive", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_NV_device_diagnostic_checkpoints", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_timeline_semaphore", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_INTEL_shader_integer_functions2", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_vulkan_memory_model", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_pci_bus_info", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_AMD_display_native_hdr", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_KHR_get_surface_capabilities2", "VK_KHR_swapchain",  } } } } },
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+{ "VK_FUCHSIA_imagepipe_surface", { { "VK_VERSION_1_0", {  { "VK_KHR_surface",  } } } } },
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+{ "VK_KHR_shader_terminate_invocation", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } },
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+{ "VK_EXT_metal_surface", { { "VK_VERSION_1_0", {  { "VK_KHR_surface",  } } } } },
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+{ "VK_EXT_fragment_density_map", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_scalar_block_layout", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_subgroup_size_control", { { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_KHR_fragment_shading_rate", { { "VK_VERSION_1_0", {  { "VK_KHR_create_renderpass2", "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  { "VK_KHR_create_renderpass2",  } } }, { "VK_VERSION_1_2", {  {  } } } } }, 
+{ "VK_AMD_shader_core_properties2", { { "VK_VERSION_1_0", {  { "VK_AMD_shader_core_properties",  } } } } }, 
+{ "VK_AMD_device_coherent_memory", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_shader_image_atomic_int64", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_KHR_spirv_1_4", { { "VK_VERSION_1_1", {  { "VK_KHR_shader_float_controls",  } } } } }, 
+{ "VK_EXT_memory_budget", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_EXT_memory_priority", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_surface_protected_capabilities", { { "VK_VERSION_1_1", {  { "VK_KHR_get_surface_capabilities2",  } } } } }, 
+{ "VK_NV_dedicated_allocation_image_aliasing", { { "VK_VERSION_1_0", {  { "VK_KHR_dedicated_allocation", "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_separate_depth_stencil_layouts", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_KHR_create_renderpass2",  } } } } }, 
+{ "VK_EXT_buffer_device_address", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_present_wait", { { "VK_VERSION_1_0", {  { "VK_KHR_swapchain", "VK_KHR_present_id",  } } } } }, 
+{ "VK_NV_cooperative_matrix", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_NV_coverage_reduction_mode", { { "VK_VERSION_1_0", {  { "VK_NV_framebuffer_mixed_samples", "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_fragment_shader_interlock", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_EXT_ycbcr_image_arrays", { { "VK_VERSION_1_0", {  { "VK_KHR_sampler_ycbcr_conversion",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_KHR_uniform_buffer_standard_layout", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_provoking_vertex", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } },
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+{ "VK_EXT_full_screen_exclusive", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_KHR_surface", "VK_KHR_get_surface_capabilities2", "VK_KHR_swapchain",  } } } } },
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+{ "VK_EXT_headless_surface", { { "VK_VERSION_1_0", {  { "VK_KHR_surface",  } } } } }, 
+{ "VK_KHR_buffer_device_address", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_KHR_device_group",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_EXT_line_rasterization", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_EXT_shader_atomic_float", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_EXT_host_query_reset", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_index_type_uint8", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_EXT_extended_dynamic_state", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_KHR_pipeline_executable_properties", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_host_image_copy", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_KHR_copy_commands2", "VK_KHR_format_feature_flags2",  } } } } }, 
+{ "VK_EXT_shader_atomic_float2", { { "VK_VERSION_1_0", {  { "VK_EXT_shader_atomic_float",  } } } } }, 
+{ "VK_EXT_surface_maintenance1", { { "VK_VERSION_1_0", {  { "VK_KHR_surface", "VK_KHR_get_surface_capabilities2",  } } } } }, 
+{ "VK_EXT_swapchain_maintenance1", { { "VK_VERSION_1_0", {  { "VK_KHR_swapchain", "VK_EXT_surface_maintenance1", "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_shader_demote_to_helper_invocation", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_NV_device_generated_commands", { { "VK_VERSION_1_1", {  { "VK_KHR_buffer_device_address",  } } } } }, 
+{ "VK_NV_inherited_viewport_scissor", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_shader_integer_dot_product", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_texel_buffer_alignment", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_QCOM_render_pass_transform", { { "VK_VERSION_1_0", {  { "VK_KHR_swapchain", "VK_KHR_surface",  } } } } }, 
+{ "VK_EXT_depth_bias_control", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_device_memory_report", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_acquire_drm_display", { { "VK_VERSION_1_0", {  { "VK_EXT_direct_mode_display",  } } } } }, 
+{ "VK_EXT_robustness2", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_EXT_custom_border_color", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_NV_present_barrier", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_KHR_surface", "VK_KHR_get_surface_capabilities2", "VK_KHR_swapchain",  } } } } }, 
+{ "VK_KHR_present_id", { { "VK_VERSION_1_0", {  { "VK_KHR_swapchain", "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_private_data", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_pipeline_creation_cache_control", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } },
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+{ "VK_KHR_video_encode_queue", { { "VK_VERSION_1_0", {  { "VK_KHR_video_queue", "VK_KHR_synchronization2",  } } } } },
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+{ "VK_NV_device_diagnostics_config", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_synchronization2", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_EXT_descriptor_buffer", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_KHR_buffer_device_address", "VK_KHR_synchronization2", "VK_EXT_descriptor_indexing",  } } } } }, 
+{ "VK_EXT_graphics_pipeline_library", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_KHR_pipeline_library",  } } } } }, 
+{ "VK_AMD_shader_early_and_late_fragment_tests", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_fragment_shader_barycentric", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_shader_subgroup_uniform_control_flow", { { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_KHR_zero_initialize_workgroup_memory", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_NV_fragment_shading_rate_enums", { { "VK_VERSION_1_0", {  { "VK_KHR_fragment_shading_rate",  } } } } }, 
+{ "VK_NV_ray_tracing_motion_blur", { { "VK_VERSION_1_0", {  { "VK_KHR_ray_tracing_pipeline",  } } } } }, 
+{ "VK_EXT_mesh_shader", { { "VK_VERSION_1_0", {  { "VK_KHR_spirv_1_4",  } } } } }, 
+{ "VK_EXT_ycbcr_2plane_444_formats", { { "VK_VERSION_1_0", {  { "VK_KHR_sampler_ycbcr_conversion",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_EXT_fragment_density_map2", { { "VK_VERSION_1_0", {  { "VK_EXT_fragment_density_map",  } } } } }, 
+{ "VK_QCOM_rotated_copy_commands", { { "VK_VERSION_1_0", {  { "VK_KHR_swapchain", "VK_KHR_copy_commands2",  } } } } }, 
+{ "VK_EXT_image_robustness", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_KHR_workgroup_memory_explicit_layout", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_copy_commands2", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_EXT_image_compression_control", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_attachment_feedback_loop_layout", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_4444_formats", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_EXT_device_fault", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_ARM_rasterization_order_attachment_access", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_rgba10x6_formats", { { "VK_VERSION_1_0", {  { "VK_KHR_sampler_ycbcr_conversion",  } } } } },
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+{ "VK_NV_acquire_winrt_display", { { "VK_VERSION_1_0", {  { "VK_EXT_direct_mode_display",  } } } } },
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+{ "VK_EXT_directfb_surface", { { "VK_VERSION_1_0", {  { "VK_KHR_surface",  } } } } },
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+{ "VK_VALVE_mutable_descriptor_type", { { "VK_VERSION_1_0", {  { "VK_KHR_maintenance3",  } } } } }, 
+{ "VK_EXT_vertex_input_dynamic_state", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_EXT_physical_device_drm", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_device_address_binding_report", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_EXT_debug_utils",  } } } } }, 
+{ "VK_EXT_depth_clip_control", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_primitive_topology_list_restart", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_KHR_format_feature_flags2", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } },
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+{ "VK_FUCHSIA_external_memory", { { "VK_VERSION_1_0", {  { "VK_KHR_external_memory_capabilities", "VK_KHR_external_memory",  } } } } }, 
+{ "VK_FUCHSIA_external_semaphore", { { "VK_VERSION_1_0", {  { "VK_KHR_external_semaphore_capabilities", "VK_KHR_external_semaphore",  } } } } }, 
+{ "VK_FUCHSIA_buffer_collection", { { "VK_VERSION_1_0", {  { "VK_FUCHSIA_external_memory", "VK_KHR_sampler_ycbcr_conversion",  } } } } },
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+{ "VK_HUAWEI_subpass_shading", { { "VK_VERSION_1_0", {  { "VK_KHR_create_renderpass2", "VK_KHR_synchronization2",  } } } } }, 
+{ "VK_HUAWEI_invocation_mask", { { "VK_VERSION_1_0", {  { "VK_KHR_ray_tracing_pipeline", "VK_KHR_synchronization2",  } } } } }, 
+{ "VK_NV_external_memory_rdma", { { "VK_VERSION_1_0", {  { "VK_KHR_external_memory",  } } } } }, 
+{ "VK_EXT_pipeline_properties", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_multisampled_render_to_single_sampled", { { "VK_VERSION_1_0", {  { "VK_KHR_create_renderpass2", "VK_KHR_depth_stencil_resolve",  } } } } }, 
+{ "VK_EXT_extended_dynamic_state2", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } },
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+{ "VK_QNX_screen_surface", { { "VK_VERSION_1_0", {  { "VK_KHR_surface",  } } } } },
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+{ "VK_EXT_color_write_enable", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } }, { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_EXT_primitives_generated_query", { { "VK_VERSION_1_0", {  { "VK_EXT_transform_feedback",  } } } } }, 
+{ "VK_KHR_ray_tracing_maintenance1", { { "VK_VERSION_1_0", {  { "VK_KHR_acceleration_structure",  } } } } }, 
+{ "VK_EXT_global_priority_query", { { "VK_VERSION_1_0", {  { "VK_EXT_global_priority", "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_image_view_min_lod", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_multi_draw", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_image_2d_view_of_3d", { { "VK_VERSION_1_0", {  { "VK_KHR_maintenance1", "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_shader_tile_image", { { "VK_VERSION_1_3", {  {  } } } } }, 
+{ "VK_EXT_opacity_micromap", { { "VK_VERSION_1_0", {  { "VK_KHR_acceleration_structure", "VK_KHR_synchronization2",  } } } } },
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+{ "VK_NV_displacement_micromap", { { "VK_VERSION_1_0", {  { "VK_EXT_opacity_micromap",  } } } } },
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+{ "VK_HUAWEI_cluster_culling_shader", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_border_color_swizzle", { { "VK_VERSION_1_0", {  { "VK_EXT_custom_border_color",  } } } } }, 
+{ "VK_EXT_pageable_device_local_memory", { { "VK_VERSION_1_0", {  { "VK_EXT_memory_priority",  } } } } }, 
+{ "VK_KHR_maintenance4", { { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_ARM_shader_core_properties", { { "VK_VERSION_1_1", {  {  } } } } }, 
+{ "VK_EXT_image_sliced_view_of_3d", { { "VK_VERSION_1_0", {  { "VK_KHR_maintenance1", "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_VALVE_descriptor_set_host_mapping", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_depth_clamp_zero_one", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_non_seamless_cube_map", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_QCOM_fragment_density_map_offset", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_EXT_fragment_density_map",  } } } } }, 
+{ "VK_NV_copy_memory_indirect", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_KHR_buffer_device_address",  } } } } }, 
+{ "VK_NV_memory_decompression", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_KHR_buffer_device_address",  } } } } }, 
+{ "VK_NV_device_generated_commands_compute", { { "VK_VERSION_1_0", {  { "VK_NV_device_generated_commands",  } } } } }, 
+{ "VK_NV_linear_color_attachment", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_GOOGLE_surfaceless_query", { { "VK_VERSION_1_0", {  { "VK_KHR_surface",  } } } } }, 
+{ "VK_EXT_image_compression_control_swapchain", { { "VK_VERSION_1_0", {  { "VK_EXT_image_compression_control",  } } } } }, 
+{ "VK_QCOM_image_processing", { { "VK_VERSION_1_0", {  { "VK_KHR_format_feature_flags2",  } } } } }, 
+{ "VK_EXT_nested_command_buffer", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_external_memory_acquire_unmodified", { { "VK_VERSION_1_0", {  { "VK_KHR_external_memory",  } } } } }, 
+{ "VK_EXT_extended_dynamic_state3", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_subpass_merge_feedback", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_shader_module_identifier", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_EXT_pipeline_creation_cache_control",  } } } } }, 
+{ "VK_EXT_rasterization_order_attachment_access", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_NV_optical_flow", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_KHR_format_feature_flags2", "VK_KHR_synchronization2",  } } } } }, 
+{ "VK_EXT_legacy_dithering", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_pipeline_protected_access", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } },
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+{ "VK_ANDROID_external_format_resolve", { { "VK_VERSION_1_0", {  { "VK_ANDROID_external_memory_android_hardware_buffer",  } } } } },
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+{ "VK_KHR_maintenance5", { { "VK_VERSION_1_1", {  { "VK_KHR_dynamic_rendering",  } } } } }, 
+{ "VK_KHR_ray_tracing_position_fetch", { { "VK_VERSION_1_0", {  { "VK_KHR_acceleration_structure",  } } } } }, 
+{ "VK_EXT_shader_object", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_KHR_dynamic_rendering",  } } }, { "VK_VERSION_1_1", {  { "VK_KHR_dynamic_rendering",  } } }, { "VK_VERSION_1_3", {  {  } } } } }, 
+{ "VK_QCOM_tile_properties", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_SEC_amigo_profiling", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_QCOM_multiview_per_view_viewports", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_NV_ray_tracing_invocation_reorder", { { "VK_VERSION_1_0", {  { "VK_KHR_ray_tracing_pipeline",  } } } } }, 
+{ "VK_EXT_mutable_descriptor_type", { { "VK_VERSION_1_0", {  { "VK_KHR_maintenance3",  } } } } }, 
+{ "VK_ARM_shader_core_builtins", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_EXT_pipeline_library_group_handles", { { "VK_VERSION_1_0", {  { "VK_KHR_ray_tracing_pipeline", "VK_KHR_pipeline_library",  } } } } }, 
+{ "VK_EXT_dynamic_rendering_unused_attachments", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_KHR_dynamic_rendering",  } } }, { "VK_VERSION_1_1", {  { "VK_KHR_dynamic_rendering",  } } }, { "VK_VERSION_1_3", {  {  } } } } }, 
+{ "VK_KHR_cooperative_matrix", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_QCOM_image_processing2", { { "VK_VERSION_1_0", {  { "VK_QCOM_image_processing",  } } } } }, 
+{ "VK_QCOM_filter_cubic_weights", { { "VK_VERSION_1_0", {  { "VK_EXT_filter_cubic",  } } } } }, 
+{ "VK_QCOM_filter_cubic_clamp", { { "VK_VERSION_1_0", {  { "VK_EXT_filter_cubic", "VK_EXT_sampler_filter_minmax",  } } }, { "VK_VERSION_1_2", {  { "VK_EXT_filter_cubic",  } } } } }, 
+{ "VK_EXT_attachment_feedback_loop_dynamic_state", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2", "VK_EXT_attachment_feedback_loop_layout",  } } } } },
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+{ "VK_QNX_external_memory_screen_buffer", { { "VK_VERSION_1_0", {  { "VK_KHR_sampler_ycbcr_conversion", "VK_KHR_external_memory", "VK_KHR_dedicated_allocation",  } } }, { "VK_VERSION_1_1", {  { "VK_EXT_queue_family_foreign",  } } } } },
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+{ "VK_MSFT_layered_driver", { { "VK_VERSION_1_0", {  { "VK_KHR_get_physical_device_properties2",  } } } } }, 
+{ "VK_NV_descriptor_pool_overallocation", { { "VK_VERSION_1_1", {  {  } } } } } };
+    auto depIt = dependencies.find( extension );
+    return ( depIt != dependencies.end() ) ? depIt->second : noDependencies;
+  }
+
+  VULKAN_HPP_INLINE std::pair<bool, std::vector<std::vector<std::string>> const &> getExtensionDepends( std::string const & version,
+                                                                                                        std::string const & extension )
+  {
+#if !defined( NDEBUG )
+    static std::set<std::string> versions = { "VK_VERSION_1_0", "VK_VERSION_1_1", "VK_VERSION_1_2", "VK_VERSION_1_3" };
+    assert( versions.find( version ) != versions.end() );
+#endif
+    static std::vector<std::vector<std::string>> noDependencies;
+
+    std::map<std::string, std::vector<std::vector<std::string>>> const & dependencies = getExtensionDepends( extension );
+    if ( dependencies.empty() )
+    {
+      return { true, noDependencies };
+    }
+    auto depIt = dependencies.lower_bound( version );
+    if ( ( depIt == dependencies.end() ) || ( depIt->first != version ) )
+    {
+      depIt = std::prev( depIt );
+    }
+    if ( depIt == dependencies.end() )
+    {
+      return { false, noDependencies };
+    }
+    else
+    {
+      return { true, depIt->second };
+    }
+  }
+
+  VULKAN_HPP_INLINE std::map<std::string, std::string> const & getObsoletedExtensions()
+  {
+    static std::map<std::string, std::string> obsoletedExtensions = { { "VK_AMD_negative_viewport_height", "VK_KHR_maintenance1" } };
+    return obsoletedExtensions;
+  }
+
+  VULKAN_HPP_INLINE std::map<std::string, std::string> const & getPromotedExtensions()
+  {
+    static std::map<std::string, std::string> promotedExtensions = { 
+{ "VK_KHR_sampler_mirror_clamp_to_edge", "VK_VERSION_1_2"}, 
+{ "VK_EXT_debug_marker", "VK_EXT_debug_utils"}, 
+{ "VK_AMD_draw_indirect_count", "VK_KHR_draw_indirect_count"}, 
+{ "VK_KHR_dynamic_rendering", "VK_VERSION_1_3"}, 
+{ "VK_KHR_multiview", "VK_VERSION_1_1"},
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+{ "VK_NV_win32_keyed_mutex", "VK_KHR_win32_keyed_mutex"},
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+{ "VK_KHR_get_physical_device_properties2", "VK_VERSION_1_1"}, 
+{ "VK_KHR_device_group", "VK_VERSION_1_1"}, 
+{ "VK_KHR_shader_draw_parameters", "VK_VERSION_1_1"}, 
+{ "VK_EXT_texture_compression_astc_hdr", "VK_VERSION_1_3"}, 
+{ "VK_KHR_maintenance1", "VK_VERSION_1_1"}, 
+{ "VK_KHR_device_group_creation", "VK_VERSION_1_1"}, 
+{ "VK_KHR_external_memory_capabilities", "VK_VERSION_1_1"}, 
+{ "VK_KHR_external_memory", "VK_VERSION_1_1"}, 
+{ "VK_KHR_external_semaphore_capabilities", "VK_VERSION_1_1"}, 
+{ "VK_KHR_external_semaphore", "VK_VERSION_1_1"}, 
+{ "VK_KHR_shader_float16_int8", "VK_VERSION_1_2"}, 
+{ "VK_KHR_16bit_storage", "VK_VERSION_1_1"}, 
+{ "VK_KHR_descriptor_update_template", "VK_VERSION_1_1"}, 
+{ "VK_KHR_imageless_framebuffer", "VK_VERSION_1_2"}, 
+{ "VK_KHR_create_renderpass2", "VK_VERSION_1_2"}, 
+{ "VK_KHR_external_fence_capabilities", "VK_VERSION_1_1"}, 
+{ "VK_KHR_external_fence", "VK_VERSION_1_1"}, 
+{ "VK_KHR_maintenance2", "VK_VERSION_1_1"}, 
+{ "VK_KHR_variable_pointers", "VK_VERSION_1_1"}, 
+{ "VK_KHR_dedicated_allocation", "VK_VERSION_1_1"}, 
+{ "VK_EXT_sampler_filter_minmax", "VK_VERSION_1_2"}, 
+{ "VK_KHR_storage_buffer_storage_class", "VK_VERSION_1_1"}, 
+{ "VK_EXT_inline_uniform_block", "VK_VERSION_1_3"}, 
+{ "VK_KHR_relaxed_block_layout", "VK_VERSION_1_1"}, 
+{ "VK_KHR_get_memory_requirements2", "VK_VERSION_1_1"}, 
+{ "VK_KHR_image_format_list", "VK_VERSION_1_2"}, 
+{ "VK_KHR_sampler_ycbcr_conversion", "VK_VERSION_1_1"}, 
+{ "VK_KHR_bind_memory2", "VK_VERSION_1_1"}, 
+{ "VK_EXT_descriptor_indexing", "VK_VERSION_1_2"}, 
+{ "VK_EXT_shader_viewport_index_layer", "VK_VERSION_1_2"}, 
+{ "VK_KHR_maintenance3", "VK_VERSION_1_1"}, 
+{ "VK_KHR_draw_indirect_count", "VK_VERSION_1_2"}, 
+{ "VK_EXT_global_priority", "VK_KHR_global_priority"}, 
+{ "VK_KHR_shader_subgroup_extended_types", "VK_VERSION_1_2"}, 
+{ "VK_KHR_8bit_storage", "VK_VERSION_1_2"}, 
+{ "VK_KHR_shader_atomic_int64", "VK_VERSION_1_2"}, 
+{ "VK_EXT_pipeline_creation_feedback", "VK_VERSION_1_3"}, 
+{ "VK_KHR_driver_properties", "VK_VERSION_1_2"}, 
+{ "VK_KHR_shader_float_controls", "VK_VERSION_1_2"}, 
+{ "VK_KHR_depth_stencil_resolve", "VK_VERSION_1_2"}, 
+{ "VK_NV_fragment_shader_barycentric", "VK_KHR_fragment_shader_barycentric"}, 
+{ "VK_KHR_timeline_semaphore", "VK_VERSION_1_2"}, 
+{ "VK_KHR_vulkan_memory_model", "VK_VERSION_1_2"}, 
+{ "VK_KHR_shader_terminate_invocation", "VK_VERSION_1_3"}, 
+{ "VK_EXT_scalar_block_layout", "VK_VERSION_1_2"}, 
+{ "VK_EXT_subgroup_size_control", "VK_VERSION_1_3"}, 
+{ "VK_KHR_spirv_1_4", "VK_VERSION_1_2"}, 
+{ "VK_KHR_separate_depth_stencil_layouts", "VK_VERSION_1_2"}, 
+{ "VK_EXT_tooling_info", "VK_VERSION_1_3"}, 
+{ "VK_EXT_separate_stencil_usage", "VK_VERSION_1_2"}, 
+{ "VK_KHR_uniform_buffer_standard_layout", "VK_VERSION_1_2"}, 
+{ "VK_KHR_buffer_device_address", "VK_VERSION_1_2"}, 
+{ "VK_EXT_host_query_reset", "VK_VERSION_1_2"}, 
+{ "VK_EXT_extended_dynamic_state", "VK_VERSION_1_3"}, 
+{ "VK_EXT_shader_demote_to_helper_invocation", "VK_VERSION_1_3"}, 
+{ "VK_KHR_shader_integer_dot_product", "VK_VERSION_1_3"}, 
+{ "VK_EXT_texel_buffer_alignment", "VK_VERSION_1_3"}, 
+{ "VK_KHR_shader_non_semantic_info", "VK_VERSION_1_3"}, 
+{ "VK_EXT_private_data", "VK_VERSION_1_3"}, 
+{ "VK_EXT_pipeline_creation_cache_control", "VK_VERSION_1_3"}, 
+{ "VK_KHR_synchronization2", "VK_VERSION_1_3"}, 
+{ "VK_KHR_zero_initialize_workgroup_memory", "VK_VERSION_1_3"}, 
+{ "VK_EXT_ycbcr_2plane_444_formats", "VK_VERSION_1_3"}, 
+{ "VK_EXT_image_robustness", "VK_VERSION_1_3"}, 
+{ "VK_KHR_copy_commands2", "VK_VERSION_1_3"}, 
+{ "VK_EXT_4444_formats", "VK_VERSION_1_3"}, 
+{ "VK_ARM_rasterization_order_attachment_access", "VK_EXT_rasterization_order_attachment_access"}, 
+{ "VK_VALVE_mutable_descriptor_type", "VK_EXT_mutable_descriptor_type"}, 
+{ "VK_KHR_format_feature_flags2", "VK_VERSION_1_3"}, 
+{ "VK_EXT_extended_dynamic_state2", "VK_VERSION_1_3"}, 
+{ "VK_EXT_global_priority_query", "VK_KHR_global_priority"}, 
+{ "VK_KHR_maintenance4", "VK_VERSION_1_3"} };
+    return promotedExtensions;
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string getExtensionDeprecatedBy( std::string const & extension )
+  {
+    if ( extension == "VK_EXT_debug_report" )
+    {
+      return "VK_EXT_debug_utils";
+    }
+    if ( extension == "VK_NV_glsl_shader" )
+    {
+      return "";
+    }
+    if ( extension == "VK_NV_dedicated_allocation" )
+    {
+      return "VK_KHR_dedicated_allocation";
+    }
+    if ( extension == "VK_AMD_gpu_shader_half_float" )
+    {
+      return "VK_KHR_shader_float16_int8";
+    }
+    if ( extension == "VK_IMG_format_pvrtc" )
+    {
+      return "";
+    }
+    if ( extension == "VK_NV_external_memory_capabilities" )
+    {
+      return "VK_KHR_external_memory_capabilities";
+    }
+    if ( extension == "VK_NV_external_memory" )
+    {
+      return "VK_KHR_external_memory";
+    }
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+    if ( extension == "VK_NV_external_memory_win32" )
+    {
+      return "VK_KHR_external_memory_win32";
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    if ( extension == "VK_EXT_validation_flags" )
+    {
+      return "VK_EXT_validation_features";
+    }
+    if ( extension == "VK_EXT_shader_subgroup_ballot" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_EXT_shader_subgroup_vote" )
+    {
+      return "VK_VERSION_1_1";
+    }
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+    if ( extension == "VK_MVK_ios_surface" )
+    {
+      return "VK_EXT_metal_surface";
+    }
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+    if ( extension == "VK_MVK_macos_surface" )
+    {
+      return "VK_EXT_metal_surface";
+    }
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+    if ( extension == "VK_AMD_gpu_shader_int16" )
+    {
+      return "VK_KHR_shader_float16_int8";
+    }
+    if ( extension == "VK_EXT_buffer_device_address" )
+    {
+      return "VK_KHR_buffer_device_address";
+    }
+    return "";
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string getExtensionObsoletedBy( std::string const & extension )
+  {
+    if ( extension == "VK_AMD_negative_viewport_height" )
+    {
+      return "VK_KHR_maintenance1";
+    }
+    return "";
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 std::string getExtensionPromotedTo( std::string const & extension )
+  {
+    if ( extension == "VK_KHR_sampler_mirror_clamp_to_edge" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_EXT_debug_marker" )
+    {
+      return "VK_EXT_debug_utils";
+    }
+    if ( extension == "VK_AMD_draw_indirect_count" )
+    {
+      return "VK_KHR_draw_indirect_count";
+    }
+    if ( extension == "VK_KHR_dynamic_rendering" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_KHR_multiview" )
+    {
+      return "VK_VERSION_1_1";
+    }
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+    if ( extension == "VK_NV_win32_keyed_mutex" )
+    {
+      return "VK_KHR_win32_keyed_mutex";
+    }
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+    if ( extension == "VK_KHR_get_physical_device_properties2" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_KHR_device_group" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_KHR_shader_draw_parameters" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_EXT_texture_compression_astc_hdr" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_KHR_maintenance1" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_KHR_device_group_creation" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_KHR_external_memory_capabilities" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_KHR_external_memory" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_KHR_external_semaphore_capabilities" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_KHR_external_semaphore" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_KHR_shader_float16_int8" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_KHR_16bit_storage" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_KHR_descriptor_update_template" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_KHR_imageless_framebuffer" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_KHR_create_renderpass2" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_KHR_external_fence_capabilities" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_KHR_external_fence" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_KHR_maintenance2" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_KHR_variable_pointers" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_KHR_dedicated_allocation" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_EXT_sampler_filter_minmax" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_KHR_storage_buffer_storage_class" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_EXT_inline_uniform_block" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_KHR_relaxed_block_layout" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_KHR_get_memory_requirements2" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_KHR_image_format_list" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_KHR_sampler_ycbcr_conversion" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_KHR_bind_memory2" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_EXT_descriptor_indexing" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_EXT_shader_viewport_index_layer" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_KHR_maintenance3" )
+    {
+      return "VK_VERSION_1_1";
+    }
+    if ( extension == "VK_KHR_draw_indirect_count" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_EXT_global_priority" )
+    {
+      return "VK_KHR_global_priority";
+    }
+    if ( extension == "VK_KHR_shader_subgroup_extended_types" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_KHR_8bit_storage" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_KHR_shader_atomic_int64" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_EXT_pipeline_creation_feedback" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_KHR_driver_properties" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_KHR_shader_float_controls" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_KHR_depth_stencil_resolve" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_NV_fragment_shader_barycentric" )
+    {
+      return "VK_KHR_fragment_shader_barycentric";
+    }
+    if ( extension == "VK_KHR_timeline_semaphore" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_KHR_vulkan_memory_model" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_KHR_shader_terminate_invocation" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_EXT_scalar_block_layout" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_EXT_subgroup_size_control" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_KHR_spirv_1_4" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_KHR_separate_depth_stencil_layouts" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_EXT_tooling_info" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_EXT_separate_stencil_usage" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_KHR_uniform_buffer_standard_layout" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_KHR_buffer_device_address" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_EXT_host_query_reset" )
+    {
+      return "VK_VERSION_1_2";
+    }
+    if ( extension == "VK_EXT_extended_dynamic_state" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_EXT_shader_demote_to_helper_invocation" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_KHR_shader_integer_dot_product" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_EXT_texel_buffer_alignment" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_KHR_shader_non_semantic_info" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_EXT_private_data" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_EXT_pipeline_creation_cache_control" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_KHR_synchronization2" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_KHR_zero_initialize_workgroup_memory" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_EXT_ycbcr_2plane_444_formats" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_EXT_image_robustness" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_KHR_copy_commands2" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_EXT_4444_formats" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_ARM_rasterization_order_attachment_access" )
+    {
+      return "VK_EXT_rasterization_order_attachment_access";
+    }
+    if ( extension == "VK_VALVE_mutable_descriptor_type" )
+    {
+      return "VK_EXT_mutable_descriptor_type";
+    }
+    if ( extension == "VK_KHR_format_feature_flags2" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_EXT_extended_dynamic_state2" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    if ( extension == "VK_EXT_global_priority_query" )
+    {
+      return "VK_KHR_global_priority";
+    }
+    if ( extension == "VK_KHR_maintenance4" )
+    {
+      return "VK_VERSION_1_3";
+    }
+    return "";
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isDeprecatedExtension( std::string const & extension )
+  {
+    return ( extension == "VK_EXT_debug_report" ) || ( extension == "VK_NV_glsl_shader" ) || ( extension == "VK_NV_dedicated_allocation" ) ||
+           ( extension == "VK_AMD_gpu_shader_half_float" ) || ( extension == "VK_IMG_format_pvrtc" ) || ( extension == "VK_NV_external_memory_capabilities" ) ||
+           ( extension == "VK_NV_external_memory" ) ||
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+           ( extension == "VK_NV_external_memory_win32" ) ||
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+           ( extension == "VK_EXT_validation_flags" ) || ( extension == "VK_EXT_shader_subgroup_ballot" ) || ( extension == "VK_EXT_shader_subgroup_vote" ) ||
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+           ( extension == "VK_MVK_ios_surface" ) ||
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+           ( extension == "VK_MVK_macos_surface" ) ||
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+           ( extension == "VK_AMD_gpu_shader_int16" ) || ( extension == "VK_EXT_buffer_device_address" );
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isDeviceExtension( std::string const & extension )
+  {
+    return ( extension == "VK_KHR_swapchain" ) || ( extension == "VK_KHR_display_swapchain" ) || ( extension == "VK_NV_glsl_shader" ) ||
+           ( extension == "VK_EXT_depth_range_unrestricted" ) || ( extension == "VK_KHR_sampler_mirror_clamp_to_edge" ) ||
+           ( extension == "VK_IMG_filter_cubic" ) || ( extension == "VK_AMD_rasterization_order" ) || ( extension == "VK_AMD_shader_trinary_minmax" ) ||
+           ( extension == "VK_AMD_shader_explicit_vertex_parameter" ) || ( extension == "VK_EXT_debug_marker" ) || ( extension == "VK_KHR_video_queue" ) ||
+           ( extension == "VK_KHR_video_decode_queue" ) || ( extension == "VK_AMD_gcn_shader" ) || ( extension == "VK_NV_dedicated_allocation" ) ||
+           ( extension == "VK_EXT_transform_feedback" ) || ( extension == "VK_NVX_binary_import" ) || ( extension == "VK_NVX_image_view_handle" ) ||
+           ( extension == "VK_AMD_draw_indirect_count" ) || ( extension == "VK_AMD_negative_viewport_height" ) ||
+           ( extension == "VK_AMD_gpu_shader_half_float" ) || ( extension == "VK_AMD_shader_ballot" )
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+           || ( extension == "VK_EXT_video_encode_h264" ) || ( extension == "VK_EXT_video_encode_h265" )
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+           || ( extension == "VK_KHR_video_decode_h264" ) || ( extension == "VK_AMD_texture_gather_bias_lod" ) || ( extension == "VK_AMD_shader_info" ) ||
+           ( extension == "VK_KHR_dynamic_rendering" ) || ( extension == "VK_AMD_shader_image_load_store_lod" ) ||
+           ( extension == "VK_NV_corner_sampled_image" ) || ( extension == "VK_KHR_multiview" ) || ( extension == "VK_IMG_format_pvrtc" ) ||
+           ( extension == "VK_NV_external_memory" )
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+           || ( extension == "VK_NV_external_memory_win32" ) || ( extension == "VK_NV_win32_keyed_mutex" )
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+           || ( extension == "VK_KHR_device_group" ) || ( extension == "VK_KHR_shader_draw_parameters" ) || ( extension == "VK_EXT_shader_subgroup_ballot" ) ||
+           ( extension == "VK_EXT_shader_subgroup_vote" ) || ( extension == "VK_EXT_texture_compression_astc_hdr" ) ||
+           ( extension == "VK_EXT_astc_decode_mode" ) || ( extension == "VK_EXT_pipeline_robustness" ) || ( extension == "VK_KHR_maintenance1" ) ||
+           ( extension == "VK_KHR_external_memory" )
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+           || ( extension == "VK_KHR_external_memory_win32" )
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+           || ( extension == "VK_KHR_external_memory_fd" )
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+           || ( extension == "VK_KHR_win32_keyed_mutex" )
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+           || ( extension == "VK_KHR_external_semaphore" )
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+           || ( extension == "VK_KHR_external_semaphore_win32" )
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+           || ( extension == "VK_KHR_external_semaphore_fd" ) || ( extension == "VK_KHR_push_descriptor" ) || ( extension == "VK_EXT_conditional_rendering" ) ||
+           ( extension == "VK_KHR_shader_float16_int8" ) || ( extension == "VK_KHR_16bit_storage" ) || ( extension == "VK_KHR_incremental_present" ) ||
+           ( extension == "VK_KHR_descriptor_update_template" ) || ( extension == "VK_NV_clip_space_w_scaling" ) || ( extension == "VK_EXT_display_control" ) ||
+           ( extension == "VK_GOOGLE_display_timing" ) || ( extension == "VK_NV_sample_mask_override_coverage" ) ||
+           ( extension == "VK_NV_geometry_shader_passthrough" ) || ( extension == "VK_NV_viewport_array2" ) ||
+           ( extension == "VK_NVX_multiview_per_view_attributes" ) || ( extension == "VK_NV_viewport_swizzle" ) ||
+           ( extension == "VK_EXT_discard_rectangles" ) || ( extension == "VK_EXT_conservative_rasterization" ) ||
+           ( extension == "VK_EXT_depth_clip_enable" ) || ( extension == "VK_EXT_hdr_metadata" ) || ( extension == "VK_KHR_imageless_framebuffer" ) ||
+           ( extension == "VK_KHR_create_renderpass2" ) || ( extension == "VK_KHR_shared_presentable_image" ) || ( extension == "VK_KHR_external_fence" )
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+           || ( extension == "VK_KHR_external_fence_win32" )
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+           || ( extension == "VK_KHR_external_fence_fd" ) || ( extension == "VK_KHR_performance_query" ) || ( extension == "VK_KHR_maintenance2" ) ||
+           ( extension == "VK_KHR_variable_pointers" ) || ( extension == "VK_EXT_external_memory_dma_buf" ) || ( extension == "VK_EXT_queue_family_foreign" ) ||
+           ( extension == "VK_KHR_dedicated_allocation" )
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+           || ( extension == "VK_ANDROID_external_memory_android_hardware_buffer" )
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+           || ( extension == "VK_EXT_sampler_filter_minmax" ) || ( extension == "VK_KHR_storage_buffer_storage_class" ) ||
+           ( extension == "VK_AMD_gpu_shader_int16" )
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+           || ( extension == "VK_AMDX_shader_enqueue" )
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+           || ( extension == "VK_AMD_mixed_attachment_samples" ) || ( extension == "VK_AMD_shader_fragment_mask" ) ||
+           ( extension == "VK_EXT_inline_uniform_block" ) || ( extension == "VK_EXT_shader_stencil_export" ) || ( extension == "VK_EXT_sample_locations" ) ||
+           ( extension == "VK_KHR_relaxed_block_layout" ) || ( extension == "VK_KHR_get_memory_requirements2" ) ||
+           ( extension == "VK_KHR_image_format_list" ) || ( extension == "VK_EXT_blend_operation_advanced" ) ||
+           ( extension == "VK_NV_fragment_coverage_to_color" ) || ( extension == "VK_KHR_acceleration_structure" ) ||
+           ( extension == "VK_KHR_ray_tracing_pipeline" ) || ( extension == "VK_KHR_ray_query" ) || ( extension == "VK_NV_framebuffer_mixed_samples" ) ||
+           ( extension == "VK_NV_fill_rectangle" ) || ( extension == "VK_NV_shader_sm_builtins" ) || ( extension == "VK_EXT_post_depth_coverage" ) ||
+           ( extension == "VK_KHR_sampler_ycbcr_conversion" ) || ( extension == "VK_KHR_bind_memory2" ) ||
+           ( extension == "VK_EXT_image_drm_format_modifier" ) || ( extension == "VK_EXT_validation_cache" ) || ( extension == "VK_EXT_descriptor_indexing" ) ||
+           ( extension == "VK_EXT_shader_viewport_index_layer" )
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+           || ( extension == "VK_KHR_portability_subset" )
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+           || ( extension == "VK_NV_shading_rate_image" ) || ( extension == "VK_NV_ray_tracing" ) || ( extension == "VK_NV_representative_fragment_test" ) ||
+           ( extension == "VK_KHR_maintenance3" ) || ( extension == "VK_KHR_draw_indirect_count" ) || ( extension == "VK_EXT_filter_cubic" ) ||
+           ( extension == "VK_QCOM_render_pass_shader_resolve" ) || ( extension == "VK_EXT_global_priority" ) ||
+           ( extension == "VK_KHR_shader_subgroup_extended_types" ) || ( extension == "VK_KHR_8bit_storage" ) ||
+           ( extension == "VK_EXT_external_memory_host" ) || ( extension == "VK_AMD_buffer_marker" ) || ( extension == "VK_KHR_shader_atomic_int64" ) ||
+           ( extension == "VK_KHR_shader_clock" ) || ( extension == "VK_AMD_pipeline_compiler_control" ) || ( extension == "VK_EXT_calibrated_timestamps" ) ||
+           ( extension == "VK_AMD_shader_core_properties" ) || ( extension == "VK_KHR_video_decode_h265" ) || ( extension == "VK_KHR_global_priority" ) ||
+           ( extension == "VK_AMD_memory_overallocation_behavior" ) || ( extension == "VK_EXT_vertex_attribute_divisor" )
+#if defined( VK_USE_PLATFORM_GGP )
+           || ( extension == "VK_GGP_frame_token" )
+#endif /*VK_USE_PLATFORM_GGP*/
+           || ( extension == "VK_EXT_pipeline_creation_feedback" ) || ( extension == "VK_KHR_driver_properties" ) ||
+           ( extension == "VK_KHR_shader_float_controls" ) || ( extension == "VK_NV_shader_subgroup_partitioned" ) ||
+           ( extension == "VK_KHR_depth_stencil_resolve" ) || ( extension == "VK_KHR_swapchain_mutable_format" ) ||
+           ( extension == "VK_NV_compute_shader_derivatives" ) || ( extension == "VK_NV_mesh_shader" ) ||
+           ( extension == "VK_NV_fragment_shader_barycentric" ) || ( extension == "VK_NV_shader_image_footprint" ) ||
+           ( extension == "VK_NV_scissor_exclusive" ) || ( extension == "VK_NV_device_diagnostic_checkpoints" ) ||
+           ( extension == "VK_KHR_timeline_semaphore" ) || ( extension == "VK_INTEL_shader_integer_functions2" ) ||
+           ( extension == "VK_INTEL_performance_query" ) || ( extension == "VK_KHR_vulkan_memory_model" ) || ( extension == "VK_EXT_pci_bus_info" ) ||
+           ( extension == "VK_AMD_display_native_hdr" ) || ( extension == "VK_KHR_shader_terminate_invocation" ) ||
+           ( extension == "VK_EXT_fragment_density_map" ) || ( extension == "VK_EXT_scalar_block_layout" ) ||
+           ( extension == "VK_GOOGLE_hlsl_functionality1" ) || ( extension == "VK_GOOGLE_decorate_string" ) ||
+           ( extension == "VK_EXT_subgroup_size_control" ) || ( extension == "VK_KHR_fragment_shading_rate" ) ||
+           ( extension == "VK_AMD_shader_core_properties2" ) || ( extension == "VK_AMD_device_coherent_memory" ) ||
+           ( extension == "VK_EXT_shader_image_atomic_int64" ) || ( extension == "VK_KHR_spirv_1_4" ) || ( extension == "VK_EXT_memory_budget" ) ||
+           ( extension == "VK_EXT_memory_priority" ) || ( extension == "VK_NV_dedicated_allocation_image_aliasing" ) ||
+           ( extension == "VK_KHR_separate_depth_stencil_layouts" ) || ( extension == "VK_EXT_buffer_device_address" ) ||
+           ( extension == "VK_EXT_tooling_info" ) || ( extension == "VK_EXT_separate_stencil_usage" ) || ( extension == "VK_KHR_present_wait" ) ||
+           ( extension == "VK_NV_cooperative_matrix" ) || ( extension == "VK_NV_coverage_reduction_mode" ) ||
+           ( extension == "VK_EXT_fragment_shader_interlock" ) || ( extension == "VK_EXT_ycbcr_image_arrays" ) ||
+           ( extension == "VK_KHR_uniform_buffer_standard_layout" ) || ( extension == "VK_EXT_provoking_vertex" )
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+           || ( extension == "VK_EXT_full_screen_exclusive" )
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+           || ( extension == "VK_KHR_buffer_device_address" ) || ( extension == "VK_EXT_line_rasterization" ) ||
+           ( extension == "VK_EXT_shader_atomic_float" ) || ( extension == "VK_EXT_host_query_reset" ) || ( extension == "VK_EXT_index_type_uint8" ) ||
+           ( extension == "VK_EXT_extended_dynamic_state" ) || ( extension == "VK_KHR_deferred_host_operations" ) ||
+           ( extension == "VK_KHR_pipeline_executable_properties" ) || ( extension == "VK_EXT_host_image_copy" ) || ( extension == "VK_KHR_map_memory2" ) ||
+           ( extension == "VK_EXT_shader_atomic_float2" ) || ( extension == "VK_EXT_swapchain_maintenance1" ) ||
+           ( extension == "VK_EXT_shader_demote_to_helper_invocation" ) || ( extension == "VK_NV_device_generated_commands" ) ||
+           ( extension == "VK_NV_inherited_viewport_scissor" ) || ( extension == "VK_KHR_shader_integer_dot_product" ) ||
+           ( extension == "VK_EXT_texel_buffer_alignment" ) || ( extension == "VK_QCOM_render_pass_transform" ) ||
+           ( extension == "VK_EXT_depth_bias_control" ) || ( extension == "VK_EXT_device_memory_report" ) || ( extension == "VK_EXT_robustness2" ) ||
+           ( extension == "VK_EXT_custom_border_color" ) || ( extension == "VK_GOOGLE_user_type" ) || ( extension == "VK_KHR_pipeline_library" ) ||
+           ( extension == "VK_NV_present_barrier" ) || ( extension == "VK_KHR_shader_non_semantic_info" ) || ( extension == "VK_KHR_present_id" ) ||
+           ( extension == "VK_EXT_private_data" ) || ( extension == "VK_EXT_pipeline_creation_cache_control" )
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+           || ( extension == "VK_KHR_video_encode_queue" )
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+           || ( extension == "VK_NV_device_diagnostics_config" ) || ( extension == "VK_QCOM_render_pass_store_ops" ) || ( extension == "VK_NV_low_latency" )
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+           || ( extension == "VK_EXT_metal_objects" )
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+           || ( extension == "VK_KHR_synchronization2" ) || ( extension == "VK_EXT_descriptor_buffer" ) ||
+           ( extension == "VK_EXT_graphics_pipeline_library" ) || ( extension == "VK_AMD_shader_early_and_late_fragment_tests" ) ||
+           ( extension == "VK_KHR_fragment_shader_barycentric" ) || ( extension == "VK_KHR_shader_subgroup_uniform_control_flow" ) ||
+           ( extension == "VK_KHR_zero_initialize_workgroup_memory" ) || ( extension == "VK_NV_fragment_shading_rate_enums" ) ||
+           ( extension == "VK_NV_ray_tracing_motion_blur" ) || ( extension == "VK_EXT_mesh_shader" ) || ( extension == "VK_EXT_ycbcr_2plane_444_formats" ) ||
+           ( extension == "VK_EXT_fragment_density_map2" ) || ( extension == "VK_QCOM_rotated_copy_commands" ) || ( extension == "VK_EXT_image_robustness" ) ||
+           ( extension == "VK_KHR_workgroup_memory_explicit_layout" ) || ( extension == "VK_KHR_copy_commands2" ) ||
+           ( extension == "VK_EXT_image_compression_control" ) || ( extension == "VK_EXT_attachment_feedback_loop_layout" ) ||
+           ( extension == "VK_EXT_4444_formats" ) || ( extension == "VK_EXT_device_fault" ) ||
+           ( extension == "VK_ARM_rasterization_order_attachment_access" ) || ( extension == "VK_EXT_rgba10x6_formats" )
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+           || ( extension == "VK_NV_acquire_winrt_display" )
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+           || ( extension == "VK_VALVE_mutable_descriptor_type" ) || ( extension == "VK_EXT_vertex_input_dynamic_state" ) ||
+           ( extension == "VK_EXT_physical_device_drm" ) || ( extension == "VK_EXT_device_address_binding_report" ) ||
+           ( extension == "VK_EXT_depth_clip_control" ) || ( extension == "VK_EXT_primitive_topology_list_restart" ) ||
+           ( extension == "VK_KHR_format_feature_flags2" )
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+           || ( extension == "VK_FUCHSIA_external_memory" ) || ( extension == "VK_FUCHSIA_external_semaphore" ) ||
+           ( extension == "VK_FUCHSIA_buffer_collection" )
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+           || ( extension == "VK_HUAWEI_subpass_shading" ) || ( extension == "VK_HUAWEI_invocation_mask" ) || ( extension == "VK_NV_external_memory_rdma" ) ||
+           ( extension == "VK_EXT_pipeline_properties" ) || ( extension == "VK_EXT_frame_boundary" ) ||
+           ( extension == "VK_EXT_multisampled_render_to_single_sampled" ) || ( extension == "VK_EXT_extended_dynamic_state2" ) ||
+           ( extension == "VK_EXT_color_write_enable" ) || ( extension == "VK_EXT_primitives_generated_query" ) ||
+           ( extension == "VK_KHR_ray_tracing_maintenance1" ) || ( extension == "VK_EXT_global_priority_query" ) ||
+           ( extension == "VK_EXT_image_view_min_lod" ) || ( extension == "VK_EXT_multi_draw" ) || ( extension == "VK_EXT_image_2d_view_of_3d" ) ||
+           ( extension == "VK_EXT_shader_tile_image" ) || ( extension == "VK_EXT_opacity_micromap" )
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+           || ( extension == "VK_NV_displacement_micromap" )
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+           || ( extension == "VK_EXT_load_store_op_none" ) || ( extension == "VK_HUAWEI_cluster_culling_shader" ) ||
+           ( extension == "VK_EXT_border_color_swizzle" ) || ( extension == "VK_EXT_pageable_device_local_memory" ) || ( extension == "VK_KHR_maintenance4" ) ||
+           ( extension == "VK_ARM_shader_core_properties" ) || ( extension == "VK_EXT_image_sliced_view_of_3d" ) ||
+           ( extension == "VK_VALVE_descriptor_set_host_mapping" ) || ( extension == "VK_EXT_depth_clamp_zero_one" ) ||
+           ( extension == "VK_EXT_non_seamless_cube_map" ) || ( extension == "VK_QCOM_fragment_density_map_offset" ) ||
+           ( extension == "VK_NV_copy_memory_indirect" ) || ( extension == "VK_NV_memory_decompression" ) ||
+           ( extension == "VK_NV_device_generated_commands_compute" ) || ( extension == "VK_NV_linear_color_attachment" ) ||
+           ( extension == "VK_EXT_image_compression_control_swapchain" ) || ( extension == "VK_QCOM_image_processing" ) ||
+           ( extension == "VK_EXT_nested_command_buffer" ) || ( extension == "VK_EXT_external_memory_acquire_unmodified" ) ||
+           ( extension == "VK_EXT_extended_dynamic_state3" ) || ( extension == "VK_EXT_subpass_merge_feedback" ) ||
+           ( extension == "VK_EXT_shader_module_identifier" ) || ( extension == "VK_EXT_rasterization_order_attachment_access" ) ||
+           ( extension == "VK_NV_optical_flow" ) || ( extension == "VK_EXT_legacy_dithering" ) || ( extension == "VK_EXT_pipeline_protected_access" )
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+           || ( extension == "VK_ANDROID_external_format_resolve" )
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+           || ( extension == "VK_KHR_maintenance5" ) || ( extension == "VK_KHR_ray_tracing_position_fetch" ) || ( extension == "VK_EXT_shader_object" ) ||
+           ( extension == "VK_QCOM_tile_properties" ) || ( extension == "VK_SEC_amigo_profiling" ) || ( extension == "VK_QCOM_multiview_per_view_viewports" ) ||
+           ( extension == "VK_NV_ray_tracing_invocation_reorder" ) || ( extension == "VK_NV_extended_sparse_address_space" ) ||
+           ( extension == "VK_EXT_mutable_descriptor_type" ) || ( extension == "VK_ARM_shader_core_builtins" ) ||
+           ( extension == "VK_EXT_pipeline_library_group_handles" ) || ( extension == "VK_EXT_dynamic_rendering_unused_attachments" ) ||
+           ( extension == "VK_NV_low_latency2" ) || ( extension == "VK_KHR_cooperative_matrix" ) ||
+           ( extension == "VK_QCOM_multiview_per_view_render_areas" ) || ( extension == "VK_QCOM_image_processing2" ) ||
+           ( extension == "VK_QCOM_filter_cubic_weights" ) || ( extension == "VK_QCOM_ycbcr_degamma" ) || ( extension == "VK_QCOM_filter_cubic_clamp" ) ||
+           ( extension == "VK_EXT_attachment_feedback_loop_dynamic_state" )
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+           || ( extension == "VK_QNX_external_memory_screen_buffer" )
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+           || ( extension == "VK_MSFT_layered_driver" ) || ( extension == "VK_NV_descriptor_pool_overallocation" );
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isInstanceExtension( std::string const & extension )
+  {
+    return ( extension == "VK_KHR_surface" ) || ( extension == "VK_KHR_display" )
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+           || ( extension == "VK_KHR_xlib_surface" )
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+           || ( extension == "VK_KHR_xcb_surface" )
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+           || ( extension == "VK_KHR_wayland_surface" )
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+           || ( extension == "VK_KHR_android_surface" )
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+           || ( extension == "VK_KHR_win32_surface" )
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+           || ( extension == "VK_EXT_debug_report" )
+#if defined( VK_USE_PLATFORM_GGP )
+           || ( extension == "VK_GGP_stream_descriptor_surface" )
+#endif /*VK_USE_PLATFORM_GGP*/
+           || ( extension == "VK_NV_external_memory_capabilities" ) || ( extension == "VK_KHR_get_physical_device_properties2" ) ||
+           ( extension == "VK_EXT_validation_flags" )
+#if defined( VK_USE_PLATFORM_VI_NN )
+           || ( extension == "VK_NN_vi_surface" )
+#endif /*VK_USE_PLATFORM_VI_NN*/
+           || ( extension == "VK_KHR_device_group_creation" ) || ( extension == "VK_KHR_external_memory_capabilities" ) ||
+           ( extension == "VK_KHR_external_semaphore_capabilities" ) || ( extension == "VK_EXT_direct_mode_display" )
+#if defined( VK_USE_PLATFORM_XLIB_XRANDR_EXT )
+           || ( extension == "VK_EXT_acquire_xlib_display" )
+#endif /*VK_USE_PLATFORM_XLIB_XRANDR_EXT*/
+           || ( extension == "VK_EXT_display_surface_counter" ) || ( extension == "VK_EXT_swapchain_colorspace" ) ||
+           ( extension == "VK_KHR_external_fence_capabilities" ) || ( extension == "VK_KHR_get_surface_capabilities2" ) ||
+           ( extension == "VK_KHR_get_display_properties2" )
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+           || ( extension == "VK_MVK_ios_surface" )
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+           || ( extension == "VK_MVK_macos_surface" )
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+           || ( extension == "VK_EXT_debug_utils" )
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+           || ( extension == "VK_FUCHSIA_imagepipe_surface" )
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+           || ( extension == "VK_EXT_metal_surface" )
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+           || ( extension == "VK_KHR_surface_protected_capabilities" ) || ( extension == "VK_EXT_validation_features" ) ||
+           ( extension == "VK_EXT_headless_surface" ) || ( extension == "VK_EXT_surface_maintenance1" ) || ( extension == "VK_EXT_acquire_drm_display" )
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+           || ( extension == "VK_EXT_directfb_surface" )
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+           || ( extension == "VK_QNX_screen_surface" )
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+           || ( extension == "VK_KHR_portability_enumeration" ) || ( extension == "VK_GOOGLE_surfaceless_query" ) ||
+           ( extension == "VK_LUNARG_direct_driver_loading" );
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isObsoletedExtension( std::string const & extension )
+  {
+    return ( extension == "VK_AMD_negative_viewport_height" );
+  }
+
+  VULKAN_HPP_INLINE VULKAN_HPP_CONSTEXPR_20 bool isPromotedExtension( std::string const & extension )
+  {
+    return ( extension == "VK_KHR_sampler_mirror_clamp_to_edge" ) || ( extension == "VK_EXT_debug_marker" ) || ( extension == "VK_AMD_draw_indirect_count" ) ||
+           ( extension == "VK_KHR_dynamic_rendering" ) || ( extension == "VK_KHR_multiview" ) ||
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+           ( extension == "VK_NV_win32_keyed_mutex" ) ||
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+           ( extension == "VK_KHR_get_physical_device_properties2" ) || ( extension == "VK_KHR_device_group" ) ||
+           ( extension == "VK_KHR_shader_draw_parameters" ) || ( extension == "VK_EXT_texture_compression_astc_hdr" ) ||
+           ( extension == "VK_KHR_maintenance1" ) || ( extension == "VK_KHR_device_group_creation" ) ||
+           ( extension == "VK_KHR_external_memory_capabilities" ) || ( extension == "VK_KHR_external_memory" ) ||
+           ( extension == "VK_KHR_external_semaphore_capabilities" ) || ( extension == "VK_KHR_external_semaphore" ) ||
+           ( extension == "VK_KHR_shader_float16_int8" ) || ( extension == "VK_KHR_16bit_storage" ) || ( extension == "VK_KHR_descriptor_update_template" ) ||
+           ( extension == "VK_KHR_imageless_framebuffer" ) || ( extension == "VK_KHR_create_renderpass2" ) ||
+           ( extension == "VK_KHR_external_fence_capabilities" ) || ( extension == "VK_KHR_external_fence" ) || ( extension == "VK_KHR_maintenance2" ) ||
+           ( extension == "VK_KHR_variable_pointers" ) || ( extension == "VK_KHR_dedicated_allocation" ) || ( extension == "VK_EXT_sampler_filter_minmax" ) ||
+           ( extension == "VK_KHR_storage_buffer_storage_class" ) || ( extension == "VK_EXT_inline_uniform_block" ) ||
+           ( extension == "VK_KHR_relaxed_block_layout" ) || ( extension == "VK_KHR_get_memory_requirements2" ) ||
+           ( extension == "VK_KHR_image_format_list" ) || ( extension == "VK_KHR_sampler_ycbcr_conversion" ) || ( extension == "VK_KHR_bind_memory2" ) ||
+           ( extension == "VK_EXT_descriptor_indexing" ) || ( extension == "VK_EXT_shader_viewport_index_layer" ) || ( extension == "VK_KHR_maintenance3" ) ||
+           ( extension == "VK_KHR_draw_indirect_count" ) || ( extension == "VK_EXT_global_priority" ) ||
+           ( extension == "VK_KHR_shader_subgroup_extended_types" ) || ( extension == "VK_KHR_8bit_storage" ) ||
+           ( extension == "VK_KHR_shader_atomic_int64" ) || ( extension == "VK_EXT_pipeline_creation_feedback" ) ||
+           ( extension == "VK_KHR_driver_properties" ) || ( extension == "VK_KHR_shader_float_controls" ) || ( extension == "VK_KHR_depth_stencil_resolve" ) ||
+           ( extension == "VK_NV_fragment_shader_barycentric" ) || ( extension == "VK_KHR_timeline_semaphore" ) ||
+           ( extension == "VK_KHR_vulkan_memory_model" ) || ( extension == "VK_KHR_shader_terminate_invocation" ) ||
+           ( extension == "VK_EXT_scalar_block_layout" ) || ( extension == "VK_EXT_subgroup_size_control" ) || ( extension == "VK_KHR_spirv_1_4" ) ||
+           ( extension == "VK_KHR_separate_depth_stencil_layouts" ) || ( extension == "VK_EXT_tooling_info" ) ||
+           ( extension == "VK_EXT_separate_stencil_usage" ) || ( extension == "VK_KHR_uniform_buffer_standard_layout" ) ||
+           ( extension == "VK_KHR_buffer_device_address" ) || ( extension == "VK_EXT_host_query_reset" ) || ( extension == "VK_EXT_extended_dynamic_state" ) ||
+           ( extension == "VK_EXT_shader_demote_to_helper_invocation" ) || ( extension == "VK_KHR_shader_integer_dot_product" ) ||
+           ( extension == "VK_EXT_texel_buffer_alignment" ) || ( extension == "VK_KHR_shader_non_semantic_info" ) || ( extension == "VK_EXT_private_data" ) ||
+           ( extension == "VK_EXT_pipeline_creation_cache_control" ) || ( extension == "VK_KHR_synchronization2" ) ||
+           ( extension == "VK_KHR_zero_initialize_workgroup_memory" ) || ( extension == "VK_EXT_ycbcr_2plane_444_formats" ) ||
+           ( extension == "VK_EXT_image_robustness" ) || ( extension == "VK_KHR_copy_commands2" ) || ( extension == "VK_EXT_4444_formats" ) ||
+           ( extension == "VK_ARM_rasterization_order_attachment_access" ) || ( extension == "VK_VALVE_mutable_descriptor_type" ) ||
+           ( extension == "VK_KHR_format_feature_flags2" ) || ( extension == "VK_EXT_extended_dynamic_state2" ) ||
+           ( extension == "VK_EXT_global_priority_query" ) || ( extension == "VK_KHR_maintenance4" );
+  }
+}  // namespace VULKAN_HPP_NAMESPACE
+
+#endif

File diff suppressed because it is too large
+ 376 - 583
ThirdParty/Khronos/vulkan/vulkan_format_traits.hpp


+ 5 - 1
ThirdParty/Khronos/vulkan/vulkan_fuchsia.h

@@ -2,7 +2,7 @@
 #define VULKAN_FUCHSIA_H_ 1
 
 /*
-** Copyright 2015-2022 The Khronos Group Inc.
+** Copyright 2015-2023 The Khronos Group Inc.
 **
 ** SPDX-License-Identifier: Apache-2.0
 */
@@ -19,6 +19,7 @@ extern "C" {
 
 
 
+// VK_FUCHSIA_imagepipe_surface is a preprocessor guard. Do not pass it to API calls.
 #define VK_FUCHSIA_imagepipe_surface 1
 #define VK_FUCHSIA_IMAGEPIPE_SURFACE_SPEC_VERSION 1
 #define VK_FUCHSIA_IMAGEPIPE_SURFACE_EXTENSION_NAME "VK_FUCHSIA_imagepipe_surface"
@@ -41,6 +42,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateImagePipeSurfaceFUCHSIA(
 #endif
 
 
+// VK_FUCHSIA_external_memory is a preprocessor guard. Do not pass it to API calls.
 #define VK_FUCHSIA_external_memory 1
 #define VK_FUCHSIA_EXTERNAL_MEMORY_SPEC_VERSION 1
 #define VK_FUCHSIA_EXTERNAL_MEMORY_EXTENSION_NAME "VK_FUCHSIA_external_memory"
@@ -81,6 +83,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryZirconHandlePropertiesFUCHSIA(
 #endif
 
 
+// VK_FUCHSIA_external_semaphore is a preprocessor guard. Do not pass it to API calls.
 #define VK_FUCHSIA_external_semaphore 1
 #define VK_FUCHSIA_EXTERNAL_SEMAPHORE_SPEC_VERSION 1
 #define VK_FUCHSIA_EXTERNAL_SEMAPHORE_EXTENSION_NAME "VK_FUCHSIA_external_semaphore"
@@ -115,6 +118,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreZirconHandleFUCHSIA(
 #endif
 
 
+// VK_FUCHSIA_buffer_collection is a preprocessor guard. Do not pass it to API calls.
 #define VK_FUCHSIA_buffer_collection 1
 VK_DEFINE_NON_DISPATCHABLE_HANDLE(VkBufferCollectionFUCHSIA)
 #define VK_FUCHSIA_BUFFER_COLLECTION_SPEC_VERSION 2

File diff suppressed because it is too large
+ 158 - 153
ThirdParty/Khronos/vulkan/vulkan_funcs.hpp


+ 3 - 1
ThirdParty/Khronos/vulkan/vulkan_ggp.h

@@ -2,7 +2,7 @@
 #define VULKAN_GGP_H_ 1
 
 /*
-** Copyright 2015-2022 The Khronos Group Inc.
+** Copyright 2015-2023 The Khronos Group Inc.
 **
 ** SPDX-License-Identifier: Apache-2.0
 */
@@ -19,6 +19,7 @@ extern "C" {
 
 
 
+// VK_GGP_stream_descriptor_surface is a preprocessor guard. Do not pass it to API calls.
 #define VK_GGP_stream_descriptor_surface 1
 #define VK_GGP_STREAM_DESCRIPTOR_SURFACE_SPEC_VERSION 1
 #define VK_GGP_STREAM_DESCRIPTOR_SURFACE_EXTENSION_NAME "VK_GGP_stream_descriptor_surface"
@@ -41,6 +42,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateStreamDescriptorSurfaceGGP(
 #endif
 
 
+// VK_GGP_frame_token is a preprocessor guard. Do not pass it to API calls.
 #define VK_GGP_frame_token 1
 #define VK_GGP_FRAME_TOKEN_SPEC_VERSION   1
 #define VK_GGP_FRAME_TOKEN_EXTENSION_NAME "VK_GGP_frame_token"

File diff suppressed because it is too large
+ 920 - 195
ThirdParty/Khronos/vulkan/vulkan_handles.hpp


File diff suppressed because it is too large
+ 776 - 22
ThirdParty/Khronos/vulkan/vulkan_hash.hpp


+ 270 - 0
ThirdParty/Khronos/vulkan/vulkan_hpp_macros.hpp

@@ -0,0 +1,270 @@
+// Copyright 2015-2023 The Khronos Group Inc.
+//
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_HPP_MACROS_HPP
+#define VULKAN_HPP_MACROS_HPP
+
+#if defined( _MSVC_LANG )
+#  define VULKAN_HPP_CPLUSPLUS _MSVC_LANG
+#else
+#  define VULKAN_HPP_CPLUSPLUS __cplusplus
+#endif
+
+#if 201703L < VULKAN_HPP_CPLUSPLUS
+#  define VULKAN_HPP_CPP_VERSION 20
+#elif 201402L < VULKAN_HPP_CPLUSPLUS
+#  define VULKAN_HPP_CPP_VERSION 17
+#elif 201103L < VULKAN_HPP_CPLUSPLUS
+#  define VULKAN_HPP_CPP_VERSION 14
+#elif 199711L < VULKAN_HPP_CPLUSPLUS
+#  define VULKAN_HPP_CPP_VERSION 11
+#else
+#  error "vulkan.hpp needs at least c++ standard version 11"
+#endif
+
+#if defined( VULKAN_HPP_DISABLE_ENHANCED_MODE )
+#  if !defined( VULKAN_HPP_NO_SMART_HANDLE )
+#    define VULKAN_HPP_NO_SMART_HANDLE
+#  endif
+#endif
+
+#if defined( VULKAN_HPP_NO_CONSTRUCTORS )
+#  if !defined( VULKAN_HPP_NO_STRUCT_CONSTRUCTORS )
+#    define VULKAN_HPP_NO_STRUCT_CONSTRUCTORS
+#  endif
+#  if !defined( VULKAN_HPP_NO_UNION_CONSTRUCTORS )
+#    define VULKAN_HPP_NO_UNION_CONSTRUCTORS
+#  endif
+#endif
+
+#if defined( VULKAN_HPP_NO_SETTERS )
+#  if !defined( VULKAN_HPP_NO_STRUCT_SETTERS )
+#    define VULKAN_HPP_NO_STRUCT_SETTERS
+#  endif
+#  if !defined( VULKAN_HPP_NO_UNION_SETTERS )
+#    define VULKAN_HPP_NO_UNION_SETTERS
+#  endif
+#endif
+
+#if !defined( VULKAN_HPP_ASSERT )
+#  define VULKAN_HPP_ASSERT assert
+#endif
+
+#if !defined( VULKAN_HPP_ASSERT_ON_RESULT )
+#  define VULKAN_HPP_ASSERT_ON_RESULT VULKAN_HPP_ASSERT
+#endif
+
+#if !defined( VULKAN_HPP_STATIC_ASSERT )
+#  define VULKAN_HPP_STATIC_ASSERT static_assert
+#endif
+
+#if !defined( VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL )
+#  define VULKAN_HPP_ENABLE_DYNAMIC_LOADER_TOOL 1
+#endif
+
+#if !defined( __has_include )
+#  define __has_include( x ) false
+#endif
+
+#if ( 201907 <= __cpp_lib_three_way_comparison ) && __has_include( <compare> ) && !defined( VULKAN_HPP_NO_SPACESHIP_OPERATOR )
+#  define VULKAN_HPP_HAS_SPACESHIP_OPERATOR
+#endif
+
+#if ( 201803 <= __cpp_lib_span )
+#  define VULKAN_HPP_SUPPORT_SPAN
+#endif
+
+// 32-bit vulkan is not typesafe for non-dispatchable handles, so don't allow copy constructors on this platform by default.
+// To enable this feature on 32-bit platforms please define VULKAN_HPP_TYPESAFE_CONVERSION
+#if ( VK_USE_64_BIT_PTR_DEFINES == 1 )
+#  if !defined( VULKAN_HPP_TYPESAFE_CONVERSION )
+#    define VULKAN_HPP_TYPESAFE_CONVERSION
+#  endif
+#endif
+
+#if defined( __GNUC__ )
+#  define GCC_VERSION ( __GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__ )
+#endif
+
+#if !defined( VULKAN_HPP_HAS_UNRESTRICTED_UNIONS )
+#  if defined( __clang__ )
+#    if __has_feature( cxx_unrestricted_unions )
+#      define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+#    endif
+#  elif defined( __GNUC__ )
+#    if 40600 <= GCC_VERSION
+#      define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+#    endif
+#  elif defined( _MSC_VER )
+#    if 1900 <= _MSC_VER
+#      define VULKAN_HPP_HAS_UNRESTRICTED_UNIONS
+#    endif
+#  endif
+#endif
+
+#if !defined( VULKAN_HPP_INLINE )
+#  if defined( __clang__ )
+#    if __has_attribute( always_inline )
+#      define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__
+#    else
+#      define VULKAN_HPP_INLINE inline
+#    endif
+#  elif defined( __GNUC__ )
+#    define VULKAN_HPP_INLINE __attribute__( ( always_inline ) ) __inline__
+#  elif defined( _MSC_VER )
+#    define VULKAN_HPP_INLINE inline
+#  else
+#    define VULKAN_HPP_INLINE inline
+#  endif
+#endif
+
+#if defined( VULKAN_HPP_TYPESAFE_CONVERSION )
+#  define VULKAN_HPP_TYPESAFE_EXPLICIT
+#else
+#  define VULKAN_HPP_TYPESAFE_EXPLICIT explicit
+#endif
+
+#if defined( __cpp_constexpr )
+#  define VULKAN_HPP_CONSTEXPR constexpr
+#  if 201304 <= __cpp_constexpr
+#    define VULKAN_HPP_CONSTEXPR_14 constexpr
+#  else
+#    define VULKAN_HPP_CONSTEXPR_14
+#  endif
+#  if ( 201907 <= __cpp_constexpr ) && ( !defined( __GNUC__ ) || ( 110400 < GCC_VERSION ) )
+#    define VULKAN_HPP_CONSTEXPR_20 constexpr
+#  else
+#    define VULKAN_HPP_CONSTEXPR_20
+#  endif
+#  define VULKAN_HPP_CONST_OR_CONSTEXPR constexpr
+#else
+#  define VULKAN_HPP_CONSTEXPR
+#  define VULKAN_HPP_CONSTEXPR_14
+#  define VULKAN_HPP_CONST_OR_CONSTEXPR const
+#endif
+
+#if !defined( VULKAN_HPP_CONSTEXPR_INLINE )
+#  if 201606L <= __cpp_inline_variables
+#    define VULKAN_HPP_CONSTEXPR_INLINE VULKAN_HPP_CONSTEXPR inline
+#  else
+#    define VULKAN_HPP_CONSTEXPR_INLINE VULKAN_HPP_CONSTEXPR
+#  endif
+#endif
+
+#if !defined( VULKAN_HPP_NOEXCEPT )
+#  if defined( _MSC_VER ) && ( _MSC_VER <= 1800 )
+#    define VULKAN_HPP_NOEXCEPT
+#  else
+#    define VULKAN_HPP_NOEXCEPT     noexcept
+#    define VULKAN_HPP_HAS_NOEXCEPT 1
+#    if defined( VULKAN_HPP_NO_EXCEPTIONS )
+#      define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS noexcept
+#    else
+#      define VULKAN_HPP_NOEXCEPT_WHEN_NO_EXCEPTIONS
+#    endif
+#  endif
+#endif
+
+#if 14 <= VULKAN_HPP_CPP_VERSION
+#  define VULKAN_HPP_DEPRECATED( msg ) [[deprecated( msg )]]
+#else
+#  define VULKAN_HPP_DEPRECATED( msg )
+#endif
+
+#if ( 17 <= VULKAN_HPP_CPP_VERSION ) && !defined( VULKAN_HPP_NO_NODISCARD_WARNINGS )
+#  define VULKAN_HPP_NODISCARD [[nodiscard]]
+#  if defined( VULKAN_HPP_NO_EXCEPTIONS )
+#    define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS [[nodiscard]]
+#  else
+#    define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+#  endif
+#else
+#  define VULKAN_HPP_NODISCARD
+#  define VULKAN_HPP_NODISCARD_WHEN_NO_EXCEPTIONS
+#endif
+
+#if !defined( VULKAN_HPP_NAMESPACE )
+#  define VULKAN_HPP_NAMESPACE vk
+#endif
+
+#define VULKAN_HPP_STRINGIFY2( text ) #text
+#define VULKAN_HPP_STRINGIFY( text )  VULKAN_HPP_STRINGIFY2( text )
+#define VULKAN_HPP_NAMESPACE_STRING   VULKAN_HPP_STRINGIFY( VULKAN_HPP_NAMESPACE )
+
+#if !defined( VULKAN_HPP_DISPATCH_LOADER_DYNAMIC )
+#  if defined( VK_NO_PROTOTYPES )
+#    define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1
+#  else
+#    define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 0
+#  endif
+#endif
+
+#if !defined( VULKAN_HPP_STORAGE_API )
+#  if defined( VULKAN_HPP_STORAGE_SHARED )
+#    if defined( _MSC_VER )
+#      if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT )
+#        define VULKAN_HPP_STORAGE_API __declspec( dllexport )
+#      else
+#        define VULKAN_HPP_STORAGE_API __declspec( dllimport )
+#      endif
+#    elif defined( __clang__ ) || defined( __GNUC__ )
+#      if defined( VULKAN_HPP_STORAGE_SHARED_EXPORT )
+#        define VULKAN_HPP_STORAGE_API __attribute__( ( visibility( "default" ) ) )
+#      else
+#        define VULKAN_HPP_STORAGE_API
+#      endif
+#    else
+#      define VULKAN_HPP_STORAGE_API
+#      pragma warning Unknown import / export semantics
+#    endif
+#  else
+#    define VULKAN_HPP_STORAGE_API
+#  endif
+#endif
+
+namespace VULKAN_HPP_NAMESPACE
+{
+  class DispatchLoaderDynamic;
+}  // namespace VULKAN_HPP_NAMESPACE
+
+#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER )
+#  if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1
+#    define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::defaultDispatchLoaderDynamic
+#    define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE                                             \
+      namespace VULKAN_HPP_NAMESPACE                                                                       \
+      {                                                                                                    \
+        VULKAN_HPP_STORAGE_API ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic defaultDispatchLoaderDynamic; \
+      }
+namespace VULKAN_HPP_NAMESPACE
+{
+  extern VULKAN_HPP_STORAGE_API VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic defaultDispatchLoaderDynamic;
+}  // namespace VULKAN_HPP_NAMESPACE
+#  else
+#    define VULKAN_HPP_DEFAULT_DISPATCHER ::VULKAN_HPP_NAMESPACE::getDispatchLoaderStatic()
+#    define VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE
+#  endif
+#endif
+
+#if !defined( VULKAN_HPP_DEFAULT_DISPATCHER_TYPE )
+#  if VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1
+#    define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderDynamic
+#  else
+#    define VULKAN_HPP_DEFAULT_DISPATCHER_TYPE ::VULKAN_HPP_NAMESPACE::DispatchLoaderStatic
+#  endif
+#endif
+
+#if defined( VULKAN_HPP_NO_DEFAULT_DISPATCHER )
+#  define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT
+#  define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT
+#  define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT
+#else
+#  define VULKAN_HPP_DEFAULT_ARGUMENT_ASSIGNMENT         = {}
+#  define VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT = nullptr
+#  define VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT       = VULKAN_HPP_DEFAULT_DISPATCHER
+#endif
+
+#endif

+ 2 - 1
ThirdParty/Khronos/vulkan/vulkan_ios.h

@@ -2,7 +2,7 @@
 #define VULKAN_IOS_H_ 1
 
 /*
-** Copyright 2015-2022 The Khronos Group Inc.
+** Copyright 2015-2023 The Khronos Group Inc.
 **
 ** SPDX-License-Identifier: Apache-2.0
 */
@@ -19,6 +19,7 @@ extern "C" {
 
 
 
+// VK_MVK_ios_surface is a preprocessor guard. Do not pass it to API calls.
 #define VK_MVK_ios_surface 1
 #define VK_MVK_IOS_SURFACE_SPEC_VERSION   3
 #define VK_MVK_IOS_SURFACE_EXTENSION_NAME "VK_MVK_ios_surface"

+ 2 - 1
ThirdParty/Khronos/vulkan/vulkan_macos.h

@@ -2,7 +2,7 @@
 #define VULKAN_MACOS_H_ 1
 
 /*
-** Copyright 2015-2022 The Khronos Group Inc.
+** Copyright 2015-2023 The Khronos Group Inc.
 **
 ** SPDX-License-Identifier: Apache-2.0
 */
@@ -19,6 +19,7 @@ extern "C" {
 
 
 
+// VK_MVK_macos_surface is a preprocessor guard. Do not pass it to API calls.
 #define VK_MVK_macos_surface 1
 #define VK_MVK_MACOS_SURFACE_SPEC_VERSION 3
 #define VK_MVK_MACOS_SURFACE_EXTENSION_NAME "VK_MVK_macos_surface"

+ 3 - 1
ThirdParty/Khronos/vulkan/vulkan_metal.h

@@ -2,7 +2,7 @@
 #define VULKAN_METAL_H_ 1
 
 /*
-** Copyright 2015-2022 The Khronos Group Inc.
+** Copyright 2015-2023 The Khronos Group Inc.
 **
 ** SPDX-License-Identifier: Apache-2.0
 */
@@ -19,6 +19,7 @@ extern "C" {
 
 
 
+// VK_EXT_metal_surface is a preprocessor guard. Do not pass it to API calls.
 #define VK_EXT_metal_surface 1
 #ifdef __OBJC__
 @class CAMetalLayer;
@@ -47,6 +48,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkCreateMetalSurfaceEXT(
 #endif
 
 
+// VK_EXT_metal_objects is a preprocessor guard. Do not pass it to API calls.
 #define VK_EXT_metal_objects 1
 #ifdef __OBJC__
 @protocol MTLDevice;

File diff suppressed because it is too large
+ 482 - 379
ThirdParty/Khronos/vulkan/vulkan_raii.hpp


+ 55 - 1
ThirdParty/Khronos/vulkan/vulkan_screen.h

@@ -2,7 +2,7 @@
 #define VULKAN_SCREEN_H_ 1
 
 /*
-** Copyright 2015-2022 The Khronos Group Inc.
+** Copyright 2015-2023 The Khronos Group Inc.
 **
 ** SPDX-License-Identifier: Apache-2.0
 */
@@ -19,6 +19,7 @@ extern "C" {
 
 
 
+// VK_QNX_screen_surface is a preprocessor guard. Do not pass it to API calls.
 #define VK_QNX_screen_surface 1
 #define VK_QNX_SCREEN_SURFACE_SPEC_VERSION 1
 #define VK_QNX_SCREEN_SURFACE_EXTENSION_NAME "VK_QNX_screen_surface"
@@ -47,6 +48,59 @@ VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceScreenPresentationSupportQNX(
     struct _screen_window*                      window);
 #endif
 
+
+// VK_QNX_external_memory_screen_buffer is a preprocessor guard. Do not pass it to API calls.
+#define VK_QNX_external_memory_screen_buffer 1
+#define VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_SPEC_VERSION 1
+#define VK_QNX_EXTERNAL_MEMORY_SCREEN_BUFFER_EXTENSION_NAME "VK_QNX_external_memory_screen_buffer"
+typedef struct VkScreenBufferPropertiesQNX {
+    VkStructureType    sType;
+    void*              pNext;
+    VkDeviceSize       allocationSize;
+    uint32_t           memoryTypeBits;
+} VkScreenBufferPropertiesQNX;
+
+typedef struct VkScreenBufferFormatPropertiesQNX {
+    VkStructureType                  sType;
+    void*                            pNext;
+    VkFormat                         format;
+    uint64_t                         externalFormat;
+    uint64_t                         screenUsage;
+    VkFormatFeatureFlags             formatFeatures;
+    VkComponentMapping               samplerYcbcrConversionComponents;
+    VkSamplerYcbcrModelConversion    suggestedYcbcrModel;
+    VkSamplerYcbcrRange              suggestedYcbcrRange;
+    VkChromaLocation                 suggestedXChromaOffset;
+    VkChromaLocation                 suggestedYChromaOffset;
+} VkScreenBufferFormatPropertiesQNX;
+
+typedef struct VkImportScreenBufferInfoQNX {
+    VkStructureType           sType;
+    const void*               pNext;
+    struct _screen_buffer*    buffer;
+} VkImportScreenBufferInfoQNX;
+
+typedef struct VkExternalFormatQNX {
+    VkStructureType    sType;
+    void*              pNext;
+    uint64_t           externalFormat;
+} VkExternalFormatQNX;
+
+typedef struct VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX {
+    VkStructureType    sType;
+    void*              pNext;
+    VkBool32           screenBufferImport;
+} VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX;
+
+typedef VkResult (VKAPI_PTR *PFN_vkGetScreenBufferPropertiesQNX)(VkDevice device, const struct _screen_buffer* buffer, VkScreenBufferPropertiesQNX* pProperties);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkGetScreenBufferPropertiesQNX(
+    VkDevice                                    device,
+    const struct _screen_buffer*                buffer,
+    VkScreenBufferPropertiesQNX*                pProperties);
+#endif
+
 #ifdef __cplusplus
 }
 #endif

+ 988 - 0
ThirdParty/Khronos/vulkan/vulkan_shared.hpp

@@ -0,0 +1,988 @@
+// Copyright 2015-2023 The Khronos Group Inc.
+//
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_SHARED_HPP
+#define VULKAN_SHARED_HPP
+
+#include <atomic>  // std::atomic_size_t
+#include <vulkan/vulkan.hpp>
+
+namespace VULKAN_HPP_NAMESPACE
+{
+#if !defined( VULKAN_HPP_NO_SMART_HANDLE )
+
+  template <typename HandleType>
+  class SharedHandleTraits;
+
+  class NoDestructor
+  {
+  };
+
+  template <typename HandleType, typename = void>
+  struct HasDestructorType : std::false_type
+  {
+  };
+
+  template <typename HandleType>
+  struct HasDestructorType<HandleType, decltype( (void)typename SharedHandleTraits<HandleType>::DestructorType() )> : std::true_type
+  {
+  };
+
+  template <typename HandleType, typename Enable = void>
+  struct GetDestructorType
+  {
+    using type = NoDestructor;
+  };
+
+  template <typename HandleType>
+  struct GetDestructorType<HandleType, typename std::enable_if<HasDestructorType<HandleType>::value>::type>
+  {
+    using type = typename SharedHandleTraits<HandleType>::DestructorType;
+  };
+
+  template <class HandleType>
+  using DestructorTypeOf = typename GetDestructorType<HandleType>::type;
+
+  template <class HandleType>
+  struct HasDestructor : std::integral_constant<bool, !std::is_same<DestructorTypeOf<HandleType>, NoDestructor>::value>
+  {
+  };
+
+  //=====================================================================================================================
+
+  template <typename HandleType>
+  class SharedHandle;
+
+  template <typename DestructorType, typename Deleter>
+  struct SharedHeader
+  {
+    SharedHeader( SharedHandle<DestructorType> parent, Deleter deleter = Deleter() ) VULKAN_HPP_NOEXCEPT
+      : parent( std::move( parent ) )
+      , deleter( std::move( deleter ) )
+    {
+    }
+
+    SharedHandle<DestructorType> parent;
+    Deleter                      deleter;
+  };
+
+  template <typename Deleter>
+  struct SharedHeader<NoDestructor, Deleter>
+  {
+    SharedHeader( Deleter deleter = Deleter() ) VULKAN_HPP_NOEXCEPT : deleter( std::move( deleter ) ) {}
+
+    Deleter deleter;
+  };
+
+  //=====================================================================================================================
+
+  template <typename HeaderType>
+  class ReferenceCounter
+  {
+  public:
+    template <typename... Args>
+    ReferenceCounter( Args &&... control_args ) : m_header( std::forward<Args>( control_args )... )
+    {
+    }
+    ReferenceCounter( const ReferenceCounter & ) = delete;
+    ReferenceCounter & operator=( const ReferenceCounter & ) = delete;
+
+  public:
+    size_t addRef() VULKAN_HPP_NOEXCEPT
+    {
+      // Relaxed memory order is sufficient since this does not impose any ordering on other operations
+      return m_ref_cnt.fetch_add( 1, std::memory_order_relaxed );
+    }
+
+    size_t release() VULKAN_HPP_NOEXCEPT
+    {
+      // A release memory order to ensure that all releases are ordered
+      return m_ref_cnt.fetch_sub( 1, std::memory_order_release );
+    }
+
+  public:
+    std::atomic_size_t m_ref_cnt{ 1 };
+    HeaderType         m_header{};
+  };
+
+  //=====================================================================================================================
+
+  template <typename HandleType, typename HeaderType, typename ForwardType = SharedHandle<HandleType>>
+  class SharedHandleBase
+  {
+  public:
+    SharedHandleBase() = default;
+
+    template <typename... Args>
+    SharedHandleBase( HandleType handle, Args &&... control_args )
+      : m_control( new ReferenceCounter<HeaderType>( std::forward<Args>( control_args )... ) ), m_handle( handle )
+    {
+    }
+
+    SharedHandleBase( const SharedHandleBase & o ) VULKAN_HPP_NOEXCEPT
+    {
+      o.addRef();
+      m_handle  = o.m_handle;
+      m_control = o.m_control;
+    }
+
+    SharedHandleBase( SharedHandleBase && o ) VULKAN_HPP_NOEXCEPT
+      : m_control( o.m_control )
+      , m_handle( o.m_handle )
+    {
+      o.m_handle  = nullptr;
+      o.m_control = nullptr;
+    }
+
+    SharedHandleBase & operator=( const SharedHandleBase & o ) VULKAN_HPP_NOEXCEPT
+    {
+      SharedHandleBase( o ).swap( *this );
+      return *this;
+    }
+
+    SharedHandleBase & operator=( SharedHandleBase && o ) VULKAN_HPP_NOEXCEPT
+    {
+      SharedHandleBase( std::move( o ) ).swap( *this );
+      return *this;
+    }
+
+    ~SharedHandleBase()
+    {
+      // only this function owns the last reference to the control block
+      // the same principle is used in the default deleter of std::shared_ptr
+      if ( m_control && ( m_control->release() == 1 ) )
+      {
+        // noop in x86, but does thread synchronization in ARM
+        // it is required to ensure that last thread is getting to destroy the control block
+        // by ordering all atomic operations before this fence
+        std::atomic_thread_fence( std::memory_order_acquire );
+        ForwardType::internalDestroy( getHeader(), m_handle );
+        delete m_control;
+      }
+    }
+
+  public:
+    HandleType get() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_handle;
+    }
+
+    HandleType operator*() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_handle;
+    }
+
+    explicit operator bool() const VULKAN_HPP_NOEXCEPT
+    {
+      return bool( m_handle );
+    }
+
+    const HandleType * operator->() const VULKAN_HPP_NOEXCEPT
+    {
+      return &m_handle;
+    }
+
+    HandleType * operator->() VULKAN_HPP_NOEXCEPT
+    {
+      return &m_handle;
+    }
+
+    void reset() VULKAN_HPP_NOEXCEPT
+    {
+      SharedHandleBase().swap( *this );
+    }
+
+    void swap( SharedHandleBase & o ) VULKAN_HPP_NOEXCEPT
+    {
+      std::swap( m_handle, o.m_handle );
+      std::swap( m_control, o.m_control );
+    }
+
+    template <typename T = HandleType>
+    typename std::enable_if<HasDestructor<T>::value, const SharedHandle<DestructorTypeOf<HandleType>> &>::type getDestructorType() const VULKAN_HPP_NOEXCEPT
+    {
+      return getHeader().parent;
+    }
+
+  protected:
+    template <typename T = HandleType>
+    static typename std::enable_if<!HasDestructor<T>::value, void>::type internalDestroy( const HeaderType & control, HandleType handle ) VULKAN_HPP_NOEXCEPT
+    {
+      control.deleter.destroy( handle );
+    }
+
+    template <typename T = HandleType>
+    static typename std::enable_if<HasDestructor<T>::value, void>::type internalDestroy( const HeaderType & control, HandleType handle ) VULKAN_HPP_NOEXCEPT
+    {
+      control.deleter.destroy( control.parent.get(), handle );
+    }
+
+    const HeaderType & getHeader() const VULKAN_HPP_NOEXCEPT
+    {
+      return m_control->m_header;
+    }
+
+  private:
+    void addRef() const VULKAN_HPP_NOEXCEPT
+    {
+      if ( m_control )
+        m_control->addRef();
+    }
+
+  protected:
+    ReferenceCounter<HeaderType> * m_control = nullptr;
+    HandleType                     m_handle{};
+  };
+
+  template <typename HandleType>
+  class SharedHandle : public SharedHandleBase<HandleType, SharedHeader<DestructorTypeOf<HandleType>, typename SharedHandleTraits<HandleType>::deleter>>
+  {
+  private:
+    using BaseType    = SharedHandleBase<HandleType, SharedHeader<DestructorTypeOf<HandleType>, typename SharedHandleTraits<HandleType>::deleter>>;
+    using DeleterType = typename SharedHandleTraits<HandleType>::deleter;
+    friend BaseType;
+
+  public:
+    SharedHandle() = default;
+
+    template <typename T = HandleType, typename = typename std::enable_if<HasDestructor<T>::value>::type>
+    explicit SharedHandle( HandleType handle, SharedHandle<DestructorTypeOf<HandleType>> parent, DeleterType deleter = DeleterType() ) VULKAN_HPP_NOEXCEPT
+      : BaseType( handle, std::move( parent ), std::move( deleter ) )
+    {
+    }
+
+    template <typename T = HandleType, typename = typename std::enable_if<!HasDestructor<T>::value>::type>
+    explicit SharedHandle( HandleType handle, DeleterType deleter = DeleterType() ) VULKAN_HPP_NOEXCEPT : BaseType( handle, std::move( deleter ) )
+    {
+    }
+
+  protected:
+    using BaseType::internalDestroy;
+  };
+
+  template <typename HandleType>
+  class SharedHandleTraits;
+
+// Silence the function cast warnings.
+#  if defined( __GNUC__ ) && !defined( __clang__ ) && !defined( __INTEL_COMPILER )
+#    pragma GCC diagnostic push
+#    pragma GCC diagnostic ignored "-Wcast-function-type"
+#  endif
+
+  template <typename HandleType>
+  class ObjectDestroyShared
+  {
+  public:
+    using DestructorType = typename SharedHandleTraits<HandleType>::DestructorType;
+
+    template <class Dispatcher>
+    using DestroyFunctionPointerType =
+      typename std::conditional<HasDestructor<HandleType>::value,
+                                void ( DestructorType::* )( HandleType, const AllocationCallbacks *, const Dispatcher & ) const,
+                                void ( HandleType::* )( const AllocationCallbacks *, const Dispatcher & ) const>::type;
+
+    using SelectorType = typename std::conditional<HasDestructor<HandleType>::value, DestructorType, HandleType>::type;
+
+    template <typename Dispatcher = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ObjectDestroyShared( Optional<const AllocationCallbacks> allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+                         const Dispatcher & dispatch                             VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT )
+      : m_destroy( reinterpret_cast<decltype( m_destroy )>( static_cast<DestroyFunctionPointerType<Dispatcher>>( &SelectorType::destroy ) ) )
+      , m_dispatch( &dispatch )
+      , m_allocationCallbacks( allocationCallbacks )
+    {
+    }
+
+  public:
+    template <typename T = HandleType>
+    typename std::enable_if<HasDestructor<T>::value, void>::type destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_destroy && m_dispatch );
+      ( parent.*m_destroy )( handle, m_allocationCallbacks, *m_dispatch );
+    }
+
+    template <typename T = HandleType>
+    typename std::enable_if<!HasDestructor<T>::value, void>::type destroy( HandleType handle ) const VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_destroy && m_dispatch );
+      ( handle.*m_destroy )( m_allocationCallbacks, *m_dispatch );
+    }
+
+  private:
+    DestroyFunctionPointerType<DispatchLoaderBase> m_destroy             = nullptr;
+    const DispatchLoaderBase *                     m_dispatch            = nullptr;
+    Optional<const AllocationCallbacks>            m_allocationCallbacks = nullptr;
+  };
+
+  template <typename HandleType>
+  class ObjectFreeShared
+  {
+  public:
+    using DestructorType = typename SharedHandleTraits<HandleType>::DestructorType;
+
+    template <class Dispatcher>
+    using DestroyFunctionPointerType = void ( DestructorType::* )( HandleType, const AllocationCallbacks *, const Dispatcher & ) const;
+
+    template <class Dispatcher = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ObjectFreeShared( Optional<const AllocationCallbacks> allocationCallbacks VULKAN_HPP_DEFAULT_ARGUMENT_NULLPTR_ASSIGNMENT,
+                      const Dispatcher & dispatch                             VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT )
+      : m_destroy( reinterpret_cast<decltype( m_destroy )>( static_cast<DestroyFunctionPointerType<Dispatcher>>( &DestructorType::free ) ) )
+      , m_dispatch( &dispatch )
+      , m_allocationCallbacks( allocationCallbacks )
+    {
+    }
+
+  public:
+    void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_destroy && m_dispatch );
+      ( parent.*m_destroy )( handle, m_allocationCallbacks, *m_dispatch );
+    }
+
+  private:
+    DestroyFunctionPointerType<DispatchLoaderBase> m_destroy             = nullptr;
+    const DispatchLoaderBase *                     m_dispatch            = nullptr;
+    Optional<const AllocationCallbacks>            m_allocationCallbacks = nullptr;
+  };
+
+  template <typename HandleType>
+  class ObjectReleaseShared
+  {
+  public:
+    using DestructorType = typename SharedHandleTraits<HandleType>::DestructorType;
+
+    template <class Dispatcher>
+    using DestroyFunctionPointerType = void ( DestructorType::* )( HandleType, const Dispatcher & ) const;
+
+    template <class Dispatcher = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    ObjectReleaseShared( const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT )
+      : m_destroy( reinterpret_cast<decltype( m_destroy )>( static_cast<DestroyFunctionPointerType<Dispatcher>>( &DestructorType::release ) ) )
+      , m_dispatch( &dispatch )
+    {
+    }
+
+  public:
+    void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_destroy && m_dispatch );
+      ( parent.*m_destroy )( handle, *m_dispatch );
+    }
+
+  private:
+    DestroyFunctionPointerType<DispatchLoaderBase> m_destroy  = nullptr;
+    const DispatchLoaderBase *                     m_dispatch = nullptr;
+  };
+
+  template <typename HandleType, typename PoolType>
+  class PoolFreeShared
+  {
+  public:
+    using DestructorType = typename SharedHandleTraits<HandleType>::DestructorType;
+
+    template <class Dispatcher>
+    using ReturnType = decltype( std::declval<DestructorType>().free( PoolType(), 0u, nullptr, Dispatcher() ) );
+
+    template <class Dispatcher>
+    using DestroyFunctionPointerType = ReturnType<Dispatcher> ( DestructorType::* )( PoolType, uint32_t, const HandleType *, const Dispatcher & ) const;
+
+    PoolFreeShared() = default;
+
+    template <class Dispatcher = VULKAN_HPP_DEFAULT_DISPATCHER_TYPE>
+    PoolFreeShared( SharedHandle<PoolType> pool, const Dispatcher & dispatch VULKAN_HPP_DEFAULT_DISPATCHER_ASSIGNMENT )
+      : m_destroy( reinterpret_cast<decltype( m_destroy )>( static_cast<DestroyFunctionPointerType<Dispatcher>>( &DestructorType::free ) ) )
+      , m_dispatch( &dispatch )
+      , m_pool( std::move( pool ) )
+    {
+    }
+
+  public:
+    void destroy( DestructorType parent, HandleType handle ) const VULKAN_HPP_NOEXCEPT
+    {
+      VULKAN_HPP_ASSERT( m_destroy && m_dispatch );
+      ( parent.*m_destroy )( m_pool.get(), 1u, &handle, *m_dispatch );
+    }
+
+  private:
+    DestroyFunctionPointerType<DispatchLoaderBase> m_destroy  = nullptr;
+    const DispatchLoaderBase *                     m_dispatch = nullptr;
+    SharedHandle<PoolType>                         m_pool{};
+  };
+
+#  if defined( __GNUC__ ) && !defined( __clang__ ) && !defined( __INTEL_COMPILER )
+#    pragma GCC diagnostic pop
+#  endif
+
+  //======================
+  //=== SHARED HANDLEs ===
+  //======================
+
+  //=== VK_VERSION_1_0 ===
+  template <>
+  class SharedHandleTraits<Instance>
+  {
+  public:
+    using DestructorType = NoDestructor;
+    using deleter        = ObjectDestroyShared<Instance>;
+  };
+  using SharedInstance = SharedHandle<Instance>;
+  template <>
+  class SharedHandleTraits<Device>
+  {
+  public:
+    using DestructorType = NoDestructor;
+    using deleter        = ObjectDestroyShared<Device>;
+  };
+  using SharedDevice = SharedHandle<Device>;
+  template <>
+  class SharedHandleTraits<DeviceMemory>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectFreeShared<DeviceMemory>;
+  };
+  using SharedDeviceMemory = SharedHandle<DeviceMemory>;
+  template <>
+  class SharedHandleTraits<Fence>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<Fence>;
+  };
+  using SharedFence = SharedHandle<Fence>;
+  template <>
+  class SharedHandleTraits<Semaphore>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<Semaphore>;
+  };
+  using SharedSemaphore = SharedHandle<Semaphore>;
+  template <>
+  class SharedHandleTraits<Event>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<Event>;
+  };
+  using SharedEvent = SharedHandle<Event>;
+  template <>
+  class SharedHandleTraits<QueryPool>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<QueryPool>;
+  };
+  using SharedQueryPool = SharedHandle<QueryPool>;
+  template <>
+  class SharedHandleTraits<Buffer>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<Buffer>;
+  };
+  using SharedBuffer = SharedHandle<Buffer>;
+  template <>
+  class SharedHandleTraits<BufferView>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<BufferView>;
+  };
+  using SharedBufferView = SharedHandle<BufferView>;
+  template <>
+  class SharedHandleTraits<Image>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<Image>;
+  };
+  using SharedImage = SharedHandle<Image>;
+  template <>
+  class SharedHandleTraits<ImageView>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<ImageView>;
+  };
+  using SharedImageView = SharedHandle<ImageView>;
+  template <>
+  class SharedHandleTraits<ShaderModule>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<ShaderModule>;
+  };
+  using SharedShaderModule = SharedHandle<ShaderModule>;
+  template <>
+  class SharedHandleTraits<PipelineCache>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<PipelineCache>;
+  };
+  using SharedPipelineCache = SharedHandle<PipelineCache>;
+  template <>
+  class SharedHandleTraits<Pipeline>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<Pipeline>;
+  };
+  using SharedPipeline = SharedHandle<Pipeline>;
+  template <>
+  class SharedHandleTraits<PipelineLayout>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<PipelineLayout>;
+  };
+  using SharedPipelineLayout = SharedHandle<PipelineLayout>;
+  template <>
+  class SharedHandleTraits<Sampler>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<Sampler>;
+  };
+  using SharedSampler = SharedHandle<Sampler>;
+  template <>
+  class SharedHandleTraits<DescriptorPool>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<DescriptorPool>;
+  };
+  using SharedDescriptorPool = SharedHandle<DescriptorPool>;
+  template <>
+  class SharedHandleTraits<DescriptorSet>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = PoolFreeShared<DescriptorSet, DescriptorPool>;
+  };
+  using SharedDescriptorSet = SharedHandle<DescriptorSet>;
+  template <>
+  class SharedHandleTraits<DescriptorSetLayout>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<DescriptorSetLayout>;
+  };
+  using SharedDescriptorSetLayout = SharedHandle<DescriptorSetLayout>;
+  template <>
+  class SharedHandleTraits<Framebuffer>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<Framebuffer>;
+  };
+  using SharedFramebuffer = SharedHandle<Framebuffer>;
+  template <>
+  class SharedHandleTraits<RenderPass>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<RenderPass>;
+  };
+  using SharedRenderPass = SharedHandle<RenderPass>;
+  template <>
+  class SharedHandleTraits<CommandPool>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<CommandPool>;
+  };
+  using SharedCommandPool = SharedHandle<CommandPool>;
+  template <>
+  class SharedHandleTraits<CommandBuffer>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = PoolFreeShared<CommandBuffer, CommandPool>;
+  };
+  using SharedCommandBuffer = SharedHandle<CommandBuffer>;
+
+  //=== VK_VERSION_1_1 ===
+  template <>
+  class SharedHandleTraits<SamplerYcbcrConversion>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<SamplerYcbcrConversion>;
+  };
+  using SharedSamplerYcbcrConversion    = SharedHandle<SamplerYcbcrConversion>;
+  using SharedSamplerYcbcrConversionKHR = SharedHandle<SamplerYcbcrConversion>;
+  template <>
+  class SharedHandleTraits<DescriptorUpdateTemplate>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<DescriptorUpdateTemplate>;
+  };
+  using SharedDescriptorUpdateTemplate    = SharedHandle<DescriptorUpdateTemplate>;
+  using SharedDescriptorUpdateTemplateKHR = SharedHandle<DescriptorUpdateTemplate>;
+  //=== VK_VERSION_1_3 ===
+  template <>
+  class SharedHandleTraits<PrivateDataSlot>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<PrivateDataSlot>;
+  };
+  using SharedPrivateDataSlot    = SharedHandle<PrivateDataSlot>;
+  using SharedPrivateDataSlotEXT = SharedHandle<PrivateDataSlot>;
+  //=== VK_KHR_surface ===
+  template <>
+  class SharedHandleTraits<SurfaceKHR>
+  {
+  public:
+    using DestructorType = Instance;
+    using deleter        = ObjectDestroyShared<SurfaceKHR>;
+  };
+  using SharedSurfaceKHR = SharedHandle<SurfaceKHR>;
+
+  //=== VK_KHR_swapchain ===
+  template <>
+  class SharedHandleTraits<SwapchainKHR>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<SwapchainKHR>;
+  };
+  using SharedSwapchainKHR = SharedHandle<SwapchainKHR>;
+
+  //=== VK_EXT_debug_report ===
+  template <>
+  class SharedHandleTraits<DebugReportCallbackEXT>
+  {
+  public:
+    using DestructorType = Instance;
+    using deleter        = ObjectDestroyShared<DebugReportCallbackEXT>;
+  };
+  using SharedDebugReportCallbackEXT = SharedHandle<DebugReportCallbackEXT>;
+
+  //=== VK_KHR_video_queue ===
+  template <>
+  class SharedHandleTraits<VideoSessionKHR>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<VideoSessionKHR>;
+  };
+  using SharedVideoSessionKHR = SharedHandle<VideoSessionKHR>;
+  template <>
+  class SharedHandleTraits<VideoSessionParametersKHR>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<VideoSessionParametersKHR>;
+  };
+  using SharedVideoSessionParametersKHR = SharedHandle<VideoSessionParametersKHR>;
+
+  //=== VK_NVX_binary_import ===
+  template <>
+  class SharedHandleTraits<CuModuleNVX>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<CuModuleNVX>;
+  };
+  using SharedCuModuleNVX = SharedHandle<CuModuleNVX>;
+  template <>
+  class SharedHandleTraits<CuFunctionNVX>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<CuFunctionNVX>;
+  };
+  using SharedCuFunctionNVX = SharedHandle<CuFunctionNVX>;
+
+  //=== VK_EXT_debug_utils ===
+  template <>
+  class SharedHandleTraits<DebugUtilsMessengerEXT>
+  {
+  public:
+    using DestructorType = Instance;
+    using deleter        = ObjectDestroyShared<DebugUtilsMessengerEXT>;
+  };
+  using SharedDebugUtilsMessengerEXT = SharedHandle<DebugUtilsMessengerEXT>;
+
+  //=== VK_KHR_acceleration_structure ===
+  template <>
+  class SharedHandleTraits<AccelerationStructureKHR>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<AccelerationStructureKHR>;
+  };
+  using SharedAccelerationStructureKHR = SharedHandle<AccelerationStructureKHR>;
+
+  //=== VK_EXT_validation_cache ===
+  template <>
+  class SharedHandleTraits<ValidationCacheEXT>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<ValidationCacheEXT>;
+  };
+  using SharedValidationCacheEXT = SharedHandle<ValidationCacheEXT>;
+
+  //=== VK_NV_ray_tracing ===
+  template <>
+  class SharedHandleTraits<AccelerationStructureNV>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<AccelerationStructureNV>;
+  };
+  using SharedAccelerationStructureNV = SharedHandle<AccelerationStructureNV>;
+
+  //=== VK_KHR_deferred_host_operations ===
+  template <>
+  class SharedHandleTraits<DeferredOperationKHR>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<DeferredOperationKHR>;
+  };
+  using SharedDeferredOperationKHR = SharedHandle<DeferredOperationKHR>;
+
+  //=== VK_NV_device_generated_commands ===
+  template <>
+  class SharedHandleTraits<IndirectCommandsLayoutNV>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<IndirectCommandsLayoutNV>;
+  };
+  using SharedIndirectCommandsLayoutNV = SharedHandle<IndirectCommandsLayoutNV>;
+
+#  if defined( VK_USE_PLATFORM_FUCHSIA )
+  //=== VK_FUCHSIA_buffer_collection ===
+  template <>
+  class SharedHandleTraits<BufferCollectionFUCHSIA>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<BufferCollectionFUCHSIA>;
+  };
+  using SharedBufferCollectionFUCHSIA = SharedHandle<BufferCollectionFUCHSIA>;
+#  endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+  //=== VK_EXT_opacity_micromap ===
+  template <>
+  class SharedHandleTraits<MicromapEXT>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<MicromapEXT>;
+  };
+  using SharedMicromapEXT = SharedHandle<MicromapEXT>;
+
+  //=== VK_NV_optical_flow ===
+  template <>
+  class SharedHandleTraits<OpticalFlowSessionNV>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<OpticalFlowSessionNV>;
+  };
+  using SharedOpticalFlowSessionNV = SharedHandle<OpticalFlowSessionNV>;
+
+  //=== VK_EXT_shader_object ===
+  template <>
+  class SharedHandleTraits<ShaderEXT>
+  {
+  public:
+    using DestructorType = Device;
+    using deleter        = ObjectDestroyShared<ShaderEXT>;
+  };
+  using SharedShaderEXT = SharedHandle<ShaderEXT>;
+
+  enum class SwapchainOwns
+  {
+    no,
+    yes,
+  };
+
+  struct ImageHeader : SharedHeader<DestructorTypeOf<VULKAN_HPP_NAMESPACE::Image>, typename SharedHandleTraits<VULKAN_HPP_NAMESPACE::Image>::deleter>
+  {
+    ImageHeader(
+      SharedHandle<DestructorTypeOf<VULKAN_HPP_NAMESPACE::Image>>       parent,
+      typename SharedHandleTraits<VULKAN_HPP_NAMESPACE::Image>::deleter deleter        = typename SharedHandleTraits<VULKAN_HPP_NAMESPACE::Image>::deleter(),
+      SwapchainOwns                                                     swapchainOwned = SwapchainOwns::no ) VULKAN_HPP_NOEXCEPT
+      : SharedHeader<DestructorTypeOf<VULKAN_HPP_NAMESPACE::Image>, typename SharedHandleTraits<VULKAN_HPP_NAMESPACE::Image>::deleter>( std::move( parent ),
+                                                                                                                                        std::move( deleter ) )
+      , swapchainOwned( swapchainOwned )
+    {
+    }
+
+    SwapchainOwns swapchainOwned = SwapchainOwns::no;
+  };
+
+  template <>
+  class SharedHandle<VULKAN_HPP_NAMESPACE::Image> : public SharedHandleBase<VULKAN_HPP_NAMESPACE::Image, ImageHeader>
+  {
+    using BaseType    = SharedHandleBase<VULKAN_HPP_NAMESPACE::Image, ImageHeader>;
+    using DeleterType = typename SharedHandleTraits<VULKAN_HPP_NAMESPACE::Image>::deleter;
+    friend BaseType;
+
+  public:
+    SharedHandle() = default;
+
+    explicit SharedHandle( VULKAN_HPP_NAMESPACE::Image                                 handle,
+                           SharedHandle<DestructorTypeOf<VULKAN_HPP_NAMESPACE::Image>> parent,
+                           SwapchainOwns                                               swapchain_owned = SwapchainOwns::no,
+                           DeleterType                                                 deleter         = DeleterType() ) VULKAN_HPP_NOEXCEPT
+      : BaseType( handle, std::move( parent ), std::move( deleter ), swapchain_owned )
+    {
+    }
+
+  protected:
+    static void internalDestroy( const ImageHeader & control, VULKAN_HPP_NAMESPACE::Image handle ) VULKAN_HPP_NOEXCEPT
+    {
+      if ( control.swapchainOwned == SwapchainOwns::no )
+      {
+        control.deleter.destroy( control.parent.get(), handle );
+      }
+    }
+  };
+
+  struct SwapchainHeader
+  {
+    SwapchainHeader( SharedHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR>                           surface,
+                     SharedHandle<DestructorTypeOf<VULKAN_HPP_NAMESPACE::SwapchainKHR>>       parent,
+                     typename SharedHandleTraits<VULKAN_HPP_NAMESPACE::SwapchainKHR>::deleter deleter =
+                       typename SharedHandleTraits<VULKAN_HPP_NAMESPACE::SwapchainKHR>::deleter() ) VULKAN_HPP_NOEXCEPT
+      : surface( std::move( surface ) )
+      , parent( std::move( parent ) )
+      , deleter( std::move( deleter ) )
+    {
+    }
+
+    SharedHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR>                           surface{};
+    SharedHandle<DestructorTypeOf<VULKAN_HPP_NAMESPACE::SwapchainKHR>>       parent{};
+    typename SharedHandleTraits<VULKAN_HPP_NAMESPACE::SwapchainKHR>::deleter deleter{};
+  };
+
+  template <>
+  class SharedHandle<VULKAN_HPP_NAMESPACE::SwapchainKHR> : public SharedHandleBase<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainHeader>
+  {
+    using BaseType    = SharedHandleBase<VULKAN_HPP_NAMESPACE::SwapchainKHR, SwapchainHeader>;
+    using DeleterType = typename SharedHandleTraits<VULKAN_HPP_NAMESPACE::SwapchainKHR>::deleter;
+    friend BaseType;
+
+  public:
+    SharedHandle() = default;
+
+    explicit SharedHandle( VULKAN_HPP_NAMESPACE::SwapchainKHR                                 handle,
+                           SharedHandle<DestructorTypeOf<VULKAN_HPP_NAMESPACE::SwapchainKHR>> parent,
+                           SharedHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR>                     surface,
+                           DeleterType                                                        deleter = DeleterType() ) VULKAN_HPP_NOEXCEPT
+      : BaseType( handle, std::move( surface ), std::move( parent ), std::move( deleter ) )
+    {
+    }
+
+  public:
+    const SharedHandle<VULKAN_HPP_NAMESPACE::SurfaceKHR> & getSurface() const VULKAN_HPP_NOEXCEPT
+    {
+      return getHeader().surface;
+    }
+
+  protected:
+    using BaseType::internalDestroy;
+  };
+
+  template <typename HandleType, typename DestructorType>
+  class SharedHandleBaseNoDestroy : public SharedHandleBase<HandleType, DestructorType>
+  {
+  public:
+    using SharedHandleBase<HandleType, DestructorType>::SharedHandleBase;
+
+    const DestructorType & getDestructorType() const VULKAN_HPP_NOEXCEPT
+    {
+      return SharedHandleBase<HandleType, DestructorType>::getHeader();
+    }
+
+  protected:
+    static void internalDestroy( const DestructorType &, HandleType ) VULKAN_HPP_NOEXCEPT {}
+  };
+
+  //=== VK_VERSION_1_0 ===
+
+  template <>
+  class SharedHandle<PhysicalDevice> : public SharedHandleBaseNoDestroy<PhysicalDevice, SharedInstance>
+  {
+    friend SharedHandleBase<PhysicalDevice, SharedInstance>;
+
+  public:
+    SharedHandle() = default;
+    explicit SharedHandle( PhysicalDevice handle, SharedInstance parent ) noexcept
+      : SharedHandleBaseNoDestroy<PhysicalDevice, SharedInstance>( handle, std::move( parent ) )
+    {
+    }
+  };
+  using SharedPhysicalDevice = SharedHandle<PhysicalDevice>;
+
+  template <>
+  class SharedHandle<Queue> : public SharedHandleBaseNoDestroy<Queue, SharedDevice>
+  {
+    friend SharedHandleBase<Queue, SharedDevice>;
+
+  public:
+    SharedHandle() = default;
+    explicit SharedHandle( Queue handle, SharedDevice parent ) noexcept : SharedHandleBaseNoDestroy<Queue, SharedDevice>( handle, std::move( parent ) ) {}
+  };
+  using SharedQueue = SharedHandle<Queue>;
+
+  //=== VK_KHR_display ===
+
+  template <>
+  class SharedHandle<DisplayKHR> : public SharedHandleBaseNoDestroy<DisplayKHR, SharedPhysicalDevice>
+  {
+    friend SharedHandleBase<DisplayKHR, SharedPhysicalDevice>;
+
+  public:
+    SharedHandle() = default;
+    explicit SharedHandle( DisplayKHR handle, SharedPhysicalDevice parent ) noexcept
+      : SharedHandleBaseNoDestroy<DisplayKHR, SharedPhysicalDevice>( handle, std::move( parent ) )
+    {
+    }
+  };
+  using SharedDisplayKHR = SharedHandle<DisplayKHR>;
+
+  template <>
+  class SharedHandle<DisplayModeKHR> : public SharedHandleBaseNoDestroy<DisplayModeKHR, SharedDisplayKHR>
+  {
+    friend SharedHandleBase<DisplayModeKHR, SharedDisplayKHR>;
+
+  public:
+    SharedHandle() = default;
+    explicit SharedHandle( DisplayModeKHR handle, SharedDisplayKHR parent ) noexcept
+      : SharedHandleBaseNoDestroy<DisplayModeKHR, SharedDisplayKHR>( handle, std::move( parent ) )
+    {
+    }
+  };
+  using SharedDisplayModeKHR = SharedHandle<DisplayModeKHR>;
+
+  //=== VK_INTEL_performance_query ===
+
+  template <>
+  class SharedHandle<PerformanceConfigurationINTEL> : public SharedHandleBaseNoDestroy<PerformanceConfigurationINTEL, SharedDevice>
+  {
+    friend SharedHandleBase<PerformanceConfigurationINTEL, SharedDevice>;
+
+  public:
+    SharedHandle() = default;
+    explicit SharedHandle( PerformanceConfigurationINTEL handle, SharedDevice parent ) noexcept
+      : SharedHandleBaseNoDestroy<PerformanceConfigurationINTEL, SharedDevice>( handle, std::move( parent ) )
+    {
+    }
+  };
+  using SharedPerformanceConfigurationINTEL = SharedHandle<PerformanceConfigurationINTEL>;
+#endif  // !VULKAN_HPP_NO_SMART_HANDLE
+}  // namespace VULKAN_HPP_NAMESPACE
+#endif  // VULKAN_SHARED_HPP

+ 7127 - 0
ThirdParty/Khronos/vulkan/vulkan_static_assertions.hpp

@@ -0,0 +1,7127 @@
+// Copyright 2015-2023 The Khronos Group Inc.
+//
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#ifndef VULKAN_STATIC_ASSERTIONS_HPP
+#define VULKAN_STATIC_ASSERTIONS_HPP
+
+#include <vulkan/vulkan.hpp>
+
+//=========================
+//=== static_assertions ===
+//=========================
+
+//=== VK_VERSION_1_0 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Extent2D ) == sizeof( VkExtent2D ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Extent2D>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Extent2D>::value, "Extent2D is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Extent3D ) == sizeof( VkExtent3D ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Extent3D>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Extent3D>::value, "Extent3D is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Offset2D ) == sizeof( VkOffset2D ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Offset2D>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Offset2D>::value, "Offset2D is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Offset3D ) == sizeof( VkOffset3D ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Offset3D>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Offset3D>::value, "Offset3D is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Rect2D ) == sizeof( VkRect2D ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Rect2D>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Rect2D>::value, "Rect2D is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BaseInStructure ) == sizeof( VkBaseInStructure ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BaseInStructure>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BaseInStructure>::value,
+                          "BaseInStructure is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BaseOutStructure ) == sizeof( VkBaseOutStructure ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BaseOutStructure>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BaseOutStructure>::value,
+                          "BaseOutStructure is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier ) == sizeof( VkBufferMemoryBarrier ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier>::value,
+                          "BufferMemoryBarrier is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchIndirectCommand ) == sizeof( VkDispatchIndirectCommand ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DispatchIndirectCommand>::value,
+                          "DispatchIndirectCommand is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand ) == sizeof( VkDrawIndexedIndirectCommand ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrawIndexedIndirectCommand>::value,
+                          "DrawIndexedIndirectCommand is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawIndirectCommand ) == sizeof( VkDrawIndirectCommand ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrawIndirectCommand>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrawIndirectCommand>::value,
+                          "DrawIndirectCommand is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier ) == sizeof( VkImageMemoryBarrier ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier>::value,
+                          "ImageMemoryBarrier is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrier ) == sizeof( VkMemoryBarrier ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryBarrier>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryBarrier>::value, "MemoryBarrier is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne ) == sizeof( VkPipelineCacheHeaderVersionOne ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCacheHeaderVersionOne>::value,
+                          "PipelineCacheHeaderVersionOne is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AllocationCallbacks ) == sizeof( VkAllocationCallbacks ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AllocationCallbacks>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AllocationCallbacks>::value,
+                          "AllocationCallbacks is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ApplicationInfo ) == sizeof( VkApplicationInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ApplicationInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ApplicationInfo>::value,
+                          "ApplicationInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties ) == sizeof( VkFormatProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FormatProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FormatProperties>::value,
+                          "FormatProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatProperties ) == sizeof( VkImageFormatProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageFormatProperties>::value,
+                          "ImageFormatProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Instance ) == sizeof( VkInstance ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Instance>::value, "Instance is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InstanceCreateInfo ) == sizeof( VkInstanceCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::InstanceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::InstanceCreateInfo>::value,
+                          "InstanceCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryHeap ) == sizeof( VkMemoryHeap ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryHeap>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryHeap>::value, "MemoryHeap is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryType ) == sizeof( VkMemoryType ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryType>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryType>::value, "MemoryType is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice ) == sizeof( VkPhysicalDevice ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevice>::value,
+                          "PhysicalDevice is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures ) == sizeof( VkPhysicalDeviceFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures>::value,
+                          "PhysicalDeviceFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits ) == sizeof( VkPhysicalDeviceLimits ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLimits>::value,
+                          "PhysicalDeviceLimits is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties ) == sizeof( VkPhysicalDeviceMemoryProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties>::value,
+                          "PhysicalDeviceMemoryProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties ) == sizeof( VkPhysicalDeviceProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties>::value,
+                          "PhysicalDeviceProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties ) == sizeof( VkPhysicalDeviceSparseProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseProperties>::value,
+                          "PhysicalDeviceSparseProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyProperties ) == sizeof( VkQueueFamilyProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyProperties>::value,
+                          "QueueFamilyProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Device ) == sizeof( VkDevice ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Device>::value, "Device is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceCreateInfo ) == sizeof( VkDeviceCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceCreateInfo>::value,
+                          "DeviceCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo ) == sizeof( VkDeviceQueueCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceQueueCreateInfo>::value,
+                          "DeviceQueueCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExtensionProperties ) == sizeof( VkExtensionProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExtensionProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExtensionProperties>::value,
+                          "ExtensionProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LayerProperties ) == sizeof( VkLayerProperties ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::LayerProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::LayerProperties>::value,
+                          "LayerProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Queue ) == sizeof( VkQueue ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Queue>::value, "Queue is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo ) == sizeof( VkSubmitInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubmitInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubmitInfo>::value, "SubmitInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MappedMemoryRange ) == sizeof( VkMappedMemoryRange ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MappedMemoryRange>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MappedMemoryRange>::value,
+                          "MappedMemoryRange is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateInfo ) == sizeof( VkMemoryAllocateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryAllocateInfo>::value,
+                          "MemoryAllocateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemory ) == sizeof( VkDeviceMemory ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceMemory>::value, "DeviceMemory is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements ) == sizeof( VkMemoryRequirements ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryRequirements>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryRequirements>::value,
+                          "MemoryRequirements is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindSparseInfo ) == sizeof( VkBindSparseInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindSparseInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindSparseInfo>::value,
+                          "BindSparseInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource ) == sizeof( VkImageSubresource ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresource>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresource>::value,
+                          "ImageSubresource is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo ) == sizeof( VkSparseBufferMemoryBindInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseBufferMemoryBindInfo>::value,
+                          "SparseBufferMemoryBindInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties ) == sizeof( VkSparseImageFormatProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties>::value,
+                          "SparseImageFormatProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryBind ) == sizeof( VkSparseImageMemoryBind ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageMemoryBind>::value,
+                          "SparseImageMemoryBind is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo ) == sizeof( VkSparseImageMemoryBindInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageMemoryBindInfo>::value,
+                          "SparseImageMemoryBindInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements ) == sizeof( VkSparseImageMemoryRequirements ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements>::value,
+                          "SparseImageMemoryRequirements is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo ) == sizeof( VkSparseImageOpaqueMemoryBindInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageOpaqueMemoryBindInfo>::value,
+                          "SparseImageOpaqueMemoryBindInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseMemoryBind ) == sizeof( VkSparseMemoryBind ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseMemoryBind>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseMemoryBind>::value,
+                          "SparseMemoryBind is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Fence ) == sizeof( VkFence ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Fence>::value, "Fence is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceCreateInfo ) == sizeof( VkFenceCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FenceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FenceCreateInfo>::value,
+                          "FenceCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Semaphore ) == sizeof( VkSemaphore ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Semaphore>::value, "Semaphore is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo ) == sizeof( VkSemaphoreCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreCreateInfo>::value,
+                          "SemaphoreCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Event ) == sizeof( VkEvent ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Event>::value, "Event is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::EventCreateInfo ) == sizeof( VkEventCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::EventCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::EventCreateInfo>::value,
+                          "EventCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPool ) == sizeof( VkQueryPool ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueryPool>::value, "QueryPool is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo ) == sizeof( VkQueryPoolCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueryPoolCreateInfo>::value,
+                          "QueryPoolCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Buffer ) == sizeof( VkBuffer ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Buffer>::value, "Buffer is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCreateInfo ) == sizeof( VkBufferCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCreateInfo>::value,
+                          "BufferCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferView ) == sizeof( VkBufferView ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferView>::value, "BufferView is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferViewCreateInfo ) == sizeof( VkBufferViewCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferViewCreateInfo>::value,
+                          "BufferViewCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Image ) == sizeof( VkImage ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Image>::value, "Image is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCreateInfo ) == sizeof( VkImageCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCreateInfo>::value,
+                          "ImageCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout ) == sizeof( VkSubresourceLayout ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubresourceLayout>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubresourceLayout>::value,
+                          "SubresourceLayout is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComponentMapping ) == sizeof( VkComponentMapping ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ComponentMapping>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ComponentMapping>::value,
+                          "ComponentMapping is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresourceRange ) == sizeof( VkImageSubresourceRange ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresourceRange>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresourceRange>::value,
+                          "ImageSubresourceRange is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageView ) == sizeof( VkImageView ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageView>::value, "ImageView is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewCreateInfo ) == sizeof( VkImageViewCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewCreateInfo>::value,
+                          "ImageViewCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModule ) == sizeof( VkShaderModule ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderModule>::value, "ShaderModule is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo ) == sizeof( VkShaderModuleCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderModuleCreateInfo>::value,
+                          "ShaderModuleCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCache ) == sizeof( VkPipelineCache ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCache>::value, "PipelineCache is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo ) == sizeof( VkPipelineCacheCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCacheCreateInfo>::value,
+                          "PipelineCacheCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo ) == sizeof( VkComputePipelineCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ComputePipelineCreateInfo>::value,
+                          "ComputePipelineCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo ) == sizeof( VkGraphicsPipelineCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GraphicsPipelineCreateInfo>::value,
+                          "GraphicsPipelineCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Pipeline ) == sizeof( VkPipeline ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Pipeline>::value, "Pipeline is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState ) == sizeof( VkPipelineColorBlendAttachmentState ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineColorBlendAttachmentState>::value,
+                          "PipelineColorBlendAttachmentState is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo ) == sizeof( VkPipelineColorBlendStateCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineColorBlendStateCreateInfo>::value,
+                          "PipelineColorBlendStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo ) == sizeof( VkPipelineDepthStencilStateCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineDepthStencilStateCreateInfo>::value,
+                          "PipelineDepthStencilStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo ) == sizeof( VkPipelineDynamicStateCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineDynamicStateCreateInfo>::value,
+                          "PipelineDynamicStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo ) == sizeof( VkPipelineInputAssemblyStateCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineInputAssemblyStateCreateInfo>::value,
+                          "PipelineInputAssemblyStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo ) == sizeof( VkPipelineMultisampleStateCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineMultisampleStateCreateInfo>::value,
+                          "PipelineMultisampleStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo ) == sizeof( VkPipelineRasterizationStateCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateCreateInfo>::value,
+                          "PipelineRasterizationStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo ) == sizeof( VkPipelineShaderStageCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineShaderStageCreateInfo>::value,
+                          "PipelineShaderStageCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo ) == sizeof( VkPipelineTessellationStateCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineTessellationStateCreateInfo>::value,
+                          "PipelineTessellationStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo ) == sizeof( VkPipelineVertexInputStateCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineVertexInputStateCreateInfo>::value,
+                          "PipelineVertexInputStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo ) == sizeof( VkPipelineViewportStateCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportStateCreateInfo>::value,
+                          "PipelineViewportStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SpecializationInfo ) == sizeof( VkSpecializationInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SpecializationInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SpecializationInfo>::value,
+                          "SpecializationInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SpecializationMapEntry ) == sizeof( VkSpecializationMapEntry ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SpecializationMapEntry>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SpecializationMapEntry>::value,
+                          "SpecializationMapEntry is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StencilOpState ) == sizeof( VkStencilOpState ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::StencilOpState>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::StencilOpState>::value,
+                          "StencilOpState is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription ) == sizeof( VkVertexInputAttributeDescription ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription>::value,
+                          "VertexInputAttributeDescription is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription ) == sizeof( VkVertexInputBindingDescription ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription>::value,
+                          "VertexInputBindingDescription is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Viewport ) == sizeof( VkViewport ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Viewport>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Viewport>::value, "Viewport is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayout ) == sizeof( VkPipelineLayout ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineLayout>::value,
+                          "PipelineLayout is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo ) == sizeof( VkPipelineLayoutCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineLayoutCreateInfo>::value,
+                          "PipelineLayoutCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PushConstantRange ) == sizeof( VkPushConstantRange ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PushConstantRange>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PushConstantRange>::value,
+                          "PushConstantRange is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Sampler ) == sizeof( VkSampler ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Sampler>::value, "Sampler is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCreateInfo ) == sizeof( VkSamplerCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerCreateInfo>::value,
+                          "SamplerCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyDescriptorSet ) == sizeof( VkCopyDescriptorSet ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyDescriptorSet>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyDescriptorSet>::value,
+                          "CopyDescriptorSet is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorBufferInfo ) == sizeof( VkDescriptorBufferInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorBufferInfo>::value,
+                          "DescriptorBufferInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorImageInfo ) == sizeof( VkDescriptorImageInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorImageInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorImageInfo>::value,
+                          "DescriptorImageInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPool ) == sizeof( VkDescriptorPool ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorPool>::value,
+                          "DescriptorPool is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo ) == sizeof( VkDescriptorPoolCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorPoolCreateInfo>::value,
+                          "DescriptorPoolCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolSize ) == sizeof( VkDescriptorPoolSize ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorPoolSize>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorPoolSize>::value,
+                          "DescriptorPoolSize is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSet ) == sizeof( VkDescriptorSet ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSet>::value, "DescriptorSet is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo ) == sizeof( VkDescriptorSetAllocateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetAllocateInfo>::value,
+                          "DescriptorSetAllocateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayout ) == sizeof( VkDescriptorSetLayout ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayout>::value,
+                          "DescriptorSetLayout is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding ) == sizeof( VkDescriptorSetLayoutBinding ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBinding>::value,
+                          "DescriptorSetLayoutBinding is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo ) == sizeof( VkDescriptorSetLayoutCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutCreateInfo>::value,
+                          "DescriptorSetLayoutCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSet ) == sizeof( VkWriteDescriptorSet ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WriteDescriptorSet>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WriteDescriptorSet>::value,
+                          "WriteDescriptorSet is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescription ) == sizeof( VkAttachmentDescription ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentDescription>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentDescription>::value,
+                          "AttachmentDescription is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReference ) == sizeof( VkAttachmentReference ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentReference>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentReference>::value,
+                          "AttachmentReference is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Framebuffer ) == sizeof( VkFramebuffer ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Framebuffer>::value, "Framebuffer is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferCreateInfo ) == sizeof( VkFramebufferCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FramebufferCreateInfo>::value,
+                          "FramebufferCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPass ) == sizeof( VkRenderPass ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPass>::value, "RenderPass is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo ) == sizeof( VkRenderPassCreateInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo>::value,
+                          "RenderPassCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDependency ) == sizeof( VkSubpassDependency ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDependency>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDependency>::value,
+                          "SubpassDependency is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescription ) == sizeof( VkSubpassDescription ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDescription>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDescription>::value,
+                          "SubpassDescription is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandPool ) == sizeof( VkCommandPool ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandPool>::value, "CommandPool is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo ) == sizeof( VkCommandPoolCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandPoolCreateInfo>::value,
+                          "CommandPoolCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBuffer ) == sizeof( VkCommandBuffer ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBuffer>::value, "CommandBuffer is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo ) == sizeof( VkCommandBufferAllocateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferAllocateInfo>::value,
+                          "CommandBufferAllocateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo ) == sizeof( VkCommandBufferBeginInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferBeginInfo>::value,
+                          "CommandBufferBeginInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo ) == sizeof( VkCommandBufferInheritanceInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceInfo>::value,
+                          "CommandBufferInheritanceInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCopy ) == sizeof( VkBufferCopy ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCopy>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCopy>::value, "BufferCopy is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferImageCopy ) == sizeof( VkBufferImageCopy ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferImageCopy>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferImageCopy>::value,
+                          "BufferImageCopy is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearAttachment ) == sizeof( VkClearAttachment ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ClearAttachment>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ClearAttachment>::value,
+                          "ClearAttachment is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearColorValue ) == sizeof( VkClearColorValue ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ClearColorValue>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ClearColorValue>::value,
+                          "ClearColorValue is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearDepthStencilValue ) == sizeof( VkClearDepthStencilValue ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ClearDepthStencilValue>::value,
+                          "ClearDepthStencilValue is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearRect ) == sizeof( VkClearRect ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ClearRect>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ClearRect>::value, "ClearRect is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ClearValue ) == sizeof( VkClearValue ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ClearValue>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ClearValue>::value, "ClearValue is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageBlit ) == sizeof( VkImageBlit ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageBlit>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageBlit>::value, "ImageBlit is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCopy ) == sizeof( VkImageCopy ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCopy>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCopy>::value, "ImageCopy is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageResolve ) == sizeof( VkImageResolve ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageResolve>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageResolve>::value, "ImageResolve is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresourceLayers ) == sizeof( VkImageSubresourceLayers ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresourceLayers>::value,
+                          "ImageSubresourceLayers is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassBeginInfo ) == sizeof( VkRenderPassBeginInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassBeginInfo>::value,
+                          "RenderPassBeginInfo is not nothrow_move_constructible!" );
+
+//=== VK_VERSION_1_1 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties ) == sizeof( VkPhysicalDeviceSubgroupProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupProperties>::value,
+                          "PhysicalDeviceSubgroupProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo ) == sizeof( VkBindBufferMemoryInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindBufferMemoryInfo>::value,
+                          "BindBufferMemoryInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryInfo ) == sizeof( VkBindImageMemoryInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindImageMemoryInfo>::value,
+                          "BindImageMemoryInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures ) == sizeof( VkPhysicalDevice16BitStorageFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevice16BitStorageFeatures>::value,
+                          "PhysicalDevice16BitStorageFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements ) == sizeof( VkMemoryDedicatedRequirements ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryDedicatedRequirements>::value,
+                          "MemoryDedicatedRequirements is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo ) == sizeof( VkMemoryDedicatedAllocateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryDedicatedAllocateInfo>::value,
+                          "MemoryDedicatedAllocateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo ) == sizeof( VkMemoryAllocateFlagsInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryAllocateFlagsInfo>::value,
+                          "MemoryAllocateFlagsInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo ) == sizeof( VkDeviceGroupRenderPassBeginInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupRenderPassBeginInfo>::value,
+                          "DeviceGroupRenderPassBeginInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo ) == sizeof( VkDeviceGroupCommandBufferBeginInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupCommandBufferBeginInfo>::value,
+                          "DeviceGroupCommandBufferBeginInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo ) == sizeof( VkDeviceGroupSubmitInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupSubmitInfo>::value,
+                          "DeviceGroupSubmitInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo ) == sizeof( VkDeviceGroupBindSparseInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupBindSparseInfo>::value,
+                          "DeviceGroupBindSparseInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo ) == sizeof( VkBindBufferMemoryDeviceGroupInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindBufferMemoryDeviceGroupInfo>::value,
+                          "BindBufferMemoryDeviceGroupInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo ) == sizeof( VkBindImageMemoryDeviceGroupInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindImageMemoryDeviceGroupInfo>::value,
+                          "BindImageMemoryDeviceGroupInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties ) == sizeof( VkPhysicalDeviceGroupProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceGroupProperties>::value,
+                          "PhysicalDeviceGroupProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo ) == sizeof( VkDeviceGroupDeviceCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupDeviceCreateInfo>::value,
+                          "DeviceGroupDeviceCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2 ) == sizeof( VkBufferMemoryRequirementsInfo2 ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferMemoryRequirementsInfo2>::value,
+                          "BufferMemoryRequirementsInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2 ) == sizeof( VkImageMemoryRequirementsInfo2 ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageMemoryRequirementsInfo2>::value,
+                          "ImageMemoryRequirementsInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2 ) == sizeof( VkImageSparseMemoryRequirementsInfo2 ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSparseMemoryRequirementsInfo2>::value,
+                          "ImageSparseMemoryRequirementsInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryRequirements2 ) == sizeof( VkMemoryRequirements2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryRequirements2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryRequirements2>::value,
+                          "MemoryRequirements2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2 ) == sizeof( VkSparseImageMemoryRequirements2 ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageMemoryRequirements2>::value,
+                          "SparseImageMemoryRequirements2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2 ) == sizeof( VkPhysicalDeviceFeatures2 ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFeatures2>::value,
+                          "PhysicalDeviceFeatures2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2 ) == sizeof( VkPhysicalDeviceProperties2 ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProperties2>::value,
+                          "PhysicalDeviceProperties2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties2 ) == sizeof( VkFormatProperties2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FormatProperties2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FormatProperties2>::value,
+                          "FormatProperties2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatProperties2 ) == sizeof( VkImageFormatProperties2 ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageFormatProperties2>::value,
+                          "ImageFormatProperties2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2 ) == sizeof( VkPhysicalDeviceImageFormatInfo2 ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageFormatInfo2>::value,
+                          "PhysicalDeviceImageFormatInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyProperties2 ) == sizeof( VkQueueFamilyProperties2 ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyProperties2>::value,
+                          "QueueFamilyProperties2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2 ) == sizeof( VkPhysicalDeviceMemoryProperties2 ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryProperties2>::value,
+                          "PhysicalDeviceMemoryProperties2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2 ) == sizeof( VkSparseImageFormatProperties2 ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SparseImageFormatProperties2>::value,
+                          "SparseImageFormatProperties2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2 ) == sizeof( VkPhysicalDeviceSparseImageFormatInfo2 ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSparseImageFormatInfo2>::value,
+                          "PhysicalDeviceSparseImageFormatInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties ) == sizeof( VkPhysicalDevicePointClippingProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePointClippingProperties>::value,
+                          "PhysicalDevicePointClippingProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo ) == sizeof( VkRenderPassInputAttachmentAspectCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassInputAttachmentAspectCreateInfo>::value,
+                          "RenderPassInputAttachmentAspectCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference ) == sizeof( VkInputAttachmentAspectReference ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::InputAttachmentAspectReference>::value,
+                          "InputAttachmentAspectReference is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo ) == sizeof( VkImageViewUsageCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewUsageCreateInfo>::value,
+                          "ImageViewUsageCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo ) ==
+                            sizeof( VkPipelineTessellationDomainOriginStateCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineTessellationDomainOriginStateCreateInfo>::value,
+                          "PipelineTessellationDomainOriginStateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo ) == sizeof( VkRenderPassMultiviewCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassMultiviewCreateInfo>::value,
+                          "RenderPassMultiviewCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures ) == sizeof( VkPhysicalDeviceMultiviewFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewFeatures>::value,
+                          "PhysicalDeviceMultiviewFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties ) == sizeof( VkPhysicalDeviceMultiviewProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewProperties>::value,
+                          "PhysicalDeviceMultiviewProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures ) == sizeof( VkPhysicalDeviceVariablePointersFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVariablePointersFeatures>::value,
+                          "PhysicalDeviceVariablePointersFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures ) == sizeof( VkPhysicalDeviceProtectedMemoryFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryFeatures>::value,
+                          "PhysicalDeviceProtectedMemoryFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties ) == sizeof( VkPhysicalDeviceProtectedMemoryProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProtectedMemoryProperties>::value,
+                          "PhysicalDeviceProtectedMemoryProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueInfo2 ) == sizeof( VkDeviceQueueInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceQueueInfo2>::value,
+                          "DeviceQueueInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo ) == sizeof( VkProtectedSubmitInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ProtectedSubmitInfo>::value,
+                          "ProtectedSubmitInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo ) == sizeof( VkSamplerYcbcrConversionCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionCreateInfo>::value,
+                          "SamplerYcbcrConversionCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo ) == sizeof( VkSamplerYcbcrConversionInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionInfo>::value,
+                          "SamplerYcbcrConversionInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo ) == sizeof( VkBindImagePlaneMemoryInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindImagePlaneMemoryInfo>::value,
+                          "BindImagePlaneMemoryInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo ) == sizeof( VkImagePlaneMemoryRequirementsInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImagePlaneMemoryRequirementsInfo>::value,
+                          "ImagePlaneMemoryRequirementsInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures ) ==
+                            sizeof( VkPhysicalDeviceSamplerYcbcrConversionFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerYcbcrConversionFeatures>::value,
+                          "PhysicalDeviceSamplerYcbcrConversionFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties ) ==
+                            sizeof( VkSamplerYcbcrConversionImageFormatProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionImageFormatProperties>::value,
+                          "SamplerYcbcrConversionImageFormatProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion ) == sizeof( VkSamplerYcbcrConversion ),
+                          "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversion>::value,
+                          "SamplerYcbcrConversion is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate ) == sizeof( VkDescriptorUpdateTemplate ),
+                          "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplate>::value,
+                          "DescriptorUpdateTemplate is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry ) == sizeof( VkDescriptorUpdateTemplateEntry ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateEntry>::value,
+                          "DescriptorUpdateTemplateEntry is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo ) == sizeof( VkDescriptorUpdateTemplateCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorUpdateTemplateCreateInfo>::value,
+                          "DescriptorUpdateTemplateCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryProperties ) == sizeof( VkExternalMemoryProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryProperties>::value,
+                          "ExternalMemoryProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo ) == sizeof( VkPhysicalDeviceExternalImageFormatInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalImageFormatInfo>::value,
+                          "PhysicalDeviceExternalImageFormatInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties ) == sizeof( VkExternalImageFormatProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalImageFormatProperties>::value,
+                          "ExternalImageFormatProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo ) == sizeof( VkPhysicalDeviceExternalBufferInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalBufferInfo>::value,
+                          "PhysicalDeviceExternalBufferInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalBufferProperties ) == sizeof( VkExternalBufferProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalBufferProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalBufferProperties>::value,
+                          "ExternalBufferProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties ) == sizeof( VkPhysicalDeviceIDProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceIDProperties>::value,
+                          "PhysicalDeviceIDProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo ) == sizeof( VkExternalMemoryImageCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfo>::value,
+                          "ExternalMemoryImageCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo ) == sizeof( VkExternalMemoryBufferCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryBufferCreateInfo>::value,
+                          "ExternalMemoryBufferCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo ) == sizeof( VkExportMemoryAllocateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfo>::value,
+                          "ExportMemoryAllocateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo ) == sizeof( VkPhysicalDeviceExternalFenceInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFenceInfo>::value,
+                          "PhysicalDeviceExternalFenceInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalFenceProperties ) == sizeof( VkExternalFenceProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalFenceProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalFenceProperties>::value,
+                          "ExternalFenceProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo ) == sizeof( VkExportFenceCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportFenceCreateInfo>::value,
+                          "ExportFenceCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo ) == sizeof( VkExportSemaphoreCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportSemaphoreCreateInfo>::value,
+                          "ExportSemaphoreCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo ) == sizeof( VkPhysicalDeviceExternalSemaphoreInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalSemaphoreInfo>::value,
+                          "PhysicalDeviceExternalSemaphoreInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties ) == sizeof( VkExternalSemaphoreProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalSemaphoreProperties>::value,
+                          "ExternalSemaphoreProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties ) == sizeof( VkPhysicalDeviceMaintenance3Properties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance3Properties>::value,
+                          "PhysicalDeviceMaintenance3Properties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport ) == sizeof( VkDescriptorSetLayoutSupport ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutSupport>::value,
+                          "DescriptorSetLayoutSupport is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures ) == sizeof( VkPhysicalDeviceShaderDrawParametersFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDrawParametersFeatures>::value,
+                          "PhysicalDeviceShaderDrawParametersFeatures is not nothrow_move_constructible!" );
+
+//=== VK_VERSION_1_2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features ) == sizeof( VkPhysicalDeviceVulkan11Features ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Features>::value,
+                          "PhysicalDeviceVulkan11Features is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties ) == sizeof( VkPhysicalDeviceVulkan11Properties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan11Properties>::value,
+                          "PhysicalDeviceVulkan11Properties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features ) == sizeof( VkPhysicalDeviceVulkan12Features ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Features>::value,
+                          "PhysicalDeviceVulkan12Features is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties ) == sizeof( VkPhysicalDeviceVulkan12Properties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan12Properties>::value,
+                          "PhysicalDeviceVulkan12Properties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo ) == sizeof( VkImageFormatListCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageFormatListCreateInfo>::value,
+                          "ImageFormatListCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2 ) == sizeof( VkRenderPassCreateInfo2 ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassCreateInfo2>::value,
+                          "RenderPassCreateInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescription2 ) == sizeof( VkAttachmentDescription2 ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentDescription2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentDescription2>::value,
+                          "AttachmentDescription2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReference2 ) == sizeof( VkAttachmentReference2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentReference2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentReference2>::value,
+                          "AttachmentReference2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescription2 ) == sizeof( VkSubpassDescription2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDescription2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDescription2>::value,
+                          "SubpassDescription2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDependency2 ) == sizeof( VkSubpassDependency2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDependency2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDependency2>::value,
+                          "SubpassDependency2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassBeginInfo ) == sizeof( VkSubpassBeginInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassBeginInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassBeginInfo>::value,
+                          "SubpassBeginInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassEndInfo ) == sizeof( VkSubpassEndInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassEndInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassEndInfo>::value,
+                          "SubpassEndInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures ) == sizeof( VkPhysicalDevice8BitStorageFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevice8BitStorageFeatures>::value,
+                          "PhysicalDevice8BitStorageFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ConformanceVersion ) == sizeof( VkConformanceVersion ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ConformanceVersion>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ConformanceVersion>::value,
+                          "ConformanceVersion is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties ) == sizeof( VkPhysicalDeviceDriverProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDriverProperties>::value,
+                          "PhysicalDeviceDriverProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features ) == sizeof( VkPhysicalDeviceShaderAtomicInt64Features ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicInt64Features>::value,
+                          "PhysicalDeviceShaderAtomicInt64Features is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features ) == sizeof( VkPhysicalDeviceShaderFloat16Int8Features ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderFloat16Int8Features>::value,
+                          "PhysicalDeviceShaderFloat16Int8Features is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties ) == sizeof( VkPhysicalDeviceFloatControlsProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFloatControlsProperties>::value,
+                          "PhysicalDeviceFloatControlsProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo ) == sizeof( VkDescriptorSetLayoutBindingFlagsCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutBindingFlagsCreateInfo>::value,
+                          "DescriptorSetLayoutBindingFlagsCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures ) == sizeof( VkPhysicalDeviceDescriptorIndexingFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingFeatures>::value,
+                          "PhysicalDeviceDescriptorIndexingFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties ) == sizeof( VkPhysicalDeviceDescriptorIndexingProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorIndexingProperties>::value,
+                          "PhysicalDeviceDescriptorIndexingProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo ) ==
+                            sizeof( VkDescriptorSetVariableDescriptorCountAllocateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountAllocateInfo>::value,
+                          "DescriptorSetVariableDescriptorCountAllocateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport ) ==
+                            sizeof( VkDescriptorSetVariableDescriptorCountLayoutSupport ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetVariableDescriptorCountLayoutSupport>::value,
+                          "DescriptorSetVariableDescriptorCountLayoutSupport is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve ) == sizeof( VkSubpassDescriptionDepthStencilResolve ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassDescriptionDepthStencilResolve>::value,
+                          "SubpassDescriptionDepthStencilResolve is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties ) ==
+                            sizeof( VkPhysicalDeviceDepthStencilResolveProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthStencilResolveProperties>::value,
+                          "PhysicalDeviceDepthStencilResolveProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures ) == sizeof( VkPhysicalDeviceScalarBlockLayoutFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceScalarBlockLayoutFeatures>::value,
+                          "PhysicalDeviceScalarBlockLayoutFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo ) == sizeof( VkImageStencilUsageCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageStencilUsageCreateInfo>::value,
+                          "ImageStencilUsageCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo ) == sizeof( VkSamplerReductionModeCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerReductionModeCreateInfo>::value,
+                          "SamplerReductionModeCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties ) ==
+                            sizeof( VkPhysicalDeviceSamplerFilterMinmaxProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSamplerFilterMinmaxProperties>::value,
+                          "PhysicalDeviceSamplerFilterMinmaxProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures ) == sizeof( VkPhysicalDeviceVulkanMemoryModelFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkanMemoryModelFeatures>::value,
+                          "PhysicalDeviceVulkanMemoryModelFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures ) == sizeof( VkPhysicalDeviceImagelessFramebufferFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImagelessFramebufferFeatures>::value,
+                          "PhysicalDeviceImagelessFramebufferFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo ) == sizeof( VkFramebufferAttachmentsCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FramebufferAttachmentsCreateInfo>::value,
+                          "FramebufferAttachmentsCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo ) == sizeof( VkFramebufferAttachmentImageInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FramebufferAttachmentImageInfo>::value,
+                          "FramebufferAttachmentImageInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo ) == sizeof( VkRenderPassAttachmentBeginInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassAttachmentBeginInfo>::value,
+                          "RenderPassAttachmentBeginInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures ) ==
+                            sizeof( VkPhysicalDeviceUniformBufferStandardLayoutFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceUniformBufferStandardLayoutFeatures>::value,
+                          "PhysicalDeviceUniformBufferStandardLayoutFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures ) ==
+                            sizeof( VkPhysicalDeviceShaderSubgroupExtendedTypesFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupExtendedTypesFeatures>::value,
+                          "PhysicalDeviceShaderSubgroupExtendedTypesFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures ) ==
+                            sizeof( VkPhysicalDeviceSeparateDepthStencilLayoutsFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSeparateDepthStencilLayoutsFeatures>::value,
+                          "PhysicalDeviceSeparateDepthStencilLayoutsFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout ) == sizeof( VkAttachmentReferenceStencilLayout ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentReferenceStencilLayout>::value,
+                          "AttachmentReferenceStencilLayout is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout ) == sizeof( VkAttachmentDescriptionStencilLayout ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentDescriptionStencilLayout>::value,
+                          "AttachmentDescriptionStencilLayout is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures ) == sizeof( VkPhysicalDeviceHostQueryResetFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostQueryResetFeatures>::value,
+                          "PhysicalDeviceHostQueryResetFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures ) == sizeof( VkPhysicalDeviceTimelineSemaphoreFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreFeatures>::value,
+                          "PhysicalDeviceTimelineSemaphoreFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties ) == sizeof( VkPhysicalDeviceTimelineSemaphoreProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTimelineSemaphoreProperties>::value,
+                          "PhysicalDeviceTimelineSemaphoreProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo ) == sizeof( VkSemaphoreTypeCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreTypeCreateInfo>::value,
+                          "SemaphoreTypeCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo ) == sizeof( VkTimelineSemaphoreSubmitInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TimelineSemaphoreSubmitInfo>::value,
+                          "TimelineSemaphoreSubmitInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo ) == sizeof( VkSemaphoreWaitInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreWaitInfo>::value,
+                          "SemaphoreWaitInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo ) == sizeof( VkSemaphoreSignalInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreSignalInfo>::value,
+                          "SemaphoreSignalInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures ) == sizeof( VkPhysicalDeviceBufferDeviceAddressFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeatures>::value,
+                          "PhysicalDeviceBufferDeviceAddressFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo ) == sizeof( VkBufferDeviceAddressInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferDeviceAddressInfo>::value,
+                          "BufferDeviceAddressInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo ) == sizeof( VkBufferOpaqueCaptureAddressCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferOpaqueCaptureAddressCreateInfo>::value,
+                          "BufferOpaqueCaptureAddressCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo ) == sizeof( VkMemoryOpaqueCaptureAddressAllocateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryOpaqueCaptureAddressAllocateInfo>::value,
+                          "MemoryOpaqueCaptureAddressAllocateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo ) == sizeof( VkDeviceMemoryOpaqueCaptureAddressInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceMemoryOpaqueCaptureAddressInfo>::value,
+                          "DeviceMemoryOpaqueCaptureAddressInfo is not nothrow_move_constructible!" );
+
+//=== VK_VERSION_1_3 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features ) == sizeof( VkPhysicalDeviceVulkan13Features ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Features>::value,
+                          "PhysicalDeviceVulkan13Features is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties ) == sizeof( VkPhysicalDeviceVulkan13Properties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVulkan13Properties>::value,
+                          "PhysicalDeviceVulkan13Properties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo ) == sizeof( VkPipelineCreationFeedbackCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCreationFeedbackCreateInfo>::value,
+                          "PipelineCreationFeedbackCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreationFeedback ) == sizeof( VkPipelineCreationFeedback ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCreationFeedback>::value,
+                          "PipelineCreationFeedback is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures ) ==
+                            sizeof( VkPhysicalDeviceShaderTerminateInvocationFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTerminateInvocationFeatures>::value,
+                          "PhysicalDeviceShaderTerminateInvocationFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties ) == sizeof( VkPhysicalDeviceToolProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceToolProperties>::value,
+                          "PhysicalDeviceToolProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures ) ==
+                            sizeof( VkPhysicalDeviceShaderDemoteToHelperInvocationFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderDemoteToHelperInvocationFeatures>::value,
+                          "PhysicalDeviceShaderDemoteToHelperInvocationFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures ) == sizeof( VkPhysicalDevicePrivateDataFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePrivateDataFeatures>::value,
+                          "PhysicalDevicePrivateDataFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo ) == sizeof( VkDevicePrivateDataCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DevicePrivateDataCreateInfo>::value,
+                          "DevicePrivateDataCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo ) == sizeof( VkPrivateDataSlotCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PrivateDataSlotCreateInfo>::value,
+                          "PrivateDataSlotCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PrivateDataSlot ) == sizeof( VkPrivateDataSlot ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PrivateDataSlot>::value,
+                          "PrivateDataSlot is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures ) ==
+                            sizeof( VkPhysicalDevicePipelineCreationCacheControlFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineCreationCacheControlFeatures>::value,
+                          "PhysicalDevicePipelineCreationCacheControlFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryBarrier2 ) == sizeof( VkMemoryBarrier2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryBarrier2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryBarrier2>::value,
+                          "MemoryBarrier2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2 ) == sizeof( VkBufferMemoryBarrier2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferMemoryBarrier2>::value,
+                          "BufferMemoryBarrier2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2 ) == sizeof( VkImageMemoryBarrier2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageMemoryBarrier2>::value,
+                          "ImageMemoryBarrier2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DependencyInfo ) == sizeof( VkDependencyInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DependencyInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DependencyInfo>::value,
+                          "DependencyInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubmitInfo2 ) == sizeof( VkSubmitInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubmitInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubmitInfo2>::value, "SubmitInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo ) == sizeof( VkSemaphoreSubmitInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreSubmitInfo>::value,
+                          "SemaphoreSubmitInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo ) == sizeof( VkCommandBufferSubmitInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferSubmitInfo>::value,
+                          "CommandBufferSubmitInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features ) == sizeof( VkPhysicalDeviceSynchronization2Features ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSynchronization2Features>::value,
+                          "PhysicalDeviceSynchronization2Features is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures ) ==
+                            sizeof( VkPhysicalDeviceZeroInitializeWorkgroupMemoryFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures>::value,
+                          "PhysicalDeviceZeroInitializeWorkgroupMemoryFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures ) == sizeof( VkPhysicalDeviceImageRobustnessFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageRobustnessFeatures>::value,
+                          "PhysicalDeviceImageRobustnessFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyBufferInfo2 ) == sizeof( VkCopyBufferInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyBufferInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyBufferInfo2>::value,
+                          "CopyBufferInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageInfo2 ) == sizeof( VkCopyImageInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyImageInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyImageInfo2>::value,
+                          "CopyImageInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2 ) == sizeof( VkCopyBufferToImageInfo2 ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyBufferToImageInfo2>::value,
+                          "CopyBufferToImageInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2 ) == sizeof( VkCopyImageToBufferInfo2 ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyImageToBufferInfo2>::value,
+                          "CopyImageToBufferInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BlitImageInfo2 ) == sizeof( VkBlitImageInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BlitImageInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BlitImageInfo2>::value,
+                          "BlitImageInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ResolveImageInfo2 ) == sizeof( VkResolveImageInfo2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ResolveImageInfo2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ResolveImageInfo2>::value,
+                          "ResolveImageInfo2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCopy2 ) == sizeof( VkBufferCopy2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCopy2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCopy2>::value, "BufferCopy2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCopy2 ) == sizeof( VkImageCopy2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCopy2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCopy2>::value, "ImageCopy2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageBlit2 ) == sizeof( VkImageBlit2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageBlit2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageBlit2>::value, "ImageBlit2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferImageCopy2 ) == sizeof( VkBufferImageCopy2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferImageCopy2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferImageCopy2>::value,
+                          "BufferImageCopy2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageResolve2 ) == sizeof( VkImageResolve2 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageResolve2>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageResolve2>::value, "ImageResolve2 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures ) == sizeof( VkPhysicalDeviceSubgroupSizeControlFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlFeatures>::value,
+                          "PhysicalDeviceSubgroupSizeControlFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties ) ==
+                            sizeof( VkPhysicalDeviceSubgroupSizeControlProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubgroupSizeControlProperties>::value,
+                          "PhysicalDeviceSubgroupSizeControlProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo ) ==
+                            sizeof( VkPipelineShaderStageRequiredSubgroupSizeCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineShaderStageRequiredSubgroupSizeCreateInfo>::value,
+                          "PipelineShaderStageRequiredSubgroupSizeCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures ) == sizeof( VkPhysicalDeviceInlineUniformBlockFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockFeatures>::value,
+                          "PhysicalDeviceInlineUniformBlockFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties ) == sizeof( VkPhysicalDeviceInlineUniformBlockProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceInlineUniformBlockProperties>::value,
+                          "PhysicalDeviceInlineUniformBlockProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock ) == sizeof( VkWriteDescriptorSetInlineUniformBlock ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WriteDescriptorSetInlineUniformBlock>::value,
+                          "WriteDescriptorSetInlineUniformBlock is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo ) == sizeof( VkDescriptorPoolInlineUniformBlockCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorPoolInlineUniformBlockCreateInfo>::value,
+                          "DescriptorPoolInlineUniformBlockCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures ) ==
+                            sizeof( VkPhysicalDeviceTextureCompressionASTCHDRFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTextureCompressionASTCHDRFeatures>::value,
+                          "PhysicalDeviceTextureCompressionASTCHDRFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingInfo ) == sizeof( VkRenderingInfo ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingInfo>::value, "RenderingInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo ) == sizeof( VkRenderingAttachmentInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingAttachmentInfo>::value,
+                          "RenderingAttachmentInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo ) == sizeof( VkPipelineRenderingCreateInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRenderingCreateInfo>::value,
+                          "PipelineRenderingCreateInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures ) == sizeof( VkPhysicalDeviceDynamicRenderingFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingFeatures>::value,
+                          "PhysicalDeviceDynamicRenderingFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo ) == sizeof( VkCommandBufferInheritanceRenderingInfo ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderingInfo>::value,
+                          "CommandBufferInheritanceRenderingInfo is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures ) ==
+                            sizeof( VkPhysicalDeviceShaderIntegerDotProductFeatures ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductFeatures>::value,
+                          "PhysicalDeviceShaderIntegerDotProductFeatures is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties ) ==
+                            sizeof( VkPhysicalDeviceShaderIntegerDotProductProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerDotProductProperties>::value,
+                          "PhysicalDeviceShaderIntegerDotProductProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties ) ==
+                            sizeof( VkPhysicalDeviceTexelBufferAlignmentProperties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentProperties>::value,
+                          "PhysicalDeviceTexelBufferAlignmentProperties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FormatProperties3 ) == sizeof( VkFormatProperties3 ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FormatProperties3>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FormatProperties3>::value,
+                          "FormatProperties3 is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features ) == sizeof( VkPhysicalDeviceMaintenance4Features ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Features>::value,
+                          "PhysicalDeviceMaintenance4Features is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties ) == sizeof( VkPhysicalDeviceMaintenance4Properties ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance4Properties>::value,
+                          "PhysicalDeviceMaintenance4Properties is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements ) == sizeof( VkDeviceBufferMemoryRequirements ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceBufferMemoryRequirements>::value,
+                          "DeviceBufferMemoryRequirements is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements ) == sizeof( VkDeviceImageMemoryRequirements ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceImageMemoryRequirements>::value,
+                          "DeviceImageMemoryRequirements is not nothrow_move_constructible!" );
+
+//=== VK_KHR_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceKHR ) == sizeof( VkSurfaceKHR ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceKHR>::value, "SurfaceKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR ) == sizeof( VkSurfaceCapabilitiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesKHR>::value,
+                          "SurfaceCapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFormatKHR ) == sizeof( VkSurfaceFormatKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceFormatKHR>::value,
+                          "SurfaceFormatKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_swapchain ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR ) == sizeof( VkSwapchainCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainCreateInfoKHR>::value,
+                          "SwapchainCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainKHR ) == sizeof( VkSwapchainKHR ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainKHR>::value, "SwapchainKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentInfoKHR ) == sizeof( VkPresentInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentInfoKHR>::value,
+                          "PresentInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR ) == sizeof( VkImageSwapchainCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSwapchainCreateInfoKHR>::value,
+                          "ImageSwapchainCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR ) == sizeof( VkBindImageMemorySwapchainInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindImageMemorySwapchainInfoKHR>::value,
+                          "BindImageMemorySwapchainInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR ) == sizeof( VkAcquireNextImageInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AcquireNextImageInfoKHR>::value,
+                          "AcquireNextImageInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR ) == sizeof( VkDeviceGroupPresentCapabilitiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupPresentCapabilitiesKHR>::value,
+                          "DeviceGroupPresentCapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR ) == sizeof( VkDeviceGroupPresentInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupPresentInfoKHR>::value,
+                          "DeviceGroupPresentInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR ) == sizeof( VkDeviceGroupSwapchainCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceGroupSwapchainCreateInfoKHR>::value,
+                          "DeviceGroupSwapchainCreateInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_display ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayKHR ) == sizeof( VkDisplayKHR ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayKHR>::value, "DisplayKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR ) == sizeof( VkDisplayModeCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModeCreateInfoKHR>::value,
+                          "DisplayModeCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeKHR ) == sizeof( VkDisplayModeKHR ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModeKHR>::value,
+                          "DisplayModeKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR ) == sizeof( VkDisplayModeParametersKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModeParametersKHR>::value,
+                          "DisplayModeParametersKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR ) == sizeof( VkDisplayModePropertiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModePropertiesKHR>::value,
+                          "DisplayModePropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR ) == sizeof( VkDisplayPlaneCapabilitiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilitiesKHR>::value,
+                          "DisplayPlaneCapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR ) == sizeof( VkDisplayPlanePropertiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlanePropertiesKHR>::value,
+                          "DisplayPlanePropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR ) == sizeof( VkDisplayPropertiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPropertiesKHR>::value,
+                          "DisplayPropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR ) == sizeof( VkDisplaySurfaceCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplaySurfaceCreateInfoKHR>::value,
+                          "DisplaySurfaceCreateInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_display_swapchain ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR ) == sizeof( VkDisplayPresentInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPresentInfoKHR>::value,
+                          "DisplayPresentInfoKHR is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_XLIB_KHR )
+//=== VK_KHR_xlib_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR ) == sizeof( VkXlibSurfaceCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::XlibSurfaceCreateInfoKHR>::value,
+                          "XlibSurfaceCreateInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_XLIB_KHR*/
+
+#if defined( VK_USE_PLATFORM_XCB_KHR )
+//=== VK_KHR_xcb_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR ) == sizeof( VkXcbSurfaceCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::XcbSurfaceCreateInfoKHR>::value,
+                          "XcbSurfaceCreateInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_XCB_KHR*/
+
+#if defined( VK_USE_PLATFORM_WAYLAND_KHR )
+//=== VK_KHR_wayland_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR ) == sizeof( VkWaylandSurfaceCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WaylandSurfaceCreateInfoKHR>::value,
+                          "WaylandSurfaceCreateInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WAYLAND_KHR*/
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+//=== VK_KHR_android_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR ) == sizeof( VkAndroidSurfaceCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidSurfaceCreateInfoKHR>::value,
+                          "AndroidSurfaceCreateInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+//=== VK_KHR_win32_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR ) == sizeof( VkWin32SurfaceCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Win32SurfaceCreateInfoKHR>::value,
+                          "Win32SurfaceCreateInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+//=== VK_EXT_debug_report ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT ) == sizeof( VkDebugReportCallbackEXT ),
+                          "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugReportCallbackEXT>::value,
+                          "DebugReportCallbackEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT ) == sizeof( VkDebugReportCallbackCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugReportCallbackCreateInfoEXT>::value,
+                          "DebugReportCallbackCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_AMD_rasterization_order ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD ) ==
+                            sizeof( VkPipelineRasterizationStateRasterizationOrderAMD ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateRasterizationOrderAMD>::value,
+                          "PipelineRasterizationStateRasterizationOrderAMD is not nothrow_move_constructible!" );
+
+//=== VK_EXT_debug_marker ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT ) == sizeof( VkDebugMarkerObjectNameInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugMarkerObjectNameInfoEXT>::value,
+                          "DebugMarkerObjectNameInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT ) == sizeof( VkDebugMarkerObjectTagInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugMarkerObjectTagInfoEXT>::value,
+                          "DebugMarkerObjectTagInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT ) == sizeof( VkDebugMarkerMarkerInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugMarkerMarkerInfoEXT>::value,
+                          "DebugMarkerMarkerInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_KHR_video_queue ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionKHR ) == sizeof( VkVideoSessionKHR ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionKHR>::value,
+                          "VideoSessionKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR ) == sizeof( VkVideoSessionParametersKHR ),
+                          "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionParametersKHR>::value,
+                          "VideoSessionParametersKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR ) == sizeof( VkQueueFamilyQueryResultStatusPropertiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyQueryResultStatusPropertiesKHR>::value,
+                          "QueueFamilyQueryResultStatusPropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR ) == sizeof( VkQueueFamilyVideoPropertiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyVideoPropertiesKHR>::value,
+                          "QueueFamilyVideoPropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR ) == sizeof( VkVideoProfileInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoProfileInfoKHR>::value,
+                          "VideoProfileInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR ) == sizeof( VkVideoProfileListInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoProfileListInfoKHR>::value,
+                          "VideoProfileListInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR ) == sizeof( VkVideoCapabilitiesKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoCapabilitiesKHR>::value,
+                          "VideoCapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR ) == sizeof( VkPhysicalDeviceVideoFormatInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoFormatInfoKHR>::value,
+                          "PhysicalDeviceVideoFormatInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR ) == sizeof( VkVideoFormatPropertiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoFormatPropertiesKHR>::value,
+                          "VideoFormatPropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR ) == sizeof( VkVideoPictureResourceInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoPictureResourceInfoKHR>::value,
+                          "VideoPictureResourceInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR ) == sizeof( VkVideoReferenceSlotInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoReferenceSlotInfoKHR>::value,
+                          "VideoReferenceSlotInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR ) == sizeof( VkVideoSessionMemoryRequirementsKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionMemoryRequirementsKHR>::value,
+                          "VideoSessionMemoryRequirementsKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR ) == sizeof( VkBindVideoSessionMemoryInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindVideoSessionMemoryInfoKHR>::value,
+                          "BindVideoSessionMemoryInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR ) == sizeof( VkVideoSessionCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionCreateInfoKHR>::value,
+                          "VideoSessionCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR ) == sizeof( VkVideoSessionParametersCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionParametersCreateInfoKHR>::value,
+                          "VideoSessionParametersCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR ) == sizeof( VkVideoSessionParametersUpdateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoSessionParametersUpdateInfoKHR>::value,
+                          "VideoSessionParametersUpdateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR ) == sizeof( VkVideoBeginCodingInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoBeginCodingInfoKHR>::value,
+                          "VideoBeginCodingInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR ) == sizeof( VkVideoEndCodingInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEndCodingInfoKHR>::value,
+                          "VideoEndCodingInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR ) == sizeof( VkVideoCodingControlInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoCodingControlInfoKHR>::value,
+                          "VideoCodingControlInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_video_decode_queue ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR ) == sizeof( VkVideoDecodeCapabilitiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeCapabilitiesKHR>::value,
+                          "VideoDecodeCapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR ) == sizeof( VkVideoDecodeUsageInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeUsageInfoKHR>::value,
+                          "VideoDecodeUsageInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR ) == sizeof( VkVideoDecodeInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeInfoKHR>::value,
+                          "VideoDecodeInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_NV_dedicated_allocation ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV ) == sizeof( VkDedicatedAllocationImageCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DedicatedAllocationImageCreateInfoNV>::value,
+                          "DedicatedAllocationImageCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV ) == sizeof( VkDedicatedAllocationBufferCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DedicatedAllocationBufferCreateInfoNV>::value,
+                          "DedicatedAllocationBufferCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV ) == sizeof( VkDedicatedAllocationMemoryAllocateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DedicatedAllocationMemoryAllocateInfoNV>::value,
+                          "DedicatedAllocationMemoryAllocateInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_transform_feedback ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT ) == sizeof( VkPhysicalDeviceTransformFeedbackFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackFeaturesEXT>::value,
+                          "PhysicalDeviceTransformFeedbackFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT ) ==
+                            sizeof( VkPhysicalDeviceTransformFeedbackPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTransformFeedbackPropertiesEXT>::value,
+                          "PhysicalDeviceTransformFeedbackPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT ) ==
+                            sizeof( VkPipelineRasterizationStateStreamCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationStateStreamCreateInfoEXT>::value,
+                          "PipelineRasterizationStateStreamCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_NVX_binary_import ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuModuleNVX ) == sizeof( VkCuModuleNVX ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuModuleNVX>::value, "CuModuleNVX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionNVX ) == sizeof( VkCuFunctionNVX ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuFunctionNVX>::value, "CuFunctionNVX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX ) == sizeof( VkCuModuleCreateInfoNVX ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuModuleCreateInfoNVX>::value,
+                          "CuModuleCreateInfoNVX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX ) == sizeof( VkCuFunctionCreateInfoNVX ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuFunctionCreateInfoNVX>::value,
+                          "CuFunctionCreateInfoNVX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX ) == sizeof( VkCuLaunchInfoNVX ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CuLaunchInfoNVX>::value,
+                          "CuLaunchInfoNVX is not nothrow_move_constructible!" );
+
+//=== VK_NVX_image_view_handle ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX ) == sizeof( VkImageViewHandleInfoNVX ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewHandleInfoNVX>::value,
+                          "ImageViewHandleInfoNVX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX ) == sizeof( VkImageViewAddressPropertiesNVX ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewAddressPropertiesNVX>::value,
+                          "ImageViewAddressPropertiesNVX is not nothrow_move_constructible!" );
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+//=== VK_EXT_video_encode_h264 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT ) == sizeof( VkVideoEncodeH264CapabilitiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264CapabilitiesEXT>::value,
+                          "VideoEncodeH264CapabilitiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264QualityLevelPropertiesEXT ) == sizeof( VkVideoEncodeH264QualityLevelPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264QualityLevelPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264QualityLevelPropertiesEXT>::value,
+                          "VideoEncodeH264QualityLevelPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoEXT ) == sizeof( VkVideoEncodeH264SessionCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionCreateInfoEXT>::value,
+                          "VideoEncodeH264SessionCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT ) ==
+                            sizeof( VkVideoEncodeH264SessionParametersCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersCreateInfoEXT>::value,
+                          "VideoEncodeH264SessionParametersCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT ) == sizeof( VkVideoEncodeH264SessionParametersAddInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersAddInfoEXT>::value,
+                          "VideoEncodeH264SessionParametersAddInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersGetInfoEXT ) == sizeof( VkVideoEncodeH264SessionParametersGetInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersGetInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersGetInfoEXT>::value,
+                          "VideoEncodeH264SessionParametersGetInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersFeedbackInfoEXT ) ==
+                            sizeof( VkVideoEncodeH264SessionParametersFeedbackInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersFeedbackInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264SessionParametersFeedbackInfoEXT>::value,
+                          "VideoEncodeH264SessionParametersFeedbackInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264PictureInfoEXT ) == sizeof( VkVideoEncodeH264PictureInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264PictureInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264PictureInfoEXT>::value,
+                          "VideoEncodeH264PictureInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT ) == sizeof( VkVideoEncodeH264DpbSlotInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264DpbSlotInfoEXT>::value,
+                          "VideoEncodeH264DpbSlotInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT ) == sizeof( VkVideoEncodeH264NaluSliceInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264NaluSliceInfoEXT>::value,
+                          "VideoEncodeH264NaluSliceInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoEXT ) == sizeof( VkVideoEncodeH264ProfileInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264ProfileInfoEXT>::value,
+                          "VideoEncodeH264ProfileInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT ) == sizeof( VkVideoEncodeH264RateControlInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlInfoEXT>::value,
+                          "VideoEncodeH264RateControlInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT ) == sizeof( VkVideoEncodeH264RateControlLayerInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264RateControlLayerInfoEXT>::value,
+                          "VideoEncodeH264RateControlLayerInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT ) == sizeof( VkVideoEncodeH264QpEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264QpEXT>::value,
+                          "VideoEncodeH264QpEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT ) == sizeof( VkVideoEncodeH264FrameSizeEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264FrameSizeEXT>::value,
+                          "VideoEncodeH264FrameSizeEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH264GopRemainingFrameInfoEXT ) == sizeof( VkVideoEncodeH264GopRemainingFrameInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH264GopRemainingFrameInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH264GopRemainingFrameInfoEXT>::value,
+                          "VideoEncodeH264GopRemainingFrameInfoEXT is not nothrow_move_constructible!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+//=== VK_EXT_video_encode_h265 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT ) == sizeof( VkVideoEncodeH265CapabilitiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265CapabilitiesEXT>::value,
+                          "VideoEncodeH265CapabilitiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoEXT ) == sizeof( VkVideoEncodeH265SessionCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionCreateInfoEXT>::value,
+                          "VideoEncodeH265SessionCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265QualityLevelPropertiesEXT ) == sizeof( VkVideoEncodeH265QualityLevelPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265QualityLevelPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265QualityLevelPropertiesEXT>::value,
+                          "VideoEncodeH265QualityLevelPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT ) ==
+                            sizeof( VkVideoEncodeH265SessionParametersCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersCreateInfoEXT>::value,
+                          "VideoEncodeH265SessionParametersCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT ) == sizeof( VkVideoEncodeH265SessionParametersAddInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersAddInfoEXT>::value,
+                          "VideoEncodeH265SessionParametersAddInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersGetInfoEXT ) == sizeof( VkVideoEncodeH265SessionParametersGetInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersGetInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersGetInfoEXT>::value,
+                          "VideoEncodeH265SessionParametersGetInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersFeedbackInfoEXT ) ==
+                            sizeof( VkVideoEncodeH265SessionParametersFeedbackInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersFeedbackInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265SessionParametersFeedbackInfoEXT>::value,
+                          "VideoEncodeH265SessionParametersFeedbackInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265PictureInfoEXT ) == sizeof( VkVideoEncodeH265PictureInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265PictureInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265PictureInfoEXT>::value,
+                          "VideoEncodeH265PictureInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT ) == sizeof( VkVideoEncodeH265DpbSlotInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265DpbSlotInfoEXT>::value,
+                          "VideoEncodeH265DpbSlotInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT ) == sizeof( VkVideoEncodeH265NaluSliceSegmentInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265NaluSliceSegmentInfoEXT>::value,
+                          "VideoEncodeH265NaluSliceSegmentInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoEXT ) == sizeof( VkVideoEncodeH265ProfileInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265ProfileInfoEXT>::value,
+                          "VideoEncodeH265ProfileInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT ) == sizeof( VkVideoEncodeH265RateControlInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlInfoEXT>::value,
+                          "VideoEncodeH265RateControlInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT ) == sizeof( VkVideoEncodeH265RateControlLayerInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265RateControlLayerInfoEXT>::value,
+                          "VideoEncodeH265RateControlLayerInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT ) == sizeof( VkVideoEncodeH265QpEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265QpEXT>::value,
+                          "VideoEncodeH265QpEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT ) == sizeof( VkVideoEncodeH265FrameSizeEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265FrameSizeEXT>::value,
+                          "VideoEncodeH265FrameSizeEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeH265GopRemainingFrameInfoEXT ) == sizeof( VkVideoEncodeH265GopRemainingFrameInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeH265GopRemainingFrameInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeH265GopRemainingFrameInfoEXT>::value,
+                          "VideoEncodeH265GopRemainingFrameInfoEXT is not nothrow_move_constructible!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+//=== VK_KHR_video_decode_h264 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoKHR ) == sizeof( VkVideoDecodeH264ProfileInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264ProfileInfoKHR>::value,
+                          "VideoDecodeH264ProfileInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesKHR ) == sizeof( VkVideoDecodeH264CapabilitiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264CapabilitiesKHR>::value,
+                          "VideoDecodeH264CapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoKHR ) ==
+                            sizeof( VkVideoDecodeH264SessionParametersCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersCreateInfoKHR>::value,
+                          "VideoDecodeH264SessionParametersCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR ) == sizeof( VkVideoDecodeH264SessionParametersAddInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264SessionParametersAddInfoKHR>::value,
+                          "VideoDecodeH264SessionParametersAddInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoKHR ) == sizeof( VkVideoDecodeH264PictureInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264PictureInfoKHR>::value,
+                          "VideoDecodeH264PictureInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoKHR ) == sizeof( VkVideoDecodeH264DpbSlotInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH264DpbSlotInfoKHR>::value,
+                          "VideoDecodeH264DpbSlotInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_AMD_texture_gather_bias_lod ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD ) == sizeof( VkTextureLODGatherFormatPropertiesAMD ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TextureLODGatherFormatPropertiesAMD>::value,
+                          "TextureLODGatherFormatPropertiesAMD is not nothrow_move_constructible!" );
+
+//=== VK_AMD_shader_info ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD ) == sizeof( VkShaderResourceUsageAMD ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderResourceUsageAMD>::value,
+                          "ShaderResourceUsageAMD is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD ) == sizeof( VkShaderStatisticsInfoAMD ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderStatisticsInfoAMD>::value,
+                          "ShaderStatisticsInfoAMD is not nothrow_move_constructible!" );
+
+//=== VK_KHR_dynamic_rendering ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR ) ==
+                            sizeof( VkRenderingFragmentShadingRateAttachmentInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingFragmentShadingRateAttachmentInfoKHR>::value,
+                          "RenderingFragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT ) ==
+                            sizeof( VkRenderingFragmentDensityMapAttachmentInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingFragmentDensityMapAttachmentInfoEXT>::value,
+                          "RenderingFragmentDensityMapAttachmentInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD ) == sizeof( VkAttachmentSampleCountInfoAMD ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentSampleCountInfoAMD>::value,
+                          "AttachmentSampleCountInfoAMD is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX ) == sizeof( VkMultiviewPerViewAttributesInfoNVX ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultiviewPerViewAttributesInfoNVX>::value,
+                          "MultiviewPerViewAttributesInfoNVX is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_GGP )
+//=== VK_GGP_stream_descriptor_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP ) == sizeof( VkStreamDescriptorSurfaceCreateInfoGGP ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::StreamDescriptorSurfaceCreateInfoGGP>::value,
+                          "StreamDescriptorSurfaceCreateInfoGGP is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_GGP*/
+
+//=== VK_NV_corner_sampled_image ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV ) == sizeof( VkPhysicalDeviceCornerSampledImageFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCornerSampledImageFeaturesNV>::value,
+                          "PhysicalDeviceCornerSampledImageFeaturesNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_external_memory_capabilities ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV ) == sizeof( VkExternalImageFormatPropertiesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalImageFormatPropertiesNV>::value,
+                          "ExternalImageFormatPropertiesNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_external_memory ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV ) == sizeof( VkExternalMemoryImageCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryImageCreateInfoNV>::value,
+                          "ExternalMemoryImageCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV ) == sizeof( VkExportMemoryAllocateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMemoryAllocateInfoNV>::value,
+                          "ExportMemoryAllocateInfoNV is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+//=== VK_NV_external_memory_win32 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV ) == sizeof( VkImportMemoryWin32HandleInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoNV>::value,
+                          "ImportMemoryWin32HandleInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV ) == sizeof( VkExportMemoryWin32HandleInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoNV>::value,
+                          "ExportMemoryWin32HandleInfoNV is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+//=== VK_NV_win32_keyed_mutex ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoNV>::value,
+                          "Win32KeyedMutexAcquireReleaseInfoNV is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+//=== VK_EXT_validation_flags ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationFlagsEXT ) == sizeof( VkValidationFlagsEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ValidationFlagsEXT>::value,
+                          "ValidationFlagsEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_VI_NN )
+//=== VK_NN_vi_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN ) == sizeof( VkViSurfaceCreateInfoNN ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ViSurfaceCreateInfoNN>::value,
+                          "ViSurfaceCreateInfoNN is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_VI_NN*/
+
+//=== VK_EXT_astc_decode_mode ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT ) == sizeof( VkImageViewASTCDecodeModeEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewASTCDecodeModeEXT>::value,
+                          "ImageViewASTCDecodeModeEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT ) == sizeof( VkPhysicalDeviceASTCDecodeFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceASTCDecodeFeaturesEXT>::value,
+                          "PhysicalDeviceASTCDecodeFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_pipeline_robustness ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT ) ==
+                            sizeof( VkPhysicalDevicePipelineRobustnessFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessFeaturesEXT>::value,
+                          "PhysicalDevicePipelineRobustnessFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT ) ==
+                            sizeof( VkPhysicalDevicePipelineRobustnessPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineRobustnessPropertiesEXT>::value,
+                          "PhysicalDevicePipelineRobustnessPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT ) == sizeof( VkPipelineRobustnessCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRobustnessCreateInfoEXT>::value,
+                          "PipelineRobustnessCreateInfoEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+//=== VK_KHR_external_memory_win32 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR ) == sizeof( VkImportMemoryWin32HandleInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryWin32HandleInfoKHR>::value,
+                          "ImportMemoryWin32HandleInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR ) == sizeof( VkExportMemoryWin32HandleInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMemoryWin32HandleInfoKHR>::value,
+                          "ExportMemoryWin32HandleInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR ) == sizeof( VkMemoryWin32HandlePropertiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryWin32HandlePropertiesKHR>::value,
+                          "MemoryWin32HandlePropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR ) == sizeof( VkMemoryGetWin32HandleInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetWin32HandleInfoKHR>::value,
+                          "MemoryGetWin32HandleInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+//=== VK_KHR_external_memory_fd ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR ) == sizeof( VkImportMemoryFdInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryFdInfoKHR>::value,
+                          "ImportMemoryFdInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR ) == sizeof( VkMemoryFdPropertiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryFdPropertiesKHR>::value,
+                          "MemoryFdPropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR ) == sizeof( VkMemoryGetFdInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetFdInfoKHR>::value,
+                          "MemoryGetFdInfoKHR is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+//=== VK_KHR_win32_keyed_mutex ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR ) == sizeof( VkWin32KeyedMutexAcquireReleaseInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::Win32KeyedMutexAcquireReleaseInfoKHR>::value,
+                          "Win32KeyedMutexAcquireReleaseInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+//=== VK_KHR_external_semaphore_win32 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR ) == sizeof( VkImportSemaphoreWin32HandleInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportSemaphoreWin32HandleInfoKHR>::value,
+                          "ImportSemaphoreWin32HandleInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR ) == sizeof( VkExportSemaphoreWin32HandleInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportSemaphoreWin32HandleInfoKHR>::value,
+                          "ExportSemaphoreWin32HandleInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR ) == sizeof( VkD3D12FenceSubmitInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::D3D12FenceSubmitInfoKHR>::value,
+                          "D3D12FenceSubmitInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR ) == sizeof( VkSemaphoreGetWin32HandleInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreGetWin32HandleInfoKHR>::value,
+                          "SemaphoreGetWin32HandleInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+//=== VK_KHR_external_semaphore_fd ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR ) == sizeof( VkImportSemaphoreFdInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportSemaphoreFdInfoKHR>::value,
+                          "ImportSemaphoreFdInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR ) == sizeof( VkSemaphoreGetFdInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreGetFdInfoKHR>::value,
+                          "SemaphoreGetFdInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_push_descriptor ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR ) == sizeof( VkPhysicalDevicePushDescriptorPropertiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePushDescriptorPropertiesKHR>::value,
+                          "PhysicalDevicePushDescriptorPropertiesKHR is not nothrow_move_constructible!" );
+
+//=== VK_EXT_conditional_rendering ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT ) == sizeof( VkConditionalRenderingBeginInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ConditionalRenderingBeginInfoEXT>::value,
+                          "ConditionalRenderingBeginInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceConditionalRenderingFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceConditionalRenderingFeaturesEXT>::value,
+                          "PhysicalDeviceConditionalRenderingFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT ) ==
+                            sizeof( VkCommandBufferInheritanceConditionalRenderingInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceConditionalRenderingInfoEXT>::value,
+                          "CommandBufferInheritanceConditionalRenderingInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_KHR_incremental_present ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentRegionsKHR ) == sizeof( VkPresentRegionsKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentRegionsKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentRegionsKHR>::value,
+                          "PresentRegionsKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentRegionKHR ) == sizeof( VkPresentRegionKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentRegionKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentRegionKHR>::value,
+                          "PresentRegionKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RectLayerKHR ) == sizeof( VkRectLayerKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RectLayerKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RectLayerKHR>::value, "RectLayerKHR is not nothrow_move_constructible!" );
+
+//=== VK_NV_clip_space_w_scaling ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViewportWScalingNV ) == sizeof( VkViewportWScalingNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ViewportWScalingNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ViewportWScalingNV>::value,
+                          "ViewportWScalingNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV ) == sizeof( VkPipelineViewportWScalingStateCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportWScalingStateCreateInfoNV>::value,
+                          "PipelineViewportWScalingStateCreateInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_display_surface_counter ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT ) == sizeof( VkSurfaceCapabilities2EXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2EXT>::value,
+                          "SurfaceCapabilities2EXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_display_control ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT ) == sizeof( VkDisplayPowerInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPowerInfoEXT>::value,
+                          "DisplayPowerInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT ) == sizeof( VkDeviceEventInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceEventInfoEXT>::value,
+                          "DeviceEventInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT ) == sizeof( VkDisplayEventInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayEventInfoEXT>::value,
+                          "DisplayEventInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT ) == sizeof( VkSwapchainCounterCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainCounterCreateInfoEXT>::value,
+                          "SwapchainCounterCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_GOOGLE_display_timing ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE ) == sizeof( VkRefreshCycleDurationGOOGLE ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RefreshCycleDurationGOOGLE>::value,
+                          "RefreshCycleDurationGOOGLE is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE ) == sizeof( VkPastPresentationTimingGOOGLE ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PastPresentationTimingGOOGLE>::value,
+                          "PastPresentationTimingGOOGLE is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE ) == sizeof( VkPresentTimesInfoGOOGLE ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentTimesInfoGOOGLE>::value,
+                          "PresentTimesInfoGOOGLE is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE ) == sizeof( VkPresentTimeGOOGLE ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentTimeGOOGLE>::value,
+                          "PresentTimeGOOGLE is not nothrow_move_constructible!" );
+
+//=== VK_NVX_multiview_per_view_attributes ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ) ==
+                            sizeof( VkPhysicalDeviceMultiviewPerViewAttributesPropertiesNVX ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX>::value,
+                          "PhysicalDeviceMultiviewPerViewAttributesPropertiesNVX is not nothrow_move_constructible!" );
+
+//=== VK_NV_viewport_swizzle ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ViewportSwizzleNV ) == sizeof( VkViewportSwizzleNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ViewportSwizzleNV>::value,
+                          "ViewportSwizzleNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV ) == sizeof( VkPipelineViewportSwizzleStateCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportSwizzleStateCreateInfoNV>::value,
+                          "PipelineViewportSwizzleStateCreateInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_discard_rectangles ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT ) ==
+                            sizeof( VkPhysicalDeviceDiscardRectanglePropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiscardRectanglePropertiesEXT>::value,
+                          "PhysicalDeviceDiscardRectanglePropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT ) == sizeof( VkPipelineDiscardRectangleStateCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineDiscardRectangleStateCreateInfoEXT>::value,
+                          "PipelineDiscardRectangleStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_conservative_rasterization ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT ) ==
+                            sizeof( VkPhysicalDeviceConservativeRasterizationPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceConservativeRasterizationPropertiesEXT>::value,
+                          "PhysicalDeviceConservativeRasterizationPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT ) ==
+                            sizeof( VkPipelineRasterizationConservativeStateCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationConservativeStateCreateInfoEXT>::value,
+                          "PipelineRasterizationConservativeStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_depth_clip_enable ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipEnableFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipEnableFeaturesEXT>::value,
+                          "PhysicalDeviceDepthClipEnableFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT ) ==
+                            sizeof( VkPipelineRasterizationDepthClipStateCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationDepthClipStateCreateInfoEXT>::value,
+                          "PipelineRasterizationDepthClipStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_hdr_metadata ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HdrMetadataEXT ) == sizeof( VkHdrMetadataEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::HdrMetadataEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::HdrMetadataEXT>::value,
+                          "HdrMetadataEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::XYColorEXT ) == sizeof( VkXYColorEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::XYColorEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::XYColorEXT>::value, "XYColorEXT is not nothrow_move_constructible!" );
+
+//=== VK_KHR_shared_presentable_image ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR ) == sizeof( VkSharedPresentSurfaceCapabilitiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SharedPresentSurfaceCapabilitiesKHR>::value,
+                          "SharedPresentSurfaceCapabilitiesKHR is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+//=== VK_KHR_external_fence_win32 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR ) == sizeof( VkImportFenceWin32HandleInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportFenceWin32HandleInfoKHR>::value,
+                          "ImportFenceWin32HandleInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR ) == sizeof( VkExportFenceWin32HandleInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportFenceWin32HandleInfoKHR>::value,
+                          "ExportFenceWin32HandleInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR ) == sizeof( VkFenceGetWin32HandleInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FenceGetWin32HandleInfoKHR>::value,
+                          "FenceGetWin32HandleInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+//=== VK_KHR_external_fence_fd ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR ) == sizeof( VkImportFenceFdInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportFenceFdInfoKHR>::value,
+                          "ImportFenceFdInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR ) == sizeof( VkFenceGetFdInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FenceGetFdInfoKHR>::value,
+                          "FenceGetFdInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_performance_query ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR ) == sizeof( VkPhysicalDevicePerformanceQueryFeaturesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryFeaturesKHR>::value,
+                          "PhysicalDevicePerformanceQueryFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR ) ==
+                            sizeof( VkPhysicalDevicePerformanceQueryPropertiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePerformanceQueryPropertiesKHR>::value,
+                          "PhysicalDevicePerformanceQueryPropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterKHR ) == sizeof( VkPerformanceCounterKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceCounterKHR>::value,
+                          "PerformanceCounterKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR ) == sizeof( VkPerformanceCounterDescriptionKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceCounterDescriptionKHR>::value,
+                          "PerformanceCounterDescriptionKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR ) == sizeof( VkQueryPoolPerformanceCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceCreateInfoKHR>::value,
+                          "QueryPoolPerformanceCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR ) == sizeof( VkPerformanceCounterResultKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceCounterResultKHR>::value,
+                          "PerformanceCounterResultKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR ) == sizeof( VkAcquireProfilingLockInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AcquireProfilingLockInfoKHR>::value,
+                          "AcquireProfilingLockInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR ) == sizeof( VkPerformanceQuerySubmitInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceQuerySubmitInfoKHR>::value,
+                          "PerformanceQuerySubmitInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_get_surface_capabilities2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR ) == sizeof( VkPhysicalDeviceSurfaceInfo2KHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSurfaceInfo2KHR>::value,
+                          "PhysicalDeviceSurfaceInfo2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR ) == sizeof( VkSurfaceCapabilities2KHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilities2KHR>::value,
+                          "SurfaceCapabilities2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR ) == sizeof( VkSurfaceFormat2KHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceFormat2KHR>::value,
+                          "SurfaceFormat2KHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_get_display_properties2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayProperties2KHR ) == sizeof( VkDisplayProperties2KHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayProperties2KHR>::value,
+                          "DisplayProperties2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR ) == sizeof( VkDisplayPlaneProperties2KHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlaneProperties2KHR>::value,
+                          "DisplayPlaneProperties2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR ) == sizeof( VkDisplayModeProperties2KHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayModeProperties2KHR>::value,
+                          "DisplayModeProperties2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR ) == sizeof( VkDisplayPlaneInfo2KHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlaneInfo2KHR>::value,
+                          "DisplayPlaneInfo2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR ) == sizeof( VkDisplayPlaneCapabilities2KHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayPlaneCapabilities2KHR>::value,
+                          "DisplayPlaneCapabilities2KHR is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_IOS_MVK )
+//=== VK_MVK_ios_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK ) == sizeof( VkIOSSurfaceCreateInfoMVK ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IOSSurfaceCreateInfoMVK>::value,
+                          "IOSSurfaceCreateInfoMVK is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_IOS_MVK*/
+
+#if defined( VK_USE_PLATFORM_MACOS_MVK )
+//=== VK_MVK_macos_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK ) == sizeof( VkMacOSSurfaceCreateInfoMVK ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MacOSSurfaceCreateInfoMVK>::value,
+                          "MacOSSurfaceCreateInfoMVK is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_MACOS_MVK*/
+
+//=== VK_EXT_debug_utils ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT ) == sizeof( VkDebugUtilsLabelEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsLabelEXT>::value,
+                          "DebugUtilsLabelEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT ) == sizeof( VkDebugUtilsMessengerCallbackDataEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCallbackDataEXT>::value,
+                          "DebugUtilsMessengerCallbackDataEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT ) == sizeof( VkDebugUtilsMessengerCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerCreateInfoEXT>::value,
+                          "DebugUtilsMessengerCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT ) == sizeof( VkDebugUtilsMessengerEXT ),
+                          "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsMessengerEXT>::value,
+                          "DebugUtilsMessengerEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT ) == sizeof( VkDebugUtilsObjectNameInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsObjectNameInfoEXT>::value,
+                          "DebugUtilsObjectNameInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT ) == sizeof( VkDebugUtilsObjectTagInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DebugUtilsObjectTagInfoEXT>::value,
+                          "DebugUtilsObjectTagInfoEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+//=== VK_ANDROID_external_memory_android_hardware_buffer ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID ) == sizeof( VkAndroidHardwareBufferUsageANDROID ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferUsageANDROID>::value,
+                          "AndroidHardwareBufferUsageANDROID is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID ) == sizeof( VkAndroidHardwareBufferPropertiesANDROID ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferPropertiesANDROID>::value,
+                          "AndroidHardwareBufferPropertiesANDROID is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID ) ==
+                            sizeof( VkAndroidHardwareBufferFormatPropertiesANDROID ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatPropertiesANDROID>::value,
+                          "AndroidHardwareBufferFormatPropertiesANDROID is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID ) == sizeof( VkImportAndroidHardwareBufferInfoANDROID ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportAndroidHardwareBufferInfoANDROID>::value,
+                          "ImportAndroidHardwareBufferInfoANDROID is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID ) == sizeof( VkMemoryGetAndroidHardwareBufferInfoANDROID ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetAndroidHardwareBufferInfoANDROID>::value,
+                          "MemoryGetAndroidHardwareBufferInfoANDROID is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalFormatANDROID ) == sizeof( VkExternalFormatANDROID ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalFormatANDROID>::value,
+                          "ExternalFormatANDROID is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID ) ==
+                            sizeof( VkAndroidHardwareBufferFormatProperties2ANDROID ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatProperties2ANDROID>::value,
+                          "AndroidHardwareBufferFormatProperties2ANDROID is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+//=== VK_AMDX_shader_enqueue ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX ) == sizeof( VkPhysicalDeviceShaderEnqueueFeaturesAMDX ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueueFeaturesAMDX>::value,
+                          "PhysicalDeviceShaderEnqueueFeaturesAMDX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX ) == sizeof( VkPhysicalDeviceShaderEnqueuePropertiesAMDX ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEnqueuePropertiesAMDX>::value,
+                          "PhysicalDeviceShaderEnqueuePropertiesAMDX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX ) == sizeof( VkExecutionGraphPipelineScratchSizeAMDX ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineScratchSizeAMDX>::value,
+                          "ExecutionGraphPipelineScratchSizeAMDX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX ) == sizeof( VkExecutionGraphPipelineCreateInfoAMDX ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExecutionGraphPipelineCreateInfoAMDX>::value,
+                          "ExecutionGraphPipelineCreateInfoAMDX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchGraphInfoAMDX ) == sizeof( VkDispatchGraphInfoAMDX ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DispatchGraphInfoAMDX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DispatchGraphInfoAMDX>::value,
+                          "DispatchGraphInfoAMDX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX ) == sizeof( VkDispatchGraphCountInfoAMDX ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DispatchGraphCountInfoAMDX>::value,
+                          "DispatchGraphCountInfoAMDX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX ) == sizeof( VkPipelineShaderStageNodeCreateInfoAMDX ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineShaderStageNodeCreateInfoAMDX>::value,
+                          "PipelineShaderStageNodeCreateInfoAMDX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX ) == sizeof( VkDeviceOrHostAddressConstAMDX ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstAMDX>::value,
+                          "DeviceOrHostAddressConstAMDX is not nothrow_move_constructible!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+//=== VK_EXT_sample_locations ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationEXT ) == sizeof( VkSampleLocationEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SampleLocationEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SampleLocationEXT>::value,
+                          "SampleLocationEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT ) == sizeof( VkSampleLocationsInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SampleLocationsInfoEXT>::value,
+                          "SampleLocationsInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT ) == sizeof( VkAttachmentSampleLocationsEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AttachmentSampleLocationsEXT>::value,
+                          "AttachmentSampleLocationsEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT ) == sizeof( VkSubpassSampleLocationsEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassSampleLocationsEXT>::value,
+                          "SubpassSampleLocationsEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT ) == sizeof( VkRenderPassSampleLocationsBeginInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassSampleLocationsBeginInfoEXT>::value,
+                          "RenderPassSampleLocationsBeginInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT ) == sizeof( VkPipelineSampleLocationsStateCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineSampleLocationsStateCreateInfoEXT>::value,
+                          "PipelineSampleLocationsStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT ) == sizeof( VkPhysicalDeviceSampleLocationsPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSampleLocationsPropertiesEXT>::value,
+                          "PhysicalDeviceSampleLocationsPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT ) == sizeof( VkMultisamplePropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultisamplePropertiesEXT>::value,
+                          "MultisamplePropertiesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_blend_operation_advanced ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceBlendOperationAdvancedFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedFeaturesEXT>::value,
+                          "PhysicalDeviceBlendOperationAdvancedFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT ) ==
+                            sizeof( VkPhysicalDeviceBlendOperationAdvancedPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBlendOperationAdvancedPropertiesEXT>::value,
+                          "PhysicalDeviceBlendOperationAdvancedPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT ) ==
+                            sizeof( VkPipelineColorBlendAdvancedStateCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineColorBlendAdvancedStateCreateInfoEXT>::value,
+                          "PipelineColorBlendAdvancedStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_NV_fragment_coverage_to_color ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV ) == sizeof( VkPipelineCoverageToColorStateCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCoverageToColorStateCreateInfoNV>::value,
+                          "PipelineCoverageToColorStateCreateInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_KHR_acceleration_structure ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR ) == sizeof( VkDeviceOrHostAddressKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceOrHostAddressKHR>::value,
+                          "DeviceOrHostAddressKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR ) == sizeof( VkDeviceOrHostAddressConstKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceOrHostAddressConstKHR>::value,
+                          "DeviceOrHostAddressConstKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR ) == sizeof( VkAccelerationStructureBuildRangeInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildRangeInfoKHR>::value,
+                          "AccelerationStructureBuildRangeInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AabbPositionsKHR ) == sizeof( VkAabbPositionsKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AabbPositionsKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AabbPositionsKHR>::value,
+                          "AabbPositionsKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR ) ==
+                            sizeof( VkAccelerationStructureGeometryTrianglesDataKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryTrianglesDataKHR>::value,
+                          "AccelerationStructureGeometryTrianglesDataKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TransformMatrixKHR ) == sizeof( VkTransformMatrixKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TransformMatrixKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TransformMatrixKHR>::value,
+                          "TransformMatrixKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR ) == sizeof( VkAccelerationStructureBuildGeometryInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildGeometryInfoKHR>::value,
+                          "AccelerationStructureBuildGeometryInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR ) == sizeof( VkAccelerationStructureGeometryAabbsDataKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryAabbsDataKHR>::value,
+                          "AccelerationStructureGeometryAabbsDataKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR ) == sizeof( VkAccelerationStructureInstanceKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureInstanceKHR>::value,
+                          "AccelerationStructureInstanceKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR ) ==
+                            sizeof( VkAccelerationStructureGeometryInstancesDataKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryInstancesDataKHR>::value,
+                          "AccelerationStructureGeometryInstancesDataKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR ) == sizeof( VkAccelerationStructureGeometryDataKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryDataKHR>::value,
+                          "AccelerationStructureGeometryDataKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR ) == sizeof( VkAccelerationStructureGeometryKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryKHR>::value,
+                          "AccelerationStructureGeometryKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR ) == sizeof( VkAccelerationStructureCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoKHR>::value,
+                          "AccelerationStructureCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureKHR ) == sizeof( VkAccelerationStructureKHR ),
+                          "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureKHR>::value,
+                          "AccelerationStructureKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR ) == sizeof( VkWriteDescriptorSetAccelerationStructureKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureKHR>::value,
+                          "WriteDescriptorSetAccelerationStructureKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR ) ==
+                            sizeof( VkPhysicalDeviceAccelerationStructureFeaturesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructureFeaturesKHR>::value,
+                          "PhysicalDeviceAccelerationStructureFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR ) ==
+                            sizeof( VkPhysicalDeviceAccelerationStructurePropertiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAccelerationStructurePropertiesKHR>::value,
+                          "PhysicalDeviceAccelerationStructurePropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR ) == sizeof( VkAccelerationStructureDeviceAddressInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureDeviceAddressInfoKHR>::value,
+                          "AccelerationStructureDeviceAddressInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR ) == sizeof( VkAccelerationStructureVersionInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureVersionInfoKHR>::value,
+                          "AccelerationStructureVersionInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR ) == sizeof( VkCopyAccelerationStructureToMemoryInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureToMemoryInfoKHR>::value,
+                          "CopyAccelerationStructureToMemoryInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR ) == sizeof( VkCopyMemoryToAccelerationStructureInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMemoryToAccelerationStructureInfoKHR>::value,
+                          "CopyMemoryToAccelerationStructureInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR ) == sizeof( VkCopyAccelerationStructureInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyAccelerationStructureInfoKHR>::value,
+                          "CopyAccelerationStructureInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR ) == sizeof( VkAccelerationStructureBuildSizesInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureBuildSizesInfoKHR>::value,
+                          "AccelerationStructureBuildSizesInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_ray_tracing_pipeline ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR ) == sizeof( VkRayTracingShaderGroupCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoKHR>::value,
+                          "RayTracingShaderGroupCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR ) == sizeof( VkRayTracingPipelineCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoKHR>::value,
+                          "RayTracingPipelineCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR ) ==
+                            sizeof( VkPhysicalDeviceRayTracingPipelineFeaturesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelineFeaturesKHR>::value,
+                          "PhysicalDeviceRayTracingPipelineFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR ) ==
+                            sizeof( VkPhysicalDeviceRayTracingPipelinePropertiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPipelinePropertiesKHR>::value,
+                          "PhysicalDeviceRayTracingPipelinePropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR ) == sizeof( VkStridedDeviceAddressRegionKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::StridedDeviceAddressRegionKHR>::value,
+                          "StridedDeviceAddressRegionKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR ) == sizeof( VkTraceRaysIndirectCommandKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommandKHR>::value,
+                          "TraceRaysIndirectCommandKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR ) == sizeof( VkRayTracingPipelineInterfaceCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingPipelineInterfaceCreateInfoKHR>::value,
+                          "RayTracingPipelineInterfaceCreateInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_ray_query ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR ) == sizeof( VkPhysicalDeviceRayQueryFeaturesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayQueryFeaturesKHR>::value,
+                          "PhysicalDeviceRayQueryFeaturesKHR is not nothrow_move_constructible!" );
+
+//=== VK_NV_framebuffer_mixed_samples ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV ) ==
+                            sizeof( VkPipelineCoverageModulationStateCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCoverageModulationStateCreateInfoNV>::value,
+                          "PipelineCoverageModulationStateCreateInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_shader_sm_builtins ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsPropertiesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsPropertiesNV>::value,
+                          "PhysicalDeviceShaderSMBuiltinsPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV ) == sizeof( VkPhysicalDeviceShaderSMBuiltinsFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSMBuiltinsFeaturesNV>::value,
+                          "PhysicalDeviceShaderSMBuiltinsFeaturesNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_image_drm_format_modifier ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT ) == sizeof( VkDrmFormatModifierPropertiesListEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesListEXT>::value,
+                          "DrmFormatModifierPropertiesListEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT ) == sizeof( VkDrmFormatModifierPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesEXT>::value,
+                          "DrmFormatModifierPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT ) ==
+                            sizeof( VkPhysicalDeviceImageDrmFormatModifierInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageDrmFormatModifierInfoEXT>::value,
+                          "PhysicalDeviceImageDrmFormatModifierInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT ) == sizeof( VkImageDrmFormatModifierListCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierListCreateInfoEXT>::value,
+                          "ImageDrmFormatModifierListCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT ) ==
+                            sizeof( VkImageDrmFormatModifierExplicitCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierExplicitCreateInfoEXT>::value,
+                          "ImageDrmFormatModifierExplicitCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT ) == sizeof( VkImageDrmFormatModifierPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageDrmFormatModifierPropertiesEXT>::value,
+                          "ImageDrmFormatModifierPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT ) == sizeof( VkDrmFormatModifierPropertiesList2EXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrmFormatModifierPropertiesList2EXT>::value,
+                          "DrmFormatModifierPropertiesList2EXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT ) == sizeof( VkDrmFormatModifierProperties2EXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrmFormatModifierProperties2EXT>::value,
+                          "DrmFormatModifierProperties2EXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_validation_cache ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheEXT ) == sizeof( VkValidationCacheEXT ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ValidationCacheEXT>::value,
+                          "ValidationCacheEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT ) == sizeof( VkValidationCacheCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ValidationCacheCreateInfoEXT>::value,
+                          "ValidationCacheCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT ) == sizeof( VkShaderModuleValidationCacheCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderModuleValidationCacheCreateInfoEXT>::value,
+                          "ShaderModuleValidationCacheCreateInfoEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+//=== VK_KHR_portability_subset ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR ) == sizeof( VkPhysicalDevicePortabilitySubsetFeaturesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetFeaturesKHR>::value,
+                          "PhysicalDevicePortabilitySubsetFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR ) ==
+                            sizeof( VkPhysicalDevicePortabilitySubsetPropertiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePortabilitySubsetPropertiesKHR>::value,
+                          "PhysicalDevicePortabilitySubsetPropertiesKHR is not nothrow_move_constructible!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+//=== VK_NV_shading_rate_image ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV ) == sizeof( VkShadingRatePaletteNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShadingRatePaletteNV>::value,
+                          "ShadingRatePaletteNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV ) ==
+                            sizeof( VkPipelineViewportShadingRateImageStateCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportShadingRateImageStateCreateInfoNV>::value,
+                          "PipelineViewportShadingRateImageStateCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV ) == sizeof( VkPhysicalDeviceShadingRateImageFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImageFeaturesNV>::value,
+                          "PhysicalDeviceShadingRateImageFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV ) == sizeof( VkPhysicalDeviceShadingRateImagePropertiesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShadingRateImagePropertiesNV>::value,
+                          "PhysicalDeviceShadingRateImagePropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV ) == sizeof( VkCoarseSampleLocationNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CoarseSampleLocationNV>::value,
+                          "CoarseSampleLocationNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV ) == sizeof( VkCoarseSampleOrderCustomNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CoarseSampleOrderCustomNV>::value,
+                          "CoarseSampleOrderCustomNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV ) ==
+                            sizeof( VkPipelineViewportCoarseSampleOrderStateCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportCoarseSampleOrderStateCreateInfoNV>::value,
+                          "PipelineViewportCoarseSampleOrderStateCreateInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_ray_tracing ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV ) == sizeof( VkRayTracingShaderGroupCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingShaderGroupCreateInfoNV>::value,
+                          "RayTracingShaderGroupCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV ) == sizeof( VkRayTracingPipelineCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RayTracingPipelineCreateInfoNV>::value,
+                          "RayTracingPipelineCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryTrianglesNV ) == sizeof( VkGeometryTrianglesNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeometryTrianglesNV>::value,
+                          "GeometryTrianglesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryAABBNV ) == sizeof( VkGeometryAABBNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeometryAABBNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeometryAABBNV>::value,
+                          "GeometryAABBNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryDataNV ) == sizeof( VkGeometryDataNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeometryDataNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeometryDataNV>::value,
+                          "GeometryDataNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeometryNV ) == sizeof( VkGeometryNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeometryNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeometryNV>::value, "GeometryNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV ) == sizeof( VkAccelerationStructureInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureInfoNV>::value,
+                          "AccelerationStructureInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV ) == sizeof( VkAccelerationStructureCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureCreateInfoNV>::value,
+                          "AccelerationStructureCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureNV ) == sizeof( VkAccelerationStructureNV ),
+                          "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureNV>::value,
+                          "AccelerationStructureNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV ) == sizeof( VkBindAccelerationStructureMemoryInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindAccelerationStructureMemoryInfoNV>::value,
+                          "BindAccelerationStructureMemoryInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV ) == sizeof( VkWriteDescriptorSetAccelerationStructureNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::WriteDescriptorSetAccelerationStructureNV>::value,
+                          "WriteDescriptorSetAccelerationStructureNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV ) ==
+                            sizeof( VkAccelerationStructureMemoryRequirementsInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMemoryRequirementsInfoNV>::value,
+                          "AccelerationStructureMemoryRequirementsInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV ) == sizeof( VkPhysicalDeviceRayTracingPropertiesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPropertiesNV>::value,
+                          "PhysicalDeviceRayTracingPropertiesNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_representative_fragment_test ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV ) ==
+                            sizeof( VkPhysicalDeviceRepresentativeFragmentTestFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRepresentativeFragmentTestFeaturesNV>::value,
+                          "PhysicalDeviceRepresentativeFragmentTestFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV ) ==
+                            sizeof( VkPipelineRepresentativeFragmentTestStateCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRepresentativeFragmentTestStateCreateInfoNV>::value,
+                          "PipelineRepresentativeFragmentTestStateCreateInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_filter_cubic ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT ) == sizeof( VkPhysicalDeviceImageViewImageFormatInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewImageFormatInfoEXT>::value,
+                          "PhysicalDeviceImageViewImageFormatInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT ) ==
+                            sizeof( VkFilterCubicImageViewImageFormatPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FilterCubicImageViewImageFormatPropertiesEXT>::value,
+                          "FilterCubicImageViewImageFormatPropertiesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_external_memory_host ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT ) == sizeof( VkImportMemoryHostPointerInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryHostPointerInfoEXT>::value,
+                          "ImportMemoryHostPointerInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT ) == sizeof( VkMemoryHostPointerPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryHostPointerPropertiesEXT>::value,
+                          "MemoryHostPointerPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT ) ==
+                            sizeof( VkPhysicalDeviceExternalMemoryHostPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryHostPropertiesEXT>::value,
+                          "PhysicalDeviceExternalMemoryHostPropertiesEXT is not nothrow_move_constructible!" );
+
+//=== VK_KHR_shader_clock ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR ) == sizeof( VkPhysicalDeviceShaderClockFeaturesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderClockFeaturesKHR>::value,
+                          "PhysicalDeviceShaderClockFeaturesKHR is not nothrow_move_constructible!" );
+
+//=== VK_AMD_pipeline_compiler_control ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD ) == sizeof( VkPipelineCompilerControlCreateInfoAMD ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCompilerControlCreateInfoAMD>::value,
+                          "PipelineCompilerControlCreateInfoAMD is not nothrow_move_constructible!" );
+
+//=== VK_EXT_calibrated_timestamps ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT ) == sizeof( VkCalibratedTimestampInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CalibratedTimestampInfoEXT>::value,
+                          "CalibratedTimestampInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_AMD_shader_core_properties ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD ) == sizeof( VkPhysicalDeviceShaderCorePropertiesAMD ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesAMD>::value,
+                          "PhysicalDeviceShaderCorePropertiesAMD is not nothrow_move_constructible!" );
+
+//=== VK_KHR_video_decode_h265 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoKHR ) == sizeof( VkVideoDecodeH265ProfileInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265ProfileInfoKHR>::value,
+                          "VideoDecodeH265ProfileInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesKHR ) == sizeof( VkVideoDecodeH265CapabilitiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265CapabilitiesKHR>::value,
+                          "VideoDecodeH265CapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoKHR ) ==
+                            sizeof( VkVideoDecodeH265SessionParametersCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersCreateInfoKHR>::value,
+                          "VideoDecodeH265SessionParametersCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR ) == sizeof( VkVideoDecodeH265SessionParametersAddInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265SessionParametersAddInfoKHR>::value,
+                          "VideoDecodeH265SessionParametersAddInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoKHR ) == sizeof( VkVideoDecodeH265PictureInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265PictureInfoKHR>::value,
+                          "VideoDecodeH265PictureInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoKHR ) == sizeof( VkVideoDecodeH265DpbSlotInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoDecodeH265DpbSlotInfoKHR>::value,
+                          "VideoDecodeH265DpbSlotInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_global_priority ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR ) == sizeof( VkDeviceQueueGlobalPriorityCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceQueueGlobalPriorityCreateInfoKHR>::value,
+                          "DeviceQueueGlobalPriorityCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR ) ==
+                            sizeof( VkPhysicalDeviceGlobalPriorityQueryFeaturesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceGlobalPriorityQueryFeaturesKHR>::value,
+                          "PhysicalDeviceGlobalPriorityQueryFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR ) == sizeof( VkQueueFamilyGlobalPriorityPropertiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyGlobalPriorityPropertiesKHR>::value,
+                          "QueueFamilyGlobalPriorityPropertiesKHR is not nothrow_move_constructible!" );
+
+//=== VK_AMD_memory_overallocation_behavior ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD ) == sizeof( VkDeviceMemoryOverallocationCreateInfoAMD ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceMemoryOverallocationCreateInfoAMD>::value,
+                          "DeviceMemoryOverallocationCreateInfoAMD is not nothrow_move_constructible!" );
+
+//=== VK_EXT_vertex_attribute_divisor ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT ) ==
+                            sizeof( VkPhysicalDeviceVertexAttributeDivisorPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorPropertiesEXT>::value,
+                          "PhysicalDeviceVertexAttributeDivisorPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT ) == sizeof( VkVertexInputBindingDivisorDescriptionEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputBindingDivisorDescriptionEXT>::value,
+                          "VertexInputBindingDivisorDescriptionEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT ) ==
+                            sizeof( VkPipelineVertexInputDivisorStateCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineVertexInputDivisorStateCreateInfoEXT>::value,
+                          "PipelineVertexInputDivisorStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceVertexAttributeDivisorFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexAttributeDivisorFeaturesEXT>::value,
+                          "PhysicalDeviceVertexAttributeDivisorFeaturesEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_GGP )
+//=== VK_GGP_frame_token ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP ) == sizeof( VkPresentFrameTokenGGP ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentFrameTokenGGP>::value,
+                          "PresentFrameTokenGGP is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_GGP*/
+
+//=== VK_NV_compute_shader_derivatives ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV ) ==
+                            sizeof( VkPhysicalDeviceComputeShaderDerivativesFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceComputeShaderDerivativesFeaturesNV>::value,
+                          "PhysicalDeviceComputeShaderDerivativesFeaturesNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_mesh_shader ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesNV>::value,
+                          "PhysicalDeviceMeshShaderFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesNV>::value,
+                          "PhysicalDeviceMeshShaderPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV ) == sizeof( VkDrawMeshTasksIndirectCommandNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandNV>::value,
+                          "DrawMeshTasksIndirectCommandNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_shader_image_footprint ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV ) ==
+                            sizeof( VkPhysicalDeviceShaderImageFootprintFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageFootprintFeaturesNV>::value,
+                          "PhysicalDeviceShaderImageFootprintFeaturesNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_scissor_exclusive ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV ) ==
+                            sizeof( VkPipelineViewportExclusiveScissorStateCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportExclusiveScissorStateCreateInfoNV>::value,
+                          "PipelineViewportExclusiveScissorStateCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV ) == sizeof( VkPhysicalDeviceExclusiveScissorFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExclusiveScissorFeaturesNV>::value,
+                          "PhysicalDeviceExclusiveScissorFeaturesNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_device_diagnostic_checkpoints ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV ) == sizeof( VkQueueFamilyCheckpointPropertiesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointPropertiesNV>::value,
+                          "QueueFamilyCheckpointPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointDataNV ) == sizeof( VkCheckpointDataNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CheckpointDataNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CheckpointDataNV>::value,
+                          "CheckpointDataNV is not nothrow_move_constructible!" );
+
+//=== VK_INTEL_shader_integer_functions2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ) ==
+                            sizeof( VkPhysicalDeviceShaderIntegerFunctions2FeaturesINTEL ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL>::value,
+                          "PhysicalDeviceShaderIntegerFunctions2FeaturesINTEL is not nothrow_move_constructible!" );
+
+//=== VK_INTEL_performance_query ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL ) == sizeof( VkPerformanceValueDataINTEL ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceValueDataINTEL>::value,
+                          "PerformanceValueDataINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceValueINTEL ) == sizeof( VkPerformanceValueINTEL ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceValueINTEL>::value,
+                          "PerformanceValueINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL ) == sizeof( VkInitializePerformanceApiInfoINTEL ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::InitializePerformanceApiInfoINTEL>::value,
+                          "InitializePerformanceApiInfoINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL ) == sizeof( VkQueryPoolPerformanceQueryCreateInfoINTEL ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueryPoolPerformanceQueryCreateInfoINTEL>::value,
+                          "QueryPoolPerformanceQueryCreateInfoINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL ) == sizeof( VkPerformanceMarkerInfoINTEL ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceMarkerInfoINTEL>::value,
+                          "PerformanceMarkerInfoINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL ) == sizeof( VkPerformanceStreamMarkerInfoINTEL ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceStreamMarkerInfoINTEL>::value,
+                          "PerformanceStreamMarkerInfoINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL ) == sizeof( VkPerformanceOverrideInfoINTEL ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceOverrideInfoINTEL>::value,
+                          "PerformanceOverrideInfoINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL ) == sizeof( VkPerformanceConfigurationAcquireInfoINTEL ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceConfigurationAcquireInfoINTEL>::value,
+                          "PerformanceConfigurationAcquireInfoINTEL is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL ) == sizeof( VkPerformanceConfigurationINTEL ),
+                          "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PerformanceConfigurationINTEL>::value,
+                          "PerformanceConfigurationINTEL is not nothrow_move_constructible!" );
+
+//=== VK_EXT_pci_bus_info ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT ) == sizeof( VkPhysicalDevicePCIBusInfoPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePCIBusInfoPropertiesEXT>::value,
+                          "PhysicalDevicePCIBusInfoPropertiesEXT is not nothrow_move_constructible!" );
+
+//=== VK_AMD_display_native_hdr ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD ) == sizeof( VkDisplayNativeHdrSurfaceCapabilitiesAMD ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DisplayNativeHdrSurfaceCapabilitiesAMD>::value,
+                          "DisplayNativeHdrSurfaceCapabilitiesAMD is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD ) == sizeof( VkSwapchainDisplayNativeHdrCreateInfoAMD ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainDisplayNativeHdrCreateInfoAMD>::value,
+                          "SwapchainDisplayNativeHdrCreateInfoAMD is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+//=== VK_FUCHSIA_imagepipe_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA ) == sizeof( VkImagePipeSurfaceCreateInfoFUCHSIA ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImagePipeSurfaceCreateInfoFUCHSIA>::value,
+                          "ImagePipeSurfaceCreateInfoFUCHSIA is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+//=== VK_EXT_metal_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT ) == sizeof( VkMetalSurfaceCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MetalSurfaceCreateInfoEXT>::value,
+                          "MetalSurfaceCreateInfoEXT is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+//=== VK_EXT_fragment_density_map ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceFragmentDensityMapFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapFeaturesEXT>::value,
+                          "PhysicalDeviceFragmentDensityMapFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT ) ==
+                            sizeof( VkPhysicalDeviceFragmentDensityMapPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapPropertiesEXT>::value,
+                          "PhysicalDeviceFragmentDensityMapPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT ) == sizeof( VkRenderPassFragmentDensityMapCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassFragmentDensityMapCreateInfoEXT>::value,
+                          "RenderPassFragmentDensityMapCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_KHR_fragment_shading_rate ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR ) == sizeof( VkFragmentShadingRateAttachmentInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FragmentShadingRateAttachmentInfoKHR>::value,
+                          "FragmentShadingRateAttachmentInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR ) ==
+                            sizeof( VkPipelineFragmentShadingRateStateCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateStateCreateInfoKHR>::value,
+                          "PipelineFragmentShadingRateStateCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR ) ==
+                            sizeof( VkPhysicalDeviceFragmentShadingRateFeaturesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateFeaturesKHR>::value,
+                          "PhysicalDeviceFragmentShadingRateFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR ) ==
+                            sizeof( VkPhysicalDeviceFragmentShadingRatePropertiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRatePropertiesKHR>::value,
+                          "PhysicalDeviceFragmentShadingRatePropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR ) == sizeof( VkPhysicalDeviceFragmentShadingRateKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateKHR>::value,
+                          "PhysicalDeviceFragmentShadingRateKHR is not nothrow_move_constructible!" );
+
+//=== VK_AMD_shader_core_properties2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD ) == sizeof( VkPhysicalDeviceShaderCoreProperties2AMD ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreProperties2AMD>::value,
+                          "PhysicalDeviceShaderCoreProperties2AMD is not nothrow_move_constructible!" );
+
+//=== VK_AMD_device_coherent_memory ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD ) == sizeof( VkPhysicalDeviceCoherentMemoryFeaturesAMD ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoherentMemoryFeaturesAMD>::value,
+                          "PhysicalDeviceCoherentMemoryFeaturesAMD is not nothrow_move_constructible!" );
+
+//=== VK_EXT_shader_image_atomic_int64 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceShaderImageAtomicInt64FeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderImageAtomicInt64FeaturesEXT>::value,
+                          "PhysicalDeviceShaderImageAtomicInt64FeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_memory_budget ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT ) == sizeof( VkPhysicalDeviceMemoryBudgetPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryBudgetPropertiesEXT>::value,
+                          "PhysicalDeviceMemoryBudgetPropertiesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_memory_priority ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT ) == sizeof( VkPhysicalDeviceMemoryPriorityFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryPriorityFeaturesEXT>::value,
+                          "PhysicalDeviceMemoryPriorityFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT ) == sizeof( VkMemoryPriorityAllocateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryPriorityAllocateInfoEXT>::value,
+                          "MemoryPriorityAllocateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_KHR_surface_protected_capabilities ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR ) == sizeof( VkSurfaceProtectedCapabilitiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceProtectedCapabilitiesKHR>::value,
+                          "SurfaceProtectedCapabilitiesKHR is not nothrow_move_constructible!" );
+
+//=== VK_NV_dedicated_allocation_image_aliasing ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ) ==
+                            sizeof( VkPhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV>::value,
+                          "PhysicalDeviceDedicatedAllocationImageAliasingFeaturesNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_buffer_device_address ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceBufferDeviceAddressFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBufferDeviceAddressFeaturesEXT>::value,
+                          "PhysicalDeviceBufferDeviceAddressFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT ) == sizeof( VkBufferDeviceAddressCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferDeviceAddressCreateInfoEXT>::value,
+                          "BufferDeviceAddressCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_validation_features ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT ) == sizeof( VkValidationFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ValidationFeaturesEXT>::value,
+                          "ValidationFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_KHR_present_wait ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR ) == sizeof( VkPhysicalDevicePresentWaitFeaturesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentWaitFeaturesKHR>::value,
+                          "PhysicalDevicePresentWaitFeaturesKHR is not nothrow_move_constructible!" );
+
+//=== VK_NV_cooperative_matrix ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV ) == sizeof( VkCooperativeMatrixPropertiesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesNV>::value,
+                          "CooperativeMatrixPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesNV>::value,
+                          "PhysicalDeviceCooperativeMatrixFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV ) ==
+                            sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesNV>::value,
+                          "PhysicalDeviceCooperativeMatrixPropertiesNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_coverage_reduction_mode ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV ) ==
+                            sizeof( VkPhysicalDeviceCoverageReductionModeFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCoverageReductionModeFeaturesNV>::value,
+                          "PhysicalDeviceCoverageReductionModeFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV ) == sizeof( VkPipelineCoverageReductionStateCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCoverageReductionStateCreateInfoNV>::value,
+                          "PipelineCoverageReductionStateCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV ) == sizeof( VkFramebufferMixedSamplesCombinationNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FramebufferMixedSamplesCombinationNV>::value,
+                          "FramebufferMixedSamplesCombinationNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_fragment_shader_interlock ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceFragmentShaderInterlockFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderInterlockFeaturesEXT>::value,
+                          "PhysicalDeviceFragmentShaderInterlockFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_ycbcr_image_arrays ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT ) == sizeof( VkPhysicalDeviceYcbcrImageArraysFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrImageArraysFeaturesEXT>::value,
+                          "PhysicalDeviceYcbcrImageArraysFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_provoking_vertex ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT ) == sizeof( VkPhysicalDeviceProvokingVertexFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexFeaturesEXT>::value,
+                          "PhysicalDeviceProvokingVertexFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT ) == sizeof( VkPhysicalDeviceProvokingVertexPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceProvokingVertexPropertiesEXT>::value,
+                          "PhysicalDeviceProvokingVertexPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT ) ==
+                            sizeof( VkPipelineRasterizationProvokingVertexStateCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationProvokingVertexStateCreateInfoEXT>::value,
+                          "PipelineRasterizationProvokingVertexStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_WIN32_KHR )
+//=== VK_EXT_full_screen_exclusive ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveInfoEXT>::value,
+                          "SurfaceFullScreenExclusiveInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT ) == sizeof( VkSurfaceCapabilitiesFullScreenExclusiveEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesFullScreenExclusiveEXT>::value,
+                          "SurfaceCapabilitiesFullScreenExclusiveEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT ) == sizeof( VkSurfaceFullScreenExclusiveWin32InfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceFullScreenExclusiveWin32InfoEXT>::value,
+                          "SurfaceFullScreenExclusiveWin32InfoEXT is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_WIN32_KHR*/
+
+//=== VK_EXT_headless_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT ) == sizeof( VkHeadlessSurfaceCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::HeadlessSurfaceCreateInfoEXT>::value,
+                          "HeadlessSurfaceCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_line_rasterization ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT ) == sizeof( VkPhysicalDeviceLineRasterizationFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationFeaturesEXT>::value,
+                          "PhysicalDeviceLineRasterizationFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT ) ==
+                            sizeof( VkPhysicalDeviceLineRasterizationPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLineRasterizationPropertiesEXT>::value,
+                          "PhysicalDeviceLineRasterizationPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT ) ==
+                            sizeof( VkPipelineRasterizationLineStateCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineRasterizationLineStateCreateInfoEXT>::value,
+                          "PipelineRasterizationLineStateCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_shader_atomic_float ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderAtomicFloatFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloatFeaturesEXT>::value,
+                          "PhysicalDeviceShaderAtomicFloatFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_index_type_uint8 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT ) == sizeof( VkPhysicalDeviceIndexTypeUint8FeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceIndexTypeUint8FeaturesEXT>::value,
+                          "PhysicalDeviceIndexTypeUint8FeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_extended_dynamic_state ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceExtendedDynamicStateFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicStateFeaturesEXT>::value,
+                          "PhysicalDeviceExtendedDynamicStateFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_KHR_deferred_host_operations ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeferredOperationKHR ) == sizeof( VkDeferredOperationKHR ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeferredOperationKHR>::value,
+                          "DeferredOperationKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_pipeline_executable_properties ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR ) ==
+                            sizeof( VkPhysicalDevicePipelineExecutablePropertiesFeaturesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineExecutablePropertiesFeaturesKHR>::value,
+                          "PhysicalDevicePipelineExecutablePropertiesFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineInfoKHR ) == sizeof( VkPipelineInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineInfoKHR>::value,
+                          "PipelineInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR ) == sizeof( VkPipelineExecutablePropertiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutablePropertiesKHR>::value,
+                          "PipelineExecutablePropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR ) == sizeof( VkPipelineExecutableInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutableInfoKHR>::value,
+                          "PipelineExecutableInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR ) == sizeof( VkPipelineExecutableStatisticValueKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticValueKHR>::value,
+                          "PipelineExecutableStatisticValueKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR ) == sizeof( VkPipelineExecutableStatisticKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutableStatisticKHR>::value,
+                          "PipelineExecutableStatisticKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR ) ==
+                            sizeof( VkPipelineExecutableInternalRepresentationKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineExecutableInternalRepresentationKHR>::value,
+                          "PipelineExecutableInternalRepresentationKHR is not nothrow_move_constructible!" );
+
+//=== VK_EXT_host_image_copy ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT ) == sizeof( VkPhysicalDeviceHostImageCopyFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyFeaturesEXT>::value,
+                          "PhysicalDeviceHostImageCopyFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT ) == sizeof( VkPhysicalDeviceHostImageCopyPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceHostImageCopyPropertiesEXT>::value,
+                          "PhysicalDeviceHostImageCopyPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT ) == sizeof( VkMemoryToImageCopyEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryToImageCopyEXT>::value,
+                          "MemoryToImageCopyEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT ) == sizeof( VkImageToMemoryCopyEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageToMemoryCopyEXT>::value,
+                          "ImageToMemoryCopyEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT ) == sizeof( VkCopyMemoryToImageInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMemoryToImageInfoEXT>::value,
+                          "CopyMemoryToImageInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT ) == sizeof( VkCopyImageToMemoryInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyImageToMemoryInfoEXT>::value,
+                          "CopyImageToMemoryInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT ) == sizeof( VkCopyImageToImageInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyImageToImageInfoEXT>::value,
+                          "CopyImageToImageInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT ) == sizeof( VkHostImageLayoutTransitionInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::HostImageLayoutTransitionInfoEXT>::value,
+                          "HostImageLayoutTransitionInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT ) == sizeof( VkSubresourceHostMemcpySizeEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubresourceHostMemcpySizeEXT>::value,
+                          "SubresourceHostMemcpySizeEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT ) == sizeof( VkHostImageCopyDevicePerformanceQueryEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::HostImageCopyDevicePerformanceQueryEXT>::value,
+                          "HostImageCopyDevicePerformanceQueryEXT is not nothrow_move_constructible!" );
+
+//=== VK_KHR_map_memory2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR ) == sizeof( VkMemoryMapInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryMapInfoKHR>::value,
+                          "MemoryMapInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR ) == sizeof( VkMemoryUnmapInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryUnmapInfoKHR>::value,
+                          "MemoryUnmapInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_EXT_shader_atomic_float2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceShaderAtomicFloat2FeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderAtomicFloat2FeaturesEXT>::value,
+                          "PhysicalDeviceShaderAtomicFloat2FeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_surface_maintenance1 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfacePresentModeEXT ) == sizeof( VkSurfacePresentModeEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfacePresentModeEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfacePresentModeEXT>::value,
+                          "SurfacePresentModeEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfacePresentScalingCapabilitiesEXT ) == sizeof( VkSurfacePresentScalingCapabilitiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfacePresentScalingCapabilitiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfacePresentScalingCapabilitiesEXT>::value,
+                          "SurfacePresentScalingCapabilitiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfacePresentModeCompatibilityEXT ) == sizeof( VkSurfacePresentModeCompatibilityEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfacePresentModeCompatibilityEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfacePresentModeCompatibilityEXT>::value,
+                          "SurfacePresentModeCompatibilityEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_swapchain_maintenance1 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSwapchainMaintenance1FeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceSwapchainMaintenance1FeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSwapchainMaintenance1FeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSwapchainMaintenance1FeaturesEXT>::value,
+                          "PhysicalDeviceSwapchainMaintenance1FeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentFenceInfoEXT ) == sizeof( VkSwapchainPresentFenceInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainPresentFenceInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainPresentFenceInfoEXT>::value,
+                          "SwapchainPresentFenceInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentModesCreateInfoEXT ) == sizeof( VkSwapchainPresentModesCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainPresentModesCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainPresentModesCreateInfoEXT>::value,
+                          "SwapchainPresentModesCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentModeInfoEXT ) == sizeof( VkSwapchainPresentModeInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainPresentModeInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainPresentModeInfoEXT>::value,
+                          "SwapchainPresentModeInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentScalingCreateInfoEXT ) == sizeof( VkSwapchainPresentScalingCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainPresentScalingCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainPresentScalingCreateInfoEXT>::value,
+                          "SwapchainPresentScalingCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT ) == sizeof( VkReleaseSwapchainImagesInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ReleaseSwapchainImagesInfoEXT>::value,
+                          "ReleaseSwapchainImagesInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_NV_device_generated_commands ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV ) ==
+                            sizeof( VkPhysicalDeviceDeviceGeneratedCommandsPropertiesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsPropertiesNV>::value,
+                          "PhysicalDeviceDeviceGeneratedCommandsPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV ) ==
+                            sizeof( VkPhysicalDeviceDeviceGeneratedCommandsFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsFeaturesNV>::value,
+                          "PhysicalDeviceDeviceGeneratedCommandsFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV ) == sizeof( VkGraphicsShaderGroupCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GraphicsShaderGroupCreateInfoNV>::value,
+                          "GraphicsShaderGroupCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV ) == sizeof( VkGraphicsPipelineShaderGroupsCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GraphicsPipelineShaderGroupsCreateInfoNV>::value,
+                          "GraphicsPipelineShaderGroupsCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV ) == sizeof( VkBindShaderGroupIndirectCommandNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindShaderGroupIndirectCommandNV>::value,
+                          "BindShaderGroupIndirectCommandNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV ) == sizeof( VkBindIndexBufferIndirectCommandNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindIndexBufferIndirectCommandNV>::value,
+                          "BindIndexBufferIndirectCommandNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV ) == sizeof( VkBindVertexBufferIndirectCommandNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindVertexBufferIndirectCommandNV>::value,
+                          "BindVertexBufferIndirectCommandNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV ) == sizeof( VkSetStateFlagsIndirectCommandNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SetStateFlagsIndirectCommandNV>::value,
+                          "SetStateFlagsIndirectCommandNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV ) == sizeof( VkIndirectCommandsLayoutNV ),
+                          "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutNV>::value,
+                          "IndirectCommandsLayoutNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV ) == sizeof( VkIndirectCommandsStreamNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IndirectCommandsStreamNV>::value,
+                          "IndirectCommandsStreamNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV ) == sizeof( VkIndirectCommandsLayoutTokenNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutTokenNV>::value,
+                          "IndirectCommandsLayoutTokenNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV ) == sizeof( VkIndirectCommandsLayoutCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::IndirectCommandsLayoutCreateInfoNV>::value,
+                          "IndirectCommandsLayoutCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV ) == sizeof( VkGeneratedCommandsInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeneratedCommandsInfoNV>::value,
+                          "GeneratedCommandsInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV ) == sizeof( VkGeneratedCommandsMemoryRequirementsInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GeneratedCommandsMemoryRequirementsInfoNV>::value,
+                          "GeneratedCommandsMemoryRequirementsInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_inherited_viewport_scissor ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV ) ==
+                            sizeof( VkPhysicalDeviceInheritedViewportScissorFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceInheritedViewportScissorFeaturesNV>::value,
+                          "PhysicalDeviceInheritedViewportScissorFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV ) ==
+                            sizeof( VkCommandBufferInheritanceViewportScissorInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceViewportScissorInfoNV>::value,
+                          "CommandBufferInheritanceViewportScissorInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_texel_buffer_alignment ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceTexelBufferAlignmentFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTexelBufferAlignmentFeaturesEXT>::value,
+                          "PhysicalDeviceTexelBufferAlignmentFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_QCOM_render_pass_transform ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM ) == sizeof( VkRenderPassTransformBeginInfoQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassTransformBeginInfoQCOM>::value,
+                          "RenderPassTransformBeginInfoQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM ) ==
+                            sizeof( VkCommandBufferInheritanceRenderPassTransformInfoQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CommandBufferInheritanceRenderPassTransformInfoQCOM>::value,
+                          "CommandBufferInheritanceRenderPassTransformInfoQCOM is not nothrow_move_constructible!" );
+
+//=== VK_EXT_depth_bias_control ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthBiasControlFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthBiasControlFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthBiasControlFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthBiasControlFeaturesEXT>::value,
+                          "PhysicalDeviceDepthBiasControlFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT ) == sizeof( VkDepthBiasInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DepthBiasInfoEXT>::value,
+                          "DepthBiasInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DepthBiasRepresentationInfoEXT ) == sizeof( VkDepthBiasRepresentationInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DepthBiasRepresentationInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DepthBiasRepresentationInfoEXT>::value,
+                          "DepthBiasRepresentationInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_device_memory_report ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceDeviceMemoryReportFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceMemoryReportFeaturesEXT>::value,
+                          "PhysicalDeviceDeviceMemoryReportFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT ) == sizeof( VkDeviceDeviceMemoryReportCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceDeviceMemoryReportCreateInfoEXT>::value,
+                          "DeviceDeviceMemoryReportCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT ) == sizeof( VkDeviceMemoryReportCallbackDataEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceMemoryReportCallbackDataEXT>::value,
+                          "DeviceMemoryReportCallbackDataEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_robustness2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT ) == sizeof( VkPhysicalDeviceRobustness2FeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2FeaturesEXT>::value,
+                          "PhysicalDeviceRobustness2FeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT ) == sizeof( VkPhysicalDeviceRobustness2PropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRobustness2PropertiesEXT>::value,
+                          "PhysicalDeviceRobustness2PropertiesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_custom_border_color ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT ) == sizeof( VkSamplerCustomBorderColorCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerCustomBorderColorCreateInfoEXT>::value,
+                          "SamplerCustomBorderColorCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT ) ==
+                            sizeof( VkPhysicalDeviceCustomBorderColorPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorPropertiesEXT>::value,
+                          "PhysicalDeviceCustomBorderColorPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT ) == sizeof( VkPhysicalDeviceCustomBorderColorFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCustomBorderColorFeaturesEXT>::value,
+                          "PhysicalDeviceCustomBorderColorFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_KHR_pipeline_library ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR ) == sizeof( VkPipelineLibraryCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineLibraryCreateInfoKHR>::value,
+                          "PipelineLibraryCreateInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_NV_present_barrier ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV ) == sizeof( VkPhysicalDevicePresentBarrierFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentBarrierFeaturesNV>::value,
+                          "PhysicalDevicePresentBarrierFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV ) == sizeof( VkSurfaceCapabilitiesPresentBarrierNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SurfaceCapabilitiesPresentBarrierNV>::value,
+                          "SurfaceCapabilitiesPresentBarrierNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV ) == sizeof( VkSwapchainPresentBarrierCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainPresentBarrierCreateInfoNV>::value,
+                          "SwapchainPresentBarrierCreateInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_KHR_present_id ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PresentIdKHR ) == sizeof( VkPresentIdKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PresentIdKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PresentIdKHR>::value, "PresentIdKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR ) == sizeof( VkPhysicalDevicePresentIdFeaturesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePresentIdFeaturesKHR>::value,
+                          "PhysicalDevicePresentIdFeaturesKHR is not nothrow_move_constructible!" );
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+//=== VK_KHR_video_encode_queue ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR ) == sizeof( VkVideoEncodeInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeInfoKHR>::value,
+                          "VideoEncodeInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR ) == sizeof( VkVideoEncodeCapabilitiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeCapabilitiesKHR>::value,
+                          "VideoEncodeCapabilitiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryPoolVideoEncodeFeedbackCreateInfoKHR ) == sizeof( VkQueryPoolVideoEncodeFeedbackCreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueryPoolVideoEncodeFeedbackCreateInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueryPoolVideoEncodeFeedbackCreateInfoKHR>::value,
+                          "QueryPoolVideoEncodeFeedbackCreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR ) == sizeof( VkVideoEncodeUsageInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeUsageInfoKHR>::value,
+                          "VideoEncodeUsageInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR ) == sizeof( VkVideoEncodeRateControlInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlInfoKHR>::value,
+                          "VideoEncodeRateControlInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR ) == sizeof( VkVideoEncodeRateControlLayerInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeRateControlLayerInfoKHR>::value,
+                          "VideoEncodeRateControlLayerInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR ) ==
+                            sizeof( VkPhysicalDeviceVideoEncodeQualityLevelInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVideoEncodeQualityLevelInfoKHR>::value,
+                          "PhysicalDeviceVideoEncodeQualityLevelInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR ) == sizeof( VkVideoEncodeQualityLevelPropertiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelPropertiesKHR>::value,
+                          "VideoEncodeQualityLevelPropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelInfoKHR ) == sizeof( VkVideoEncodeQualityLevelInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeQualityLevelInfoKHR>::value,
+                          "VideoEncodeQualityLevelInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR ) == sizeof( VkVideoEncodeSessionParametersGetInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersGetInfoKHR>::value,
+                          "VideoEncodeSessionParametersGetInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR ) ==
+                            sizeof( VkVideoEncodeSessionParametersFeedbackInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VideoEncodeSessionParametersFeedbackInfoKHR>::value,
+                          "VideoEncodeSessionParametersFeedbackInfoKHR is not nothrow_move_constructible!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+//=== VK_NV_device_diagnostics_config ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV ) == sizeof( VkPhysicalDeviceDiagnosticsConfigFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDiagnosticsConfigFeaturesNV>::value,
+                          "PhysicalDeviceDiagnosticsConfigFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV ) == sizeof( VkDeviceDiagnosticsConfigCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceDiagnosticsConfigCreateInfoNV>::value,
+                          "DeviceDiagnosticsConfigCreateInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_low_latency ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueryLowLatencySupportNV ) == sizeof( VkQueryLowLatencySupportNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueryLowLatencySupportNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueryLowLatencySupportNV>::value,
+                          "QueryLowLatencySupportNV is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_METAL_EXT )
+//=== VK_EXT_metal_objects ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT ) == sizeof( VkExportMetalObjectCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalObjectCreateInfoEXT>::value,
+                          "ExportMetalObjectCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT ) == sizeof( VkExportMetalObjectsInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalObjectsInfoEXT>::value,
+                          "ExportMetalObjectsInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT ) == sizeof( VkExportMetalDeviceInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalDeviceInfoEXT>::value,
+                          "ExportMetalDeviceInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT ) == sizeof( VkExportMetalCommandQueueInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalCommandQueueInfoEXT>::value,
+                          "ExportMetalCommandQueueInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT ) == sizeof( VkExportMetalBufferInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalBufferInfoEXT>::value,
+                          "ExportMetalBufferInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT ) == sizeof( VkImportMetalBufferInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMetalBufferInfoEXT>::value,
+                          "ImportMetalBufferInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT ) == sizeof( VkExportMetalTextureInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalTextureInfoEXT>::value,
+                          "ExportMetalTextureInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT ) == sizeof( VkImportMetalTextureInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMetalTextureInfoEXT>::value,
+                          "ImportMetalTextureInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT ) == sizeof( VkExportMetalIOSurfaceInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalIOSurfaceInfoEXT>::value,
+                          "ExportMetalIOSurfaceInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT ) == sizeof( VkImportMetalIOSurfaceInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMetalIOSurfaceInfoEXT>::value,
+                          "ImportMetalIOSurfaceInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT ) == sizeof( VkExportMetalSharedEventInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExportMetalSharedEventInfoEXT>::value,
+                          "ExportMetalSharedEventInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT ) == sizeof( VkImportMetalSharedEventInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMetalSharedEventInfoEXT>::value,
+                          "ImportMetalSharedEventInfoEXT is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_METAL_EXT*/
+
+//=== VK_KHR_synchronization2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV ) == sizeof( VkQueueFamilyCheckpointProperties2NV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::QueueFamilyCheckpointProperties2NV>::value,
+                          "QueueFamilyCheckpointProperties2NV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CheckpointData2NV ) == sizeof( VkCheckpointData2NV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CheckpointData2NV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CheckpointData2NV>::value,
+                          "CheckpointData2NV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_descriptor_buffer ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferPropertiesEXT ) ==
+                            sizeof( VkPhysicalDeviceDescriptorBufferPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferPropertiesEXT>::value,
+                          "PhysicalDeviceDescriptorBufferPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT ) ==
+                            sizeof( VkPhysicalDeviceDescriptorBufferDensityMapPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT>::value,
+                          "PhysicalDeviceDescriptorBufferDensityMapPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferFeaturesEXT ) == sizeof( VkPhysicalDeviceDescriptorBufferFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorBufferFeaturesEXT>::value,
+                          "PhysicalDeviceDescriptorBufferFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT ) == sizeof( VkDescriptorAddressInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorAddressInfoEXT>::value,
+                          "DescriptorAddressInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT ) == sizeof( VkDescriptorBufferBindingInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorBufferBindingInfoEXT>::value,
+                          "DescriptorBufferBindingInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorBufferBindingPushDescriptorBufferHandleEXT ) ==
+                            sizeof( VkDescriptorBufferBindingPushDescriptorBufferHandleEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorBufferBindingPushDescriptorBufferHandleEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorBufferBindingPushDescriptorBufferHandleEXT>::value,
+                          "DescriptorBufferBindingPushDescriptorBufferHandleEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorDataEXT ) == sizeof( VkDescriptorDataEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorDataEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorDataEXT>::value,
+                          "DescriptorDataEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT ) == sizeof( VkDescriptorGetInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorGetInfoEXT>::value,
+                          "DescriptorGetInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT ) == sizeof( VkBufferCaptureDescriptorDataInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCaptureDescriptorDataInfoEXT>::value,
+                          "BufferCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT ) == sizeof( VkImageCaptureDescriptorDataInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCaptureDescriptorDataInfoEXT>::value,
+                          "ImageCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT ) == sizeof( VkImageViewCaptureDescriptorDataInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewCaptureDescriptorDataInfoEXT>::value,
+                          "ImageViewCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT ) == sizeof( VkSamplerCaptureDescriptorDataInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerCaptureDescriptorDataInfoEXT>::value,
+                          "SamplerCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpaqueCaptureDescriptorDataCreateInfoEXT ) == sizeof( VkOpaqueCaptureDescriptorDataCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpaqueCaptureDescriptorDataCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpaqueCaptureDescriptorDataCreateInfoEXT>::value,
+                          "OpaqueCaptureDescriptorDataCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT ) ==
+                            sizeof( VkAccelerationStructureCaptureDescriptorDataInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureCaptureDescriptorDataInfoEXT>::value,
+                          "AccelerationStructureCaptureDescriptorDataInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_graphics_pipeline_library ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceGraphicsPipelineLibraryFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT>::value,
+                          "PhysicalDeviceGraphicsPipelineLibraryFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT ) ==
+                            sizeof( VkPhysicalDeviceGraphicsPipelineLibraryPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT>::value,
+                          "PhysicalDeviceGraphicsPipelineLibraryPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT ) == sizeof( VkGraphicsPipelineLibraryCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GraphicsPipelineLibraryCreateInfoEXT>::value,
+                          "GraphicsPipelineLibraryCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_AMD_shader_early_and_late_fragment_tests ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD ) ==
+                            sizeof( VkPhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD>::value,
+                          "PhysicalDeviceShaderEarlyAndLateFragmentTestsFeaturesAMD is not nothrow_move_constructible!" );
+
+//=== VK_KHR_fragment_shader_barycentric ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR ) ==
+                            sizeof( VkPhysicalDeviceFragmentShaderBarycentricFeaturesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricFeaturesKHR>::value,
+                          "PhysicalDeviceFragmentShaderBarycentricFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR ) ==
+                            sizeof( VkPhysicalDeviceFragmentShaderBarycentricPropertiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShaderBarycentricPropertiesKHR>::value,
+                          "PhysicalDeviceFragmentShaderBarycentricPropertiesKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_shader_subgroup_uniform_control_flow ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ) ==
+                            sizeof( VkPhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR>::value,
+                          "PhysicalDeviceShaderSubgroupUniformControlFlowFeaturesKHR is not nothrow_move_constructible!" );
+
+//=== VK_NV_fragment_shading_rate_enums ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV ) ==
+                            sizeof( VkPhysicalDeviceFragmentShadingRateEnumsFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsFeaturesNV>::value,
+                          "PhysicalDeviceFragmentShadingRateEnumsFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV ) ==
+                            sizeof( VkPhysicalDeviceFragmentShadingRateEnumsPropertiesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentShadingRateEnumsPropertiesNV>::value,
+                          "PhysicalDeviceFragmentShadingRateEnumsPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV ) ==
+                            sizeof( VkPipelineFragmentShadingRateEnumStateCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineFragmentShadingRateEnumStateCreateInfoNV>::value,
+                          "PipelineFragmentShadingRateEnumStateCreateInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_ray_tracing_motion_blur ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV ) ==
+                            sizeof( VkAccelerationStructureGeometryMotionTrianglesDataNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureGeometryMotionTrianglesDataNV>::value,
+                          "AccelerationStructureGeometryMotionTrianglesDataNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV ) == sizeof( VkAccelerationStructureMotionInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInfoNV>::value,
+                          "AccelerationStructureMotionInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV ) == sizeof( VkAccelerationStructureMotionInstanceNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceNV>::value,
+                          "AccelerationStructureMotionInstanceNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV ) == sizeof( VkAccelerationStructureMotionInstanceDataNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMotionInstanceDataNV>::value,
+                          "AccelerationStructureMotionInstanceDataNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV ) ==
+                            sizeof( VkAccelerationStructureMatrixMotionInstanceNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureMatrixMotionInstanceNV>::value,
+                          "AccelerationStructureMatrixMotionInstanceNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV ) == sizeof( VkAccelerationStructureSRTMotionInstanceNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureSRTMotionInstanceNV>::value,
+                          "AccelerationStructureSRTMotionInstanceNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SRTDataNV ) == sizeof( VkSRTDataNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SRTDataNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SRTDataNV>::value, "SRTDataNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV ) ==
+                            sizeof( VkPhysicalDeviceRayTracingMotionBlurFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMotionBlurFeaturesNV>::value,
+                          "PhysicalDeviceRayTracingMotionBlurFeaturesNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_mesh_shader ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT ) == sizeof( VkPhysicalDeviceMeshShaderFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderFeaturesEXT>::value,
+                          "PhysicalDeviceMeshShaderFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT ) == sizeof( VkPhysicalDeviceMeshShaderPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMeshShaderPropertiesEXT>::value,
+                          "PhysicalDeviceMeshShaderPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT ) == sizeof( VkDrawMeshTasksIndirectCommandEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DrawMeshTasksIndirectCommandEXT>::value,
+                          "DrawMeshTasksIndirectCommandEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_ycbcr_2plane_444_formats ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT>::value,
+                          "PhysicalDeviceYcbcr2Plane444FormatsFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_fragment_density_map2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceFragmentDensityMap2FeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2FeaturesEXT>::value,
+                          "PhysicalDeviceFragmentDensityMap2FeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT ) ==
+                            sizeof( VkPhysicalDeviceFragmentDensityMap2PropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMap2PropertiesEXT>::value,
+                          "PhysicalDeviceFragmentDensityMap2PropertiesEXT is not nothrow_move_constructible!" );
+
+//=== VK_QCOM_rotated_copy_commands ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM ) == sizeof( VkCopyCommandTransformInfoQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyCommandTransformInfoQCOM>::value,
+                          "CopyCommandTransformInfoQCOM is not nothrow_move_constructible!" );
+
+//=== VK_KHR_workgroup_memory_explicit_layout ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ) ==
+                            sizeof( VkPhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR>::value,
+                          "PhysicalDeviceWorkgroupMemoryExplicitLayoutFeaturesKHR is not nothrow_move_constructible!" );
+
+//=== VK_EXT_image_compression_control ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceImageCompressionControlFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlFeaturesEXT>::value,
+                          "PhysicalDeviceImageCompressionControlFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT ) == sizeof( VkImageCompressionControlEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCompressionControlEXT>::value,
+                          "ImageCompressionControlEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT ) == sizeof( VkImageCompressionPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageCompressionPropertiesEXT>::value,
+                          "ImageCompressionPropertiesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_attachment_feedback_loop_layout ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT>::value,
+                          "PhysicalDeviceAttachmentFeedbackLoopLayoutFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_4444_formats ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT ) == sizeof( VkPhysicalDevice4444FormatsFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevice4444FormatsFeaturesEXT>::value,
+                          "PhysicalDevice4444FormatsFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_device_fault ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT ) == sizeof( VkPhysicalDeviceFaultFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFaultFeaturesEXT>::value,
+                          "PhysicalDeviceFaultFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT ) == sizeof( VkDeviceFaultCountsEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceFaultCountsEXT>::value,
+                          "DeviceFaultCountsEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT ) == sizeof( VkDeviceFaultInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceFaultInfoEXT>::value,
+                          "DeviceFaultInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT ) == sizeof( VkDeviceFaultAddressInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceFaultAddressInfoEXT>::value,
+                          "DeviceFaultAddressInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT ) == sizeof( VkDeviceFaultVendorInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceFaultVendorInfoEXT>::value,
+                          "DeviceFaultVendorInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT ) == sizeof( VkDeviceFaultVendorBinaryHeaderVersionOneEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceFaultVendorBinaryHeaderVersionOneEXT>::value,
+                          "DeviceFaultVendorBinaryHeaderVersionOneEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_rgba10x6_formats ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT ) == sizeof( VkPhysicalDeviceRGBA10X6FormatsFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRGBA10X6FormatsFeaturesEXT>::value,
+                          "PhysicalDeviceRGBA10X6FormatsFeaturesEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_DIRECTFB_EXT )
+//=== VK_EXT_directfb_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT ) == sizeof( VkDirectFBSurfaceCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DirectFBSurfaceCreateInfoEXT>::value,
+                          "DirectFBSurfaceCreateInfoEXT is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_DIRECTFB_EXT*/
+
+//=== VK_EXT_vertex_input_dynamic_state ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceVertexInputDynamicStateFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceVertexInputDynamicStateFeaturesEXT>::value,
+                          "PhysicalDeviceVertexInputDynamicStateFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT ) == sizeof( VkVertexInputBindingDescription2EXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputBindingDescription2EXT>::value,
+                          "VertexInputBindingDescription2EXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT ) == sizeof( VkVertexInputAttributeDescription2EXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::VertexInputAttributeDescription2EXT>::value,
+                          "VertexInputAttributeDescription2EXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_physical_device_drm ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT ) == sizeof( VkPhysicalDeviceDrmPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDrmPropertiesEXT>::value,
+                          "PhysicalDeviceDrmPropertiesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_device_address_binding_report ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceAddressBindingReportFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAddressBindingReportFeaturesEXT>::value,
+                          "PhysicalDeviceAddressBindingReportFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT ) == sizeof( VkDeviceAddressBindingCallbackDataEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceAddressBindingCallbackDataEXT>::value,
+                          "DeviceAddressBindingCallbackDataEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_depth_clip_control ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClipControlFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClipControlFeaturesEXT>::value,
+                          "PhysicalDeviceDepthClipControlFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT ) ==
+                            sizeof( VkPipelineViewportDepthClipControlCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineViewportDepthClipControlCreateInfoEXT>::value,
+                          "PipelineViewportDepthClipControlCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_primitive_topology_list_restart ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT ) ==
+                            sizeof( VkPhysicalDevicePrimitiveTopologyListRestartFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT>::value,
+                          "PhysicalDevicePrimitiveTopologyListRestartFeaturesEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+//=== VK_FUCHSIA_external_memory ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA ) == sizeof( VkImportMemoryZirconHandleInfoFUCHSIA ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryZirconHandleInfoFUCHSIA>::value,
+                          "ImportMemoryZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA ) == sizeof( VkMemoryZirconHandlePropertiesFUCHSIA ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryZirconHandlePropertiesFUCHSIA>::value,
+                          "MemoryZirconHandlePropertiesFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA ) == sizeof( VkMemoryGetZirconHandleInfoFUCHSIA ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetZirconHandleInfoFUCHSIA>::value,
+                          "MemoryGetZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+//=== VK_FUCHSIA_external_semaphore ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA ) == sizeof( VkImportSemaphoreZirconHandleInfoFUCHSIA ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportSemaphoreZirconHandleInfoFUCHSIA>::value,
+                          "ImportSemaphoreZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA ) == sizeof( VkSemaphoreGetZirconHandleInfoFUCHSIA ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SemaphoreGetZirconHandleInfoFUCHSIA>::value,
+                          "SemaphoreGetZirconHandleInfoFUCHSIA is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+#if defined( VK_USE_PLATFORM_FUCHSIA )
+//=== VK_FUCHSIA_buffer_collection ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA ) == sizeof( VkBufferCollectionFUCHSIA ),
+                          "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionFUCHSIA>::value,
+                          "BufferCollectionFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA ) == sizeof( VkBufferCollectionCreateInfoFUCHSIA ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionCreateInfoFUCHSIA>::value,
+                          "BufferCollectionCreateInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA ) == sizeof( VkImportMemoryBufferCollectionFUCHSIA ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportMemoryBufferCollectionFUCHSIA>::value,
+                          "ImportMemoryBufferCollectionFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA ) == sizeof( VkBufferCollectionImageCreateInfoFUCHSIA ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionImageCreateInfoFUCHSIA>::value,
+                          "BufferCollectionImageCreateInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA ) == sizeof( VkBufferConstraintsInfoFUCHSIA ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferConstraintsInfoFUCHSIA>::value,
+                          "BufferConstraintsInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA ) == sizeof( VkBufferCollectionBufferCreateInfoFUCHSIA ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionBufferCreateInfoFUCHSIA>::value,
+                          "BufferCollectionBufferCreateInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA ) == sizeof( VkBufferCollectionPropertiesFUCHSIA ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionPropertiesFUCHSIA>::value,
+                          "BufferCollectionPropertiesFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA ) == sizeof( VkSysmemColorSpaceFUCHSIA ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SysmemColorSpaceFUCHSIA>::value,
+                          "SysmemColorSpaceFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA ) == sizeof( VkImageConstraintsInfoFUCHSIA ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageConstraintsInfoFUCHSIA>::value,
+                          "ImageConstraintsInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA ) == sizeof( VkImageFormatConstraintsInfoFUCHSIA ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageFormatConstraintsInfoFUCHSIA>::value,
+                          "ImageFormatConstraintsInfoFUCHSIA is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA ) == sizeof( VkBufferCollectionConstraintsInfoFUCHSIA ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferCollectionConstraintsInfoFUCHSIA>::value,
+                          "BufferCollectionConstraintsInfoFUCHSIA is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_FUCHSIA*/
+
+//=== VK_HUAWEI_subpass_shading ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI ) == sizeof( VkSubpassShadingPipelineCreateInfoHUAWEI ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassShadingPipelineCreateInfoHUAWEI>::value,
+                          "SubpassShadingPipelineCreateInfoHUAWEI is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI ) == sizeof( VkPhysicalDeviceSubpassShadingFeaturesHUAWEI ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingFeaturesHUAWEI>::value,
+                          "PhysicalDeviceSubpassShadingFeaturesHUAWEI is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI ) ==
+                            sizeof( VkPhysicalDeviceSubpassShadingPropertiesHUAWEI ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassShadingPropertiesHUAWEI>::value,
+                          "PhysicalDeviceSubpassShadingPropertiesHUAWEI is not nothrow_move_constructible!" );
+
+//=== VK_HUAWEI_invocation_mask ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI ) == sizeof( VkPhysicalDeviceInvocationMaskFeaturesHUAWEI ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceInvocationMaskFeaturesHUAWEI>::value,
+                          "PhysicalDeviceInvocationMaskFeaturesHUAWEI is not nothrow_move_constructible!" );
+
+//=== VK_NV_external_memory_rdma ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV ) == sizeof( VkMemoryGetRemoteAddressInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MemoryGetRemoteAddressInfoNV>::value,
+                          "MemoryGetRemoteAddressInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV ) == sizeof( VkPhysicalDeviceExternalMemoryRDMAFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryRDMAFeaturesNV>::value,
+                          "PhysicalDeviceExternalMemoryRDMAFeaturesNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_pipeline_properties ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT ) == sizeof( VkPipelinePropertiesIdentifierEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelinePropertiesIdentifierEXT>::value,
+                          "PipelinePropertiesIdentifierEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT ) ==
+                            sizeof( VkPhysicalDevicePipelinePropertiesFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelinePropertiesFeaturesEXT>::value,
+                          "PhysicalDevicePipelinePropertiesFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_frame_boundary ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFrameBoundaryFeaturesEXT ) == sizeof( VkPhysicalDeviceFrameBoundaryFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFrameBoundaryFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFrameBoundaryFeaturesEXT>::value,
+                          "PhysicalDeviceFrameBoundaryFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::FrameBoundaryEXT ) == sizeof( VkFrameBoundaryEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::FrameBoundaryEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::FrameBoundaryEXT>::value,
+                          "FrameBoundaryEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_multisampled_render_to_single_sampled ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT>::value,
+                          "PhysicalDeviceMultisampledRenderToSingleSampledFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT ) == sizeof( VkSubpassResolvePerformanceQueryEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassResolvePerformanceQueryEXT>::value,
+                          "SubpassResolvePerformanceQueryEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT ) == sizeof( VkMultisampledRenderToSingleSampledInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultisampledRenderToSingleSampledInfoEXT>::value,
+                          "MultisampledRenderToSingleSampledInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_extended_dynamic_state2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceExtendedDynamicState2FeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState2FeaturesEXT>::value,
+                          "PhysicalDeviceExtendedDynamicState2FeaturesEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+//=== VK_QNX_screen_surface ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX ) == sizeof( VkScreenSurfaceCreateInfoQNX ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ScreenSurfaceCreateInfoQNX>::value,
+                          "ScreenSurfaceCreateInfoQNX is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+//=== VK_EXT_color_write_enable ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT ) == sizeof( VkPhysicalDeviceColorWriteEnableFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceColorWriteEnableFeaturesEXT>::value,
+                          "PhysicalDeviceColorWriteEnableFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT ) == sizeof( VkPipelineColorWriteCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineColorWriteCreateInfoEXT>::value,
+                          "PipelineColorWriteCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_primitives_generated_query ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT ) ==
+                            sizeof( VkPhysicalDevicePrimitivesGeneratedQueryFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT>::value,
+                          "PhysicalDevicePrimitivesGeneratedQueryFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_KHR_ray_tracing_maintenance1 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR ) ==
+                            sizeof( VkPhysicalDeviceRayTracingMaintenance1FeaturesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingMaintenance1FeaturesKHR>::value,
+                          "PhysicalDeviceRayTracingMaintenance1FeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR ) == sizeof( VkTraceRaysIndirectCommand2KHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TraceRaysIndirectCommand2KHR>::value,
+                          "TraceRaysIndirectCommand2KHR is not nothrow_move_constructible!" );
+
+//=== VK_EXT_image_view_min_lod ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT ) == sizeof( VkPhysicalDeviceImageViewMinLodFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageViewMinLodFeaturesEXT>::value,
+                          "PhysicalDeviceImageViewMinLodFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT ) == sizeof( VkImageViewMinLodCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewMinLodCreateInfoEXT>::value,
+                          "ImageViewMinLodCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_multi_draw ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT ) == sizeof( VkPhysicalDeviceMultiDrawFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawFeaturesEXT>::value,
+                          "PhysicalDeviceMultiDrawFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT ) == sizeof( VkPhysicalDeviceMultiDrawPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiDrawPropertiesEXT>::value,
+                          "PhysicalDeviceMultiDrawPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT ) == sizeof( VkMultiDrawInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultiDrawInfoEXT>::value,
+                          "MultiDrawInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT ) == sizeof( VkMultiDrawIndexedInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultiDrawIndexedInfoEXT>::value,
+                          "MultiDrawIndexedInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_image_2d_view_of_3d ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT ) == sizeof( VkPhysicalDeviceImage2DViewOf3DFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImage2DViewOf3DFeaturesEXT>::value,
+                          "PhysicalDeviceImage2DViewOf3DFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_shader_tile_image ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderTileImageFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImageFeaturesEXT>::value,
+                          "PhysicalDeviceShaderTileImageFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT ) == sizeof( VkPhysicalDeviceShaderTileImagePropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderTileImagePropertiesEXT>::value,
+                          "PhysicalDeviceShaderTileImagePropertiesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_opacity_micromap ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT ) == sizeof( VkMicromapBuildInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapBuildInfoEXT>::value,
+                          "MicromapBuildInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapUsageEXT ) == sizeof( VkMicromapUsageEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapUsageEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapUsageEXT>::value,
+                          "MicromapUsageEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT ) == sizeof( VkMicromapCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapCreateInfoEXT>::value,
+                          "MicromapCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapEXT ) == sizeof( VkMicromapEXT ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapEXT>::value, "MicromapEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT ) == sizeof( VkPhysicalDeviceOpacityMicromapFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapFeaturesEXT>::value,
+                          "PhysicalDeviceOpacityMicromapFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT ) == sizeof( VkPhysicalDeviceOpacityMicromapPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpacityMicromapPropertiesEXT>::value,
+                          "PhysicalDeviceOpacityMicromapPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT ) == sizeof( VkMicromapVersionInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapVersionInfoEXT>::value,
+                          "MicromapVersionInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT ) == sizeof( VkCopyMicromapToMemoryInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMicromapToMemoryInfoEXT>::value,
+                          "CopyMicromapToMemoryInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT ) == sizeof( VkCopyMemoryToMicromapInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMemoryToMicromapInfoEXT>::value,
+                          "CopyMemoryToMicromapInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT ) == sizeof( VkCopyMicromapInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMicromapInfoEXT>::value,
+                          "CopyMicromapInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT ) == sizeof( VkMicromapBuildSizesInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapBuildSizesInfoEXT>::value,
+                          "MicromapBuildSizesInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT ) ==
+                            sizeof( VkAccelerationStructureTrianglesOpacityMicromapEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesOpacityMicromapEXT>::value,
+                          "AccelerationStructureTrianglesOpacityMicromapEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MicromapTriangleEXT ) == sizeof( VkMicromapTriangleEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MicromapTriangleEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MicromapTriangleEXT>::value,
+                          "MicromapTriangleEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_ENABLE_BETA_EXTENSIONS )
+//=== VK_NV_displacement_micromap ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapFeaturesNV ) ==
+                            sizeof( VkPhysicalDeviceDisplacementMicromapFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapFeaturesNV>::value,
+                          "PhysicalDeviceDisplacementMicromapFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapPropertiesNV ) ==
+                            sizeof( VkPhysicalDeviceDisplacementMicromapPropertiesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapPropertiesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDisplacementMicromapPropertiesNV>::value,
+                          "PhysicalDeviceDisplacementMicromapPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesDisplacementMicromapNV ) ==
+                            sizeof( VkAccelerationStructureTrianglesDisplacementMicromapNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesDisplacementMicromapNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AccelerationStructureTrianglesDisplacementMicromapNV>::value,
+                          "AccelerationStructureTrianglesDisplacementMicromapNV is not nothrow_move_constructible!" );
+#endif /*VK_ENABLE_BETA_EXTENSIONS*/
+
+//=== VK_HUAWEI_cluster_culling_shader ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderFeaturesHUAWEI ) ==
+                            sizeof( VkPhysicalDeviceClusterCullingShaderFeaturesHUAWEI ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderFeaturesHUAWEI>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderFeaturesHUAWEI>::value,
+                          "PhysicalDeviceClusterCullingShaderFeaturesHUAWEI is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderPropertiesHUAWEI ) ==
+                            sizeof( VkPhysicalDeviceClusterCullingShaderPropertiesHUAWEI ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderPropertiesHUAWEI>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceClusterCullingShaderPropertiesHUAWEI>::value,
+                          "PhysicalDeviceClusterCullingShaderPropertiesHUAWEI is not nothrow_move_constructible!" );
+
+//=== VK_EXT_border_color_swizzle ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceBorderColorSwizzleFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceBorderColorSwizzleFeaturesEXT>::value,
+                          "PhysicalDeviceBorderColorSwizzleFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT ) ==
+                            sizeof( VkSamplerBorderColorComponentMappingCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerBorderColorComponentMappingCreateInfoEXT>::value,
+                          "SamplerBorderColorComponentMappingCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_pageable_device_local_memory ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT ) ==
+                            sizeof( VkPhysicalDevicePageableDeviceLocalMemoryFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT>::value,
+                          "PhysicalDevicePageableDeviceLocalMemoryFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_ARM_shader_core_properties ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesARM ) == sizeof( VkPhysicalDeviceShaderCorePropertiesARM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesARM>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCorePropertiesARM>::value,
+                          "PhysicalDeviceShaderCorePropertiesARM is not nothrow_move_constructible!" );
+
+//=== VK_EXT_image_sliced_view_of_3d ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageSlicedViewOf3DFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceImageSlicedViewOf3DFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageSlicedViewOf3DFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageSlicedViewOf3DFeaturesEXT>::value,
+                          "PhysicalDeviceImageSlicedViewOf3DFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewSlicedCreateInfoEXT ) == sizeof( VkImageViewSlicedCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewSlicedCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewSlicedCreateInfoEXT>::value,
+                          "ImageViewSlicedCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_VALVE_descriptor_set_host_mapping ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE ) ==
+                            sizeof( VkPhysicalDeviceDescriptorSetHostMappingFeaturesVALVE ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE>::value,
+                          "PhysicalDeviceDescriptorSetHostMappingFeaturesVALVE is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE ) == sizeof( VkDescriptorSetBindingReferenceVALVE ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetBindingReferenceVALVE>::value,
+                          "DescriptorSetBindingReferenceVALVE is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE ) == sizeof( VkDescriptorSetLayoutHostMappingInfoVALVE ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DescriptorSetLayoutHostMappingInfoVALVE>::value,
+                          "DescriptorSetLayoutHostMappingInfoVALVE is not nothrow_move_constructible!" );
+
+//=== VK_EXT_depth_clamp_zero_one ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT ) == sizeof( VkPhysicalDeviceDepthClampZeroOneFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDepthClampZeroOneFeaturesEXT>::value,
+                          "PhysicalDeviceDepthClampZeroOneFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_non_seamless_cube_map ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceNonSeamlessCubeMapFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceNonSeamlessCubeMapFeaturesEXT>::value,
+                          "PhysicalDeviceNonSeamlessCubeMapFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_QCOM_fragment_density_map_offset ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM ) ==
+                            sizeof( VkPhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM>::value,
+                          "PhysicalDeviceFragmentDensityMapOffsetFeaturesQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM ) ==
+                            sizeof( VkPhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM>::value,
+                          "PhysicalDeviceFragmentDensityMapOffsetPropertiesQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM ) == sizeof( VkSubpassFragmentDensityMapOffsetEndInfoQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubpassFragmentDensityMapOffsetEndInfoQCOM>::value,
+                          "SubpassFragmentDensityMapOffsetEndInfoQCOM is not nothrow_move_constructible!" );
+
+//=== VK_NV_copy_memory_indirect ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV ) == sizeof( VkCopyMemoryIndirectCommandNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMemoryIndirectCommandNV>::value,
+                          "CopyMemoryIndirectCommandNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV ) == sizeof( VkCopyMemoryToImageIndirectCommandNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CopyMemoryToImageIndirectCommandNV>::value,
+                          "CopyMemoryToImageIndirectCommandNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV ) == sizeof( VkPhysicalDeviceCopyMemoryIndirectFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectFeaturesNV>::value,
+                          "PhysicalDeviceCopyMemoryIndirectFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV ) ==
+                            sizeof( VkPhysicalDeviceCopyMemoryIndirectPropertiesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCopyMemoryIndirectPropertiesNV>::value,
+                          "PhysicalDeviceCopyMemoryIndirectPropertiesNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_memory_decompression ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV ) == sizeof( VkDecompressMemoryRegionNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DecompressMemoryRegionNV>::value,
+                          "DecompressMemoryRegionNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV ) ==
+                            sizeof( VkPhysicalDeviceMemoryDecompressionFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionFeaturesNV>::value,
+                          "PhysicalDeviceMemoryDecompressionFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV ) ==
+                            sizeof( VkPhysicalDeviceMemoryDecompressionPropertiesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMemoryDecompressionPropertiesNV>::value,
+                          "PhysicalDeviceMemoryDecompressionPropertiesNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_device_generated_commands_compute ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV ) ==
+                            sizeof( VkPhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV>::value,
+                          "PhysicalDeviceDeviceGeneratedCommandsComputeFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV ) == sizeof( VkComputePipelineIndirectBufferInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ComputePipelineIndirectBufferInfoNV>::value,
+                          "ComputePipelineIndirectBufferInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV ) == sizeof( VkPipelineIndirectDeviceAddressInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineIndirectDeviceAddressInfoNV>::value,
+                          "PipelineIndirectDeviceAddressInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV ) == sizeof( VkBindPipelineIndirectCommandNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BindPipelineIndirectCommandNV>::value,
+                          "BindPipelineIndirectCommandNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_linear_color_attachment ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV ) ==
+                            sizeof( VkPhysicalDeviceLinearColorAttachmentFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLinearColorAttachmentFeaturesNV>::value,
+                          "PhysicalDeviceLinearColorAttachmentFeaturesNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_image_compression_control_swapchain ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceImageCompressionControlSwapchainFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT>::value,
+                          "PhysicalDeviceImageCompressionControlSwapchainFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_QCOM_image_processing ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM ) == sizeof( VkImageViewSampleWeightCreateInfoQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageViewSampleWeightCreateInfoQCOM>::value,
+                          "ImageViewSampleWeightCreateInfoQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM ) == sizeof( VkPhysicalDeviceImageProcessingFeaturesQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingFeaturesQCOM>::value,
+                          "PhysicalDeviceImageProcessingFeaturesQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM ) ==
+                            sizeof( VkPhysicalDeviceImageProcessingPropertiesQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessingPropertiesQCOM>::value,
+                          "PhysicalDeviceImageProcessingPropertiesQCOM is not nothrow_move_constructible!" );
+
+//=== VK_EXT_nested_command_buffer ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceNestedCommandBufferFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferFeaturesEXT>::value,
+                          "PhysicalDeviceNestedCommandBufferFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferPropertiesEXT ) ==
+                            sizeof( VkPhysicalDeviceNestedCommandBufferPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceNestedCommandBufferPropertiesEXT>::value,
+                          "PhysicalDeviceNestedCommandBufferPropertiesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_external_memory_acquire_unmodified ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalMemoryAcquireUnmodifiedEXT ) == sizeof( VkExternalMemoryAcquireUnmodifiedEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalMemoryAcquireUnmodifiedEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalMemoryAcquireUnmodifiedEXT>::value,
+                          "ExternalMemoryAcquireUnmodifiedEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_extended_dynamic_state3 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceExtendedDynamicState3FeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3FeaturesEXT>::value,
+                          "PhysicalDeviceExtendedDynamicState3FeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT ) ==
+                            sizeof( VkPhysicalDeviceExtendedDynamicState3PropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedDynamicState3PropertiesEXT>::value,
+                          "PhysicalDeviceExtendedDynamicState3PropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT ) == sizeof( VkColorBlendEquationEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ColorBlendEquationEXT>::value,
+                          "ColorBlendEquationEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT ) == sizeof( VkColorBlendAdvancedEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ColorBlendAdvancedEXT>::value,
+                          "ColorBlendAdvancedEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_subpass_merge_feedback ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceSubpassMergeFeedbackFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceSubpassMergeFeedbackFeaturesEXT>::value,
+                          "PhysicalDeviceSubpassMergeFeedbackFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT ) == sizeof( VkRenderPassCreationControlEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassCreationControlEXT>::value,
+                          "RenderPassCreationControlEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT ) == sizeof( VkRenderPassCreationFeedbackInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackInfoEXT>::value,
+                          "RenderPassCreationFeedbackInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT ) == sizeof( VkRenderPassCreationFeedbackCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassCreationFeedbackCreateInfoEXT>::value,
+                          "RenderPassCreationFeedbackCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT ) == sizeof( VkRenderPassSubpassFeedbackInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackInfoEXT>::value,
+                          "RenderPassSubpassFeedbackInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT ) == sizeof( VkRenderPassSubpassFeedbackCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderPassSubpassFeedbackCreateInfoEXT>::value,
+                          "RenderPassSubpassFeedbackCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_LUNARG_direct_driver_loading ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG ) == sizeof( VkDirectDriverLoadingInfoLUNARG ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DirectDriverLoadingInfoLUNARG>::value,
+                          "DirectDriverLoadingInfoLUNARG is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG ) == sizeof( VkDirectDriverLoadingListLUNARG ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DirectDriverLoadingListLUNARG>::value,
+                          "DirectDriverLoadingListLUNARG is not nothrow_move_constructible!" );
+
+//=== VK_EXT_shader_module_identifier ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceShaderModuleIdentifierFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierFeaturesEXT>::value,
+                          "PhysicalDeviceShaderModuleIdentifierFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT ) ==
+                            sizeof( VkPhysicalDeviceShaderModuleIdentifierPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderModuleIdentifierPropertiesEXT>::value,
+                          "PhysicalDeviceShaderModuleIdentifierPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT ) ==
+                            sizeof( VkPipelineShaderStageModuleIdentifierCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineShaderStageModuleIdentifierCreateInfoEXT>::value,
+                          "PipelineShaderStageModuleIdentifierCreateInfoEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT ) == sizeof( VkShaderModuleIdentifierEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderModuleIdentifierEXT>::value,
+                          "ShaderModuleIdentifierEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_rasterization_order_attachment_access ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT>::value,
+                          "PhysicalDeviceRasterizationOrderAttachmentAccessFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_NV_optical_flow ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV ) == sizeof( VkPhysicalDeviceOpticalFlowFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowFeaturesNV>::value,
+                          "PhysicalDeviceOpticalFlowFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV ) == sizeof( VkPhysicalDeviceOpticalFlowPropertiesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceOpticalFlowPropertiesNV>::value,
+                          "PhysicalDeviceOpticalFlowPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV ) == sizeof( VkOpticalFlowImageFormatInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatInfoNV>::value,
+                          "OpticalFlowImageFormatInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV ) == sizeof( VkOpticalFlowImageFormatPropertiesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowImageFormatPropertiesNV>::value,
+                          "OpticalFlowImageFormatPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV ) == sizeof( VkOpticalFlowSessionNV ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowSessionNV>::value,
+                          "OpticalFlowSessionNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV ) == sizeof( VkOpticalFlowSessionCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreateInfoNV>::value,
+                          "OpticalFlowSessionCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV ) == sizeof( VkOpticalFlowSessionCreatePrivateDataInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowSessionCreatePrivateDataInfoNV>::value,
+                          "OpticalFlowSessionCreatePrivateDataInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV ) == sizeof( VkOpticalFlowExecuteInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OpticalFlowExecuteInfoNV>::value,
+                          "OpticalFlowExecuteInfoNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_legacy_dithering ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT ) == sizeof( VkPhysicalDeviceLegacyDitheringFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLegacyDitheringFeaturesEXT>::value,
+                          "PhysicalDeviceLegacyDitheringFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_pipeline_protected_access ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT ) ==
+                            sizeof( VkPhysicalDevicePipelineProtectedAccessFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineProtectedAccessFeaturesEXT>::value,
+                          "PhysicalDevicePipelineProtectedAccessFeaturesEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_ANDROID_KHR )
+//=== VK_ANDROID_external_format_resolve ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolveFeaturesANDROID ) ==
+                            sizeof( VkPhysicalDeviceExternalFormatResolveFeaturesANDROID ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolveFeaturesANDROID>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolveFeaturesANDROID>::value,
+                          "PhysicalDeviceExternalFormatResolveFeaturesANDROID is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolvePropertiesANDROID ) ==
+                            sizeof( VkPhysicalDeviceExternalFormatResolvePropertiesANDROID ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolvePropertiesANDROID>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalFormatResolvePropertiesANDROID>::value,
+                          "PhysicalDeviceExternalFormatResolvePropertiesANDROID is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatResolvePropertiesANDROID ) ==
+                            sizeof( VkAndroidHardwareBufferFormatResolvePropertiesANDROID ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatResolvePropertiesANDROID>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AndroidHardwareBufferFormatResolvePropertiesANDROID>::value,
+                          "AndroidHardwareBufferFormatResolvePropertiesANDROID is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_ANDROID_KHR*/
+
+//=== VK_KHR_maintenance5 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR ) == sizeof( VkPhysicalDeviceMaintenance5FeaturesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5FeaturesKHR>::value,
+                          "PhysicalDeviceMaintenance5FeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR ) == sizeof( VkPhysicalDeviceMaintenance5PropertiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMaintenance5PropertiesKHR>::value,
+                          "PhysicalDeviceMaintenance5PropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR ) == sizeof( VkRenderingAreaInfoKHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::RenderingAreaInfoKHR>::value,
+                          "RenderingAreaInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR ) == sizeof( VkDeviceImageSubresourceInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::DeviceImageSubresourceInfoKHR>::value,
+                          "DeviceImageSubresourceInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImageSubresource2KHR ) == sizeof( VkImageSubresource2KHR ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImageSubresource2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImageSubresource2KHR>::value,
+                          "ImageSubresource2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR ) == sizeof( VkSubresourceLayout2KHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SubresourceLayout2KHR>::value,
+                          "SubresourceLayout2KHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR ) == sizeof( VkPipelineCreateFlags2CreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PipelineCreateFlags2CreateInfoKHR>::value,
+                          "PipelineCreateFlags2CreateInfoKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR ) == sizeof( VkBufferUsageFlags2CreateInfoKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BufferUsageFlags2CreateInfoKHR>::value,
+                          "BufferUsageFlags2CreateInfoKHR is not nothrow_move_constructible!" );
+
+//=== VK_KHR_ray_tracing_position_fetch ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR ) ==
+                            sizeof( VkPhysicalDeviceRayTracingPositionFetchFeaturesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingPositionFetchFeaturesKHR>::value,
+                          "PhysicalDeviceRayTracingPositionFetchFeaturesKHR is not nothrow_move_constructible!" );
+
+//=== VK_EXT_shader_object ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderEXT ) == sizeof( VkShaderEXT ), "handle and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderEXT>::value, "ShaderEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT ) == sizeof( VkPhysicalDeviceShaderObjectFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectFeaturesEXT>::value,
+                          "PhysicalDeviceShaderObjectFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT ) == sizeof( VkPhysicalDeviceShaderObjectPropertiesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderObjectPropertiesEXT>::value,
+                          "PhysicalDeviceShaderObjectPropertiesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT ) == sizeof( VkShaderCreateInfoEXT ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ShaderCreateInfoEXT>::value,
+                          "ShaderCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_QCOM_tile_properties ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM ) == sizeof( VkPhysicalDeviceTilePropertiesFeaturesQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceTilePropertiesFeaturesQCOM>::value,
+                          "PhysicalDeviceTilePropertiesFeaturesQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::TilePropertiesQCOM ) == sizeof( VkTilePropertiesQCOM ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::TilePropertiesQCOM>::value,
+                          "TilePropertiesQCOM is not nothrow_move_constructible!" );
+
+//=== VK_SEC_amigo_profiling ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC ) == sizeof( VkPhysicalDeviceAmigoProfilingFeaturesSEC ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAmigoProfilingFeaturesSEC>::value,
+                          "PhysicalDeviceAmigoProfilingFeaturesSEC is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC ) == sizeof( VkAmigoProfilingSubmitInfoSEC ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::AmigoProfilingSubmitInfoSEC>::value,
+                          "AmigoProfilingSubmitInfoSEC is not nothrow_move_constructible!" );
+
+//=== VK_QCOM_multiview_per_view_viewports ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM ) ==
+                            sizeof( VkPhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM>::value,
+                          "PhysicalDeviceMultiviewPerViewViewportsFeaturesQCOM is not nothrow_move_constructible!" );
+
+//=== VK_NV_ray_tracing_invocation_reorder ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV ) ==
+                            sizeof( VkPhysicalDeviceRayTracingInvocationReorderPropertiesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderPropertiesNV>::value,
+                          "PhysicalDeviceRayTracingInvocationReorderPropertiesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV ) ==
+                            sizeof( VkPhysicalDeviceRayTracingInvocationReorderFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceRayTracingInvocationReorderFeaturesNV>::value,
+                          "PhysicalDeviceRayTracingInvocationReorderFeaturesNV is not nothrow_move_constructible!" );
+
+//=== VK_NV_extended_sparse_address_space ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV ) ==
+                            sizeof( VkPhysicalDeviceExtendedSparseAddressSpaceFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV>::value,
+                          "PhysicalDeviceExtendedSparseAddressSpaceFeaturesNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpacePropertiesNV ) ==
+                            sizeof( VkPhysicalDeviceExtendedSparseAddressSpacePropertiesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpacePropertiesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExtendedSparseAddressSpacePropertiesNV>::value,
+                          "PhysicalDeviceExtendedSparseAddressSpacePropertiesNV is not nothrow_move_constructible!" );
+
+//=== VK_EXT_mutable_descriptor_type ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceMutableDescriptorTypeFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMutableDescriptorTypeFeaturesEXT>::value,
+                          "PhysicalDeviceMutableDescriptorTypeFeaturesEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT ) == sizeof( VkMutableDescriptorTypeListEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeListEXT>::value,
+                          "MutableDescriptorTypeListEXT is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT ) == sizeof( VkMutableDescriptorTypeCreateInfoEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MutableDescriptorTypeCreateInfoEXT>::value,
+                          "MutableDescriptorTypeCreateInfoEXT is not nothrow_move_constructible!" );
+
+//=== VK_ARM_shader_core_builtins ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM ) ==
+                            sizeof( VkPhysicalDeviceShaderCoreBuiltinsFeaturesARM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsFeaturesARM>::value,
+                          "PhysicalDeviceShaderCoreBuiltinsFeaturesARM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM ) ==
+                            sizeof( VkPhysicalDeviceShaderCoreBuiltinsPropertiesARM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceShaderCoreBuiltinsPropertiesARM>::value,
+                          "PhysicalDeviceShaderCoreBuiltinsPropertiesARM is not nothrow_move_constructible!" );
+
+//=== VK_EXT_pipeline_library_group_handles ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT ) ==
+                            sizeof( VkPhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT>::value,
+                          "PhysicalDevicePipelineLibraryGroupHandlesFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_EXT_dynamic_rendering_unused_attachments ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT>::value,
+                          "PhysicalDeviceDynamicRenderingUnusedAttachmentsFeaturesEXT is not nothrow_move_constructible!" );
+
+//=== VK_NV_low_latency2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV ) == sizeof( VkLatencySleepModeInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::LatencySleepModeInfoNV>::value,
+                          "LatencySleepModeInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LatencySleepInfoNV ) == sizeof( VkLatencySleepInfoNV ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::LatencySleepInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::LatencySleepInfoNV>::value,
+                          "LatencySleepInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV ) == sizeof( VkSetLatencyMarkerInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SetLatencyMarkerInfoNV>::value,
+                          "SetLatencyMarkerInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV ) == sizeof( VkGetLatencyMarkerInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::GetLatencyMarkerInfoNV>::value,
+                          "GetLatencyMarkerInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV ) == sizeof( VkLatencyTimingsFrameReportNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::LatencyTimingsFrameReportNV>::value,
+                          "LatencyTimingsFrameReportNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LatencySubmissionPresentIdNV ) == sizeof( VkLatencySubmissionPresentIdNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::LatencySubmissionPresentIdNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::LatencySubmissionPresentIdNV>::value,
+                          "LatencySubmissionPresentIdNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SwapchainLatencyCreateInfoNV ) == sizeof( VkSwapchainLatencyCreateInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SwapchainLatencyCreateInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SwapchainLatencyCreateInfoNV>::value,
+                          "SwapchainLatencyCreateInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV ) == sizeof( VkOutOfBandQueueTypeInfoNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::OutOfBandQueueTypeInfoNV>::value,
+                          "OutOfBandQueueTypeInfoNV is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::LatencySurfaceCapabilitiesNV ) == sizeof( VkLatencySurfaceCapabilitiesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::LatencySurfaceCapabilitiesNV>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::LatencySurfaceCapabilitiesNV>::value,
+                          "LatencySurfaceCapabilitiesNV is not nothrow_move_constructible!" );
+
+//=== VK_KHR_cooperative_matrix ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR ) == sizeof( VkCooperativeMatrixPropertiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::CooperativeMatrixPropertiesKHR>::value,
+                          "CooperativeMatrixPropertiesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesKHR ) == sizeof( VkPhysicalDeviceCooperativeMatrixFeaturesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixFeaturesKHR>::value,
+                          "PhysicalDeviceCooperativeMatrixFeaturesKHR is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesKHR ) ==
+                            sizeof( VkPhysicalDeviceCooperativeMatrixPropertiesKHR ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesKHR>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCooperativeMatrixPropertiesKHR>::value,
+                          "PhysicalDeviceCooperativeMatrixPropertiesKHR is not nothrow_move_constructible!" );
+
+//=== VK_QCOM_multiview_per_view_render_areas ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM ) ==
+                            sizeof( VkPhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM>::value,
+                          "PhysicalDeviceMultiviewPerViewRenderAreasFeaturesQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM ) ==
+                            sizeof( VkMultiviewPerViewRenderAreasRenderPassBeginInfoQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM>::value,
+                          "MultiviewPerViewRenderAreasRenderPassBeginInfoQCOM is not nothrow_move_constructible!" );
+
+//=== VK_QCOM_image_processing2 ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2FeaturesQCOM ) == sizeof( VkPhysicalDeviceImageProcessing2FeaturesQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2FeaturesQCOM>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2FeaturesQCOM>::value,
+                          "PhysicalDeviceImageProcessing2FeaturesQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2PropertiesQCOM ) ==
+                            sizeof( VkPhysicalDeviceImageProcessing2PropertiesQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2PropertiesQCOM>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceImageProcessing2PropertiesQCOM>::value,
+                          "PhysicalDeviceImageProcessing2PropertiesQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerBlockMatchWindowCreateInfoQCOM ) == sizeof( VkSamplerBlockMatchWindowCreateInfoQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerBlockMatchWindowCreateInfoQCOM>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerBlockMatchWindowCreateInfoQCOM>::value,
+                          "SamplerBlockMatchWindowCreateInfoQCOM is not nothrow_move_constructible!" );
+
+//=== VK_QCOM_filter_cubic_weights ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicWeightsFeaturesQCOM ) == sizeof( VkPhysicalDeviceCubicWeightsFeaturesQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicWeightsFeaturesQCOM>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicWeightsFeaturesQCOM>::value,
+                          "PhysicalDeviceCubicWeightsFeaturesQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerCubicWeightsCreateInfoQCOM ) == sizeof( VkSamplerCubicWeightsCreateInfoQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerCubicWeightsCreateInfoQCOM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerCubicWeightsCreateInfoQCOM>::value,
+                          "SamplerCubicWeightsCreateInfoQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::BlitImageCubicWeightsInfoQCOM ) == sizeof( VkBlitImageCubicWeightsInfoQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::BlitImageCubicWeightsInfoQCOM>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::BlitImageCubicWeightsInfoQCOM>::value,
+                          "BlitImageCubicWeightsInfoQCOM is not nothrow_move_constructible!" );
+
+//=== VK_QCOM_ycbcr_degamma ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrDegammaFeaturesQCOM ) == sizeof( VkPhysicalDeviceYcbcrDegammaFeaturesQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrDegammaFeaturesQCOM>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceYcbcrDegammaFeaturesQCOM>::value,
+                          "PhysicalDeviceYcbcrDegammaFeaturesQCOM is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM ) ==
+                            sizeof( VkSamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM>::value,
+                          "SamplerYcbcrConversionYcbcrDegammaCreateInfoQCOM is not nothrow_move_constructible!" );
+
+//=== VK_QCOM_filter_cubic_clamp ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicClampFeaturesQCOM ) == sizeof( VkPhysicalDeviceCubicClampFeaturesQCOM ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicClampFeaturesQCOM>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceCubicClampFeaturesQCOM>::value,
+                          "PhysicalDeviceCubicClampFeaturesQCOM is not nothrow_move_constructible!" );
+
+//=== VK_EXT_attachment_feedback_loop_dynamic_state ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT ) ==
+                            sizeof( VkPhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT>::value,
+                          "PhysicalDeviceAttachmentFeedbackLoopDynamicStateFeaturesEXT is not nothrow_move_constructible!" );
+
+#if defined( VK_USE_PLATFORM_SCREEN_QNX )
+//=== VK_QNX_external_memory_screen_buffer ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX ) == sizeof( VkScreenBufferPropertiesQNX ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ScreenBufferPropertiesQNX>::value,
+                          "ScreenBufferPropertiesQNX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ScreenBufferFormatPropertiesQNX ) == sizeof( VkScreenBufferFormatPropertiesQNX ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ScreenBufferFormatPropertiesQNX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ScreenBufferFormatPropertiesQNX>::value,
+                          "ScreenBufferFormatPropertiesQNX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ImportScreenBufferInfoQNX ) == sizeof( VkImportScreenBufferInfoQNX ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ImportScreenBufferInfoQNX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ImportScreenBufferInfoQNX>::value,
+                          "ImportScreenBufferInfoQNX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::ExternalFormatQNX ) == sizeof( VkExternalFormatQNX ), "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::ExternalFormatQNX>::value, "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::ExternalFormatQNX>::value,
+                          "ExternalFormatQNX is not nothrow_move_constructible!" );
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX ) ==
+                            sizeof( VkPhysicalDeviceExternalMemoryScreenBufferFeaturesQNX ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX>::value,
+                          "PhysicalDeviceExternalMemoryScreenBufferFeaturesQNX is not nothrow_move_constructible!" );
+#endif /*VK_USE_PLATFORM_SCREEN_QNX*/
+
+//=== VK_MSFT_layered_driver ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredDriverPropertiesMSFT ) == sizeof( VkPhysicalDeviceLayeredDriverPropertiesMSFT ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredDriverPropertiesMSFT>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceLayeredDriverPropertiesMSFT>::value,
+                          "PhysicalDeviceLayeredDriverPropertiesMSFT is not nothrow_move_constructible!" );
+
+//=== VK_NV_descriptor_pool_overallocation ===
+
+VULKAN_HPP_STATIC_ASSERT( sizeof( VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorPoolOverallocationFeaturesNV ) ==
+                            sizeof( VkPhysicalDeviceDescriptorPoolOverallocationFeaturesNV ),
+                          "struct and wrapper have different size!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_standard_layout<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorPoolOverallocationFeaturesNV>::value,
+                          "struct wrapper is not a standard layout!" );
+VULKAN_HPP_STATIC_ASSERT( std::is_nothrow_move_constructible<VULKAN_HPP_NAMESPACE::PhysicalDeviceDescriptorPoolOverallocationFeaturesNV>::value,
+                          "PhysicalDeviceDescriptorPoolOverallocationFeaturesNV is not nothrow_move_constructible!" );
+
+#endif

File diff suppressed because it is too large
+ 777 - 26
ThirdParty/Khronos/vulkan/vulkan_structs.hpp


File diff suppressed because it is too large
+ 365 - 178
ThirdParty/Khronos/vulkan/vulkan_to_string.hpp


+ 2 - 1
ThirdParty/Khronos/vulkan/vulkan_vi.h

@@ -2,7 +2,7 @@
 #define VULKAN_VI_H_ 1
 
 /*
-** Copyright 2015-2022 The Khronos Group Inc.
+** Copyright 2015-2023 The Khronos Group Inc.
 **
 ** SPDX-License-Identifier: Apache-2.0
 */
@@ -19,6 +19,7 @@ extern "C" {
 
 
 
+// VK_NN_vi_surface is a preprocessor guard. Do not pass it to API calls.
 #define VK_NN_vi_surface 1
 #define VK_NN_VI_SURFACE_SPEC_VERSION     1
 #define VK_NN_VI_SURFACE_EXTENSION_NAME   "VK_NN_vi_surface"

+ 2699 - 0
ThirdParty/Khronos/vulkan/vulkan_video.hpp

@@ -0,0 +1,2699 @@
+// Copyright 2021-2023 The Khronos Group Inc.
+// SPDX-License-Identifier: Apache-2.0 OR MIT
+//
+
+// This header is generated from the Khronos Vulkan XML API Registry.
+
+#include <vk_video/vulkan_video_codec_h264std.h>
+#include <vk_video/vulkan_video_codec_h264std_decode.h>
+#include <vk_video/vulkan_video_codec_h264std_encode.h>
+#include <vk_video/vulkan_video_codec_h265std.h>
+#include <vk_video/vulkan_video_codec_h265std_decode.h>
+#include <vk_video/vulkan_video_codec_h265std_encode.h>
+#include <vk_video/vulkan_video_codecs_common.h>
+#include <vulkan/vulkan.hpp>
+
+#if !defined( VULKAN_HPP_VIDEO_NAMESPACE )
+#  define VULKAN_HPP_VIDEO_NAMESPACE video
+#endif
+
+namespace VULKAN_HPP_NAMESPACE
+{
+  namespace VULKAN_HPP_VIDEO_NAMESPACE
+  {
+
+    //=============
+    //=== ENUMs ===
+    //=============
+
+    //=== vulkan_video_codec_h264std ===
+
+    enum class H264ChromaFormatIdc
+    {
+      eMonochrome = STD_VIDEO_H264_CHROMA_FORMAT_IDC_MONOCHROME,
+      e420        = STD_VIDEO_H264_CHROMA_FORMAT_IDC_420,
+      e422        = STD_VIDEO_H264_CHROMA_FORMAT_IDC_422,
+      e444        = STD_VIDEO_H264_CHROMA_FORMAT_IDC_444,
+      eInvalid    = STD_VIDEO_H264_CHROMA_FORMAT_IDC_INVALID
+    };
+
+    enum class H264ProfileIdc
+    {
+      eBaseline          = STD_VIDEO_H264_PROFILE_IDC_BASELINE,
+      eMain              = STD_VIDEO_H264_PROFILE_IDC_MAIN,
+      eHigh              = STD_VIDEO_H264_PROFILE_IDC_HIGH,
+      eHigh444Predictive = STD_VIDEO_H264_PROFILE_IDC_HIGH_444_PREDICTIVE,
+      eInvalid           = STD_VIDEO_H264_PROFILE_IDC_INVALID
+    };
+
+    enum class H264LevelIdc
+    {
+      e1_0     = STD_VIDEO_H264_LEVEL_IDC_1_0,
+      e1_1     = STD_VIDEO_H264_LEVEL_IDC_1_1,
+      e1_2     = STD_VIDEO_H264_LEVEL_IDC_1_2,
+      e1_3     = STD_VIDEO_H264_LEVEL_IDC_1_3,
+      e2_0     = STD_VIDEO_H264_LEVEL_IDC_2_0,
+      e2_1     = STD_VIDEO_H264_LEVEL_IDC_2_1,
+      e2_2     = STD_VIDEO_H264_LEVEL_IDC_2_2,
+      e3_0     = STD_VIDEO_H264_LEVEL_IDC_3_0,
+      e3_1     = STD_VIDEO_H264_LEVEL_IDC_3_1,
+      e3_2     = STD_VIDEO_H264_LEVEL_IDC_3_2,
+      e4_0     = STD_VIDEO_H264_LEVEL_IDC_4_0,
+      e4_1     = STD_VIDEO_H264_LEVEL_IDC_4_1,
+      e4_2     = STD_VIDEO_H264_LEVEL_IDC_4_2,
+      e5_0     = STD_VIDEO_H264_LEVEL_IDC_5_0,
+      e5_1     = STD_VIDEO_H264_LEVEL_IDC_5_1,
+      e5_2     = STD_VIDEO_H264_LEVEL_IDC_5_2,
+      e6_0     = STD_VIDEO_H264_LEVEL_IDC_6_0,
+      e6_1     = STD_VIDEO_H264_LEVEL_IDC_6_1,
+      e6_2     = STD_VIDEO_H264_LEVEL_IDC_6_2,
+      eInvalid = STD_VIDEO_H264_LEVEL_IDC_INVALID
+    };
+
+    enum class H264PocType
+    {
+      e0       = STD_VIDEO_H264_POC_TYPE_0,
+      e1       = STD_VIDEO_H264_POC_TYPE_1,
+      e2       = STD_VIDEO_H264_POC_TYPE_2,
+      eInvalid = STD_VIDEO_H264_POC_TYPE_INVALID
+    };
+
+    enum class H264AspectRatioIdc
+    {
+      eUnspecified = STD_VIDEO_H264_ASPECT_RATIO_IDC_UNSPECIFIED,
+      eSquare      = STD_VIDEO_H264_ASPECT_RATIO_IDC_SQUARE,
+      e12_11       = STD_VIDEO_H264_ASPECT_RATIO_IDC_12_11,
+      e10_11       = STD_VIDEO_H264_ASPECT_RATIO_IDC_10_11,
+      e16_11       = STD_VIDEO_H264_ASPECT_RATIO_IDC_16_11,
+      e40_33       = STD_VIDEO_H264_ASPECT_RATIO_IDC_40_33,
+      e24_11       = STD_VIDEO_H264_ASPECT_RATIO_IDC_24_11,
+      e20_11       = STD_VIDEO_H264_ASPECT_RATIO_IDC_20_11,
+      e32_11       = STD_VIDEO_H264_ASPECT_RATIO_IDC_32_11,
+      e80_33       = STD_VIDEO_H264_ASPECT_RATIO_IDC_80_33,
+      e18_11       = STD_VIDEO_H264_ASPECT_RATIO_IDC_18_11,
+      e15_11       = STD_VIDEO_H264_ASPECT_RATIO_IDC_15_11,
+      e64_33       = STD_VIDEO_H264_ASPECT_RATIO_IDC_64_33,
+      e160_99      = STD_VIDEO_H264_ASPECT_RATIO_IDC_160_99,
+      e4_3         = STD_VIDEO_H264_ASPECT_RATIO_IDC_4_3,
+      e3_2         = STD_VIDEO_H264_ASPECT_RATIO_IDC_3_2,
+      e2_1         = STD_VIDEO_H264_ASPECT_RATIO_IDC_2_1,
+      eExtendedSar = STD_VIDEO_H264_ASPECT_RATIO_IDC_EXTENDED_SAR,
+      eInvalid     = STD_VIDEO_H264_ASPECT_RATIO_IDC_INVALID
+    };
+
+    enum class H264WeightedBipredIdc
+    {
+      eDefault  = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_DEFAULT,
+      eExplicit = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_EXPLICIT,
+      eImplicit = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_IMPLICIT,
+      eInvalid  = STD_VIDEO_H264_WEIGHTED_BIPRED_IDC_INVALID
+    };
+
+    enum class H264ModificationOfPicNumsIdc
+    {
+      eShortTermSubtract = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_SUBTRACT,
+      eShortTermAdd      = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_SHORT_TERM_ADD,
+      eLongTerm          = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_LONG_TERM,
+      eEnd               = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_END,
+      eInvalid           = STD_VIDEO_H264_MODIFICATION_OF_PIC_NUMS_IDC_INVALID
+    };
+
+    enum class H264MemMgmtControlOp
+    {
+      eEnd                   = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_END,
+      eUnmarkShortTerm       = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_SHORT_TERM,
+      eUnmarkLongTerm        = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_LONG_TERM,
+      eMarkLongTerm          = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_LONG_TERM,
+      eSetMaxLongTermIndex   = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_SET_MAX_LONG_TERM_INDEX,
+      eUnmarkAll             = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_UNMARK_ALL,
+      eMarkCurrentAsLongTerm = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_MARK_CURRENT_AS_LONG_TERM,
+      eInvalid               = STD_VIDEO_H264_MEM_MGMT_CONTROL_OP_INVALID
+    };
+
+    enum class H264CabacInitIdc
+    {
+      e0       = STD_VIDEO_H264_CABAC_INIT_IDC_0,
+      e1       = STD_VIDEO_H264_CABAC_INIT_IDC_1,
+      e2       = STD_VIDEO_H264_CABAC_INIT_IDC_2,
+      eInvalid = STD_VIDEO_H264_CABAC_INIT_IDC_INVALID
+    };
+
+    enum class H264DisableDeblockingFilterIdc
+    {
+      eDisabled = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_DISABLED,
+      eEnabled  = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_ENABLED,
+      ePartial  = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_PARTIAL,
+      eInvalid  = STD_VIDEO_H264_DISABLE_DEBLOCKING_FILTER_IDC_INVALID
+    };
+
+    enum class H264SliceType
+    {
+      eP       = STD_VIDEO_H264_SLICE_TYPE_P,
+      eB       = STD_VIDEO_H264_SLICE_TYPE_B,
+      eI       = STD_VIDEO_H264_SLICE_TYPE_I,
+      eInvalid = STD_VIDEO_H264_SLICE_TYPE_INVALID
+    };
+
+    enum class H264PictureType
+    {
+      eP       = STD_VIDEO_H264_PICTURE_TYPE_P,
+      eB       = STD_VIDEO_H264_PICTURE_TYPE_B,
+      eI       = STD_VIDEO_H264_PICTURE_TYPE_I,
+      eIdr     = STD_VIDEO_H264_PICTURE_TYPE_IDR,
+      eInvalid = STD_VIDEO_H264_PICTURE_TYPE_INVALID
+    };
+
+    enum class H264NonVclNaluType
+    {
+      eSps           = STD_VIDEO_H264_NON_VCL_NALU_TYPE_SPS,
+      ePps           = STD_VIDEO_H264_NON_VCL_NALU_TYPE_PPS,
+      eAud           = STD_VIDEO_H264_NON_VCL_NALU_TYPE_AUD,
+      ePrefix        = STD_VIDEO_H264_NON_VCL_NALU_TYPE_PREFIX,
+      eEndOfSequence = STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_SEQUENCE,
+      eEndOfStream   = STD_VIDEO_H264_NON_VCL_NALU_TYPE_END_OF_STREAM,
+      ePrecoded      = STD_VIDEO_H264_NON_VCL_NALU_TYPE_PRECODED,
+      eInvalid       = STD_VIDEO_H264_NON_VCL_NALU_TYPE_INVALID
+    };
+
+    //=== vulkan_video_codec_h264std_decode ===
+
+    enum class DecodeH264FieldOrderCount
+    {
+      eTop     = STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_TOP,
+      eBottom  = STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_BOTTOM,
+      eInvalid = STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_INVALID
+    };
+
+    //=== vulkan_video_codec_h265std ===
+
+    enum class H265ChromaFormatIdc
+    {
+      eMonochrome = STD_VIDEO_H265_CHROMA_FORMAT_IDC_MONOCHROME,
+      e420        = STD_VIDEO_H265_CHROMA_FORMAT_IDC_420,
+      e422        = STD_VIDEO_H265_CHROMA_FORMAT_IDC_422,
+      e444        = STD_VIDEO_H265_CHROMA_FORMAT_IDC_444,
+      eInvalid    = STD_VIDEO_H265_CHROMA_FORMAT_IDC_INVALID
+    };
+
+    enum class H265ProfileIdc
+    {
+      eMain                  = STD_VIDEO_H265_PROFILE_IDC_MAIN,
+      eMain10                = STD_VIDEO_H265_PROFILE_IDC_MAIN_10,
+      eMainStillPicture      = STD_VIDEO_H265_PROFILE_IDC_MAIN_STILL_PICTURE,
+      eFormatRangeExtensions = STD_VIDEO_H265_PROFILE_IDC_FORMAT_RANGE_EXTENSIONS,
+      eSccExtensions         = STD_VIDEO_H265_PROFILE_IDC_SCC_EXTENSIONS,
+      eInvalid               = STD_VIDEO_H265_PROFILE_IDC_INVALID
+    };
+
+    enum class H265LevelIdc
+    {
+      e1_0     = STD_VIDEO_H265_LEVEL_IDC_1_0,
+      e2_0     = STD_VIDEO_H265_LEVEL_IDC_2_0,
+      e2_1     = STD_VIDEO_H265_LEVEL_IDC_2_1,
+      e3_0     = STD_VIDEO_H265_LEVEL_IDC_3_0,
+      e3_1     = STD_VIDEO_H265_LEVEL_IDC_3_1,
+      e4_0     = STD_VIDEO_H265_LEVEL_IDC_4_0,
+      e4_1     = STD_VIDEO_H265_LEVEL_IDC_4_1,
+      e5_0     = STD_VIDEO_H265_LEVEL_IDC_5_0,
+      e5_1     = STD_VIDEO_H265_LEVEL_IDC_5_1,
+      e5_2     = STD_VIDEO_H265_LEVEL_IDC_5_2,
+      e6_0     = STD_VIDEO_H265_LEVEL_IDC_6_0,
+      e6_1     = STD_VIDEO_H265_LEVEL_IDC_6_1,
+      e6_2     = STD_VIDEO_H265_LEVEL_IDC_6_2,
+      eInvalid = STD_VIDEO_H265_LEVEL_IDC_INVALID
+    };
+
+    enum class H265SliceType
+    {
+      eB       = STD_VIDEO_H265_SLICE_TYPE_B,
+      eP       = STD_VIDEO_H265_SLICE_TYPE_P,
+      eI       = STD_VIDEO_H265_SLICE_TYPE_I,
+      eInvalid = STD_VIDEO_H265_SLICE_TYPE_INVALID
+    };
+
+    enum class H265PictureType
+    {
+      eP       = STD_VIDEO_H265_PICTURE_TYPE_P,
+      eB       = STD_VIDEO_H265_PICTURE_TYPE_B,
+      eI       = STD_VIDEO_H265_PICTURE_TYPE_I,
+      eIdr     = STD_VIDEO_H265_PICTURE_TYPE_IDR,
+      eInvalid = STD_VIDEO_H265_PICTURE_TYPE_INVALID
+    };
+
+    enum class H265AspectRatioIdc
+    {
+      eUnspecified = STD_VIDEO_H265_ASPECT_RATIO_IDC_UNSPECIFIED,
+      eSquare      = STD_VIDEO_H265_ASPECT_RATIO_IDC_SQUARE,
+      e12_11       = STD_VIDEO_H265_ASPECT_RATIO_IDC_12_11,
+      e10_11       = STD_VIDEO_H265_ASPECT_RATIO_IDC_10_11,
+      e16_11       = STD_VIDEO_H265_ASPECT_RATIO_IDC_16_11,
+      e40_33       = STD_VIDEO_H265_ASPECT_RATIO_IDC_40_33,
+      e24_11       = STD_VIDEO_H265_ASPECT_RATIO_IDC_24_11,
+      e20_11       = STD_VIDEO_H265_ASPECT_RATIO_IDC_20_11,
+      e32_11       = STD_VIDEO_H265_ASPECT_RATIO_IDC_32_11,
+      e80_33       = STD_VIDEO_H265_ASPECT_RATIO_IDC_80_33,
+      e18_11       = STD_VIDEO_H265_ASPECT_RATIO_IDC_18_11,
+      e15_11       = STD_VIDEO_H265_ASPECT_RATIO_IDC_15_11,
+      e64_33       = STD_VIDEO_H265_ASPECT_RATIO_IDC_64_33,
+      e160_99      = STD_VIDEO_H265_ASPECT_RATIO_IDC_160_99,
+      e4_3         = STD_VIDEO_H265_ASPECT_RATIO_IDC_4_3,
+      e3_2         = STD_VIDEO_H265_ASPECT_RATIO_IDC_3_2,
+      e2_1         = STD_VIDEO_H265_ASPECT_RATIO_IDC_2_1,
+      eExtendedSar = STD_VIDEO_H265_ASPECT_RATIO_IDC_EXTENDED_SAR,
+      eInvalid     = STD_VIDEO_H265_ASPECT_RATIO_IDC_INVALID
+    };
+
+    //===============
+    //=== STRUCTS ===
+    //===============
+
+    //=== vulkan_video_codec_h264std ===
+
+    struct H264SpsVuiFlags
+    {
+      using NativeType = StdVideoH264SpsVuiFlags;
+
+      operator StdVideoH264SpsVuiFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH264SpsVuiFlags *>( this );
+      }
+
+      operator StdVideoH264SpsVuiFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH264SpsVuiFlags *>( this );
+      }
+
+      bool operator==( H264SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( aspect_ratio_info_present_flag == rhs.aspect_ratio_info_present_flag ) && ( overscan_info_present_flag == rhs.overscan_info_present_flag ) &&
+               ( overscan_appropriate_flag == rhs.overscan_appropriate_flag ) && ( video_signal_type_present_flag == rhs.video_signal_type_present_flag ) &&
+               ( video_full_range_flag == rhs.video_full_range_flag ) && ( color_description_present_flag == rhs.color_description_present_flag ) &&
+               ( chroma_loc_info_present_flag == rhs.chroma_loc_info_present_flag ) && ( timing_info_present_flag == rhs.timing_info_present_flag ) &&
+               ( fixed_frame_rate_flag == rhs.fixed_frame_rate_flag ) && ( bitstream_restriction_flag == rhs.bitstream_restriction_flag ) &&
+               ( nal_hrd_parameters_present_flag == rhs.nal_hrd_parameters_present_flag ) &&
+               ( vcl_hrd_parameters_present_flag == rhs.vcl_hrd_parameters_present_flag );
+      }
+
+      bool operator!=( H264SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t aspect_ratio_info_present_flag  : 1;
+      uint32_t overscan_info_present_flag      : 1;
+      uint32_t overscan_appropriate_flag       : 1;
+      uint32_t video_signal_type_present_flag  : 1;
+      uint32_t video_full_range_flag           : 1;
+      uint32_t color_description_present_flag  : 1;
+      uint32_t chroma_loc_info_present_flag    : 1;
+      uint32_t timing_info_present_flag        : 1;
+      uint32_t fixed_frame_rate_flag           : 1;
+      uint32_t bitstream_restriction_flag      : 1;
+      uint32_t nal_hrd_parameters_present_flag : 1;
+      uint32_t vcl_hrd_parameters_present_flag : 1;
+    };
+
+    struct H264HrdParameters
+    {
+      using NativeType = StdVideoH264HrdParameters;
+
+      operator StdVideoH264HrdParameters const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH264HrdParameters *>( this );
+      }
+
+      operator StdVideoH264HrdParameters &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH264HrdParameters *>( this );
+      }
+
+      bool operator==( H264HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( cpb_cnt_minus1 == rhs.cpb_cnt_minus1 ) && ( bit_rate_scale == rhs.bit_rate_scale ) && ( cpb_size_scale == rhs.cpb_size_scale ) &&
+               ( reserved1 == rhs.reserved1 ) && ( bit_rate_value_minus1 == rhs.bit_rate_value_minus1 ) &&
+               ( cpb_size_value_minus1 == rhs.cpb_size_value_minus1 ) && ( cbr_flag == rhs.cbr_flag ) &&
+               ( initial_cpb_removal_delay_length_minus1 == rhs.initial_cpb_removal_delay_length_minus1 ) &&
+               ( cpb_removal_delay_length_minus1 == rhs.cpb_removal_delay_length_minus1 ) &&
+               ( dpb_output_delay_length_minus1 == rhs.dpb_output_delay_length_minus1 ) && ( time_offset_length == rhs.time_offset_length );
+      }
+
+      bool operator!=( H264HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint8_t                                                                          cpb_cnt_minus1                          = {};
+      uint8_t                                                                          bit_rate_scale                          = {};
+      uint8_t                                                                          cpb_size_scale                          = {};
+      uint8_t                                                                          reserved1                               = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, STD_VIDEO_H264_CPB_CNT_LIST_SIZE> bit_rate_value_minus1                   = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, STD_VIDEO_H264_CPB_CNT_LIST_SIZE> cpb_size_value_minus1                   = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H264_CPB_CNT_LIST_SIZE>  cbr_flag                                = {};
+      uint32_t                                                                         initial_cpb_removal_delay_length_minus1 = {};
+      uint32_t                                                                         cpb_removal_delay_length_minus1         = {};
+      uint32_t                                                                         dpb_output_delay_length_minus1          = {};
+      uint32_t                                                                         time_offset_length                      = {};
+    };
+
+    struct H264SequenceParameterSetVui
+    {
+      using NativeType = StdVideoH264SequenceParameterSetVui;
+
+      operator StdVideoH264SequenceParameterSetVui const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH264SequenceParameterSetVui *>( this );
+      }
+
+      operator StdVideoH264SequenceParameterSetVui &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH264SequenceParameterSetVui *>( this );
+      }
+
+      bool operator==( H264SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( aspect_ratio_idc == rhs.aspect_ratio_idc ) && ( sar_width == rhs.sar_width ) && ( sar_height == rhs.sar_height ) &&
+               ( video_format == rhs.video_format ) && ( colour_primaries == rhs.colour_primaries ) &&
+               ( transfer_characteristics == rhs.transfer_characteristics ) && ( matrix_coefficients == rhs.matrix_coefficients ) &&
+               ( num_units_in_tick == rhs.num_units_in_tick ) && ( time_scale == rhs.time_scale ) && ( max_num_reorder_frames == rhs.max_num_reorder_frames ) &&
+               ( max_dec_frame_buffering == rhs.max_dec_frame_buffering ) && ( chroma_sample_loc_type_top_field == rhs.chroma_sample_loc_type_top_field ) &&
+               ( chroma_sample_loc_type_bottom_field == rhs.chroma_sample_loc_type_bottom_field ) && ( reserved1 == rhs.reserved1 ) &&
+               ( pHrdParameters == rhs.pHrdParameters );
+      }
+
+      bool operator!=( H264SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SpsVuiFlags    flags = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264AspectRatioIdc aspect_ratio_idc =
+        VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264AspectRatioIdc::eUnspecified;
+      uint16_t                                                                    sar_width                           = {};
+      uint16_t                                                                    sar_height                          = {};
+      uint8_t                                                                     video_format                        = {};
+      uint8_t                                                                     colour_primaries                    = {};
+      uint8_t                                                                     transfer_characteristics            = {};
+      uint8_t                                                                     matrix_coefficients                 = {};
+      uint32_t                                                                    num_units_in_tick                   = {};
+      uint32_t                                                                    time_scale                          = {};
+      uint8_t                                                                     max_num_reorder_frames              = {};
+      uint8_t                                                                     max_dec_frame_buffering             = {};
+      uint8_t                                                                     chroma_sample_loc_type_top_field    = {};
+      uint8_t                                                                     chroma_sample_loc_type_bottom_field = {};
+      uint32_t                                                                    reserved1                           = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264HrdParameters * pHrdParameters                      = {};
+    };
+
+    struct H264SpsFlags
+    {
+      using NativeType = StdVideoH264SpsFlags;
+
+      operator StdVideoH264SpsFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH264SpsFlags *>( this );
+      }
+
+      operator StdVideoH264SpsFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH264SpsFlags *>( this );
+      }
+
+      bool operator==( H264SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( constraint_set0_flag == rhs.constraint_set0_flag ) && ( constraint_set1_flag == rhs.constraint_set1_flag ) &&
+               ( constraint_set2_flag == rhs.constraint_set2_flag ) && ( constraint_set3_flag == rhs.constraint_set3_flag ) &&
+               ( constraint_set4_flag == rhs.constraint_set4_flag ) && ( constraint_set5_flag == rhs.constraint_set5_flag ) &&
+               ( direct_8x8_inference_flag == rhs.direct_8x8_inference_flag ) && ( mb_adaptive_frame_field_flag == rhs.mb_adaptive_frame_field_flag ) &&
+               ( frame_mbs_only_flag == rhs.frame_mbs_only_flag ) && ( delta_pic_order_always_zero_flag == rhs.delta_pic_order_always_zero_flag ) &&
+               ( separate_colour_plane_flag == rhs.separate_colour_plane_flag ) &&
+               ( gaps_in_frame_num_value_allowed_flag == rhs.gaps_in_frame_num_value_allowed_flag ) &&
+               ( qpprime_y_zero_transform_bypass_flag == rhs.qpprime_y_zero_transform_bypass_flag ) && ( frame_cropping_flag == rhs.frame_cropping_flag ) &&
+               ( seq_scaling_matrix_present_flag == rhs.seq_scaling_matrix_present_flag ) && ( vui_parameters_present_flag == rhs.vui_parameters_present_flag );
+      }
+
+      bool operator!=( H264SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t constraint_set0_flag                 : 1;
+      uint32_t constraint_set1_flag                 : 1;
+      uint32_t constraint_set2_flag                 : 1;
+      uint32_t constraint_set3_flag                 : 1;
+      uint32_t constraint_set4_flag                 : 1;
+      uint32_t constraint_set5_flag                 : 1;
+      uint32_t direct_8x8_inference_flag            : 1;
+      uint32_t mb_adaptive_frame_field_flag         : 1;
+      uint32_t frame_mbs_only_flag                  : 1;
+      uint32_t delta_pic_order_always_zero_flag     : 1;
+      uint32_t separate_colour_plane_flag           : 1;
+      uint32_t gaps_in_frame_num_value_allowed_flag : 1;
+      uint32_t qpprime_y_zero_transform_bypass_flag : 1;
+      uint32_t frame_cropping_flag                  : 1;
+      uint32_t seq_scaling_matrix_present_flag      : 1;
+      uint32_t vui_parameters_present_flag          : 1;
+    };
+
+    struct H264ScalingLists
+    {
+      using NativeType = StdVideoH264ScalingLists;
+
+      operator StdVideoH264ScalingLists const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH264ScalingLists *>( this );
+      }
+
+      operator StdVideoH264ScalingLists &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH264ScalingLists *>( this );
+      }
+
+      bool operator==( H264ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( scaling_list_present_mask == rhs.scaling_list_present_mask ) && ( use_default_scaling_matrix_mask == rhs.use_default_scaling_matrix_mask ) &&
+               ( ScalingList4x4 == rhs.ScalingList4x4 ) && ( ScalingList8x8 == rhs.ScalingList8x8 );
+      }
+
+      bool operator!=( H264ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint16_t scaling_list_present_mask       = {};
+      uint16_t use_default_scaling_matrix_mask = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper2D<uint8_t, STD_VIDEO_H264_SCALING_LIST_4X4_NUM_LISTS, STD_VIDEO_H264_SCALING_LIST_4X4_NUM_ELEMENTS>
+        ScalingList4x4 = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper2D<uint8_t, STD_VIDEO_H264_SCALING_LIST_8X8_NUM_LISTS, STD_VIDEO_H264_SCALING_LIST_8X8_NUM_ELEMENTS>
+        ScalingList8x8 = {};
+    };
+
+    struct H264SequenceParameterSet
+    {
+      using NativeType = StdVideoH264SequenceParameterSet;
+
+      operator StdVideoH264SequenceParameterSet const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH264SequenceParameterSet *>( this );
+      }
+
+      operator StdVideoH264SequenceParameterSet &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH264SequenceParameterSet *>( this );
+      }
+
+      bool operator==( H264SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( profile_idc == rhs.profile_idc ) && ( level_idc == rhs.level_idc ) &&
+               ( chroma_format_idc == rhs.chroma_format_idc ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) &&
+               ( bit_depth_luma_minus8 == rhs.bit_depth_luma_minus8 ) && ( bit_depth_chroma_minus8 == rhs.bit_depth_chroma_minus8 ) &&
+               ( log2_max_frame_num_minus4 == rhs.log2_max_frame_num_minus4 ) && ( pic_order_cnt_type == rhs.pic_order_cnt_type ) &&
+               ( offset_for_non_ref_pic == rhs.offset_for_non_ref_pic ) && ( offset_for_top_to_bottom_field == rhs.offset_for_top_to_bottom_field ) &&
+               ( log2_max_pic_order_cnt_lsb_minus4 == rhs.log2_max_pic_order_cnt_lsb_minus4 ) &&
+               ( num_ref_frames_in_pic_order_cnt_cycle == rhs.num_ref_frames_in_pic_order_cnt_cycle ) && ( max_num_ref_frames == rhs.max_num_ref_frames ) &&
+               ( reserved1 == rhs.reserved1 ) && ( pic_width_in_mbs_minus1 == rhs.pic_width_in_mbs_minus1 ) &&
+               ( pic_height_in_map_units_minus1 == rhs.pic_height_in_map_units_minus1 ) && ( frame_crop_left_offset == rhs.frame_crop_left_offset ) &&
+               ( frame_crop_right_offset == rhs.frame_crop_right_offset ) && ( frame_crop_top_offset == rhs.frame_crop_top_offset ) &&
+               ( frame_crop_bottom_offset == rhs.frame_crop_bottom_offset ) && ( reserved2 == rhs.reserved2 ) &&
+               ( pOffsetForRefFrame == rhs.pOffsetForRefFrame ) && ( pScalingLists == rhs.pScalingLists ) &&
+               ( pSequenceParameterSetVui == rhs.pSequenceParameterSetVui );
+      }
+
+      bool operator!=( H264SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SpsFlags   flags = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ProfileIdc profile_idc =
+        VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ProfileIdc::eBaseline;
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264LevelIdc        level_idc = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264LevelIdc::e1_0;
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ChromaFormatIdc chroma_format_idc =
+        VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ChromaFormatIdc::eMonochrome;
+      uint8_t                                                       seq_parameter_set_id      = {};
+      uint8_t                                                       bit_depth_luma_minus8     = {};
+      uint8_t                                                       bit_depth_chroma_minus8   = {};
+      uint8_t                                                       log2_max_frame_num_minus4 = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PocType pic_order_cnt_type     = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PocType::e0;
+      int32_t                                                       offset_for_non_ref_pic = {};
+      int32_t                                                       offset_for_top_to_bottom_field                   = {};
+      uint8_t                                                       log2_max_pic_order_cnt_lsb_minus4                = {};
+      uint8_t                                                       num_ref_frames_in_pic_order_cnt_cycle            = {};
+      uint8_t                                                       max_num_ref_frames                               = {};
+      uint8_t                                                       reserved1                                        = {};
+      uint32_t                                                      pic_width_in_mbs_minus1                          = {};
+      uint32_t                                                      pic_height_in_map_units_minus1                   = {};
+      uint32_t                                                      frame_crop_left_offset                           = {};
+      uint32_t                                                      frame_crop_right_offset                          = {};
+      uint32_t                                                      frame_crop_top_offset                            = {};
+      uint32_t                                                      frame_crop_bottom_offset                         = {};
+      uint32_t                                                      reserved2                                        = {};
+      const int32_t *                                               pOffsetForRefFrame                               = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ScalingLists *            pScalingLists            = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SequenceParameterSetVui * pSequenceParameterSetVui = {};
+    };
+
+    struct H264PpsFlags
+    {
+      using NativeType = StdVideoH264PpsFlags;
+
+      operator StdVideoH264PpsFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH264PpsFlags *>( this );
+      }
+
+      operator StdVideoH264PpsFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH264PpsFlags *>( this );
+      }
+
+      bool operator==( H264PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( transform_8x8_mode_flag == rhs.transform_8x8_mode_flag ) && ( redundant_pic_cnt_present_flag == rhs.redundant_pic_cnt_present_flag ) &&
+               ( constrained_intra_pred_flag == rhs.constrained_intra_pred_flag ) &&
+               ( deblocking_filter_control_present_flag == rhs.deblocking_filter_control_present_flag ) && ( weighted_pred_flag == rhs.weighted_pred_flag ) &&
+               ( bottom_field_pic_order_in_frame_present_flag == rhs.bottom_field_pic_order_in_frame_present_flag ) &&
+               ( entropy_coding_mode_flag == rhs.entropy_coding_mode_flag ) && ( pic_scaling_matrix_present_flag == rhs.pic_scaling_matrix_present_flag );
+      }
+
+      bool operator!=( H264PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t transform_8x8_mode_flag                      : 1;
+      uint32_t redundant_pic_cnt_present_flag               : 1;
+      uint32_t constrained_intra_pred_flag                  : 1;
+      uint32_t deblocking_filter_control_present_flag       : 1;
+      uint32_t weighted_pred_flag                           : 1;
+      uint32_t bottom_field_pic_order_in_frame_present_flag : 1;
+      uint32_t entropy_coding_mode_flag                     : 1;
+      uint32_t pic_scaling_matrix_present_flag              : 1;
+    };
+
+    struct H264PictureParameterSet
+    {
+      using NativeType = StdVideoH264PictureParameterSet;
+
+      operator StdVideoH264PictureParameterSet const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH264PictureParameterSet *>( this );
+      }
+
+      operator StdVideoH264PictureParameterSet &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH264PictureParameterSet *>( this );
+      }
+
+      bool operator==( H264PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && ( pic_parameter_set_id == rhs.pic_parameter_set_id ) &&
+               ( num_ref_idx_l0_default_active_minus1 == rhs.num_ref_idx_l0_default_active_minus1 ) &&
+               ( num_ref_idx_l1_default_active_minus1 == rhs.num_ref_idx_l1_default_active_minus1 ) && ( weighted_bipred_idc == rhs.weighted_bipred_idc ) &&
+               ( pic_init_qp_minus26 == rhs.pic_init_qp_minus26 ) && ( pic_init_qs_minus26 == rhs.pic_init_qs_minus26 ) &&
+               ( chroma_qp_index_offset == rhs.chroma_qp_index_offset ) && ( second_chroma_qp_index_offset == rhs.second_chroma_qp_index_offset ) &&
+               ( pScalingLists == rhs.pScalingLists );
+      }
+
+      bool operator!=( H264PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PpsFlags          flags                                = {};
+      uint8_t                                                                 seq_parameter_set_id                 = {};
+      uint8_t                                                                 pic_parameter_set_id                 = {};
+      uint8_t                                                                 num_ref_idx_l0_default_active_minus1 = {};
+      uint8_t                                                                 num_ref_idx_l1_default_active_minus1 = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264WeightedBipredIdc weighted_bipred_idc =
+        VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264WeightedBipredIdc::eDefault;
+      int8_t                                                                     pic_init_qp_minus26           = {};
+      int8_t                                                                     pic_init_qs_minus26           = {};
+      int8_t                                                                     chroma_qp_index_offset        = {};
+      int8_t                                                                     second_chroma_qp_index_offset = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ScalingLists * pScalingLists                 = {};
+    };
+
+    //=== vulkan_video_codec_h264std_decode ===
+
+    struct DecodeH264PictureInfoFlags
+    {
+      using NativeType = StdVideoDecodeH264PictureInfoFlags;
+
+      operator StdVideoDecodeH264PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoDecodeH264PictureInfoFlags *>( this );
+      }
+
+      operator StdVideoDecodeH264PictureInfoFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoDecodeH264PictureInfoFlags *>( this );
+      }
+
+      bool operator==( DecodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( field_pic_flag == rhs.field_pic_flag ) && ( is_intra == rhs.is_intra ) && ( IdrPicFlag == rhs.IdrPicFlag ) &&
+               ( bottom_field_flag == rhs.bottom_field_flag ) && ( is_reference == rhs.is_reference ) &&
+               ( complementary_field_pair == rhs.complementary_field_pair );
+      }
+
+      bool operator!=( DecodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t field_pic_flag           : 1;
+      uint32_t is_intra                 : 1;
+      uint32_t IdrPicFlag               : 1;
+      uint32_t bottom_field_flag        : 1;
+      uint32_t is_reference             : 1;
+      uint32_t complementary_field_pair : 1;
+    };
+
+    struct DecodeH264PictureInfo
+    {
+      using NativeType = StdVideoDecodeH264PictureInfo;
+
+      operator StdVideoDecodeH264PictureInfo const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoDecodeH264PictureInfo *>( this );
+      }
+
+      operator StdVideoDecodeH264PictureInfo &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoDecodeH264PictureInfo *>( this );
+      }
+
+      bool operator==( DecodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && ( pic_parameter_set_id == rhs.pic_parameter_set_id ) &&
+               ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && ( frame_num == rhs.frame_num ) && ( idr_pic_id == rhs.idr_pic_id ) &&
+               ( PicOrderCnt == rhs.PicOrderCnt );
+      }
+
+      bool operator!=( DecodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH264PictureInfoFlags                     flags                = {};
+      uint8_t                                                                                          seq_parameter_set_id = {};
+      uint8_t                                                                                          pic_parameter_set_id = {};
+      uint8_t                                                                                          reserved1            = {};
+      uint8_t                                                                                          reserved2            = {};
+      uint16_t                                                                                         frame_num            = {};
+      uint16_t                                                                                         idr_pic_id           = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int32_t, STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE> PicOrderCnt          = {};
+    };
+
+    struct DecodeH264ReferenceInfoFlags
+    {
+      using NativeType = StdVideoDecodeH264ReferenceInfoFlags;
+
+      operator StdVideoDecodeH264ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoDecodeH264ReferenceInfoFlags *>( this );
+      }
+
+      operator StdVideoDecodeH264ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoDecodeH264ReferenceInfoFlags *>( this );
+      }
+
+      bool operator==( DecodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( top_field_flag == rhs.top_field_flag ) && ( bottom_field_flag == rhs.bottom_field_flag ) &&
+               ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( is_non_existing == rhs.is_non_existing );
+      }
+
+      bool operator!=( DecodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t top_field_flag               : 1;
+      uint32_t bottom_field_flag            : 1;
+      uint32_t used_for_long_term_reference : 1;
+      uint32_t is_non_existing              : 1;
+    };
+
+    struct DecodeH264ReferenceInfo
+    {
+      using NativeType = StdVideoDecodeH264ReferenceInfo;
+
+      operator StdVideoDecodeH264ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoDecodeH264ReferenceInfo *>( this );
+      }
+
+      operator StdVideoDecodeH264ReferenceInfo &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoDecodeH264ReferenceInfo *>( this );
+      }
+
+      bool operator==( DecodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( FrameNum == rhs.FrameNum ) && ( reserved == rhs.reserved ) && ( PicOrderCnt == rhs.PicOrderCnt );
+      }
+
+      bool operator!=( DecodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH264ReferenceInfoFlags                   flags       = {};
+      uint16_t                                                                                         FrameNum    = {};
+      uint16_t                                                                                         reserved    = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int32_t, STD_VIDEO_DECODE_H264_FIELD_ORDER_COUNT_LIST_SIZE> PicOrderCnt = {};
+    };
+
+    //=== vulkan_video_codec_h264std_encode ===
+
+    struct EncodeH264WeightTableFlags
+    {
+      using NativeType = StdVideoEncodeH264WeightTableFlags;
+
+      operator StdVideoEncodeH264WeightTableFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH264WeightTableFlags *>( this );
+      }
+
+      operator StdVideoEncodeH264WeightTableFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH264WeightTableFlags *>( this );
+      }
+
+      bool operator==( EncodeH264WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( luma_weight_l0_flag == rhs.luma_weight_l0_flag ) && ( chroma_weight_l0_flag == rhs.chroma_weight_l0_flag ) &&
+               ( luma_weight_l1_flag == rhs.luma_weight_l1_flag ) && ( chroma_weight_l1_flag == rhs.chroma_weight_l1_flag );
+      }
+
+      bool operator!=( EncodeH264WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t luma_weight_l0_flag   = {};
+      uint32_t chroma_weight_l0_flag = {};
+      uint32_t luma_weight_l1_flag   = {};
+      uint32_t chroma_weight_l1_flag = {};
+    };
+
+    struct EncodeH264WeightTable
+    {
+      using NativeType = StdVideoEncodeH264WeightTable;
+
+      operator StdVideoEncodeH264WeightTable const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH264WeightTable *>( this );
+      }
+
+      operator StdVideoEncodeH264WeightTable &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH264WeightTable *>( this );
+      }
+
+      bool operator==( EncodeH264WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( luma_log2_weight_denom == rhs.luma_log2_weight_denom ) &&
+               ( chroma_log2_weight_denom == rhs.chroma_log2_weight_denom ) && ( luma_weight_l0 == rhs.luma_weight_l0 ) &&
+               ( luma_offset_l0 == rhs.luma_offset_l0 ) && ( chroma_weight_l0 == rhs.chroma_weight_l0 ) && ( chroma_offset_l0 == rhs.chroma_offset_l0 ) &&
+               ( luma_weight_l1 == rhs.luma_weight_l1 ) && ( luma_offset_l1 == rhs.luma_offset_l1 ) && ( chroma_weight_l1 == rhs.chroma_weight_l1 ) &&
+               ( chroma_offset_l1 == rhs.chroma_offset_l1 );
+      }
+
+      bool operator!=( EncodeH264WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264WeightTableFlags                                    flags                    = {};
+      uint8_t                                                                                                         luma_log2_weight_denom   = {};
+      uint8_t                                                                                                         chroma_log2_weight_denom = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int8_t, STD_VIDEO_H264_MAX_NUM_LIST_REF>                                   luma_weight_l0           = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int8_t, STD_VIDEO_H264_MAX_NUM_LIST_REF>                                   luma_offset_l0           = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper2D<int8_t, STD_VIDEO_H264_MAX_NUM_LIST_REF, STD_VIDEO_H264_MAX_CHROMA_PLANES> chroma_weight_l0         = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper2D<int8_t, STD_VIDEO_H264_MAX_NUM_LIST_REF, STD_VIDEO_H264_MAX_CHROMA_PLANES> chroma_offset_l0         = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int8_t, STD_VIDEO_H264_MAX_NUM_LIST_REF>                                   luma_weight_l1           = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int8_t, STD_VIDEO_H264_MAX_NUM_LIST_REF>                                   luma_offset_l1           = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper2D<int8_t, STD_VIDEO_H264_MAX_NUM_LIST_REF, STD_VIDEO_H264_MAX_CHROMA_PLANES> chroma_weight_l1         = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper2D<int8_t, STD_VIDEO_H264_MAX_NUM_LIST_REF, STD_VIDEO_H264_MAX_CHROMA_PLANES> chroma_offset_l1         = {};
+    };
+
+    struct EncodeH264SliceHeaderFlags
+    {
+      using NativeType = StdVideoEncodeH264SliceHeaderFlags;
+
+      operator StdVideoEncodeH264SliceHeaderFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH264SliceHeaderFlags *>( this );
+      }
+
+      operator StdVideoEncodeH264SliceHeaderFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH264SliceHeaderFlags *>( this );
+      }
+
+      bool operator==( EncodeH264SliceHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( direct_spatial_mv_pred_flag == rhs.direct_spatial_mv_pred_flag ) &&
+               ( num_ref_idx_active_override_flag == rhs.num_ref_idx_active_override_flag ) && ( reserved == rhs.reserved );
+      }
+
+      bool operator!=( EncodeH264SliceHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t direct_spatial_mv_pred_flag      : 1;
+      uint32_t num_ref_idx_active_override_flag : 1;
+      uint32_t reserved                         : 30;
+    };
+
+    struct EncodeH264PictureInfoFlags
+    {
+      using NativeType = StdVideoEncodeH264PictureInfoFlags;
+
+      operator StdVideoEncodeH264PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH264PictureInfoFlags *>( this );
+      }
+
+      operator StdVideoEncodeH264PictureInfoFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH264PictureInfoFlags *>( this );
+      }
+
+      bool operator==( EncodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( IdrPicFlag == rhs.IdrPicFlag ) && ( is_reference == rhs.is_reference ) &&
+               ( no_output_of_prior_pics_flag == rhs.no_output_of_prior_pics_flag ) && ( long_term_reference_flag == rhs.long_term_reference_flag ) &&
+               ( adaptive_ref_pic_marking_mode_flag == rhs.adaptive_ref_pic_marking_mode_flag ) && ( reserved == rhs.reserved );
+      }
+
+      bool operator!=( EncodeH264PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t IdrPicFlag                         : 1;
+      uint32_t is_reference                       : 1;
+      uint32_t no_output_of_prior_pics_flag       : 1;
+      uint32_t long_term_reference_flag           : 1;
+      uint32_t adaptive_ref_pic_marking_mode_flag : 1;
+      uint32_t reserved                           : 27;
+    };
+
+    struct EncodeH264ReferenceInfoFlags
+    {
+      using NativeType = StdVideoEncodeH264ReferenceInfoFlags;
+
+      operator StdVideoEncodeH264ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH264ReferenceInfoFlags *>( this );
+      }
+
+      operator StdVideoEncodeH264ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH264ReferenceInfoFlags *>( this );
+      }
+
+      bool operator==( EncodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( reserved == rhs.reserved );
+      }
+
+      bool operator!=( EncodeH264ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t used_for_long_term_reference : 1;
+      uint32_t reserved                     : 31;
+    };
+
+    struct EncodeH264ReferenceListsInfoFlags
+    {
+      using NativeType = StdVideoEncodeH264ReferenceListsInfoFlags;
+
+      operator StdVideoEncodeH264ReferenceListsInfoFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH264ReferenceListsInfoFlags *>( this );
+      }
+
+      operator StdVideoEncodeH264ReferenceListsInfoFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH264ReferenceListsInfoFlags *>( this );
+      }
+
+      bool operator==( EncodeH264ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( ref_pic_list_modification_flag_l0 == rhs.ref_pic_list_modification_flag_l0 ) &&
+               ( ref_pic_list_modification_flag_l1 == rhs.ref_pic_list_modification_flag_l1 ) && ( reserved == rhs.reserved );
+      }
+
+      bool operator!=( EncodeH264ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t ref_pic_list_modification_flag_l0 : 1;
+      uint32_t ref_pic_list_modification_flag_l1 : 1;
+      uint32_t reserved                          : 30;
+    };
+
+    struct EncodeH264RefListModEntry
+    {
+      using NativeType = StdVideoEncodeH264RefListModEntry;
+
+      operator StdVideoEncodeH264RefListModEntry const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH264RefListModEntry *>( this );
+      }
+
+      operator StdVideoEncodeH264RefListModEntry &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH264RefListModEntry *>( this );
+      }
+
+      bool operator==( EncodeH264RefListModEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( modification_of_pic_nums_idc == rhs.modification_of_pic_nums_idc ) && ( abs_diff_pic_num_minus1 == rhs.abs_diff_pic_num_minus1 ) &&
+               ( long_term_pic_num == rhs.long_term_pic_num );
+      }
+
+      bool operator!=( EncodeH264RefListModEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ModificationOfPicNumsIdc modification_of_pic_nums_idc =
+        VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264ModificationOfPicNumsIdc::eShortTermSubtract;
+      uint16_t abs_diff_pic_num_minus1 = {};
+      uint16_t long_term_pic_num       = {};
+    };
+
+    struct EncodeH264RefPicMarkingEntry
+    {
+      using NativeType = StdVideoEncodeH264RefPicMarkingEntry;
+
+      operator StdVideoEncodeH264RefPicMarkingEntry const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH264RefPicMarkingEntry *>( this );
+      }
+
+      operator StdVideoEncodeH264RefPicMarkingEntry &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH264RefPicMarkingEntry *>( this );
+      }
+
+      bool operator==( EncodeH264RefPicMarkingEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( memory_management_control_operation == rhs.memory_management_control_operation ) &&
+               ( difference_of_pic_nums_minus1 == rhs.difference_of_pic_nums_minus1 ) && ( long_term_pic_num == rhs.long_term_pic_num ) &&
+               ( long_term_frame_idx == rhs.long_term_frame_idx ) && ( max_long_term_frame_idx_plus1 == rhs.max_long_term_frame_idx_plus1 );
+      }
+
+      bool operator!=( EncodeH264RefPicMarkingEntry const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264MemMgmtControlOp memory_management_control_operation =
+        VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264MemMgmtControlOp::eEnd;
+      uint16_t difference_of_pic_nums_minus1 = {};
+      uint16_t long_term_pic_num             = {};
+      uint16_t long_term_frame_idx           = {};
+      uint16_t max_long_term_frame_idx_plus1 = {};
+    };
+
+    struct EncodeH264ReferenceListsInfo
+    {
+      using NativeType = StdVideoEncodeH264ReferenceListsInfo;
+
+      operator StdVideoEncodeH264ReferenceListsInfo const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH264ReferenceListsInfo *>( this );
+      }
+
+      operator StdVideoEncodeH264ReferenceListsInfo &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH264ReferenceListsInfo *>( this );
+      }
+
+      bool operator==( EncodeH264ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( num_ref_idx_l0_active_minus1 == rhs.num_ref_idx_l0_active_minus1 ) &&
+               ( num_ref_idx_l1_active_minus1 == rhs.num_ref_idx_l1_active_minus1 ) && ( RefPicList0 == rhs.RefPicList0 ) &&
+               ( RefPicList1 == rhs.RefPicList1 ) && ( refList0ModOpCount == rhs.refList0ModOpCount ) && ( refList1ModOpCount == rhs.refList1ModOpCount ) &&
+               ( refPicMarkingOpCount == rhs.refPicMarkingOpCount ) && ( reserved1 == rhs.reserved1 ) &&
+               ( pRefList0ModOperations == rhs.pRefList0ModOperations ) && ( pRefList1ModOperations == rhs.pRefList1ModOperations ) &&
+               ( pRefPicMarkingOperations == rhs.pRefPicMarkingOperations );
+      }
+
+      bool operator!=( EncodeH264ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264ReferenceListsInfoFlags    flags                        = {};
+      uint8_t                                                                                num_ref_idx_l0_active_minus1 = {};
+      uint8_t                                                                                num_ref_idx_l1_active_minus1 = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H264_MAX_NUM_LIST_REF>         RefPicList0                  = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H264_MAX_NUM_LIST_REF>         RefPicList1                  = {};
+      uint8_t                                                                                refList0ModOpCount           = {};
+      uint8_t                                                                                refList1ModOpCount           = {};
+      uint8_t                                                                                refPicMarkingOpCount         = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, 7>                                       reserved1                    = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264RefListModEntry *    pRefList0ModOperations       = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264RefListModEntry *    pRefList1ModOperations       = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264RefPicMarkingEntry * pRefPicMarkingOperations     = {};
+    };
+
+    struct EncodeH264PictureInfo
+    {
+      using NativeType = StdVideoEncodeH264PictureInfo;
+
+      operator StdVideoEncodeH264PictureInfo const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH264PictureInfo *>( this );
+      }
+
+      operator StdVideoEncodeH264PictureInfo &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH264PictureInfo *>( this );
+      }
+
+      bool operator==( EncodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( seq_parameter_set_id == rhs.seq_parameter_set_id ) && ( pic_parameter_set_id == rhs.pic_parameter_set_id ) &&
+               ( idr_pic_id == rhs.idr_pic_id ) && ( primary_pic_type == rhs.primary_pic_type ) && ( frame_num == rhs.frame_num ) &&
+               ( PicOrderCnt == rhs.PicOrderCnt ) && ( temporal_id == rhs.temporal_id ) && ( reserved1 == rhs.reserved1 ) && ( pRefLists == rhs.pRefLists );
+      }
+
+      bool operator!=( EncodeH264PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264PictureInfoFlags flags                = {};
+      uint8_t                                                                      seq_parameter_set_id = {};
+      uint8_t                                                                      pic_parameter_set_id = {};
+      uint16_t                                                                     idr_pic_id           = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType            primary_pic_type =
+        VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType::eP;
+      uint32_t                                                                               frame_num   = {};
+      int32_t                                                                                PicOrderCnt = {};
+      uint8_t                                                                                temporal_id = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, 3>                                       reserved1   = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264ReferenceListsInfo * pRefLists   = {};
+    };
+
+    struct EncodeH264ReferenceInfo
+    {
+      using NativeType = StdVideoEncodeH264ReferenceInfo;
+
+      operator StdVideoEncodeH264ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH264ReferenceInfo *>( this );
+      }
+
+      operator StdVideoEncodeH264ReferenceInfo &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH264ReferenceInfo *>( this );
+      }
+
+      bool operator==( EncodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( primary_pic_type == rhs.primary_pic_type ) && ( FrameNum == rhs.FrameNum ) && ( PicOrderCnt == rhs.PicOrderCnt ) &&
+               ( long_term_pic_num == rhs.long_term_pic_num ) && ( long_term_frame_idx == rhs.long_term_frame_idx ) && ( temporal_id == rhs.temporal_id );
+      }
+
+      bool operator!=( EncodeH264ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264ReferenceInfoFlags flags = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType              primary_pic_type =
+        VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264PictureType::eP;
+      uint32_t FrameNum            = {};
+      int32_t  PicOrderCnt         = {};
+      uint16_t long_term_pic_num   = {};
+      uint16_t long_term_frame_idx = {};
+      uint8_t  temporal_id         = {};
+    };
+
+    struct EncodeH264SliceHeader
+    {
+      using NativeType = StdVideoEncodeH264SliceHeader;
+
+      operator StdVideoEncodeH264SliceHeader const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH264SliceHeader *>( this );
+      }
+
+      operator StdVideoEncodeH264SliceHeader &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH264SliceHeader *>( this );
+      }
+
+      bool operator==( EncodeH264SliceHeader const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( first_mb_in_slice == rhs.first_mb_in_slice ) && ( slice_type == rhs.slice_type ) &&
+               ( slice_alpha_c0_offset_div2 == rhs.slice_alpha_c0_offset_div2 ) && ( slice_beta_offset_div2 == rhs.slice_beta_offset_div2 ) &&
+               ( slice_qp_delta == rhs.slice_qp_delta ) && ( reserved1 == rhs.reserved1 ) && ( cabac_init_idc == rhs.cabac_init_idc ) &&
+               ( disable_deblocking_filter_idc == rhs.disable_deblocking_filter_idc ) && ( pWeightTable == rhs.pWeightTable );
+      }
+
+      bool operator!=( EncodeH264SliceHeader const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264SliceHeaderFlags flags             = {};
+      uint32_t                                                                     first_mb_in_slice = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SliceType    slice_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264SliceType::eP;
+      int8_t                                                             slice_alpha_c0_offset_div2 = {};
+      int8_t                                                             slice_beta_offset_div2     = {};
+      int8_t                                                             slice_qp_delta             = {};
+      uint8_t                                                            reserved1                  = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264CabacInitIdc cabac_init_idc =
+        VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264CabacInitIdc::e0;
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264DisableDeblockingFilterIdc disable_deblocking_filter_idc =
+        VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H264DisableDeblockingFilterIdc::eDisabled;
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH264WeightTable * pWeightTable = {};
+    };
+
+    //=== vulkan_video_codec_h265std ===
+
+    struct H265DecPicBufMgr
+    {
+      using NativeType = StdVideoH265DecPicBufMgr;
+
+      operator StdVideoH265DecPicBufMgr const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH265DecPicBufMgr *>( this );
+      }
+
+      operator StdVideoH265DecPicBufMgr &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH265DecPicBufMgr *>( this );
+      }
+
+      bool operator==( H265DecPicBufMgr const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( max_latency_increase_plus1 == rhs.max_latency_increase_plus1 ) && ( max_dec_pic_buffering_minus1 == rhs.max_dec_pic_buffering_minus1 ) &&
+               ( max_num_reorder_pics == rhs.max_num_reorder_pics );
+      }
+
+      bool operator!=( H265DecPicBufMgr const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, STD_VIDEO_H265_SUBLAYERS_LIST_SIZE> max_latency_increase_plus1   = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_SUBLAYERS_LIST_SIZE>  max_dec_pic_buffering_minus1 = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_SUBLAYERS_LIST_SIZE>  max_num_reorder_pics         = {};
+    };
+
+    struct H265SubLayerHrdParameters
+    {
+      using NativeType = StdVideoH265SubLayerHrdParameters;
+
+      operator StdVideoH265SubLayerHrdParameters const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH265SubLayerHrdParameters *>( this );
+      }
+
+      operator StdVideoH265SubLayerHrdParameters &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH265SubLayerHrdParameters *>( this );
+      }
+
+      bool operator==( H265SubLayerHrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( bit_rate_value_minus1 == rhs.bit_rate_value_minus1 ) && ( cpb_size_value_minus1 == rhs.cpb_size_value_minus1 ) &&
+               ( cpb_size_du_value_minus1 == rhs.cpb_size_du_value_minus1 ) && ( bit_rate_du_value_minus1 == rhs.bit_rate_du_value_minus1 ) &&
+               ( cbr_flag == rhs.cbr_flag );
+      }
+
+      bool operator!=( H265SubLayerHrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, STD_VIDEO_H265_CPB_CNT_LIST_SIZE> bit_rate_value_minus1    = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, STD_VIDEO_H265_CPB_CNT_LIST_SIZE> cpb_size_value_minus1    = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, STD_VIDEO_H265_CPB_CNT_LIST_SIZE> cpb_size_du_value_minus1 = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, STD_VIDEO_H265_CPB_CNT_LIST_SIZE> bit_rate_du_value_minus1 = {};
+      uint32_t                                                                         cbr_flag                 = {};
+    };
+
+    struct H265HrdFlags
+    {
+      using NativeType = StdVideoH265HrdFlags;
+
+      operator StdVideoH265HrdFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH265HrdFlags *>( this );
+      }
+
+      operator StdVideoH265HrdFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH265HrdFlags *>( this );
+      }
+
+      bool operator==( H265HrdFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( nal_hrd_parameters_present_flag == rhs.nal_hrd_parameters_present_flag ) &&
+               ( vcl_hrd_parameters_present_flag == rhs.vcl_hrd_parameters_present_flag ) &&
+               ( sub_pic_hrd_params_present_flag == rhs.sub_pic_hrd_params_present_flag ) &&
+               ( sub_pic_cpb_params_in_pic_timing_sei_flag == rhs.sub_pic_cpb_params_in_pic_timing_sei_flag ) &&
+               ( fixed_pic_rate_general_flag == rhs.fixed_pic_rate_general_flag ) && ( fixed_pic_rate_within_cvs_flag == rhs.fixed_pic_rate_within_cvs_flag ) &&
+               ( low_delay_hrd_flag == rhs.low_delay_hrd_flag );
+      }
+
+      bool operator!=( H265HrdFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t nal_hrd_parameters_present_flag           : 1;
+      uint32_t vcl_hrd_parameters_present_flag           : 1;
+      uint32_t sub_pic_hrd_params_present_flag           : 1;
+      uint32_t sub_pic_cpb_params_in_pic_timing_sei_flag : 1;
+      uint32_t fixed_pic_rate_general_flag               : 8;
+      uint32_t fixed_pic_rate_within_cvs_flag            : 8;
+      uint32_t low_delay_hrd_flag                        : 8;
+    };
+
+    struct H265HrdParameters
+    {
+      using NativeType = StdVideoH265HrdParameters;
+
+      operator StdVideoH265HrdParameters const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH265HrdParameters *>( this );
+      }
+
+      operator StdVideoH265HrdParameters &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH265HrdParameters *>( this );
+      }
+
+      bool operator==( H265HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( tick_divisor_minus2 == rhs.tick_divisor_minus2 ) &&
+               ( du_cpb_removal_delay_increment_length_minus1 == rhs.du_cpb_removal_delay_increment_length_minus1 ) &&
+               ( dpb_output_delay_du_length_minus1 == rhs.dpb_output_delay_du_length_minus1 ) && ( bit_rate_scale == rhs.bit_rate_scale ) &&
+               ( cpb_size_scale == rhs.cpb_size_scale ) && ( cpb_size_du_scale == rhs.cpb_size_du_scale ) &&
+               ( initial_cpb_removal_delay_length_minus1 == rhs.initial_cpb_removal_delay_length_minus1 ) &&
+               ( au_cpb_removal_delay_length_minus1 == rhs.au_cpb_removal_delay_length_minus1 ) &&
+               ( dpb_output_delay_length_minus1 == rhs.dpb_output_delay_length_minus1 ) && ( cpb_cnt_minus1 == rhs.cpb_cnt_minus1 ) &&
+               ( elemental_duration_in_tc_minus1 == rhs.elemental_duration_in_tc_minus1 ) && ( reserved == rhs.reserved ) &&
+               ( pSubLayerHrdParametersNal == rhs.pSubLayerHrdParametersNal ) && ( pSubLayerHrdParametersVcl == rhs.pSubLayerHrdParametersVcl );
+      }
+
+      bool operator!=( H265HrdParameters const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265HrdFlags                      flags                                        = {};
+      uint8_t                                                                             tick_divisor_minus2                          = {};
+      uint8_t                                                                             du_cpb_removal_delay_increment_length_minus1 = {};
+      uint8_t                                                                             dpb_output_delay_du_length_minus1            = {};
+      uint8_t                                                                             bit_rate_scale                               = {};
+      uint8_t                                                                             cpb_size_scale                               = {};
+      uint8_t                                                                             cpb_size_du_scale                            = {};
+      uint8_t                                                                             initial_cpb_removal_delay_length_minus1      = {};
+      uint8_t                                                                             au_cpb_removal_delay_length_minus1           = {};
+      uint8_t                                                                             dpb_output_delay_length_minus1               = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_SUBLAYERS_LIST_SIZE>   cpb_cnt_minus1                               = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint16_t, STD_VIDEO_H265_SUBLAYERS_LIST_SIZE>  elemental_duration_in_tc_minus1              = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint16_t, 3>                                   reserved                                     = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SubLayerHrdParameters * pSubLayerHrdParametersNal                    = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SubLayerHrdParameters * pSubLayerHrdParametersVcl                    = {};
+    };
+
+    struct H265VpsFlags
+    {
+      using NativeType = StdVideoH265VpsFlags;
+
+      operator StdVideoH265VpsFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH265VpsFlags *>( this );
+      }
+
+      operator StdVideoH265VpsFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH265VpsFlags *>( this );
+      }
+
+      bool operator==( H265VpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( vps_temporal_id_nesting_flag == rhs.vps_temporal_id_nesting_flag ) &&
+               ( vps_sub_layer_ordering_info_present_flag == rhs.vps_sub_layer_ordering_info_present_flag ) &&
+               ( vps_timing_info_present_flag == rhs.vps_timing_info_present_flag ) &&
+               ( vps_poc_proportional_to_timing_flag == rhs.vps_poc_proportional_to_timing_flag );
+      }
+
+      bool operator!=( H265VpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t vps_temporal_id_nesting_flag             : 1;
+      uint32_t vps_sub_layer_ordering_info_present_flag : 1;
+      uint32_t vps_timing_info_present_flag             : 1;
+      uint32_t vps_poc_proportional_to_timing_flag      : 1;
+    };
+
+    struct H265ProfileTierLevelFlags
+    {
+      using NativeType = StdVideoH265ProfileTierLevelFlags;
+
+      operator StdVideoH265ProfileTierLevelFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH265ProfileTierLevelFlags *>( this );
+      }
+
+      operator StdVideoH265ProfileTierLevelFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH265ProfileTierLevelFlags *>( this );
+      }
+
+      bool operator==( H265ProfileTierLevelFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( general_tier_flag == rhs.general_tier_flag ) && ( general_progressive_source_flag == rhs.general_progressive_source_flag ) &&
+               ( general_interlaced_source_flag == rhs.general_interlaced_source_flag ) &&
+               ( general_non_packed_constraint_flag == rhs.general_non_packed_constraint_flag ) &&
+               ( general_frame_only_constraint_flag == rhs.general_frame_only_constraint_flag );
+      }
+
+      bool operator!=( H265ProfileTierLevelFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t general_tier_flag                  : 1;
+      uint32_t general_progressive_source_flag    : 1;
+      uint32_t general_interlaced_source_flag     : 1;
+      uint32_t general_non_packed_constraint_flag : 1;
+      uint32_t general_frame_only_constraint_flag : 1;
+    };
+
+    struct H265ProfileTierLevel
+    {
+      using NativeType = StdVideoH265ProfileTierLevel;
+
+      operator StdVideoH265ProfileTierLevel const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH265ProfileTierLevel *>( this );
+      }
+
+      operator StdVideoH265ProfileTierLevel &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH265ProfileTierLevel *>( this );
+      }
+
+      bool operator==( H265ProfileTierLevel const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( general_profile_idc == rhs.general_profile_idc ) && ( general_level_idc == rhs.general_level_idc );
+      }
+
+      bool operator!=( H265ProfileTierLevel const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileTierLevelFlags flags = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileIdc            general_profile_idc =
+        VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileIdc::eMain;
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265LevelIdc general_level_idc = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265LevelIdc::e1_0;
+    };
+
+    struct H265VideoParameterSet
+    {
+      using NativeType = StdVideoH265VideoParameterSet;
+
+      operator StdVideoH265VideoParameterSet const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH265VideoParameterSet *>( this );
+      }
+
+      operator StdVideoH265VideoParameterSet &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH265VideoParameterSet *>( this );
+      }
+
+      bool operator==( H265VideoParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( vps_video_parameter_set_id == rhs.vps_video_parameter_set_id ) &&
+               ( vps_max_sub_layers_minus1 == rhs.vps_max_sub_layers_minus1 ) && ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) &&
+               ( vps_num_units_in_tick == rhs.vps_num_units_in_tick ) && ( vps_time_scale == rhs.vps_time_scale ) &&
+               ( vps_num_ticks_poc_diff_one_minus1 == rhs.vps_num_ticks_poc_diff_one_minus1 ) && ( reserved3 == rhs.reserved3 ) &&
+               ( pDecPicBufMgr == rhs.pDecPicBufMgr ) && ( pHrdParameters == rhs.pHrdParameters ) && ( pProfileTierLevel == rhs.pProfileTierLevel );
+      }
+
+      bool operator!=( H265VideoParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265VpsFlags                 flags                             = {};
+      uint8_t                                                                        vps_video_parameter_set_id        = {};
+      uint8_t                                                                        vps_max_sub_layers_minus1         = {};
+      uint8_t                                                                        reserved1                         = {};
+      uint8_t                                                                        reserved2                         = {};
+      uint32_t                                                                       vps_num_units_in_tick             = {};
+      uint32_t                                                                       vps_time_scale                    = {};
+      uint32_t                                                                       vps_num_ticks_poc_diff_one_minus1 = {};
+      uint32_t                                                                       reserved3                         = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265DecPicBufMgr *     pDecPicBufMgr                     = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265HrdParameters *    pHrdParameters                    = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileTierLevel * pProfileTierLevel                 = {};
+    };
+
+    struct H265ScalingLists
+    {
+      using NativeType = StdVideoH265ScalingLists;
+
+      operator StdVideoH265ScalingLists const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH265ScalingLists *>( this );
+      }
+
+      operator StdVideoH265ScalingLists &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH265ScalingLists *>( this );
+      }
+
+      bool operator==( H265ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( ScalingList4x4 == rhs.ScalingList4x4 ) && ( ScalingList8x8 == rhs.ScalingList8x8 ) && ( ScalingList16x16 == rhs.ScalingList16x16 ) &&
+               ( ScalingList32x32 == rhs.ScalingList32x32 ) && ( ScalingListDCCoef16x16 == rhs.ScalingListDCCoef16x16 ) &&
+               ( ScalingListDCCoef32x32 == rhs.ScalingListDCCoef32x32 );
+      }
+
+      bool operator!=( H265ScalingLists const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::ArrayWrapper2D<uint8_t, STD_VIDEO_H265_SCALING_LIST_4X4_NUM_LISTS, STD_VIDEO_H265_SCALING_LIST_4X4_NUM_ELEMENTS>
+        ScalingList4x4 = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper2D<uint8_t, STD_VIDEO_H265_SCALING_LIST_8X8_NUM_LISTS, STD_VIDEO_H265_SCALING_LIST_8X8_NUM_ELEMENTS>
+        ScalingList8x8 = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper2D<uint8_t, STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS, STD_VIDEO_H265_SCALING_LIST_16X16_NUM_ELEMENTS>
+        ScalingList16x16 = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper2D<uint8_t, STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS, STD_VIDEO_H265_SCALING_LIST_32X32_NUM_ELEMENTS>
+                                                                                                 ScalingList32x32       = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_SCALING_LIST_16X16_NUM_LISTS> ScalingListDCCoef16x16 = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_SCALING_LIST_32X32_NUM_LISTS> ScalingListDCCoef32x32 = {};
+    };
+
+    struct H265SpsVuiFlags
+    {
+      using NativeType = StdVideoH265SpsVuiFlags;
+
+      operator StdVideoH265SpsVuiFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH265SpsVuiFlags *>( this );
+      }
+
+      operator StdVideoH265SpsVuiFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH265SpsVuiFlags *>( this );
+      }
+
+      bool operator==( H265SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( aspect_ratio_info_present_flag == rhs.aspect_ratio_info_present_flag ) && ( overscan_info_present_flag == rhs.overscan_info_present_flag ) &&
+               ( overscan_appropriate_flag == rhs.overscan_appropriate_flag ) && ( video_signal_type_present_flag == rhs.video_signal_type_present_flag ) &&
+               ( video_full_range_flag == rhs.video_full_range_flag ) && ( colour_description_present_flag == rhs.colour_description_present_flag ) &&
+               ( chroma_loc_info_present_flag == rhs.chroma_loc_info_present_flag ) &&
+               ( neutral_chroma_indication_flag == rhs.neutral_chroma_indication_flag ) && ( field_seq_flag == rhs.field_seq_flag ) &&
+               ( frame_field_info_present_flag == rhs.frame_field_info_present_flag ) && ( default_display_window_flag == rhs.default_display_window_flag ) &&
+               ( vui_timing_info_present_flag == rhs.vui_timing_info_present_flag ) &&
+               ( vui_poc_proportional_to_timing_flag == rhs.vui_poc_proportional_to_timing_flag ) &&
+               ( vui_hrd_parameters_present_flag == rhs.vui_hrd_parameters_present_flag ) && ( bitstream_restriction_flag == rhs.bitstream_restriction_flag ) &&
+               ( tiles_fixed_structure_flag == rhs.tiles_fixed_structure_flag ) &&
+               ( motion_vectors_over_pic_boundaries_flag == rhs.motion_vectors_over_pic_boundaries_flag ) &&
+               ( restricted_ref_pic_lists_flag == rhs.restricted_ref_pic_lists_flag );
+      }
+
+      bool operator!=( H265SpsVuiFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t aspect_ratio_info_present_flag          : 1;
+      uint32_t overscan_info_present_flag              : 1;
+      uint32_t overscan_appropriate_flag               : 1;
+      uint32_t video_signal_type_present_flag          : 1;
+      uint32_t video_full_range_flag                   : 1;
+      uint32_t colour_description_present_flag         : 1;
+      uint32_t chroma_loc_info_present_flag            : 1;
+      uint32_t neutral_chroma_indication_flag          : 1;
+      uint32_t field_seq_flag                          : 1;
+      uint32_t frame_field_info_present_flag           : 1;
+      uint32_t default_display_window_flag             : 1;
+      uint32_t vui_timing_info_present_flag            : 1;
+      uint32_t vui_poc_proportional_to_timing_flag     : 1;
+      uint32_t vui_hrd_parameters_present_flag         : 1;
+      uint32_t bitstream_restriction_flag              : 1;
+      uint32_t tiles_fixed_structure_flag              : 1;
+      uint32_t motion_vectors_over_pic_boundaries_flag : 1;
+      uint32_t restricted_ref_pic_lists_flag           : 1;
+    };
+
+    struct H265SequenceParameterSetVui
+    {
+      using NativeType = StdVideoH265SequenceParameterSetVui;
+
+      operator StdVideoH265SequenceParameterSetVui const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH265SequenceParameterSetVui *>( this );
+      }
+
+      operator StdVideoH265SequenceParameterSetVui &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH265SequenceParameterSetVui *>( this );
+      }
+
+      bool operator==( H265SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( aspect_ratio_idc == rhs.aspect_ratio_idc ) && ( sar_width == rhs.sar_width ) && ( sar_height == rhs.sar_height ) &&
+               ( video_format == rhs.video_format ) && ( colour_primaries == rhs.colour_primaries ) &&
+               ( transfer_characteristics == rhs.transfer_characteristics ) && ( matrix_coeffs == rhs.matrix_coeffs ) &&
+               ( chroma_sample_loc_type_top_field == rhs.chroma_sample_loc_type_top_field ) &&
+               ( chroma_sample_loc_type_bottom_field == rhs.chroma_sample_loc_type_bottom_field ) && ( reserved1 == rhs.reserved1 ) &&
+               ( reserved2 == rhs.reserved2 ) && ( def_disp_win_left_offset == rhs.def_disp_win_left_offset ) &&
+               ( def_disp_win_right_offset == rhs.def_disp_win_right_offset ) && ( def_disp_win_top_offset == rhs.def_disp_win_top_offset ) &&
+               ( def_disp_win_bottom_offset == rhs.def_disp_win_bottom_offset ) && ( vui_num_units_in_tick == rhs.vui_num_units_in_tick ) &&
+               ( vui_time_scale == rhs.vui_time_scale ) && ( vui_num_ticks_poc_diff_one_minus1 == rhs.vui_num_ticks_poc_diff_one_minus1 ) &&
+               ( min_spatial_segmentation_idc == rhs.min_spatial_segmentation_idc ) && ( reserved3 == rhs.reserved3 ) &&
+               ( max_bytes_per_pic_denom == rhs.max_bytes_per_pic_denom ) && ( max_bits_per_min_cu_denom == rhs.max_bits_per_min_cu_denom ) &&
+               ( log2_max_mv_length_horizontal == rhs.log2_max_mv_length_horizontal ) && ( log2_max_mv_length_vertical == rhs.log2_max_mv_length_vertical ) &&
+               ( pHrdParameters == rhs.pHrdParameters );
+      }
+
+      bool operator!=( H265SequenceParameterSetVui const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SpsVuiFlags    flags = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265AspectRatioIdc aspect_ratio_idc =
+        VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265AspectRatioIdc::eUnspecified;
+      uint16_t                                                                    sar_width                           = {};
+      uint16_t                                                                    sar_height                          = {};
+      uint8_t                                                                     video_format                        = {};
+      uint8_t                                                                     colour_primaries                    = {};
+      uint8_t                                                                     transfer_characteristics            = {};
+      uint8_t                                                                     matrix_coeffs                       = {};
+      uint8_t                                                                     chroma_sample_loc_type_top_field    = {};
+      uint8_t                                                                     chroma_sample_loc_type_bottom_field = {};
+      uint8_t                                                                     reserved1                           = {};
+      uint8_t                                                                     reserved2                           = {};
+      uint16_t                                                                    def_disp_win_left_offset            = {};
+      uint16_t                                                                    def_disp_win_right_offset           = {};
+      uint16_t                                                                    def_disp_win_top_offset             = {};
+      uint16_t                                                                    def_disp_win_bottom_offset          = {};
+      uint32_t                                                                    vui_num_units_in_tick               = {};
+      uint32_t                                                                    vui_time_scale                      = {};
+      uint32_t                                                                    vui_num_ticks_poc_diff_one_minus1   = {};
+      uint16_t                                                                    min_spatial_segmentation_idc        = {};
+      uint16_t                                                                    reserved3                           = {};
+      uint8_t                                                                     max_bytes_per_pic_denom             = {};
+      uint8_t                                                                     max_bits_per_min_cu_denom           = {};
+      uint8_t                                                                     log2_max_mv_length_horizontal       = {};
+      uint8_t                                                                     log2_max_mv_length_vertical         = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265HrdParameters * pHrdParameters                      = {};
+    };
+
+    struct H265PredictorPaletteEntries
+    {
+      using NativeType = StdVideoH265PredictorPaletteEntries;
+
+      operator StdVideoH265PredictorPaletteEntries const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH265PredictorPaletteEntries *>( this );
+      }
+
+      operator StdVideoH265PredictorPaletteEntries &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH265PredictorPaletteEntries *>( this );
+      }
+
+      bool operator==( H265PredictorPaletteEntries const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( PredictorPaletteEntries == rhs.PredictorPaletteEntries );
+      }
+
+      bool operator!=( H265PredictorPaletteEntries const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::
+        ArrayWrapper2D<uint16_t, STD_VIDEO_H265_PREDICTOR_PALETTE_COMPONENTS_LIST_SIZE, STD_VIDEO_H265_PREDICTOR_PALETTE_COMP_ENTRIES_LIST_SIZE>
+          PredictorPaletteEntries = {};
+    };
+
+    struct H265SpsFlags
+    {
+      using NativeType = StdVideoH265SpsFlags;
+
+      operator StdVideoH265SpsFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH265SpsFlags *>( this );
+      }
+
+      operator StdVideoH265SpsFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH265SpsFlags *>( this );
+      }
+
+      bool operator==( H265SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( sps_temporal_id_nesting_flag == rhs.sps_temporal_id_nesting_flag ) && ( separate_colour_plane_flag == rhs.separate_colour_plane_flag ) &&
+               ( conformance_window_flag == rhs.conformance_window_flag ) &&
+               ( sps_sub_layer_ordering_info_present_flag == rhs.sps_sub_layer_ordering_info_present_flag ) &&
+               ( scaling_list_enabled_flag == rhs.scaling_list_enabled_flag ) &&
+               ( sps_scaling_list_data_present_flag == rhs.sps_scaling_list_data_present_flag ) && ( amp_enabled_flag == rhs.amp_enabled_flag ) &&
+               ( sample_adaptive_offset_enabled_flag == rhs.sample_adaptive_offset_enabled_flag ) && ( pcm_enabled_flag == rhs.pcm_enabled_flag ) &&
+               ( pcm_loop_filter_disabled_flag == rhs.pcm_loop_filter_disabled_flag ) &&
+               ( long_term_ref_pics_present_flag == rhs.long_term_ref_pics_present_flag ) &&
+               ( sps_temporal_mvp_enabled_flag == rhs.sps_temporal_mvp_enabled_flag ) &&
+               ( strong_intra_smoothing_enabled_flag == rhs.strong_intra_smoothing_enabled_flag ) &&
+               ( vui_parameters_present_flag == rhs.vui_parameters_present_flag ) && ( sps_extension_present_flag == rhs.sps_extension_present_flag ) &&
+               ( sps_range_extension_flag == rhs.sps_range_extension_flag ) &&
+               ( transform_skip_rotation_enabled_flag == rhs.transform_skip_rotation_enabled_flag ) &&
+               ( transform_skip_context_enabled_flag == rhs.transform_skip_context_enabled_flag ) &&
+               ( implicit_rdpcm_enabled_flag == rhs.implicit_rdpcm_enabled_flag ) && ( explicit_rdpcm_enabled_flag == rhs.explicit_rdpcm_enabled_flag ) &&
+               ( extended_precision_processing_flag == rhs.extended_precision_processing_flag ) &&
+               ( intra_smoothing_disabled_flag == rhs.intra_smoothing_disabled_flag ) &&
+               ( high_precision_offsets_enabled_flag == rhs.high_precision_offsets_enabled_flag ) &&
+               ( persistent_rice_adaptation_enabled_flag == rhs.persistent_rice_adaptation_enabled_flag ) &&
+               ( cabac_bypass_alignment_enabled_flag == rhs.cabac_bypass_alignment_enabled_flag ) && ( sps_scc_extension_flag == rhs.sps_scc_extension_flag ) &&
+               ( sps_curr_pic_ref_enabled_flag == rhs.sps_curr_pic_ref_enabled_flag ) && ( palette_mode_enabled_flag == rhs.palette_mode_enabled_flag ) &&
+               ( sps_palette_predictor_initializers_present_flag == rhs.sps_palette_predictor_initializers_present_flag ) &&
+               ( intra_boundary_filtering_disabled_flag == rhs.intra_boundary_filtering_disabled_flag );
+      }
+
+      bool operator!=( H265SpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t sps_temporal_id_nesting_flag                    : 1;
+      uint32_t separate_colour_plane_flag                      : 1;
+      uint32_t conformance_window_flag                         : 1;
+      uint32_t sps_sub_layer_ordering_info_present_flag        : 1;
+      uint32_t scaling_list_enabled_flag                       : 1;
+      uint32_t sps_scaling_list_data_present_flag              : 1;
+      uint32_t amp_enabled_flag                                : 1;
+      uint32_t sample_adaptive_offset_enabled_flag             : 1;
+      uint32_t pcm_enabled_flag                                : 1;
+      uint32_t pcm_loop_filter_disabled_flag                   : 1;
+      uint32_t long_term_ref_pics_present_flag                 : 1;
+      uint32_t sps_temporal_mvp_enabled_flag                   : 1;
+      uint32_t strong_intra_smoothing_enabled_flag             : 1;
+      uint32_t vui_parameters_present_flag                     : 1;
+      uint32_t sps_extension_present_flag                      : 1;
+      uint32_t sps_range_extension_flag                        : 1;
+      uint32_t transform_skip_rotation_enabled_flag            : 1;
+      uint32_t transform_skip_context_enabled_flag             : 1;
+      uint32_t implicit_rdpcm_enabled_flag                     : 1;
+      uint32_t explicit_rdpcm_enabled_flag                     : 1;
+      uint32_t extended_precision_processing_flag              : 1;
+      uint32_t intra_smoothing_disabled_flag                   : 1;
+      uint32_t high_precision_offsets_enabled_flag             : 1;
+      uint32_t persistent_rice_adaptation_enabled_flag         : 1;
+      uint32_t cabac_bypass_alignment_enabled_flag             : 1;
+      uint32_t sps_scc_extension_flag                          : 1;
+      uint32_t sps_curr_pic_ref_enabled_flag                   : 1;
+      uint32_t palette_mode_enabled_flag                       : 1;
+      uint32_t sps_palette_predictor_initializers_present_flag : 1;
+      uint32_t intra_boundary_filtering_disabled_flag          : 1;
+    };
+
+    struct H265ShortTermRefPicSetFlags
+    {
+      using NativeType = StdVideoH265ShortTermRefPicSetFlags;
+
+      operator StdVideoH265ShortTermRefPicSetFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH265ShortTermRefPicSetFlags *>( this );
+      }
+
+      operator StdVideoH265ShortTermRefPicSetFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH265ShortTermRefPicSetFlags *>( this );
+      }
+
+      bool operator==( H265ShortTermRefPicSetFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( inter_ref_pic_set_prediction_flag == rhs.inter_ref_pic_set_prediction_flag ) && ( delta_rps_sign == rhs.delta_rps_sign );
+      }
+
+      bool operator!=( H265ShortTermRefPicSetFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t inter_ref_pic_set_prediction_flag : 1;
+      uint32_t delta_rps_sign                    : 1;
+    };
+
+    struct H265ShortTermRefPicSet
+    {
+      using NativeType = StdVideoH265ShortTermRefPicSet;
+
+      operator StdVideoH265ShortTermRefPicSet const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH265ShortTermRefPicSet *>( this );
+      }
+
+      operator StdVideoH265ShortTermRefPicSet &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH265ShortTermRefPicSet *>( this );
+      }
+
+      bool operator==( H265ShortTermRefPicSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( delta_idx_minus1 == rhs.delta_idx_minus1 ) && ( use_delta_flag == rhs.use_delta_flag ) &&
+               ( abs_delta_rps_minus1 == rhs.abs_delta_rps_minus1 ) && ( used_by_curr_pic_flag == rhs.used_by_curr_pic_flag ) &&
+               ( used_by_curr_pic_s0_flag == rhs.used_by_curr_pic_s0_flag ) && ( used_by_curr_pic_s1_flag == rhs.used_by_curr_pic_s1_flag ) &&
+               ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && ( reserved3 == rhs.reserved3 ) &&
+               ( num_negative_pics == rhs.num_negative_pics ) && ( num_positive_pics == rhs.num_positive_pics ) &&
+               ( delta_poc_s0_minus1 == rhs.delta_poc_s0_minus1 ) && ( delta_poc_s1_minus1 == rhs.delta_poc_s1_minus1 );
+      }
+
+      bool operator!=( H265ShortTermRefPicSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ShortTermRefPicSetFlags flags                    = {};
+      uint32_t                                                                      delta_idx_minus1         = {};
+      uint16_t                                                                      use_delta_flag           = {};
+      uint16_t                                                                      abs_delta_rps_minus1     = {};
+      uint16_t                                                                      used_by_curr_pic_flag    = {};
+      uint16_t                                                                      used_by_curr_pic_s0_flag = {};
+      uint16_t                                                                      used_by_curr_pic_s1_flag = {};
+      uint16_t                                                                      reserved1                = {};
+      uint8_t                                                                       reserved2                = {};
+      uint8_t                                                                       reserved3                = {};
+      uint8_t                                                                       num_negative_pics        = {};
+      uint8_t                                                                       num_positive_pics        = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint16_t, STD_VIDEO_H265_MAX_DPB_SIZE>   delta_poc_s0_minus1      = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint16_t, STD_VIDEO_H265_MAX_DPB_SIZE>   delta_poc_s1_minus1      = {};
+    };
+
+    struct H265LongTermRefPicsSps
+    {
+      using NativeType = StdVideoH265LongTermRefPicsSps;
+
+      operator StdVideoH265LongTermRefPicsSps const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH265LongTermRefPicsSps *>( this );
+      }
+
+      operator StdVideoH265LongTermRefPicsSps &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH265LongTermRefPicsSps *>( this );
+      }
+
+      bool operator==( H265LongTermRefPicsSps const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( used_by_curr_pic_lt_sps_flag == rhs.used_by_curr_pic_lt_sps_flag ) && ( lt_ref_pic_poc_lsb_sps == rhs.lt_ref_pic_poc_lsb_sps );
+      }
+
+      bool operator!=( H265LongTermRefPicsSps const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t                                                                                  used_by_curr_pic_lt_sps_flag = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint32_t, STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS> lt_ref_pic_poc_lsb_sps       = {};
+    };
+
+    struct H265SequenceParameterSet
+    {
+      using NativeType = StdVideoH265SequenceParameterSet;
+
+      operator StdVideoH265SequenceParameterSet const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH265SequenceParameterSet *>( this );
+      }
+
+      operator StdVideoH265SequenceParameterSet &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH265SequenceParameterSet *>( this );
+      }
+
+      bool operator==( H265SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( chroma_format_idc == rhs.chroma_format_idc ) && ( pic_width_in_luma_samples == rhs.pic_width_in_luma_samples ) &&
+               ( pic_height_in_luma_samples == rhs.pic_height_in_luma_samples ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) &&
+               ( sps_max_sub_layers_minus1 == rhs.sps_max_sub_layers_minus1 ) && ( sps_seq_parameter_set_id == rhs.sps_seq_parameter_set_id ) &&
+               ( bit_depth_luma_minus8 == rhs.bit_depth_luma_minus8 ) && ( bit_depth_chroma_minus8 == rhs.bit_depth_chroma_minus8 ) &&
+               ( log2_max_pic_order_cnt_lsb_minus4 == rhs.log2_max_pic_order_cnt_lsb_minus4 ) &&
+               ( log2_min_luma_coding_block_size_minus3 == rhs.log2_min_luma_coding_block_size_minus3 ) &&
+               ( log2_diff_max_min_luma_coding_block_size == rhs.log2_diff_max_min_luma_coding_block_size ) &&
+               ( log2_min_luma_transform_block_size_minus2 == rhs.log2_min_luma_transform_block_size_minus2 ) &&
+               ( log2_diff_max_min_luma_transform_block_size == rhs.log2_diff_max_min_luma_transform_block_size ) &&
+               ( max_transform_hierarchy_depth_inter == rhs.max_transform_hierarchy_depth_inter ) &&
+               ( max_transform_hierarchy_depth_intra == rhs.max_transform_hierarchy_depth_intra ) &&
+               ( num_short_term_ref_pic_sets == rhs.num_short_term_ref_pic_sets ) && ( num_long_term_ref_pics_sps == rhs.num_long_term_ref_pics_sps ) &&
+               ( pcm_sample_bit_depth_luma_minus1 == rhs.pcm_sample_bit_depth_luma_minus1 ) &&
+               ( pcm_sample_bit_depth_chroma_minus1 == rhs.pcm_sample_bit_depth_chroma_minus1 ) &&
+               ( log2_min_pcm_luma_coding_block_size_minus3 == rhs.log2_min_pcm_luma_coding_block_size_minus3 ) &&
+               ( log2_diff_max_min_pcm_luma_coding_block_size == rhs.log2_diff_max_min_pcm_luma_coding_block_size ) && ( reserved1 == rhs.reserved1 ) &&
+               ( reserved2 == rhs.reserved2 ) && ( palette_max_size == rhs.palette_max_size ) &&
+               ( delta_palette_max_predictor_size == rhs.delta_palette_max_predictor_size ) &&
+               ( motion_vector_resolution_control_idc == rhs.motion_vector_resolution_control_idc ) &&
+               ( sps_num_palette_predictor_initializers_minus1 == rhs.sps_num_palette_predictor_initializers_minus1 ) &&
+               ( conf_win_left_offset == rhs.conf_win_left_offset ) && ( conf_win_right_offset == rhs.conf_win_right_offset ) &&
+               ( conf_win_top_offset == rhs.conf_win_top_offset ) && ( conf_win_bottom_offset == rhs.conf_win_bottom_offset ) &&
+               ( pProfileTierLevel == rhs.pProfileTierLevel ) && ( pDecPicBufMgr == rhs.pDecPicBufMgr ) && ( pScalingLists == rhs.pScalingLists ) &&
+               ( pShortTermRefPicSet == rhs.pShortTermRefPicSet ) && ( pLongTermRefPicsSps == rhs.pLongTermRefPicsSps ) &&
+               ( pSequenceParameterSetVui == rhs.pSequenceParameterSetVui ) && ( pPredictorPaletteEntries == rhs.pPredictorPaletteEntries );
+      }
+
+      bool operator!=( H265SequenceParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SpsFlags        flags = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ChromaFormatIdc chroma_format_idc =
+        VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ChromaFormatIdc::eMonochrome;
+      uint32_t                                                                              pic_width_in_luma_samples                     = {};
+      uint32_t                                                                              pic_height_in_luma_samples                    = {};
+      uint8_t                                                                               sps_video_parameter_set_id                    = {};
+      uint8_t                                                                               sps_max_sub_layers_minus1                     = {};
+      uint8_t                                                                               sps_seq_parameter_set_id                      = {};
+      uint8_t                                                                               bit_depth_luma_minus8                         = {};
+      uint8_t                                                                               bit_depth_chroma_minus8                       = {};
+      uint8_t                                                                               log2_max_pic_order_cnt_lsb_minus4             = {};
+      uint8_t                                                                               log2_min_luma_coding_block_size_minus3        = {};
+      uint8_t                                                                               log2_diff_max_min_luma_coding_block_size      = {};
+      uint8_t                                                                               log2_min_luma_transform_block_size_minus2     = {};
+      uint8_t                                                                               log2_diff_max_min_luma_transform_block_size   = {};
+      uint8_t                                                                               max_transform_hierarchy_depth_inter           = {};
+      uint8_t                                                                               max_transform_hierarchy_depth_intra           = {};
+      uint8_t                                                                               num_short_term_ref_pic_sets                   = {};
+      uint8_t                                                                               num_long_term_ref_pics_sps                    = {};
+      uint8_t                                                                               pcm_sample_bit_depth_luma_minus1              = {};
+      uint8_t                                                                               pcm_sample_bit_depth_chroma_minus1            = {};
+      uint8_t                                                                               log2_min_pcm_luma_coding_block_size_minus3    = {};
+      uint8_t                                                                               log2_diff_max_min_pcm_luma_coding_block_size  = {};
+      uint8_t                                                                               reserved1                                     = {};
+      uint8_t                                                                               reserved2                                     = {};
+      uint8_t                                                                               palette_max_size                              = {};
+      uint8_t                                                                               delta_palette_max_predictor_size              = {};
+      uint8_t                                                                               motion_vector_resolution_control_idc          = {};
+      uint8_t                                                                               sps_num_palette_predictor_initializers_minus1 = {};
+      uint32_t                                                                              conf_win_left_offset                          = {};
+      uint32_t                                                                              conf_win_right_offset                         = {};
+      uint32_t                                                                              conf_win_top_offset                           = {};
+      uint32_t                                                                              conf_win_bottom_offset                        = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ProfileTierLevel *        pProfileTierLevel                             = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265DecPicBufMgr *            pDecPicBufMgr                                 = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ScalingLists *            pScalingLists                                 = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ShortTermRefPicSet *      pShortTermRefPicSet                           = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265LongTermRefPicsSps *      pLongTermRefPicsSps                           = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SequenceParameterSetVui * pSequenceParameterSetVui                      = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PredictorPaletteEntries * pPredictorPaletteEntries                      = {};
+    };
+
+    struct H265PpsFlags
+    {
+      using NativeType = StdVideoH265PpsFlags;
+
+      operator StdVideoH265PpsFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH265PpsFlags *>( this );
+      }
+
+      operator StdVideoH265PpsFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH265PpsFlags *>( this );
+      }
+
+      bool operator==( H265PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( dependent_slice_segments_enabled_flag == rhs.dependent_slice_segments_enabled_flag ) &&
+               ( output_flag_present_flag == rhs.output_flag_present_flag ) && ( sign_data_hiding_enabled_flag == rhs.sign_data_hiding_enabled_flag ) &&
+               ( cabac_init_present_flag == rhs.cabac_init_present_flag ) && ( constrained_intra_pred_flag == rhs.constrained_intra_pred_flag ) &&
+               ( transform_skip_enabled_flag == rhs.transform_skip_enabled_flag ) && ( cu_qp_delta_enabled_flag == rhs.cu_qp_delta_enabled_flag ) &&
+               ( pps_slice_chroma_qp_offsets_present_flag == rhs.pps_slice_chroma_qp_offsets_present_flag ) &&
+               ( weighted_pred_flag == rhs.weighted_pred_flag ) && ( weighted_bipred_flag == rhs.weighted_bipred_flag ) &&
+               ( transquant_bypass_enabled_flag == rhs.transquant_bypass_enabled_flag ) && ( tiles_enabled_flag == rhs.tiles_enabled_flag ) &&
+               ( entropy_coding_sync_enabled_flag == rhs.entropy_coding_sync_enabled_flag ) && ( uniform_spacing_flag == rhs.uniform_spacing_flag ) &&
+               ( loop_filter_across_tiles_enabled_flag == rhs.loop_filter_across_tiles_enabled_flag ) &&
+               ( pps_loop_filter_across_slices_enabled_flag == rhs.pps_loop_filter_across_slices_enabled_flag ) &&
+               ( deblocking_filter_control_present_flag == rhs.deblocking_filter_control_present_flag ) &&
+               ( deblocking_filter_override_enabled_flag == rhs.deblocking_filter_override_enabled_flag ) &&
+               ( pps_deblocking_filter_disabled_flag == rhs.pps_deblocking_filter_disabled_flag ) &&
+               ( pps_scaling_list_data_present_flag == rhs.pps_scaling_list_data_present_flag ) &&
+               ( lists_modification_present_flag == rhs.lists_modification_present_flag ) &&
+               ( slice_segment_header_extension_present_flag == rhs.slice_segment_header_extension_present_flag ) &&
+               ( pps_extension_present_flag == rhs.pps_extension_present_flag ) &&
+               ( cross_component_prediction_enabled_flag == rhs.cross_component_prediction_enabled_flag ) &&
+               ( chroma_qp_offset_list_enabled_flag == rhs.chroma_qp_offset_list_enabled_flag ) &&
+               ( pps_curr_pic_ref_enabled_flag == rhs.pps_curr_pic_ref_enabled_flag ) &&
+               ( residual_adaptive_colour_transform_enabled_flag == rhs.residual_adaptive_colour_transform_enabled_flag ) &&
+               ( pps_slice_act_qp_offsets_present_flag == rhs.pps_slice_act_qp_offsets_present_flag ) &&
+               ( pps_palette_predictor_initializers_present_flag == rhs.pps_palette_predictor_initializers_present_flag ) &&
+               ( monochrome_palette_flag == rhs.monochrome_palette_flag ) && ( pps_range_extension_flag == rhs.pps_range_extension_flag );
+      }
+
+      bool operator!=( H265PpsFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t dependent_slice_segments_enabled_flag           : 1;
+      uint32_t output_flag_present_flag                        : 1;
+      uint32_t sign_data_hiding_enabled_flag                   : 1;
+      uint32_t cabac_init_present_flag                         : 1;
+      uint32_t constrained_intra_pred_flag                     : 1;
+      uint32_t transform_skip_enabled_flag                     : 1;
+      uint32_t cu_qp_delta_enabled_flag                        : 1;
+      uint32_t pps_slice_chroma_qp_offsets_present_flag        : 1;
+      uint32_t weighted_pred_flag                              : 1;
+      uint32_t weighted_bipred_flag                            : 1;
+      uint32_t transquant_bypass_enabled_flag                  : 1;
+      uint32_t tiles_enabled_flag                              : 1;
+      uint32_t entropy_coding_sync_enabled_flag                : 1;
+      uint32_t uniform_spacing_flag                            : 1;
+      uint32_t loop_filter_across_tiles_enabled_flag           : 1;
+      uint32_t pps_loop_filter_across_slices_enabled_flag      : 1;
+      uint32_t deblocking_filter_control_present_flag          : 1;
+      uint32_t deblocking_filter_override_enabled_flag         : 1;
+      uint32_t pps_deblocking_filter_disabled_flag             : 1;
+      uint32_t pps_scaling_list_data_present_flag              : 1;
+      uint32_t lists_modification_present_flag                 : 1;
+      uint32_t slice_segment_header_extension_present_flag     : 1;
+      uint32_t pps_extension_present_flag                      : 1;
+      uint32_t cross_component_prediction_enabled_flag         : 1;
+      uint32_t chroma_qp_offset_list_enabled_flag              : 1;
+      uint32_t pps_curr_pic_ref_enabled_flag                   : 1;
+      uint32_t residual_adaptive_colour_transform_enabled_flag : 1;
+      uint32_t pps_slice_act_qp_offsets_present_flag           : 1;
+      uint32_t pps_palette_predictor_initializers_present_flag : 1;
+      uint32_t monochrome_palette_flag                         : 1;
+      uint32_t pps_range_extension_flag                        : 1;
+    };
+
+    struct H265PictureParameterSet
+    {
+      using NativeType = StdVideoH265PictureParameterSet;
+
+      operator StdVideoH265PictureParameterSet const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoH265PictureParameterSet *>( this );
+      }
+
+      operator StdVideoH265PictureParameterSet &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoH265PictureParameterSet *>( this );
+      }
+
+      bool operator==( H265PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) &&
+               ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) &&
+               ( num_extra_slice_header_bits == rhs.num_extra_slice_header_bits ) &&
+               ( num_ref_idx_l0_default_active_minus1 == rhs.num_ref_idx_l0_default_active_minus1 ) &&
+               ( num_ref_idx_l1_default_active_minus1 == rhs.num_ref_idx_l1_default_active_minus1 ) && ( init_qp_minus26 == rhs.init_qp_minus26 ) &&
+               ( diff_cu_qp_delta_depth == rhs.diff_cu_qp_delta_depth ) && ( pps_cb_qp_offset == rhs.pps_cb_qp_offset ) &&
+               ( pps_cr_qp_offset == rhs.pps_cr_qp_offset ) && ( pps_beta_offset_div2 == rhs.pps_beta_offset_div2 ) &&
+               ( pps_tc_offset_div2 == rhs.pps_tc_offset_div2 ) && ( log2_parallel_merge_level_minus2 == rhs.log2_parallel_merge_level_minus2 ) &&
+               ( log2_max_transform_skip_block_size_minus2 == rhs.log2_max_transform_skip_block_size_minus2 ) &&
+               ( diff_cu_chroma_qp_offset_depth == rhs.diff_cu_chroma_qp_offset_depth ) &&
+               ( chroma_qp_offset_list_len_minus1 == rhs.chroma_qp_offset_list_len_minus1 ) && ( cb_qp_offset_list == rhs.cb_qp_offset_list ) &&
+               ( cr_qp_offset_list == rhs.cr_qp_offset_list ) && ( log2_sao_offset_scale_luma == rhs.log2_sao_offset_scale_luma ) &&
+               ( log2_sao_offset_scale_chroma == rhs.log2_sao_offset_scale_chroma ) && ( pps_act_y_qp_offset_plus5 == rhs.pps_act_y_qp_offset_plus5 ) &&
+               ( pps_act_cb_qp_offset_plus5 == rhs.pps_act_cb_qp_offset_plus5 ) && ( pps_act_cr_qp_offset_plus3 == rhs.pps_act_cr_qp_offset_plus3 ) &&
+               ( pps_num_palette_predictor_initializers == rhs.pps_num_palette_predictor_initializers ) &&
+               ( luma_bit_depth_entry_minus8 == rhs.luma_bit_depth_entry_minus8 ) && ( chroma_bit_depth_entry_minus8 == rhs.chroma_bit_depth_entry_minus8 ) &&
+               ( num_tile_columns_minus1 == rhs.num_tile_columns_minus1 ) && ( num_tile_rows_minus1 == rhs.num_tile_rows_minus1 ) &&
+               ( reserved1 == rhs.reserved1 ) && ( reserved2 == rhs.reserved2 ) && ( column_width_minus1 == rhs.column_width_minus1 ) &&
+               ( row_height_minus1 == rhs.row_height_minus1 ) && ( reserved3 == rhs.reserved3 ) && ( pScalingLists == rhs.pScalingLists ) &&
+               ( pPredictorPaletteEntries == rhs.pPredictorPaletteEntries );
+      }
+
+      bool operator!=( H265PictureParameterSet const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PpsFlags                                      flags                                     = {};
+      uint8_t                                                                                             pps_pic_parameter_set_id                  = {};
+      uint8_t                                                                                             pps_seq_parameter_set_id                  = {};
+      uint8_t                                                                                             sps_video_parameter_set_id                = {};
+      uint8_t                                                                                             num_extra_slice_header_bits               = {};
+      uint8_t                                                                                             num_ref_idx_l0_default_active_minus1      = {};
+      uint8_t                                                                                             num_ref_idx_l1_default_active_minus1      = {};
+      int8_t                                                                                              init_qp_minus26                           = {};
+      uint8_t                                                                                             diff_cu_qp_delta_depth                    = {};
+      int8_t                                                                                              pps_cb_qp_offset                          = {};
+      int8_t                                                                                              pps_cr_qp_offset                          = {};
+      int8_t                                                                                              pps_beta_offset_div2                      = {};
+      int8_t                                                                                              pps_tc_offset_div2                        = {};
+      uint8_t                                                                                             log2_parallel_merge_level_minus2          = {};
+      uint8_t                                                                                             log2_max_transform_skip_block_size_minus2 = {};
+      uint8_t                                                                                             diff_cu_chroma_qp_offset_depth            = {};
+      uint8_t                                                                                             chroma_qp_offset_list_len_minus1          = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int8_t, STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE>             cb_qp_offset_list                         = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int8_t, STD_VIDEO_H265_CHROMA_QP_OFFSET_LIST_SIZE>             cr_qp_offset_list                         = {};
+      uint8_t                                                                                             log2_sao_offset_scale_luma                = {};
+      uint8_t                                                                                             log2_sao_offset_scale_chroma              = {};
+      int8_t                                                                                              pps_act_y_qp_offset_plus5                 = {};
+      int8_t                                                                                              pps_act_cb_qp_offset_plus5                = {};
+      int8_t                                                                                              pps_act_cr_qp_offset_plus3                = {};
+      uint8_t                                                                                             pps_num_palette_predictor_initializers    = {};
+      uint8_t                                                                                             luma_bit_depth_entry_minus8               = {};
+      uint8_t                                                                                             chroma_bit_depth_entry_minus8             = {};
+      uint8_t                                                                                             num_tile_columns_minus1                   = {};
+      uint8_t                                                                                             num_tile_rows_minus1                      = {};
+      uint8_t                                                                                             reserved1                                 = {};
+      uint8_t                                                                                             reserved2                                 = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint16_t, STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_COLS_LIST_SIZE> column_width_minus1                       = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint16_t, STD_VIDEO_H265_CHROMA_QP_OFFSET_TILE_ROWS_LIST_SIZE> row_height_minus1                         = {};
+      uint32_t                                                                                            reserved3                                 = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ScalingLists *                          pScalingLists                             = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PredictorPaletteEntries *               pPredictorPaletteEntries                  = {};
+    };
+
+    //=== vulkan_video_codec_h265std_decode ===
+
+    struct DecodeH265PictureInfoFlags
+    {
+      using NativeType = StdVideoDecodeH265PictureInfoFlags;
+
+      operator StdVideoDecodeH265PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoDecodeH265PictureInfoFlags *>( this );
+      }
+
+      operator StdVideoDecodeH265PictureInfoFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoDecodeH265PictureInfoFlags *>( this );
+      }
+
+      bool operator==( DecodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( IrapPicFlag == rhs.IrapPicFlag ) && ( IdrPicFlag == rhs.IdrPicFlag ) && ( IsReference == rhs.IsReference ) &&
+               ( short_term_ref_pic_set_sps_flag == rhs.short_term_ref_pic_set_sps_flag );
+      }
+
+      bool operator!=( DecodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t IrapPicFlag                     : 1;
+      uint32_t IdrPicFlag                      : 1;
+      uint32_t IsReference                     : 1;
+      uint32_t short_term_ref_pic_set_sps_flag : 1;
+    };
+
+    struct DecodeH265PictureInfo
+    {
+      using NativeType = StdVideoDecodeH265PictureInfo;
+
+      operator StdVideoDecodeH265PictureInfo const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoDecodeH265PictureInfo *>( this );
+      }
+
+      operator StdVideoDecodeH265PictureInfo &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoDecodeH265PictureInfo *>( this );
+      }
+
+      bool operator==( DecodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) &&
+               ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) &&
+               ( NumDeltaPocsOfRefRpsIdx == rhs.NumDeltaPocsOfRefRpsIdx ) && ( PicOrderCntVal == rhs.PicOrderCntVal ) &&
+               ( NumBitsForSTRefPicSetInSlice == rhs.NumBitsForSTRefPicSetInSlice ) && ( reserved == rhs.reserved ) &&
+               ( RefPicSetStCurrBefore == rhs.RefPicSetStCurrBefore ) && ( RefPicSetStCurrAfter == rhs.RefPicSetStCurrAfter ) &&
+               ( RefPicSetLtCurr == rhs.RefPicSetLtCurr );
+      }
+
+      bool operator!=( DecodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH265PictureInfoFlags               flags                        = {};
+      uint8_t                                                                                    sps_video_parameter_set_id   = {};
+      uint8_t                                                                                    pps_seq_parameter_set_id     = {};
+      uint8_t                                                                                    pps_pic_parameter_set_id     = {};
+      uint8_t                                                                                    NumDeltaPocsOfRefRpsIdx      = {};
+      int32_t                                                                                    PicOrderCntVal               = {};
+      uint16_t                                                                                   NumBitsForSTRefPicSetInSlice = {};
+      uint16_t                                                                                   reserved                     = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE> RefPicSetStCurrBefore        = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE> RefPicSetStCurrAfter         = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_DECODE_H265_REF_PIC_SET_LIST_SIZE> RefPicSetLtCurr              = {};
+    };
+
+    struct DecodeH265ReferenceInfoFlags
+    {
+      using NativeType = StdVideoDecodeH265ReferenceInfoFlags;
+
+      operator StdVideoDecodeH265ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoDecodeH265ReferenceInfoFlags *>( this );
+      }
+
+      operator StdVideoDecodeH265ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoDecodeH265ReferenceInfoFlags *>( this );
+      }
+
+      bool operator==( DecodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( unused_for_reference == rhs.unused_for_reference );
+      }
+
+      bool operator!=( DecodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t used_for_long_term_reference : 1;
+      uint32_t unused_for_reference         : 1;
+    };
+
+    struct DecodeH265ReferenceInfo
+    {
+      using NativeType = StdVideoDecodeH265ReferenceInfo;
+
+      operator StdVideoDecodeH265ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoDecodeH265ReferenceInfo *>( this );
+      }
+
+      operator StdVideoDecodeH265ReferenceInfo &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoDecodeH265ReferenceInfo *>( this );
+      }
+
+      bool operator==( DecodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( PicOrderCntVal == rhs.PicOrderCntVal );
+      }
+
+      bool operator!=( DecodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::DecodeH265ReferenceInfoFlags flags          = {};
+      int32_t                                                                        PicOrderCntVal = {};
+    };
+
+    //=== vulkan_video_codec_h265std_encode ===
+
+    struct EncodeH265WeightTableFlags
+    {
+      using NativeType = StdVideoEncodeH265WeightTableFlags;
+
+      operator StdVideoEncodeH265WeightTableFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH265WeightTableFlags *>( this );
+      }
+
+      operator StdVideoEncodeH265WeightTableFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH265WeightTableFlags *>( this );
+      }
+
+      bool operator==( EncodeH265WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( luma_weight_l0_flag == rhs.luma_weight_l0_flag ) && ( chroma_weight_l0_flag == rhs.chroma_weight_l0_flag ) &&
+               ( luma_weight_l1_flag == rhs.luma_weight_l1_flag ) && ( chroma_weight_l1_flag == rhs.chroma_weight_l1_flag );
+      }
+
+      bool operator!=( EncodeH265WeightTableFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint16_t luma_weight_l0_flag   = {};
+      uint16_t chroma_weight_l0_flag = {};
+      uint16_t luma_weight_l1_flag   = {};
+      uint16_t chroma_weight_l1_flag = {};
+    };
+
+    struct EncodeH265WeightTable
+    {
+      using NativeType = StdVideoEncodeH265WeightTable;
+
+      operator StdVideoEncodeH265WeightTable const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH265WeightTable *>( this );
+      }
+
+      operator StdVideoEncodeH265WeightTable &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH265WeightTable *>( this );
+      }
+
+      bool operator==( EncodeH265WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( luma_log2_weight_denom == rhs.luma_log2_weight_denom ) &&
+               ( delta_chroma_log2_weight_denom == rhs.delta_chroma_log2_weight_denom ) && ( delta_luma_weight_l0 == rhs.delta_luma_weight_l0 ) &&
+               ( luma_offset_l0 == rhs.luma_offset_l0 ) && ( delta_chroma_weight_l0 == rhs.delta_chroma_weight_l0 ) &&
+               ( delta_chroma_offset_l0 == rhs.delta_chroma_offset_l0 ) && ( delta_luma_weight_l1 == rhs.delta_luma_weight_l1 ) &&
+               ( luma_offset_l1 == rhs.luma_offset_l1 ) && ( delta_chroma_weight_l1 == rhs.delta_chroma_weight_l1 ) &&
+               ( delta_chroma_offset_l1 == rhs.delta_chroma_offset_l1 );
+      }
+
+      bool operator!=( EncodeH265WeightTable const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265WeightTableFlags                                    flags                          = {};
+      uint8_t                                                                                                         luma_log2_weight_denom         = {};
+      int8_t                                                                                                          delta_chroma_log2_weight_denom = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF>                                   delta_luma_weight_l0           = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF>                                   luma_offset_l0                 = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper2D<int8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF, STD_VIDEO_H265_MAX_CHROMA_PLANES> delta_chroma_weight_l0         = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper2D<int8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF, STD_VIDEO_H265_MAX_CHROMA_PLANES> delta_chroma_offset_l0         = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF>                                   delta_luma_weight_l1           = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<int8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF>                                   luma_offset_l1                 = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper2D<int8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF, STD_VIDEO_H265_MAX_CHROMA_PLANES> delta_chroma_weight_l1         = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper2D<int8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF, STD_VIDEO_H265_MAX_CHROMA_PLANES> delta_chroma_offset_l1         = {};
+    };
+
+    struct EncodeH265SliceSegmentHeaderFlags
+    {
+      using NativeType = StdVideoEncodeH265SliceSegmentHeaderFlags;
+
+      operator StdVideoEncodeH265SliceSegmentHeaderFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH265SliceSegmentHeaderFlags *>( this );
+      }
+
+      operator StdVideoEncodeH265SliceSegmentHeaderFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH265SliceSegmentHeaderFlags *>( this );
+      }
+
+      bool operator==( EncodeH265SliceSegmentHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( first_slice_segment_in_pic_flag == rhs.first_slice_segment_in_pic_flag ) &&
+               ( dependent_slice_segment_flag == rhs.dependent_slice_segment_flag ) && ( slice_sao_luma_flag == rhs.slice_sao_luma_flag ) &&
+               ( slice_sao_chroma_flag == rhs.slice_sao_chroma_flag ) && ( num_ref_idx_active_override_flag == rhs.num_ref_idx_active_override_flag ) &&
+               ( mvd_l1_zero_flag == rhs.mvd_l1_zero_flag ) && ( cabac_init_flag == rhs.cabac_init_flag ) &&
+               ( cu_chroma_qp_offset_enabled_flag == rhs.cu_chroma_qp_offset_enabled_flag ) &&
+               ( deblocking_filter_override_flag == rhs.deblocking_filter_override_flag ) &&
+               ( slice_deblocking_filter_disabled_flag == rhs.slice_deblocking_filter_disabled_flag ) &&
+               ( collocated_from_l0_flag == rhs.collocated_from_l0_flag ) &&
+               ( slice_loop_filter_across_slices_enabled_flag == rhs.slice_loop_filter_across_slices_enabled_flag ) && ( reserved == rhs.reserved );
+      }
+
+      bool operator!=( EncodeH265SliceSegmentHeaderFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t first_slice_segment_in_pic_flag              : 1;
+      uint32_t dependent_slice_segment_flag                 : 1;
+      uint32_t slice_sao_luma_flag                          : 1;
+      uint32_t slice_sao_chroma_flag                        : 1;
+      uint32_t num_ref_idx_active_override_flag             : 1;
+      uint32_t mvd_l1_zero_flag                             : 1;
+      uint32_t cabac_init_flag                              : 1;
+      uint32_t cu_chroma_qp_offset_enabled_flag             : 1;
+      uint32_t deblocking_filter_override_flag              : 1;
+      uint32_t slice_deblocking_filter_disabled_flag        : 1;
+      uint32_t collocated_from_l0_flag                      : 1;
+      uint32_t slice_loop_filter_across_slices_enabled_flag : 1;
+      uint32_t reserved                                     : 20;
+    };
+
+    struct EncodeH265SliceSegmentHeader
+    {
+      using NativeType = StdVideoEncodeH265SliceSegmentHeader;
+
+      operator StdVideoEncodeH265SliceSegmentHeader const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH265SliceSegmentHeader *>( this );
+      }
+
+      operator StdVideoEncodeH265SliceSegmentHeader &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH265SliceSegmentHeader *>( this );
+      }
+
+      bool operator==( EncodeH265SliceSegmentHeader const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( slice_type == rhs.slice_type ) && ( slice_segment_address == rhs.slice_segment_address ) &&
+               ( collocated_ref_idx == rhs.collocated_ref_idx ) && ( MaxNumMergeCand == rhs.MaxNumMergeCand ) &&
+               ( slice_cb_qp_offset == rhs.slice_cb_qp_offset ) && ( slice_cr_qp_offset == rhs.slice_cr_qp_offset ) &&
+               ( slice_beta_offset_div2 == rhs.slice_beta_offset_div2 ) && ( slice_tc_offset_div2 == rhs.slice_tc_offset_div2 ) &&
+               ( slice_act_y_qp_offset == rhs.slice_act_y_qp_offset ) && ( slice_act_cb_qp_offset == rhs.slice_act_cb_qp_offset ) &&
+               ( slice_act_cr_qp_offset == rhs.slice_act_cr_qp_offset ) && ( slice_qp_delta == rhs.slice_qp_delta ) && ( reserved1 == rhs.reserved1 ) &&
+               ( pWeightTable == rhs.pWeightTable );
+      }
+
+      bool operator!=( EncodeH265SliceSegmentHeader const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265SliceSegmentHeaderFlags flags = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SliceType slice_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265SliceType::eB;
+      uint32_t                                                        slice_segment_address        = {};
+      uint8_t                                                         collocated_ref_idx           = {};
+      uint8_t                                                         MaxNumMergeCand              = {};
+      int8_t                                                          slice_cb_qp_offset           = {};
+      int8_t                                                          slice_cr_qp_offset           = {};
+      int8_t                                                          slice_beta_offset_div2       = {};
+      int8_t                                                          slice_tc_offset_div2         = {};
+      int8_t                                                          slice_act_y_qp_offset        = {};
+      int8_t                                                          slice_act_cb_qp_offset       = {};
+      int8_t                                                          slice_act_cr_qp_offset       = {};
+      int8_t                                                          slice_qp_delta               = {};
+      uint16_t                                                        reserved1                    = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265WeightTable * pWeightTable = {};
+    };
+
+    struct EncodeH265ReferenceListsInfoFlags
+    {
+      using NativeType = StdVideoEncodeH265ReferenceListsInfoFlags;
+
+      operator StdVideoEncodeH265ReferenceListsInfoFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH265ReferenceListsInfoFlags *>( this );
+      }
+
+      operator StdVideoEncodeH265ReferenceListsInfoFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH265ReferenceListsInfoFlags *>( this );
+      }
+
+      bool operator==( EncodeH265ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( ref_pic_list_modification_flag_l0 == rhs.ref_pic_list_modification_flag_l0 ) &&
+               ( ref_pic_list_modification_flag_l1 == rhs.ref_pic_list_modification_flag_l1 ) && ( reserved == rhs.reserved );
+      }
+
+      bool operator!=( EncodeH265ReferenceListsInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t ref_pic_list_modification_flag_l0 : 1;
+      uint32_t ref_pic_list_modification_flag_l1 : 1;
+      uint32_t reserved                          : 30;
+    };
+
+    struct EncodeH265ReferenceListsInfo
+    {
+      using NativeType = StdVideoEncodeH265ReferenceListsInfo;
+
+      operator StdVideoEncodeH265ReferenceListsInfo const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH265ReferenceListsInfo *>( this );
+      }
+
+      operator StdVideoEncodeH265ReferenceListsInfo &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH265ReferenceListsInfo *>( this );
+      }
+
+      bool operator==( EncodeH265ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( num_ref_idx_l0_active_minus1 == rhs.num_ref_idx_l0_active_minus1 ) &&
+               ( num_ref_idx_l1_active_minus1 == rhs.num_ref_idx_l1_active_minus1 ) && ( RefPicList0 == rhs.RefPicList0 ) &&
+               ( RefPicList1 == rhs.RefPicList1 ) && ( list_entry_l0 == rhs.list_entry_l0 ) && ( list_entry_l1 == rhs.list_entry_l1 );
+      }
+
+      bool operator!=( EncodeH265ReferenceListsInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265ReferenceListsInfoFlags flags                        = {};
+      uint8_t                                                                             num_ref_idx_l0_active_minus1 = {};
+      uint8_t                                                                             num_ref_idx_l1_active_minus1 = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF>      RefPicList0                  = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF>      RefPicList1                  = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF>      list_entry_l0                = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_MAX_NUM_LIST_REF>      list_entry_l1                = {};
+    };
+
+    struct EncodeH265PictureInfoFlags
+    {
+      using NativeType = StdVideoEncodeH265PictureInfoFlags;
+
+      operator StdVideoEncodeH265PictureInfoFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH265PictureInfoFlags *>( this );
+      }
+
+      operator StdVideoEncodeH265PictureInfoFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH265PictureInfoFlags *>( this );
+      }
+
+      bool operator==( EncodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( is_reference == rhs.is_reference ) && ( IrapPicFlag == rhs.IrapPicFlag ) &&
+               ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( discardable_flag == rhs.discardable_flag ) &&
+               ( cross_layer_bla_flag == rhs.cross_layer_bla_flag ) && ( pic_output_flag == rhs.pic_output_flag ) &&
+               ( no_output_of_prior_pics_flag == rhs.no_output_of_prior_pics_flag ) &&
+               ( short_term_ref_pic_set_sps_flag == rhs.short_term_ref_pic_set_sps_flag ) &&
+               ( slice_temporal_mvp_enabled_flag == rhs.slice_temporal_mvp_enabled_flag ) && ( reserved == rhs.reserved );
+      }
+
+      bool operator!=( EncodeH265PictureInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t is_reference                    : 1;
+      uint32_t IrapPicFlag                     : 1;
+      uint32_t used_for_long_term_reference    : 1;
+      uint32_t discardable_flag                : 1;
+      uint32_t cross_layer_bla_flag            : 1;
+      uint32_t pic_output_flag                 : 1;
+      uint32_t no_output_of_prior_pics_flag    : 1;
+      uint32_t short_term_ref_pic_set_sps_flag : 1;
+      uint32_t slice_temporal_mvp_enabled_flag : 1;
+      uint32_t reserved                        : 23;
+    };
+
+    struct EncodeH265LongTermRefPics
+    {
+      using NativeType = StdVideoEncodeH265LongTermRefPics;
+
+      operator StdVideoEncodeH265LongTermRefPics const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH265LongTermRefPics *>( this );
+      }
+
+      operator StdVideoEncodeH265LongTermRefPics &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH265LongTermRefPics *>( this );
+      }
+
+      bool operator==( EncodeH265LongTermRefPics const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( num_long_term_sps == rhs.num_long_term_sps ) && ( num_long_term_pics == rhs.num_long_term_pics ) && ( lt_idx_sps == rhs.lt_idx_sps ) &&
+               ( poc_lsb_lt == rhs.poc_lsb_lt ) && ( used_by_curr_pic_lt_flag == rhs.used_by_curr_pic_lt_flag ) &&
+               ( delta_poc_msb_present_flag == rhs.delta_poc_msb_present_flag ) && ( delta_poc_msb_cycle_lt == rhs.delta_poc_msb_cycle_lt );
+      }
+
+      bool operator!=( EncodeH265LongTermRefPics const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint8_t                                                                                  num_long_term_sps          = {};
+      uint8_t                                                                                  num_long_term_pics         = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_MAX_LONG_TERM_REF_PICS_SPS> lt_idx_sps                 = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_MAX_LONG_TERM_PICS>         poc_lsb_lt                 = {};
+      uint16_t                                                                                 used_by_curr_pic_lt_flag   = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_MAX_DELTA_POC>              delta_poc_msb_present_flag = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, STD_VIDEO_H265_MAX_DELTA_POC>              delta_poc_msb_cycle_lt     = {};
+    };
+
+    struct EncodeH265PictureInfo
+    {
+      using NativeType = StdVideoEncodeH265PictureInfo;
+
+      operator StdVideoEncodeH265PictureInfo const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH265PictureInfo *>( this );
+      }
+
+      operator StdVideoEncodeH265PictureInfo &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH265PictureInfo *>( this );
+      }
+
+      bool operator==( EncodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( pic_type == rhs.pic_type ) && ( sps_video_parameter_set_id == rhs.sps_video_parameter_set_id ) &&
+               ( pps_seq_parameter_set_id == rhs.pps_seq_parameter_set_id ) && ( pps_pic_parameter_set_id == rhs.pps_pic_parameter_set_id ) &&
+               ( short_term_ref_pic_set_idx == rhs.short_term_ref_pic_set_idx ) && ( PicOrderCntVal == rhs.PicOrderCntVal ) &&
+               ( TemporalId == rhs.TemporalId ) && ( reserved1 == rhs.reserved1 ) && ( pRefLists == rhs.pRefLists ) &&
+               ( pShortTermRefPicSet == rhs.pShortTermRefPicSet ) && ( pLongTermRefPics == rhs.pLongTermRefPics );
+      }
+
+      bool operator!=( EncodeH265PictureInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265PictureInfoFlags flags = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType pic_type = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType::eP;
+      uint8_t                                                           sps_video_parameter_set_id               = {};
+      uint8_t                                                           pps_seq_parameter_set_id                 = {};
+      uint8_t                                                           pps_pic_parameter_set_id                 = {};
+      uint8_t                                                           short_term_ref_pic_set_idx               = {};
+      int32_t                                                           PicOrderCntVal                           = {};
+      uint8_t                                                           TemporalId                               = {};
+      VULKAN_HPP_NAMESPACE::ArrayWrapper1D<uint8_t, 7>                  reserved1                                = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265ReferenceListsInfo * pRefLists           = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265ShortTermRefPicSet *       pShortTermRefPicSet = {};
+      const VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265LongTermRefPics *    pLongTermRefPics    = {};
+    };
+
+    struct EncodeH265ReferenceInfoFlags
+    {
+      using NativeType = StdVideoEncodeH265ReferenceInfoFlags;
+
+      operator StdVideoEncodeH265ReferenceInfoFlags const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH265ReferenceInfoFlags *>( this );
+      }
+
+      operator StdVideoEncodeH265ReferenceInfoFlags &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH265ReferenceInfoFlags *>( this );
+      }
+
+      bool operator==( EncodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( used_for_long_term_reference == rhs.used_for_long_term_reference ) && ( unused_for_reference == rhs.unused_for_reference ) &&
+               ( reserved == rhs.reserved );
+      }
+
+      bool operator!=( EncodeH265ReferenceInfoFlags const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      uint32_t used_for_long_term_reference : 1;
+      uint32_t unused_for_reference         : 1;
+      uint32_t reserved                     : 30;
+    };
+
+    struct EncodeH265ReferenceInfo
+    {
+      using NativeType = StdVideoEncodeH265ReferenceInfo;
+
+      operator StdVideoEncodeH265ReferenceInfo const &() const VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<const StdVideoEncodeH265ReferenceInfo *>( this );
+      }
+
+      operator StdVideoEncodeH265ReferenceInfo &() VULKAN_HPP_NOEXCEPT
+      {
+        return *reinterpret_cast<StdVideoEncodeH265ReferenceInfo *>( this );
+      }
+
+      bool operator==( EncodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return ( flags == rhs.flags ) && ( pic_type == rhs.pic_type ) && ( PicOrderCntVal == rhs.PicOrderCntVal ) && ( TemporalId == rhs.TemporalId );
+      }
+
+      bool operator!=( EncodeH265ReferenceInfo const & rhs ) const VULKAN_HPP_NOEXCEPT
+      {
+        return !operator==( rhs );
+      }
+
+    public:
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::EncodeH265ReferenceInfoFlags flags = {};
+      VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType pic_type       = VULKAN_HPP_NAMESPACE::VULKAN_HPP_VIDEO_NAMESPACE::H265PictureType::eP;
+      int32_t                                                           PicOrderCntVal = {};
+      uint8_t                                                           TemporalId     = {};
+    };
+
+  }  // namespace VULKAN_HPP_VIDEO_NAMESPACE
+}  // namespace VULKAN_HPP_NAMESPACE
+#endif

+ 2 - 1
ThirdParty/Khronos/vulkan/vulkan_wayland.h

@@ -2,7 +2,7 @@
 #define VULKAN_WAYLAND_H_ 1
 
 /*
-** Copyright 2015-2022 The Khronos Group Inc.
+** Copyright 2015-2023 The Khronos Group Inc.
 **
 ** SPDX-License-Identifier: Apache-2.0
 */
@@ -19,6 +19,7 @@ extern "C" {
 
 
 
+// VK_KHR_wayland_surface is a preprocessor guard. Do not pass it to API calls.
 #define VK_KHR_wayland_surface 1
 #define VK_KHR_WAYLAND_SURFACE_SPEC_VERSION 6
 #define VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME "VK_KHR_wayland_surface"

+ 28 - 1
ThirdParty/Khronos/vulkan/vulkan_win32.h

@@ -2,7 +2,7 @@
 #define VULKAN_WIN32_H_ 1
 
 /*
-** Copyright 2015-2022 The Khronos Group Inc.
+** Copyright 2015-2023 The Khronos Group Inc.
 **
 ** SPDX-License-Identifier: Apache-2.0
 */
@@ -19,6 +19,7 @@ extern "C" {
 
 
 
+// VK_KHR_win32_surface is a preprocessor guard. Do not pass it to API calls.
 #define VK_KHR_win32_surface 1
 #define VK_KHR_WIN32_SURFACE_SPEC_VERSION 6
 #define VK_KHR_WIN32_SURFACE_EXTENSION_NAME "VK_KHR_win32_surface"
@@ -47,6 +48,7 @@ VKAPI_ATTR VkBool32 VKAPI_CALL vkGetPhysicalDeviceWin32PresentationSupportKHR(
 #endif
 
 
+// VK_KHR_external_memory_win32 is a preprocessor guard. Do not pass it to API calls.
 #define VK_KHR_external_memory_win32 1
 #define VK_KHR_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1
 #define VK_KHR_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_KHR_external_memory_win32"
@@ -96,6 +98,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandlePropertiesKHR(
 #endif
 
 
+// VK_KHR_win32_keyed_mutex is a preprocessor guard. Do not pass it to API calls.
 #define VK_KHR_win32_keyed_mutex 1
 #define VK_KHR_WIN32_KEYED_MUTEX_SPEC_VERSION 1
 #define VK_KHR_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_KHR_win32_keyed_mutex"
@@ -113,6 +116,7 @@ typedef struct VkWin32KeyedMutexAcquireReleaseInfoKHR {
 
 
 
+// VK_KHR_external_semaphore_win32 is a preprocessor guard. Do not pass it to API calls.
 #define VK_KHR_external_semaphore_win32 1
 #define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_SPEC_VERSION 1
 #define VK_KHR_EXTERNAL_SEMAPHORE_WIN32_EXTENSION_NAME "VK_KHR_external_semaphore_win32"
@@ -165,6 +169,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetSemaphoreWin32HandleKHR(
 #endif
 
 
+// VK_KHR_external_fence_win32 is a preprocessor guard. Do not pass it to API calls.
 #define VK_KHR_external_fence_win32 1
 #define VK_KHR_EXTERNAL_FENCE_WIN32_SPEC_VERSION 1
 #define VK_KHR_EXTERNAL_FENCE_WIN32_EXTENSION_NAME "VK_KHR_external_fence_win32"
@@ -208,6 +213,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetFenceWin32HandleKHR(
 #endif
 
 
+// VK_NV_external_memory_win32 is a preprocessor guard. Do not pass it to API calls.
 #define VK_NV_external_memory_win32 1
 #define VK_NV_EXTERNAL_MEMORY_WIN32_SPEC_VERSION 1
 #define VK_NV_EXTERNAL_MEMORY_WIN32_EXTENSION_NAME "VK_NV_external_memory_win32"
@@ -236,6 +242,7 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetMemoryWin32HandleNV(
 #endif
 
 
+// VK_NV_win32_keyed_mutex is a preprocessor guard. Do not pass it to API calls.
 #define VK_NV_win32_keyed_mutex 1
 #define VK_NV_WIN32_KEYED_MUTEX_SPEC_VERSION 2
 #define VK_NV_WIN32_KEYED_MUTEX_EXTENSION_NAME "VK_NV_win32_keyed_mutex"
@@ -253,6 +260,7 @@ typedef struct VkWin32KeyedMutexAcquireReleaseInfoNV {
 
 
 
+// VK_EXT_full_screen_exclusive is a preprocessor guard. Do not pass it to API calls.
 #define VK_EXT_full_screen_exclusive 1
 #define VK_EXT_FULL_SCREEN_EXCLUSIVE_SPEC_VERSION 4
 #define VK_EXT_FULL_SCREEN_EXCLUSIVE_EXTENSION_NAME "VK_EXT_full_screen_exclusive"
@@ -308,6 +316,25 @@ VKAPI_ATTR VkResult VKAPI_CALL vkGetDeviceGroupSurfacePresentModes2EXT(
     VkDeviceGroupPresentModeFlagsKHR*           pModes);
 #endif
 
+
+// VK_NV_acquire_winrt_display is a preprocessor guard. Do not pass it to API calls.
+#define VK_NV_acquire_winrt_display 1
+#define VK_NV_ACQUIRE_WINRT_DISPLAY_SPEC_VERSION 1
+#define VK_NV_ACQUIRE_WINRT_DISPLAY_EXTENSION_NAME "VK_NV_acquire_winrt_display"
+typedef VkResult (VKAPI_PTR *PFN_vkAcquireWinrtDisplayNV)(VkPhysicalDevice physicalDevice, VkDisplayKHR display);
+typedef VkResult (VKAPI_PTR *PFN_vkGetWinrtDisplayNV)(VkPhysicalDevice physicalDevice, uint32_t deviceRelativeId, VkDisplayKHR* pDisplay);
+
+#ifndef VK_NO_PROTOTYPES
+VKAPI_ATTR VkResult VKAPI_CALL vkAcquireWinrtDisplayNV(
+    VkPhysicalDevice                            physicalDevice,
+    VkDisplayKHR                                display);
+
+VKAPI_ATTR VkResult VKAPI_CALL vkGetWinrtDisplayNV(
+    VkPhysicalDevice                            physicalDevice,
+    uint32_t                                    deviceRelativeId,
+    VkDisplayKHR*                               pDisplay);
+#endif
+
 #ifdef __cplusplus
 }
 #endif

+ 2 - 1
ThirdParty/Khronos/vulkan/vulkan_xcb.h

@@ -2,7 +2,7 @@
 #define VULKAN_XCB_H_ 1
 
 /*
-** Copyright 2015-2022 The Khronos Group Inc.
+** Copyright 2015-2023 The Khronos Group Inc.
 **
 ** SPDX-License-Identifier: Apache-2.0
 */
@@ -19,6 +19,7 @@ extern "C" {
 
 
 
+// VK_KHR_xcb_surface is a preprocessor guard. Do not pass it to API calls.
 #define VK_KHR_xcb_surface 1
 #define VK_KHR_XCB_SURFACE_SPEC_VERSION   6
 #define VK_KHR_XCB_SURFACE_EXTENSION_NAME "VK_KHR_xcb_surface"

+ 2 - 1
ThirdParty/Khronos/vulkan/vulkan_xlib.h

@@ -2,7 +2,7 @@
 #define VULKAN_XLIB_H_ 1
 
 /*
-** Copyright 2015-2022 The Khronos Group Inc.
+** Copyright 2015-2023 The Khronos Group Inc.
 **
 ** SPDX-License-Identifier: Apache-2.0
 */
@@ -19,6 +19,7 @@ extern "C" {
 
 
 
+// VK_KHR_xlib_surface is a preprocessor guard. Do not pass it to API calls.
 #define VK_KHR_xlib_surface 1
 #define VK_KHR_XLIB_SURFACE_SPEC_VERSION  6
 #define VK_KHR_XLIB_SURFACE_EXTENSION_NAME "VK_KHR_xlib_surface"

+ 2 - 1
ThirdParty/Khronos/vulkan/vulkan_xlib_xrandr.h

@@ -2,7 +2,7 @@
 #define VULKAN_XLIB_XRANDR_H_ 1
 
 /*
-** Copyright 2015-2022 The Khronos Group Inc.
+** Copyright 2015-2023 The Khronos Group Inc.
 **
 ** SPDX-License-Identifier: Apache-2.0
 */
@@ -19,6 +19,7 @@ extern "C" {
 
 
 
+// VK_EXT_acquire_xlib_display is a preprocessor guard. Do not pass it to API calls.
 #define VK_EXT_acquire_xlib_display 1
 #define VK_EXT_ACQUIRE_XLIB_DISPLAY_SPEC_VERSION 1
 #define VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME "VK_EXT_acquire_xlib_display"

+ 6 - 23
ThirdParty/SpirvCross/GLSL.std.450.h

@@ -1,27 +1,10 @@
 /*
-** Copyright (c) 2014-2016 The Khronos Group Inc.
-**
-** Permission is hereby granted, free of charge, to any person obtaining a copy
-** of this software and/or associated documentation files (the "Materials"),
-** to deal in the Materials without restriction, including without limitation
-** the rights to use, copy, modify, merge, publish, distribute, sublicense,
-** and/or sell copies of the Materials, and to permit persons to whom the
-** Materials are furnished to do so, subject to the following conditions:
-**
-** The above copyright notice and this permission notice shall be included in
-** all copies or substantial portions of the Materials.
-**
-** MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS
-** STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND
-** HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ 
-**
-** THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
-** OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-** FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
-** THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-** LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
-** FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS
-** IN THE MATERIALS.
+ * Copyright 2014-2016,2021 The Khronos Group, Inc.
+ * SPDX-License-Identifier: MIT
+ *
+ * MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS
+ * STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND
+ * HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/
 */
 
 #ifndef GLSLstd450_H

+ 2579 - 0
ThirdParty/SpirvCross/spirv.hpp

@@ -0,0 +1,2579 @@
+// Copyright (c) 2014-2020 The Khronos Group Inc.
+// 
+// Permission is hereby granted, free of charge, to any person obtaining a copy
+// of this software and/or associated documentation files (the "Materials"),
+// to deal in the Materials without restriction, including without limitation
+// the rights to use, copy, modify, merge, publish, distribute, sublicense,
+// and/or sell copies of the Materials, and to permit persons to whom the
+// Materials are furnished to do so, subject to the following conditions:
+// 
+// The above copyright notice and this permission notice shall be included in
+// all copies or substantial portions of the Materials.
+// 
+// MODIFICATIONS TO THIS FILE MAY MEAN IT NO LONGER ACCURATELY REFLECTS KHRONOS
+// STANDARDS. THE UNMODIFIED, NORMATIVE VERSIONS OF KHRONOS SPECIFICATIONS AND
+// HEADER INFORMATION ARE LOCATED AT https://www.khronos.org/registry/ 
+// 
+// THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
+// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
+// THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
+// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
+// FROM,OUT OF OR IN CONNECTION WITH THE MATERIALS OR THE USE OR OTHER DEALINGS
+// IN THE MATERIALS.
+
+// This header is automatically generated by the same tool that creates
+// the Binary Section of the SPIR-V specification.
+
+// Enumeration tokens for SPIR-V, in various styles:
+//   C, C++, C++11, JSON, Lua, Python, C#, D, Beef
+// 
+// - C will have tokens with a "Spv" prefix, e.g.: SpvSourceLanguageGLSL
+// - C++ will have tokens in the "spv" name space, e.g.: spv::SourceLanguageGLSL
+// - C++11 will use enum classes in the spv namespace, e.g.: spv::SourceLanguage::GLSL
+// - Lua will use tables, e.g.: spv.SourceLanguage.GLSL
+// - Python will use dictionaries, e.g.: spv['SourceLanguage']['GLSL']
+// - C# will use enum classes in the Specification class located in the "Spv" namespace,
+//     e.g.: Spv.Specification.SourceLanguage.GLSL
+// - D will have tokens under the "spv" module, e.g: spv.SourceLanguage.GLSL
+// - Beef will use enum classes in the Specification class located in the "Spv" namespace,
+//     e.g.: Spv.Specification.SourceLanguage.GLSL
+// 
+// Some tokens act like mask values, which can be OR'd together,
+// while others are mutually exclusive.  The mask-like ones have
+// "Mask" in their name, and a parallel enum that has the shift
+// amount (1 << x) for each corresponding enumerant.
+
+#ifndef spirv_HPP
+#define spirv_HPP
+
+namespace spv {
+
+typedef unsigned int Id;
+
+#define SPV_VERSION 0x10600
+#define SPV_REVISION 1
+
+static const unsigned int MagicNumber = 0x07230203;
+static const unsigned int Version = 0x00010600;
+static const unsigned int Revision = 1;
+static const unsigned int OpCodeMask = 0xffff;
+static const unsigned int WordCountShift = 16;
+
+enum SourceLanguage {
+    SourceLanguageUnknown = 0,
+    SourceLanguageESSL = 1,
+    SourceLanguageGLSL = 2,
+    SourceLanguageOpenCL_C = 3,
+    SourceLanguageOpenCL_CPP = 4,
+    SourceLanguageHLSL = 5,
+    SourceLanguageCPP_for_OpenCL = 6,
+    SourceLanguageSYCL = 7,
+    SourceLanguageMax = 0x7fffffff,
+};
+
+enum ExecutionModel {
+    ExecutionModelVertex = 0,
+    ExecutionModelTessellationControl = 1,
+    ExecutionModelTessellationEvaluation = 2,
+    ExecutionModelGeometry = 3,
+    ExecutionModelFragment = 4,
+    ExecutionModelGLCompute = 5,
+    ExecutionModelKernel = 6,
+    ExecutionModelTaskNV = 5267,
+    ExecutionModelMeshNV = 5268,
+    ExecutionModelRayGenerationKHR = 5313,
+    ExecutionModelRayGenerationNV = 5313,
+    ExecutionModelIntersectionKHR = 5314,
+    ExecutionModelIntersectionNV = 5314,
+    ExecutionModelAnyHitKHR = 5315,
+    ExecutionModelAnyHitNV = 5315,
+    ExecutionModelClosestHitKHR = 5316,
+    ExecutionModelClosestHitNV = 5316,
+    ExecutionModelMissKHR = 5317,
+    ExecutionModelMissNV = 5317,
+    ExecutionModelCallableKHR = 5318,
+    ExecutionModelCallableNV = 5318,
+    ExecutionModelTaskEXT = 5364,
+    ExecutionModelMeshEXT = 5365,
+    ExecutionModelMax = 0x7fffffff,
+};
+
+enum AddressingModel {
+    AddressingModelLogical = 0,
+    AddressingModelPhysical32 = 1,
+    AddressingModelPhysical64 = 2,
+    AddressingModelPhysicalStorageBuffer64 = 5348,
+    AddressingModelPhysicalStorageBuffer64EXT = 5348,
+    AddressingModelMax = 0x7fffffff,
+};
+
+enum MemoryModel {
+    MemoryModelSimple = 0,
+    MemoryModelGLSL450 = 1,
+    MemoryModelOpenCL = 2,
+    MemoryModelVulkan = 3,
+    MemoryModelVulkanKHR = 3,
+    MemoryModelMax = 0x7fffffff,
+};
+
+enum ExecutionMode {
+    ExecutionModeInvocations = 0,
+    ExecutionModeSpacingEqual = 1,
+    ExecutionModeSpacingFractionalEven = 2,
+    ExecutionModeSpacingFractionalOdd = 3,
+    ExecutionModeVertexOrderCw = 4,
+    ExecutionModeVertexOrderCcw = 5,
+    ExecutionModePixelCenterInteger = 6,
+    ExecutionModeOriginUpperLeft = 7,
+    ExecutionModeOriginLowerLeft = 8,
+    ExecutionModeEarlyFragmentTests = 9,
+    ExecutionModePointMode = 10,
+    ExecutionModeXfb = 11,
+    ExecutionModeDepthReplacing = 12,
+    ExecutionModeDepthGreater = 14,
+    ExecutionModeDepthLess = 15,
+    ExecutionModeDepthUnchanged = 16,
+    ExecutionModeLocalSize = 17,
+    ExecutionModeLocalSizeHint = 18,
+    ExecutionModeInputPoints = 19,
+    ExecutionModeInputLines = 20,
+    ExecutionModeInputLinesAdjacency = 21,
+    ExecutionModeTriangles = 22,
+    ExecutionModeInputTrianglesAdjacency = 23,
+    ExecutionModeQuads = 24,
+    ExecutionModeIsolines = 25,
+    ExecutionModeOutputVertices = 26,
+    ExecutionModeOutputPoints = 27,
+    ExecutionModeOutputLineStrip = 28,
+    ExecutionModeOutputTriangleStrip = 29,
+    ExecutionModeVecTypeHint = 30,
+    ExecutionModeContractionOff = 31,
+    ExecutionModeInitializer = 33,
+    ExecutionModeFinalizer = 34,
+    ExecutionModeSubgroupSize = 35,
+    ExecutionModeSubgroupsPerWorkgroup = 36,
+    ExecutionModeSubgroupsPerWorkgroupId = 37,
+    ExecutionModeLocalSizeId = 38,
+    ExecutionModeLocalSizeHintId = 39,
+    ExecutionModeSubgroupUniformControlFlowKHR = 4421,
+    ExecutionModePostDepthCoverage = 4446,
+    ExecutionModeDenormPreserve = 4459,
+    ExecutionModeDenormFlushToZero = 4460,
+    ExecutionModeSignedZeroInfNanPreserve = 4461,
+    ExecutionModeRoundingModeRTE = 4462,
+    ExecutionModeRoundingModeRTZ = 4463,
+    ExecutionModeEarlyAndLateFragmentTestsAMD = 5017,
+    ExecutionModeStencilRefReplacingEXT = 5027,
+    ExecutionModeStencilRefUnchangedFrontAMD = 5079,
+    ExecutionModeStencilRefGreaterFrontAMD = 5080,
+    ExecutionModeStencilRefLessFrontAMD = 5081,
+    ExecutionModeStencilRefUnchangedBackAMD = 5082,
+    ExecutionModeStencilRefGreaterBackAMD = 5083,
+    ExecutionModeStencilRefLessBackAMD = 5084,
+    ExecutionModeOutputLinesEXT = 5269,
+    ExecutionModeOutputLinesNV = 5269,
+    ExecutionModeOutputPrimitivesEXT = 5270,
+    ExecutionModeOutputPrimitivesNV = 5270,
+    ExecutionModeDerivativeGroupQuadsNV = 5289,
+    ExecutionModeDerivativeGroupLinearNV = 5290,
+    ExecutionModeOutputTrianglesEXT = 5298,
+    ExecutionModeOutputTrianglesNV = 5298,
+    ExecutionModePixelInterlockOrderedEXT = 5366,
+    ExecutionModePixelInterlockUnorderedEXT = 5367,
+    ExecutionModeSampleInterlockOrderedEXT = 5368,
+    ExecutionModeSampleInterlockUnorderedEXT = 5369,
+    ExecutionModeShadingRateInterlockOrderedEXT = 5370,
+    ExecutionModeShadingRateInterlockUnorderedEXT = 5371,
+    ExecutionModeSharedLocalMemorySizeINTEL = 5618,
+    ExecutionModeRoundingModeRTPINTEL = 5620,
+    ExecutionModeRoundingModeRTNINTEL = 5621,
+    ExecutionModeFloatingPointModeALTINTEL = 5622,
+    ExecutionModeFloatingPointModeIEEEINTEL = 5623,
+    ExecutionModeMaxWorkgroupSizeINTEL = 5893,
+    ExecutionModeMaxWorkDimINTEL = 5894,
+    ExecutionModeNoGlobalOffsetINTEL = 5895,
+    ExecutionModeNumSIMDWorkitemsINTEL = 5896,
+    ExecutionModeSchedulerTargetFmaxMhzINTEL = 5903,
+    ExecutionModeNamedBarrierCountINTEL = 6417,
+    ExecutionModeMax = 0x7fffffff,
+};
+
+enum StorageClass {
+    StorageClassUniformConstant = 0,
+    StorageClassInput = 1,
+    StorageClassUniform = 2,
+    StorageClassOutput = 3,
+    StorageClassWorkgroup = 4,
+    StorageClassCrossWorkgroup = 5,
+    StorageClassPrivate = 6,
+    StorageClassFunction = 7,
+    StorageClassGeneric = 8,
+    StorageClassPushConstant = 9,
+    StorageClassAtomicCounter = 10,
+    StorageClassImage = 11,
+    StorageClassStorageBuffer = 12,
+    StorageClassCallableDataKHR = 5328,
+    StorageClassCallableDataNV = 5328,
+    StorageClassIncomingCallableDataKHR = 5329,
+    StorageClassIncomingCallableDataNV = 5329,
+    StorageClassRayPayloadKHR = 5338,
+    StorageClassRayPayloadNV = 5338,
+    StorageClassHitAttributeKHR = 5339,
+    StorageClassHitAttributeNV = 5339,
+    StorageClassIncomingRayPayloadKHR = 5342,
+    StorageClassIncomingRayPayloadNV = 5342,
+    StorageClassShaderRecordBufferKHR = 5343,
+    StorageClassShaderRecordBufferNV = 5343,
+    StorageClassPhysicalStorageBuffer = 5349,
+    StorageClassPhysicalStorageBufferEXT = 5349,
+    StorageClassTaskPayloadWorkgroupEXT = 5402,
+    StorageClassCodeSectionINTEL = 5605,
+    StorageClassDeviceOnlyINTEL = 5936,
+    StorageClassHostOnlyINTEL = 5937,
+    StorageClassMax = 0x7fffffff,
+};
+
+enum Dim {
+    Dim1D = 0,
+    Dim2D = 1,
+    Dim3D = 2,
+    DimCube = 3,
+    DimRect = 4,
+    DimBuffer = 5,
+    DimSubpassData = 6,
+    DimMax = 0x7fffffff,
+};
+
+enum SamplerAddressingMode {
+    SamplerAddressingModeNone = 0,
+    SamplerAddressingModeClampToEdge = 1,
+    SamplerAddressingModeClamp = 2,
+    SamplerAddressingModeRepeat = 3,
+    SamplerAddressingModeRepeatMirrored = 4,
+    SamplerAddressingModeMax = 0x7fffffff,
+};
+
+enum SamplerFilterMode {
+    SamplerFilterModeNearest = 0,
+    SamplerFilterModeLinear = 1,
+    SamplerFilterModeMax = 0x7fffffff,
+};
+
+enum ImageFormat {
+    ImageFormatUnknown = 0,
+    ImageFormatRgba32f = 1,
+    ImageFormatRgba16f = 2,
+    ImageFormatR32f = 3,
+    ImageFormatRgba8 = 4,
+    ImageFormatRgba8Snorm = 5,
+    ImageFormatRg32f = 6,
+    ImageFormatRg16f = 7,
+    ImageFormatR11fG11fB10f = 8,
+    ImageFormatR16f = 9,
+    ImageFormatRgba16 = 10,
+    ImageFormatRgb10A2 = 11,
+    ImageFormatRg16 = 12,
+    ImageFormatRg8 = 13,
+    ImageFormatR16 = 14,
+    ImageFormatR8 = 15,
+    ImageFormatRgba16Snorm = 16,
+    ImageFormatRg16Snorm = 17,
+    ImageFormatRg8Snorm = 18,
+    ImageFormatR16Snorm = 19,
+    ImageFormatR8Snorm = 20,
+    ImageFormatRgba32i = 21,
+    ImageFormatRgba16i = 22,
+    ImageFormatRgba8i = 23,
+    ImageFormatR32i = 24,
+    ImageFormatRg32i = 25,
+    ImageFormatRg16i = 26,
+    ImageFormatRg8i = 27,
+    ImageFormatR16i = 28,
+    ImageFormatR8i = 29,
+    ImageFormatRgba32ui = 30,
+    ImageFormatRgba16ui = 31,
+    ImageFormatRgba8ui = 32,
+    ImageFormatR32ui = 33,
+    ImageFormatRgb10a2ui = 34,
+    ImageFormatRg32ui = 35,
+    ImageFormatRg16ui = 36,
+    ImageFormatRg8ui = 37,
+    ImageFormatR16ui = 38,
+    ImageFormatR8ui = 39,
+    ImageFormatR64ui = 40,
+    ImageFormatR64i = 41,
+    ImageFormatMax = 0x7fffffff,
+};
+
+enum ImageChannelOrder {
+    ImageChannelOrderR = 0,
+    ImageChannelOrderA = 1,
+    ImageChannelOrderRG = 2,
+    ImageChannelOrderRA = 3,
+    ImageChannelOrderRGB = 4,
+    ImageChannelOrderRGBA = 5,
+    ImageChannelOrderBGRA = 6,
+    ImageChannelOrderARGB = 7,
+    ImageChannelOrderIntensity = 8,
+    ImageChannelOrderLuminance = 9,
+    ImageChannelOrderRx = 10,
+    ImageChannelOrderRGx = 11,
+    ImageChannelOrderRGBx = 12,
+    ImageChannelOrderDepth = 13,
+    ImageChannelOrderDepthStencil = 14,
+    ImageChannelOrdersRGB = 15,
+    ImageChannelOrdersRGBx = 16,
+    ImageChannelOrdersRGBA = 17,
+    ImageChannelOrdersBGRA = 18,
+    ImageChannelOrderABGR = 19,
+    ImageChannelOrderMax = 0x7fffffff,
+};
+
+enum ImageChannelDataType {
+    ImageChannelDataTypeSnormInt8 = 0,
+    ImageChannelDataTypeSnormInt16 = 1,
+    ImageChannelDataTypeUnormInt8 = 2,
+    ImageChannelDataTypeUnormInt16 = 3,
+    ImageChannelDataTypeUnormShort565 = 4,
+    ImageChannelDataTypeUnormShort555 = 5,
+    ImageChannelDataTypeUnormInt101010 = 6,
+    ImageChannelDataTypeSignedInt8 = 7,
+    ImageChannelDataTypeSignedInt16 = 8,
+    ImageChannelDataTypeSignedInt32 = 9,
+    ImageChannelDataTypeUnsignedInt8 = 10,
+    ImageChannelDataTypeUnsignedInt16 = 11,
+    ImageChannelDataTypeUnsignedInt32 = 12,
+    ImageChannelDataTypeHalfFloat = 13,
+    ImageChannelDataTypeFloat = 14,
+    ImageChannelDataTypeUnormInt24 = 15,
+    ImageChannelDataTypeUnormInt101010_2 = 16,
+    ImageChannelDataTypeMax = 0x7fffffff,
+};
+
+enum ImageOperandsShift {
+    ImageOperandsBiasShift = 0,
+    ImageOperandsLodShift = 1,
+    ImageOperandsGradShift = 2,
+    ImageOperandsConstOffsetShift = 3,
+    ImageOperandsOffsetShift = 4,
+    ImageOperandsConstOffsetsShift = 5,
+    ImageOperandsSampleShift = 6,
+    ImageOperandsMinLodShift = 7,
+    ImageOperandsMakeTexelAvailableShift = 8,
+    ImageOperandsMakeTexelAvailableKHRShift = 8,
+    ImageOperandsMakeTexelVisibleShift = 9,
+    ImageOperandsMakeTexelVisibleKHRShift = 9,
+    ImageOperandsNonPrivateTexelShift = 10,
+    ImageOperandsNonPrivateTexelKHRShift = 10,
+    ImageOperandsVolatileTexelShift = 11,
+    ImageOperandsVolatileTexelKHRShift = 11,
+    ImageOperandsSignExtendShift = 12,
+    ImageOperandsZeroExtendShift = 13,
+    ImageOperandsNontemporalShift = 14,
+    ImageOperandsOffsetsShift = 16,
+    ImageOperandsMax = 0x7fffffff,
+};
+
+enum ImageOperandsMask {
+    ImageOperandsMaskNone = 0,
+    ImageOperandsBiasMask = 0x00000001,
+    ImageOperandsLodMask = 0x00000002,
+    ImageOperandsGradMask = 0x00000004,
+    ImageOperandsConstOffsetMask = 0x00000008,
+    ImageOperandsOffsetMask = 0x00000010,
+    ImageOperandsConstOffsetsMask = 0x00000020,
+    ImageOperandsSampleMask = 0x00000040,
+    ImageOperandsMinLodMask = 0x00000080,
+    ImageOperandsMakeTexelAvailableMask = 0x00000100,
+    ImageOperandsMakeTexelAvailableKHRMask = 0x00000100,
+    ImageOperandsMakeTexelVisibleMask = 0x00000200,
+    ImageOperandsMakeTexelVisibleKHRMask = 0x00000200,
+    ImageOperandsNonPrivateTexelMask = 0x00000400,
+    ImageOperandsNonPrivateTexelKHRMask = 0x00000400,
+    ImageOperandsVolatileTexelMask = 0x00000800,
+    ImageOperandsVolatileTexelKHRMask = 0x00000800,
+    ImageOperandsSignExtendMask = 0x00001000,
+    ImageOperandsZeroExtendMask = 0x00002000,
+    ImageOperandsNontemporalMask = 0x00004000,
+    ImageOperandsOffsetsMask = 0x00010000,
+};
+
+enum FPFastMathModeShift {
+    FPFastMathModeNotNaNShift = 0,
+    FPFastMathModeNotInfShift = 1,
+    FPFastMathModeNSZShift = 2,
+    FPFastMathModeAllowRecipShift = 3,
+    FPFastMathModeFastShift = 4,
+    FPFastMathModeAllowContractFastINTELShift = 16,
+    FPFastMathModeAllowReassocINTELShift = 17,
+    FPFastMathModeMax = 0x7fffffff,
+};
+
+enum FPFastMathModeMask {
+    FPFastMathModeMaskNone = 0,
+    FPFastMathModeNotNaNMask = 0x00000001,
+    FPFastMathModeNotInfMask = 0x00000002,
+    FPFastMathModeNSZMask = 0x00000004,
+    FPFastMathModeAllowRecipMask = 0x00000008,
+    FPFastMathModeFastMask = 0x00000010,
+    FPFastMathModeAllowContractFastINTELMask = 0x00010000,
+    FPFastMathModeAllowReassocINTELMask = 0x00020000,
+};
+
+enum FPRoundingMode {
+    FPRoundingModeRTE = 0,
+    FPRoundingModeRTZ = 1,
+    FPRoundingModeRTP = 2,
+    FPRoundingModeRTN = 3,
+    FPRoundingModeMax = 0x7fffffff,
+};
+
+enum LinkageType {
+    LinkageTypeExport = 0,
+    LinkageTypeImport = 1,
+    LinkageTypeLinkOnceODR = 2,
+    LinkageTypeMax = 0x7fffffff,
+};
+
+enum AccessQualifier {
+    AccessQualifierReadOnly = 0,
+    AccessQualifierWriteOnly = 1,
+    AccessQualifierReadWrite = 2,
+    AccessQualifierMax = 0x7fffffff,
+};
+
+enum FunctionParameterAttribute {
+    FunctionParameterAttributeZext = 0,
+    FunctionParameterAttributeSext = 1,
+    FunctionParameterAttributeByVal = 2,
+    FunctionParameterAttributeSret = 3,
+    FunctionParameterAttributeNoAlias = 4,
+    FunctionParameterAttributeNoCapture = 5,
+    FunctionParameterAttributeNoWrite = 6,
+    FunctionParameterAttributeNoReadWrite = 7,
+    FunctionParameterAttributeMax = 0x7fffffff,
+};
+
+enum Decoration {
+    DecorationRelaxedPrecision = 0,
+    DecorationSpecId = 1,
+    DecorationBlock = 2,
+    DecorationBufferBlock = 3,
+    DecorationRowMajor = 4,
+    DecorationColMajor = 5,
+    DecorationArrayStride = 6,
+    DecorationMatrixStride = 7,
+    DecorationGLSLShared = 8,
+    DecorationGLSLPacked = 9,
+    DecorationCPacked = 10,
+    DecorationBuiltIn = 11,
+    DecorationNoPerspective = 13,
+    DecorationFlat = 14,
+    DecorationPatch = 15,
+    DecorationCentroid = 16,
+    DecorationSample = 17,
+    DecorationInvariant = 18,
+    DecorationRestrict = 19,
+    DecorationAliased = 20,
+    DecorationVolatile = 21,
+    DecorationConstant = 22,
+    DecorationCoherent = 23,
+    DecorationNonWritable = 24,
+    DecorationNonReadable = 25,
+    DecorationUniform = 26,
+    DecorationUniformId = 27,
+    DecorationSaturatedConversion = 28,
+    DecorationStream = 29,
+    DecorationLocation = 30,
+    DecorationComponent = 31,
+    DecorationIndex = 32,
+    DecorationBinding = 33,
+    DecorationDescriptorSet = 34,
+    DecorationOffset = 35,
+    DecorationXfbBuffer = 36,
+    DecorationXfbStride = 37,
+    DecorationFuncParamAttr = 38,
+    DecorationFPRoundingMode = 39,
+    DecorationFPFastMathMode = 40,
+    DecorationLinkageAttributes = 41,
+    DecorationNoContraction = 42,
+    DecorationInputAttachmentIndex = 43,
+    DecorationAlignment = 44,
+    DecorationMaxByteOffset = 45,
+    DecorationAlignmentId = 46,
+    DecorationMaxByteOffsetId = 47,
+    DecorationNoSignedWrap = 4469,
+    DecorationNoUnsignedWrap = 4470,
+    DecorationExplicitInterpAMD = 4999,
+    DecorationOverrideCoverageNV = 5248,
+    DecorationPassthroughNV = 5250,
+    DecorationViewportRelativeNV = 5252,
+    DecorationSecondaryViewportRelativeNV = 5256,
+    DecorationPerPrimitiveEXT = 5271,
+    DecorationPerPrimitiveNV = 5271,
+    DecorationPerViewNV = 5272,
+    DecorationPerTaskNV = 5273,
+    DecorationPerVertexKHR = 5285,
+    DecorationPerVertexNV = 5285,
+    DecorationNonUniform = 5300,
+    DecorationNonUniformEXT = 5300,
+    DecorationRestrictPointer = 5355,
+    DecorationRestrictPointerEXT = 5355,
+    DecorationAliasedPointer = 5356,
+    DecorationAliasedPointerEXT = 5356,
+    DecorationBindlessSamplerNV = 5398,
+    DecorationBindlessImageNV = 5399,
+    DecorationBoundSamplerNV = 5400,
+    DecorationBoundImageNV = 5401,
+    DecorationSIMTCallINTEL = 5599,
+    DecorationReferencedIndirectlyINTEL = 5602,
+    DecorationClobberINTEL = 5607,
+    DecorationSideEffectsINTEL = 5608,
+    DecorationVectorComputeVariableINTEL = 5624,
+    DecorationFuncParamIOKindINTEL = 5625,
+    DecorationVectorComputeFunctionINTEL = 5626,
+    DecorationStackCallINTEL = 5627,
+    DecorationGlobalVariableOffsetINTEL = 5628,
+    DecorationCounterBuffer = 5634,
+    DecorationHlslCounterBufferGOOGLE = 5634,
+    DecorationHlslSemanticGOOGLE = 5635,
+    DecorationUserSemantic = 5635,
+    DecorationUserTypeGOOGLE = 5636,
+    DecorationFunctionRoundingModeINTEL = 5822,
+    DecorationFunctionDenormModeINTEL = 5823,
+    DecorationRegisterINTEL = 5825,
+    DecorationMemoryINTEL = 5826,
+    DecorationNumbanksINTEL = 5827,
+    DecorationBankwidthINTEL = 5828,
+    DecorationMaxPrivateCopiesINTEL = 5829,
+    DecorationSinglepumpINTEL = 5830,
+    DecorationDoublepumpINTEL = 5831,
+    DecorationMaxReplicatesINTEL = 5832,
+    DecorationSimpleDualPortINTEL = 5833,
+    DecorationMergeINTEL = 5834,
+    DecorationBankBitsINTEL = 5835,
+    DecorationForcePow2DepthINTEL = 5836,
+    DecorationBurstCoalesceINTEL = 5899,
+    DecorationCacheSizeINTEL = 5900,
+    DecorationDontStaticallyCoalesceINTEL = 5901,
+    DecorationPrefetchINTEL = 5902,
+    DecorationStallEnableINTEL = 5905,
+    DecorationFuseLoopsInFunctionINTEL = 5907,
+    DecorationAliasScopeINTEL = 5914,
+    DecorationNoAliasINTEL = 5915,
+    DecorationBufferLocationINTEL = 5921,
+    DecorationIOPipeStorageINTEL = 5944,
+    DecorationFunctionFloatingPointModeINTEL = 6080,
+    DecorationSingleElementVectorINTEL = 6085,
+    DecorationVectorComputeCallableFunctionINTEL = 6087,
+    DecorationMediaBlockIOINTEL = 6140,
+    DecorationMax = 0x7fffffff,
+};
+
+enum BuiltIn {
+    BuiltInPosition = 0,
+    BuiltInPointSize = 1,
+    BuiltInClipDistance = 3,
+    BuiltInCullDistance = 4,
+    BuiltInVertexId = 5,
+    BuiltInInstanceId = 6,
+    BuiltInPrimitiveId = 7,
+    BuiltInInvocationId = 8,
+    BuiltInLayer = 9,
+    BuiltInViewportIndex = 10,
+    BuiltInTessLevelOuter = 11,
+    BuiltInTessLevelInner = 12,
+    BuiltInTessCoord = 13,
+    BuiltInPatchVertices = 14,
+    BuiltInFragCoord = 15,
+    BuiltInPointCoord = 16,
+    BuiltInFrontFacing = 17,
+    BuiltInSampleId = 18,
+    BuiltInSamplePosition = 19,
+    BuiltInSampleMask = 20,
+    BuiltInFragDepth = 22,
+    BuiltInHelperInvocation = 23,
+    BuiltInNumWorkgroups = 24,
+    BuiltInWorkgroupSize = 25,
+    BuiltInWorkgroupId = 26,
+    BuiltInLocalInvocationId = 27,
+    BuiltInGlobalInvocationId = 28,
+    BuiltInLocalInvocationIndex = 29,
+    BuiltInWorkDim = 30,
+    BuiltInGlobalSize = 31,
+    BuiltInEnqueuedWorkgroupSize = 32,
+    BuiltInGlobalOffset = 33,
+    BuiltInGlobalLinearId = 34,
+    BuiltInSubgroupSize = 36,
+    BuiltInSubgroupMaxSize = 37,
+    BuiltInNumSubgroups = 38,
+    BuiltInNumEnqueuedSubgroups = 39,
+    BuiltInSubgroupId = 40,
+    BuiltInSubgroupLocalInvocationId = 41,
+    BuiltInVertexIndex = 42,
+    BuiltInInstanceIndex = 43,
+    BuiltInSubgroupEqMask = 4416,
+    BuiltInSubgroupEqMaskKHR = 4416,
+    BuiltInSubgroupGeMask = 4417,
+    BuiltInSubgroupGeMaskKHR = 4417,
+    BuiltInSubgroupGtMask = 4418,
+    BuiltInSubgroupGtMaskKHR = 4418,
+    BuiltInSubgroupLeMask = 4419,
+    BuiltInSubgroupLeMaskKHR = 4419,
+    BuiltInSubgroupLtMask = 4420,
+    BuiltInSubgroupLtMaskKHR = 4420,
+    BuiltInBaseVertex = 4424,
+    BuiltInBaseInstance = 4425,
+    BuiltInDrawIndex = 4426,
+    BuiltInPrimitiveShadingRateKHR = 4432,
+    BuiltInDeviceIndex = 4438,
+    BuiltInViewIndex = 4440,
+    BuiltInShadingRateKHR = 4444,
+    BuiltInBaryCoordNoPerspAMD = 4992,
+    BuiltInBaryCoordNoPerspCentroidAMD = 4993,
+    BuiltInBaryCoordNoPerspSampleAMD = 4994,
+    BuiltInBaryCoordSmoothAMD = 4995,
+    BuiltInBaryCoordSmoothCentroidAMD = 4996,
+    BuiltInBaryCoordSmoothSampleAMD = 4997,
+    BuiltInBaryCoordPullModelAMD = 4998,
+    BuiltInFragStencilRefEXT = 5014,
+    BuiltInViewportMaskNV = 5253,
+    BuiltInSecondaryPositionNV = 5257,
+    BuiltInSecondaryViewportMaskNV = 5258,
+    BuiltInPositionPerViewNV = 5261,
+    BuiltInViewportMaskPerViewNV = 5262,
+    BuiltInFullyCoveredEXT = 5264,
+    BuiltInTaskCountNV = 5274,
+    BuiltInPrimitiveCountNV = 5275,
+    BuiltInPrimitiveIndicesNV = 5276,
+    BuiltInClipDistancePerViewNV = 5277,
+    BuiltInCullDistancePerViewNV = 5278,
+    BuiltInLayerPerViewNV = 5279,
+    BuiltInMeshViewCountNV = 5280,
+    BuiltInMeshViewIndicesNV = 5281,
+    BuiltInBaryCoordKHR = 5286,
+    BuiltInBaryCoordNV = 5286,
+    BuiltInBaryCoordNoPerspKHR = 5287,
+    BuiltInBaryCoordNoPerspNV = 5287,
+    BuiltInFragSizeEXT = 5292,
+    BuiltInFragmentSizeNV = 5292,
+    BuiltInFragInvocationCountEXT = 5293,
+    BuiltInInvocationsPerPixelNV = 5293,
+    BuiltInPrimitivePointIndicesEXT = 5294,
+    BuiltInPrimitiveLineIndicesEXT = 5295,
+    BuiltInPrimitiveTriangleIndicesEXT = 5296,
+    BuiltInCullPrimitiveEXT = 5299,
+    BuiltInLaunchIdKHR = 5319,
+    BuiltInLaunchIdNV = 5319,
+    BuiltInLaunchSizeKHR = 5320,
+    BuiltInLaunchSizeNV = 5320,
+    BuiltInWorldRayOriginKHR = 5321,
+    BuiltInWorldRayOriginNV = 5321,
+    BuiltInWorldRayDirectionKHR = 5322,
+    BuiltInWorldRayDirectionNV = 5322,
+    BuiltInObjectRayOriginKHR = 5323,
+    BuiltInObjectRayOriginNV = 5323,
+    BuiltInObjectRayDirectionKHR = 5324,
+    BuiltInObjectRayDirectionNV = 5324,
+    BuiltInRayTminKHR = 5325,
+    BuiltInRayTminNV = 5325,
+    BuiltInRayTmaxKHR = 5326,
+    BuiltInRayTmaxNV = 5326,
+    BuiltInInstanceCustomIndexKHR = 5327,
+    BuiltInInstanceCustomIndexNV = 5327,
+    BuiltInObjectToWorldKHR = 5330,
+    BuiltInObjectToWorldNV = 5330,
+    BuiltInWorldToObjectKHR = 5331,
+    BuiltInWorldToObjectNV = 5331,
+    BuiltInHitTNV = 5332,
+    BuiltInHitKindKHR = 5333,
+    BuiltInHitKindNV = 5333,
+    BuiltInCurrentRayTimeNV = 5334,
+    BuiltInIncomingRayFlagsKHR = 5351,
+    BuiltInIncomingRayFlagsNV = 5351,
+    BuiltInRayGeometryIndexKHR = 5352,
+    BuiltInWarpsPerSMNV = 5374,
+    BuiltInSMCountNV = 5375,
+    BuiltInWarpIDNV = 5376,
+    BuiltInSMIDNV = 5377,
+    BuiltInCullMaskKHR = 6021,
+    BuiltInMax = 0x7fffffff,
+};
+
+enum SelectionControlShift {
+    SelectionControlFlattenShift = 0,
+    SelectionControlDontFlattenShift = 1,
+    SelectionControlMax = 0x7fffffff,
+};
+
+enum SelectionControlMask {
+    SelectionControlMaskNone = 0,
+    SelectionControlFlattenMask = 0x00000001,
+    SelectionControlDontFlattenMask = 0x00000002,
+};
+
+enum LoopControlShift {
+    LoopControlUnrollShift = 0,
+    LoopControlDontUnrollShift = 1,
+    LoopControlDependencyInfiniteShift = 2,
+    LoopControlDependencyLengthShift = 3,
+    LoopControlMinIterationsShift = 4,
+    LoopControlMaxIterationsShift = 5,
+    LoopControlIterationMultipleShift = 6,
+    LoopControlPeelCountShift = 7,
+    LoopControlPartialCountShift = 8,
+    LoopControlInitiationIntervalINTELShift = 16,
+    LoopControlMaxConcurrencyINTELShift = 17,
+    LoopControlDependencyArrayINTELShift = 18,
+    LoopControlPipelineEnableINTELShift = 19,
+    LoopControlLoopCoalesceINTELShift = 20,
+    LoopControlMaxInterleavingINTELShift = 21,
+    LoopControlSpeculatedIterationsINTELShift = 22,
+    LoopControlNoFusionINTELShift = 23,
+    LoopControlMax = 0x7fffffff,
+};
+
+enum LoopControlMask {
+    LoopControlMaskNone = 0,
+    LoopControlUnrollMask = 0x00000001,
+    LoopControlDontUnrollMask = 0x00000002,
+    LoopControlDependencyInfiniteMask = 0x00000004,
+    LoopControlDependencyLengthMask = 0x00000008,
+    LoopControlMinIterationsMask = 0x00000010,
+    LoopControlMaxIterationsMask = 0x00000020,
+    LoopControlIterationMultipleMask = 0x00000040,
+    LoopControlPeelCountMask = 0x00000080,
+    LoopControlPartialCountMask = 0x00000100,
+    LoopControlInitiationIntervalINTELMask = 0x00010000,
+    LoopControlMaxConcurrencyINTELMask = 0x00020000,
+    LoopControlDependencyArrayINTELMask = 0x00040000,
+    LoopControlPipelineEnableINTELMask = 0x00080000,
+    LoopControlLoopCoalesceINTELMask = 0x00100000,
+    LoopControlMaxInterleavingINTELMask = 0x00200000,
+    LoopControlSpeculatedIterationsINTELMask = 0x00400000,
+    LoopControlNoFusionINTELMask = 0x00800000,
+};
+
+enum FunctionControlShift {
+    FunctionControlInlineShift = 0,
+    FunctionControlDontInlineShift = 1,
+    FunctionControlPureShift = 2,
+    FunctionControlConstShift = 3,
+    FunctionControlOptNoneINTELShift = 16,
+    FunctionControlMax = 0x7fffffff,
+};
+
+enum FunctionControlMask {
+    FunctionControlMaskNone = 0,
+    FunctionControlInlineMask = 0x00000001,
+    FunctionControlDontInlineMask = 0x00000002,
+    FunctionControlPureMask = 0x00000004,
+    FunctionControlConstMask = 0x00000008,
+    FunctionControlOptNoneINTELMask = 0x00010000,
+};
+
+enum MemorySemanticsShift {
+    MemorySemanticsAcquireShift = 1,
+    MemorySemanticsReleaseShift = 2,
+    MemorySemanticsAcquireReleaseShift = 3,
+    MemorySemanticsSequentiallyConsistentShift = 4,
+    MemorySemanticsUniformMemoryShift = 6,
+    MemorySemanticsSubgroupMemoryShift = 7,
+    MemorySemanticsWorkgroupMemoryShift = 8,
+    MemorySemanticsCrossWorkgroupMemoryShift = 9,
+    MemorySemanticsAtomicCounterMemoryShift = 10,
+    MemorySemanticsImageMemoryShift = 11,
+    MemorySemanticsOutputMemoryShift = 12,
+    MemorySemanticsOutputMemoryKHRShift = 12,
+    MemorySemanticsMakeAvailableShift = 13,
+    MemorySemanticsMakeAvailableKHRShift = 13,
+    MemorySemanticsMakeVisibleShift = 14,
+    MemorySemanticsMakeVisibleKHRShift = 14,
+    MemorySemanticsVolatileShift = 15,
+    MemorySemanticsMax = 0x7fffffff,
+};
+
+enum MemorySemanticsMask {
+    MemorySemanticsMaskNone = 0,
+    MemorySemanticsAcquireMask = 0x00000002,
+    MemorySemanticsReleaseMask = 0x00000004,
+    MemorySemanticsAcquireReleaseMask = 0x00000008,
+    MemorySemanticsSequentiallyConsistentMask = 0x00000010,
+    MemorySemanticsUniformMemoryMask = 0x00000040,
+    MemorySemanticsSubgroupMemoryMask = 0x00000080,
+    MemorySemanticsWorkgroupMemoryMask = 0x00000100,
+    MemorySemanticsCrossWorkgroupMemoryMask = 0x00000200,
+    MemorySemanticsAtomicCounterMemoryMask = 0x00000400,
+    MemorySemanticsImageMemoryMask = 0x00000800,
+    MemorySemanticsOutputMemoryMask = 0x00001000,
+    MemorySemanticsOutputMemoryKHRMask = 0x00001000,
+    MemorySemanticsMakeAvailableMask = 0x00002000,
+    MemorySemanticsMakeAvailableKHRMask = 0x00002000,
+    MemorySemanticsMakeVisibleMask = 0x00004000,
+    MemorySemanticsMakeVisibleKHRMask = 0x00004000,
+    MemorySemanticsVolatileMask = 0x00008000,
+};
+
+enum MemoryAccessShift {
+    MemoryAccessVolatileShift = 0,
+    MemoryAccessAlignedShift = 1,
+    MemoryAccessNontemporalShift = 2,
+    MemoryAccessMakePointerAvailableShift = 3,
+    MemoryAccessMakePointerAvailableKHRShift = 3,
+    MemoryAccessMakePointerVisibleShift = 4,
+    MemoryAccessMakePointerVisibleKHRShift = 4,
+    MemoryAccessNonPrivatePointerShift = 5,
+    MemoryAccessNonPrivatePointerKHRShift = 5,
+    MemoryAccessAliasScopeINTELMaskShift = 16,
+    MemoryAccessNoAliasINTELMaskShift = 17,
+    MemoryAccessMax = 0x7fffffff,
+};
+
+enum MemoryAccessMask {
+    MemoryAccessMaskNone = 0,
+    MemoryAccessVolatileMask = 0x00000001,
+    MemoryAccessAlignedMask = 0x00000002,
+    MemoryAccessNontemporalMask = 0x00000004,
+    MemoryAccessMakePointerAvailableMask = 0x00000008,
+    MemoryAccessMakePointerAvailableKHRMask = 0x00000008,
+    MemoryAccessMakePointerVisibleMask = 0x00000010,
+    MemoryAccessMakePointerVisibleKHRMask = 0x00000010,
+    MemoryAccessNonPrivatePointerMask = 0x00000020,
+    MemoryAccessNonPrivatePointerKHRMask = 0x00000020,
+    MemoryAccessAliasScopeINTELMaskMask = 0x00010000,
+    MemoryAccessNoAliasINTELMaskMask = 0x00020000,
+};
+
+enum Scope {
+    ScopeCrossDevice = 0,
+    ScopeDevice = 1,
+    ScopeWorkgroup = 2,
+    ScopeSubgroup = 3,
+    ScopeInvocation = 4,
+    ScopeQueueFamily = 5,
+    ScopeQueueFamilyKHR = 5,
+    ScopeShaderCallKHR = 6,
+    ScopeMax = 0x7fffffff,
+};
+
+enum GroupOperation {
+    GroupOperationReduce = 0,
+    GroupOperationInclusiveScan = 1,
+    GroupOperationExclusiveScan = 2,
+    GroupOperationClusteredReduce = 3,
+    GroupOperationPartitionedReduceNV = 6,
+    GroupOperationPartitionedInclusiveScanNV = 7,
+    GroupOperationPartitionedExclusiveScanNV = 8,
+    GroupOperationMax = 0x7fffffff,
+};
+
+enum KernelEnqueueFlags {
+    KernelEnqueueFlagsNoWait = 0,
+    KernelEnqueueFlagsWaitKernel = 1,
+    KernelEnqueueFlagsWaitWorkGroup = 2,
+    KernelEnqueueFlagsMax = 0x7fffffff,
+};
+
+enum KernelProfilingInfoShift {
+    KernelProfilingInfoCmdExecTimeShift = 0,
+    KernelProfilingInfoMax = 0x7fffffff,
+};
+
+enum KernelProfilingInfoMask {
+    KernelProfilingInfoMaskNone = 0,
+    KernelProfilingInfoCmdExecTimeMask = 0x00000001,
+};
+
+enum Capability {
+    CapabilityMatrix = 0,
+    CapabilityShader = 1,
+    CapabilityGeometry = 2,
+    CapabilityTessellation = 3,
+    CapabilityAddresses = 4,
+    CapabilityLinkage = 5,
+    CapabilityKernel = 6,
+    CapabilityVector16 = 7,
+    CapabilityFloat16Buffer = 8,
+    CapabilityFloat16 = 9,
+    CapabilityFloat64 = 10,
+    CapabilityInt64 = 11,
+    CapabilityInt64Atomics = 12,
+    CapabilityImageBasic = 13,
+    CapabilityImageReadWrite = 14,
+    CapabilityImageMipmap = 15,
+    CapabilityPipes = 17,
+    CapabilityGroups = 18,
+    CapabilityDeviceEnqueue = 19,
+    CapabilityLiteralSampler = 20,
+    CapabilityAtomicStorage = 21,
+    CapabilityInt16 = 22,
+    CapabilityTessellationPointSize = 23,
+    CapabilityGeometryPointSize = 24,
+    CapabilityImageGatherExtended = 25,
+    CapabilityStorageImageMultisample = 27,
+    CapabilityUniformBufferArrayDynamicIndexing = 28,
+    CapabilitySampledImageArrayDynamicIndexing = 29,
+    CapabilityStorageBufferArrayDynamicIndexing = 30,
+    CapabilityStorageImageArrayDynamicIndexing = 31,
+    CapabilityClipDistance = 32,
+    CapabilityCullDistance = 33,
+    CapabilityImageCubeArray = 34,
+    CapabilitySampleRateShading = 35,
+    CapabilityImageRect = 36,
+    CapabilitySampledRect = 37,
+    CapabilityGenericPointer = 38,
+    CapabilityInt8 = 39,
+    CapabilityInputAttachment = 40,
+    CapabilitySparseResidency = 41,
+    CapabilityMinLod = 42,
+    CapabilitySampled1D = 43,
+    CapabilityImage1D = 44,
+    CapabilitySampledCubeArray = 45,
+    CapabilitySampledBuffer = 46,
+    CapabilityImageBuffer = 47,
+    CapabilityImageMSArray = 48,
+    CapabilityStorageImageExtendedFormats = 49,
+    CapabilityImageQuery = 50,
+    CapabilityDerivativeControl = 51,
+    CapabilityInterpolationFunction = 52,
+    CapabilityTransformFeedback = 53,
+    CapabilityGeometryStreams = 54,
+    CapabilityStorageImageReadWithoutFormat = 55,
+    CapabilityStorageImageWriteWithoutFormat = 56,
+    CapabilityMultiViewport = 57,
+    CapabilitySubgroupDispatch = 58,
+    CapabilityNamedBarrier = 59,
+    CapabilityPipeStorage = 60,
+    CapabilityGroupNonUniform = 61,
+    CapabilityGroupNonUniformVote = 62,
+    CapabilityGroupNonUniformArithmetic = 63,
+    CapabilityGroupNonUniformBallot = 64,
+    CapabilityGroupNonUniformShuffle = 65,
+    CapabilityGroupNonUniformShuffleRelative = 66,
+    CapabilityGroupNonUniformClustered = 67,
+    CapabilityGroupNonUniformQuad = 68,
+    CapabilityShaderLayer = 69,
+    CapabilityShaderViewportIndex = 70,
+    CapabilityUniformDecoration = 71,
+    CapabilityFragmentShadingRateKHR = 4422,
+    CapabilitySubgroupBallotKHR = 4423,
+    CapabilityDrawParameters = 4427,
+    CapabilityWorkgroupMemoryExplicitLayoutKHR = 4428,
+    CapabilityWorkgroupMemoryExplicitLayout8BitAccessKHR = 4429,
+    CapabilityWorkgroupMemoryExplicitLayout16BitAccessKHR = 4430,
+    CapabilitySubgroupVoteKHR = 4431,
+    CapabilityStorageBuffer16BitAccess = 4433,
+    CapabilityStorageUniformBufferBlock16 = 4433,
+    CapabilityStorageUniform16 = 4434,
+    CapabilityUniformAndStorageBuffer16BitAccess = 4434,
+    CapabilityStoragePushConstant16 = 4435,
+    CapabilityStorageInputOutput16 = 4436,
+    CapabilityDeviceGroup = 4437,
+    CapabilityMultiView = 4439,
+    CapabilityVariablePointersStorageBuffer = 4441,
+    CapabilityVariablePointers = 4442,
+    CapabilityAtomicStorageOps = 4445,
+    CapabilitySampleMaskPostDepthCoverage = 4447,
+    CapabilityStorageBuffer8BitAccess = 4448,
+    CapabilityUniformAndStorageBuffer8BitAccess = 4449,
+    CapabilityStoragePushConstant8 = 4450,
+    CapabilityDenormPreserve = 4464,
+    CapabilityDenormFlushToZero = 4465,
+    CapabilitySignedZeroInfNanPreserve = 4466,
+    CapabilityRoundingModeRTE = 4467,
+    CapabilityRoundingModeRTZ = 4468,
+    CapabilityRayQueryProvisionalKHR = 4471,
+    CapabilityRayQueryKHR = 4472,
+    CapabilityRayTraversalPrimitiveCullingKHR = 4478,
+    CapabilityRayTracingKHR = 4479,
+    CapabilityFloat16ImageAMD = 5008,
+    CapabilityImageGatherBiasLodAMD = 5009,
+    CapabilityFragmentMaskAMD = 5010,
+    CapabilityStencilExportEXT = 5013,
+    CapabilityImageReadWriteLodAMD = 5015,
+    CapabilityInt64ImageEXT = 5016,
+    CapabilityShaderClockKHR = 5055,
+    CapabilitySampleMaskOverrideCoverageNV = 5249,
+    CapabilityGeometryShaderPassthroughNV = 5251,
+    CapabilityShaderViewportIndexLayerEXT = 5254,
+    CapabilityShaderViewportIndexLayerNV = 5254,
+    CapabilityShaderViewportMaskNV = 5255,
+    CapabilityShaderStereoViewNV = 5259,
+    CapabilityPerViewAttributesNV = 5260,
+    CapabilityFragmentFullyCoveredEXT = 5265,
+    CapabilityMeshShadingNV = 5266,
+    CapabilityImageFootprintNV = 5282,
+    CapabilityMeshShadingEXT = 5283,
+    CapabilityFragmentBarycentricKHR = 5284,
+    CapabilityFragmentBarycentricNV = 5284,
+    CapabilityComputeDerivativeGroupQuadsNV = 5288,
+    CapabilityFragmentDensityEXT = 5291,
+    CapabilityShadingRateNV = 5291,
+    CapabilityGroupNonUniformPartitionedNV = 5297,
+    CapabilityShaderNonUniform = 5301,
+    CapabilityShaderNonUniformEXT = 5301,
+    CapabilityRuntimeDescriptorArray = 5302,
+    CapabilityRuntimeDescriptorArrayEXT = 5302,
+    CapabilityInputAttachmentArrayDynamicIndexing = 5303,
+    CapabilityInputAttachmentArrayDynamicIndexingEXT = 5303,
+    CapabilityUniformTexelBufferArrayDynamicIndexing = 5304,
+    CapabilityUniformTexelBufferArrayDynamicIndexingEXT = 5304,
+    CapabilityStorageTexelBufferArrayDynamicIndexing = 5305,
+    CapabilityStorageTexelBufferArrayDynamicIndexingEXT = 5305,
+    CapabilityUniformBufferArrayNonUniformIndexing = 5306,
+    CapabilityUniformBufferArrayNonUniformIndexingEXT = 5306,
+    CapabilitySampledImageArrayNonUniformIndexing = 5307,
+    CapabilitySampledImageArrayNonUniformIndexingEXT = 5307,
+    CapabilityStorageBufferArrayNonUniformIndexing = 5308,
+    CapabilityStorageBufferArrayNonUniformIndexingEXT = 5308,
+    CapabilityStorageImageArrayNonUniformIndexing = 5309,
+    CapabilityStorageImageArrayNonUniformIndexingEXT = 5309,
+    CapabilityInputAttachmentArrayNonUniformIndexing = 5310,
+    CapabilityInputAttachmentArrayNonUniformIndexingEXT = 5310,
+    CapabilityUniformTexelBufferArrayNonUniformIndexing = 5311,
+    CapabilityUniformTexelBufferArrayNonUniformIndexingEXT = 5311,
+    CapabilityStorageTexelBufferArrayNonUniformIndexing = 5312,
+    CapabilityStorageTexelBufferArrayNonUniformIndexingEXT = 5312,
+    CapabilityRayTracingNV = 5340,
+    CapabilityRayTracingMotionBlurNV = 5341,
+    CapabilityVulkanMemoryModel = 5345,
+    CapabilityVulkanMemoryModelKHR = 5345,
+    CapabilityVulkanMemoryModelDeviceScope = 5346,
+    CapabilityVulkanMemoryModelDeviceScopeKHR = 5346,
+    CapabilityPhysicalStorageBufferAddresses = 5347,
+    CapabilityPhysicalStorageBufferAddressesEXT = 5347,
+    CapabilityComputeDerivativeGroupLinearNV = 5350,
+    CapabilityRayTracingProvisionalKHR = 5353,
+    CapabilityCooperativeMatrixNV = 5357,
+    CapabilityFragmentShaderSampleInterlockEXT = 5363,
+    CapabilityFragmentShaderShadingRateInterlockEXT = 5372,
+    CapabilityShaderSMBuiltinsNV = 5373,
+    CapabilityFragmentShaderPixelInterlockEXT = 5378,
+    CapabilityDemoteToHelperInvocation = 5379,
+    CapabilityDemoteToHelperInvocationEXT = 5379,
+    CapabilityBindlessTextureNV = 5390,
+    CapabilitySubgroupShuffleINTEL = 5568,
+    CapabilitySubgroupBufferBlockIOINTEL = 5569,
+    CapabilitySubgroupImageBlockIOINTEL = 5570,
+    CapabilitySubgroupImageMediaBlockIOINTEL = 5579,
+    CapabilityRoundToInfinityINTEL = 5582,
+    CapabilityFloatingPointModeINTEL = 5583,
+    CapabilityIntegerFunctions2INTEL = 5584,
+    CapabilityFunctionPointersINTEL = 5603,
+    CapabilityIndirectReferencesINTEL = 5604,
+    CapabilityAsmINTEL = 5606,
+    CapabilityAtomicFloat32MinMaxEXT = 5612,
+    CapabilityAtomicFloat64MinMaxEXT = 5613,
+    CapabilityAtomicFloat16MinMaxEXT = 5616,
+    CapabilityVectorComputeINTEL = 5617,
+    CapabilityVectorAnyINTEL = 5619,
+    CapabilityExpectAssumeKHR = 5629,
+    CapabilitySubgroupAvcMotionEstimationINTEL = 5696,
+    CapabilitySubgroupAvcMotionEstimationIntraINTEL = 5697,
+    CapabilitySubgroupAvcMotionEstimationChromaINTEL = 5698,
+    CapabilityVariableLengthArrayINTEL = 5817,
+    CapabilityFunctionFloatControlINTEL = 5821,
+    CapabilityFPGAMemoryAttributesINTEL = 5824,
+    CapabilityFPFastMathModeINTEL = 5837,
+    CapabilityArbitraryPrecisionIntegersINTEL = 5844,
+    CapabilityArbitraryPrecisionFloatingPointINTEL = 5845,
+    CapabilityUnstructuredLoopControlsINTEL = 5886,
+    CapabilityFPGALoopControlsINTEL = 5888,
+    CapabilityKernelAttributesINTEL = 5892,
+    CapabilityFPGAKernelAttributesINTEL = 5897,
+    CapabilityFPGAMemoryAccessesINTEL = 5898,
+    CapabilityFPGAClusterAttributesINTEL = 5904,
+    CapabilityLoopFuseINTEL = 5906,
+    CapabilityMemoryAccessAliasingINTEL = 5910,
+    CapabilityFPGABufferLocationINTEL = 5920,
+    CapabilityArbitraryPrecisionFixedPointINTEL = 5922,
+    CapabilityUSMStorageClassesINTEL = 5935,
+    CapabilityIOPipesINTEL = 5943,
+    CapabilityBlockingPipesINTEL = 5945,
+    CapabilityFPGARegINTEL = 5948,
+    CapabilityDotProductInputAll = 6016,
+    CapabilityDotProductInputAllKHR = 6016,
+    CapabilityDotProductInput4x8Bit = 6017,
+    CapabilityDotProductInput4x8BitKHR = 6017,
+    CapabilityDotProductInput4x8BitPacked = 6018,
+    CapabilityDotProductInput4x8BitPackedKHR = 6018,
+    CapabilityDotProduct = 6019,
+    CapabilityDotProductKHR = 6019,
+    CapabilityRayCullMaskKHR = 6020,
+    CapabilityBitInstructions = 6025,
+    CapabilityGroupNonUniformRotateKHR = 6026,
+    CapabilityAtomicFloat32AddEXT = 6033,
+    CapabilityAtomicFloat64AddEXT = 6034,
+    CapabilityLongConstantCompositeINTEL = 6089,
+    CapabilityOptNoneINTEL = 6094,
+    CapabilityAtomicFloat16AddEXT = 6095,
+    CapabilityDebugInfoModuleINTEL = 6114,
+    CapabilitySplitBarrierINTEL = 6141,
+    CapabilityGroupUniformArithmeticKHR = 6400,
+    CapabilityMax = 0x7fffffff,
+};
+
+enum RayFlagsShift {
+    RayFlagsOpaqueKHRShift = 0,
+    RayFlagsNoOpaqueKHRShift = 1,
+    RayFlagsTerminateOnFirstHitKHRShift = 2,
+    RayFlagsSkipClosestHitShaderKHRShift = 3,
+    RayFlagsCullBackFacingTrianglesKHRShift = 4,
+    RayFlagsCullFrontFacingTrianglesKHRShift = 5,
+    RayFlagsCullOpaqueKHRShift = 6,
+    RayFlagsCullNoOpaqueKHRShift = 7,
+    RayFlagsSkipTrianglesKHRShift = 8,
+    RayFlagsSkipAABBsKHRShift = 9,
+    RayFlagsMax = 0x7fffffff,
+};
+
+enum RayFlagsMask {
+    RayFlagsMaskNone = 0,
+    RayFlagsOpaqueKHRMask = 0x00000001,
+    RayFlagsNoOpaqueKHRMask = 0x00000002,
+    RayFlagsTerminateOnFirstHitKHRMask = 0x00000004,
+    RayFlagsSkipClosestHitShaderKHRMask = 0x00000008,
+    RayFlagsCullBackFacingTrianglesKHRMask = 0x00000010,
+    RayFlagsCullFrontFacingTrianglesKHRMask = 0x00000020,
+    RayFlagsCullOpaqueKHRMask = 0x00000040,
+    RayFlagsCullNoOpaqueKHRMask = 0x00000080,
+    RayFlagsSkipTrianglesKHRMask = 0x00000100,
+    RayFlagsSkipAABBsKHRMask = 0x00000200,
+};
+
+enum RayQueryIntersection {
+    RayQueryIntersectionRayQueryCandidateIntersectionKHR = 0,
+    RayQueryIntersectionRayQueryCommittedIntersectionKHR = 1,
+    RayQueryIntersectionMax = 0x7fffffff,
+};
+
+enum RayQueryCommittedIntersectionType {
+    RayQueryCommittedIntersectionTypeRayQueryCommittedIntersectionNoneKHR = 0,
+    RayQueryCommittedIntersectionTypeRayQueryCommittedIntersectionTriangleKHR = 1,
+    RayQueryCommittedIntersectionTypeRayQueryCommittedIntersectionGeneratedKHR = 2,
+    RayQueryCommittedIntersectionTypeMax = 0x7fffffff,
+};
+
+enum RayQueryCandidateIntersectionType {
+    RayQueryCandidateIntersectionTypeRayQueryCandidateIntersectionTriangleKHR = 0,
+    RayQueryCandidateIntersectionTypeRayQueryCandidateIntersectionAABBKHR = 1,
+    RayQueryCandidateIntersectionTypeMax = 0x7fffffff,
+};
+
+enum FragmentShadingRateShift {
+    FragmentShadingRateVertical2PixelsShift = 0,
+    FragmentShadingRateVertical4PixelsShift = 1,
+    FragmentShadingRateHorizontal2PixelsShift = 2,
+    FragmentShadingRateHorizontal4PixelsShift = 3,
+    FragmentShadingRateMax = 0x7fffffff,
+};
+
+enum FragmentShadingRateMask {
+    FragmentShadingRateMaskNone = 0,
+    FragmentShadingRateVertical2PixelsMask = 0x00000001,
+    FragmentShadingRateVertical4PixelsMask = 0x00000002,
+    FragmentShadingRateHorizontal2PixelsMask = 0x00000004,
+    FragmentShadingRateHorizontal4PixelsMask = 0x00000008,
+};
+
+enum FPDenormMode {
+    FPDenormModePreserve = 0,
+    FPDenormModeFlushToZero = 1,
+    FPDenormModeMax = 0x7fffffff,
+};
+
+enum FPOperationMode {
+    FPOperationModeIEEE = 0,
+    FPOperationModeALT = 1,
+    FPOperationModeMax = 0x7fffffff,
+};
+
+enum QuantizationModes {
+    QuantizationModesTRN = 0,
+    QuantizationModesTRN_ZERO = 1,
+    QuantizationModesRND = 2,
+    QuantizationModesRND_ZERO = 3,
+    QuantizationModesRND_INF = 4,
+    QuantizationModesRND_MIN_INF = 5,
+    QuantizationModesRND_CONV = 6,
+    QuantizationModesRND_CONV_ODD = 7,
+    QuantizationModesMax = 0x7fffffff,
+};
+
+enum OverflowModes {
+    OverflowModesWRAP = 0,
+    OverflowModesSAT = 1,
+    OverflowModesSAT_ZERO = 2,
+    OverflowModesSAT_SYM = 3,
+    OverflowModesMax = 0x7fffffff,
+};
+
+enum PackedVectorFormat {
+    PackedVectorFormatPackedVectorFormat4x8Bit = 0,
+    PackedVectorFormatPackedVectorFormat4x8BitKHR = 0,
+    PackedVectorFormatMax = 0x7fffffff,
+};
+
+enum Op {
+    OpNop = 0,
+    OpUndef = 1,
+    OpSourceContinued = 2,
+    OpSource = 3,
+    OpSourceExtension = 4,
+    OpName = 5,
+    OpMemberName = 6,
+    OpString = 7,
+    OpLine = 8,
+    OpExtension = 10,
+    OpExtInstImport = 11,
+    OpExtInst = 12,
+    OpMemoryModel = 14,
+    OpEntryPoint = 15,
+    OpExecutionMode = 16,
+    OpCapability = 17,
+    OpTypeVoid = 19,
+    OpTypeBool = 20,
+    OpTypeInt = 21,
+    OpTypeFloat = 22,
+    OpTypeVector = 23,
+    OpTypeMatrix = 24,
+    OpTypeImage = 25,
+    OpTypeSampler = 26,
+    OpTypeSampledImage = 27,
+    OpTypeArray = 28,
+    OpTypeRuntimeArray = 29,
+    OpTypeStruct = 30,
+    OpTypeOpaque = 31,
+    OpTypePointer = 32,
+    OpTypeFunction = 33,
+    OpTypeEvent = 34,
+    OpTypeDeviceEvent = 35,
+    OpTypeReserveId = 36,
+    OpTypeQueue = 37,
+    OpTypePipe = 38,
+    OpTypeForwardPointer = 39,
+    OpConstantTrue = 41,
+    OpConstantFalse = 42,
+    OpConstant = 43,
+    OpConstantComposite = 44,
+    OpConstantSampler = 45,
+    OpConstantNull = 46,
+    OpSpecConstantTrue = 48,
+    OpSpecConstantFalse = 49,
+    OpSpecConstant = 50,
+    OpSpecConstantComposite = 51,
+    OpSpecConstantOp = 52,
+    OpFunction = 54,
+    OpFunctionParameter = 55,
+    OpFunctionEnd = 56,
+    OpFunctionCall = 57,
+    OpVariable = 59,
+    OpImageTexelPointer = 60,
+    OpLoad = 61,
+    OpStore = 62,
+    OpCopyMemory = 63,
+    OpCopyMemorySized = 64,
+    OpAccessChain = 65,
+    OpInBoundsAccessChain = 66,
+    OpPtrAccessChain = 67,
+    OpArrayLength = 68,
+    OpGenericPtrMemSemantics = 69,
+    OpInBoundsPtrAccessChain = 70,
+    OpDecorate = 71,
+    OpMemberDecorate = 72,
+    OpDecorationGroup = 73,
+    OpGroupDecorate = 74,
+    OpGroupMemberDecorate = 75,
+    OpVectorExtractDynamic = 77,
+    OpVectorInsertDynamic = 78,
+    OpVectorShuffle = 79,
+    OpCompositeConstruct = 80,
+    OpCompositeExtract = 81,
+    OpCompositeInsert = 82,
+    OpCopyObject = 83,
+    OpTranspose = 84,
+    OpSampledImage = 86,
+    OpImageSampleImplicitLod = 87,
+    OpImageSampleExplicitLod = 88,
+    OpImageSampleDrefImplicitLod = 89,
+    OpImageSampleDrefExplicitLod = 90,
+    OpImageSampleProjImplicitLod = 91,
+    OpImageSampleProjExplicitLod = 92,
+    OpImageSampleProjDrefImplicitLod = 93,
+    OpImageSampleProjDrefExplicitLod = 94,
+    OpImageFetch = 95,
+    OpImageGather = 96,
+    OpImageDrefGather = 97,
+    OpImageRead = 98,
+    OpImageWrite = 99,
+    OpImage = 100,
+    OpImageQueryFormat = 101,
+    OpImageQueryOrder = 102,
+    OpImageQuerySizeLod = 103,
+    OpImageQuerySize = 104,
+    OpImageQueryLod = 105,
+    OpImageQueryLevels = 106,
+    OpImageQuerySamples = 107,
+    OpConvertFToU = 109,
+    OpConvertFToS = 110,
+    OpConvertSToF = 111,
+    OpConvertUToF = 112,
+    OpUConvert = 113,
+    OpSConvert = 114,
+    OpFConvert = 115,
+    OpQuantizeToF16 = 116,
+    OpConvertPtrToU = 117,
+    OpSatConvertSToU = 118,
+    OpSatConvertUToS = 119,
+    OpConvertUToPtr = 120,
+    OpPtrCastToGeneric = 121,
+    OpGenericCastToPtr = 122,
+    OpGenericCastToPtrExplicit = 123,
+    OpBitcast = 124,
+    OpSNegate = 126,
+    OpFNegate = 127,
+    OpIAdd = 128,
+    OpFAdd = 129,
+    OpISub = 130,
+    OpFSub = 131,
+    OpIMul = 132,
+    OpFMul = 133,
+    OpUDiv = 134,
+    OpSDiv = 135,
+    OpFDiv = 136,
+    OpUMod = 137,
+    OpSRem = 138,
+    OpSMod = 139,
+    OpFRem = 140,
+    OpFMod = 141,
+    OpVectorTimesScalar = 142,
+    OpMatrixTimesScalar = 143,
+    OpVectorTimesMatrix = 144,
+    OpMatrixTimesVector = 145,
+    OpMatrixTimesMatrix = 146,
+    OpOuterProduct = 147,
+    OpDot = 148,
+    OpIAddCarry = 149,
+    OpISubBorrow = 150,
+    OpUMulExtended = 151,
+    OpSMulExtended = 152,
+    OpAny = 154,
+    OpAll = 155,
+    OpIsNan = 156,
+    OpIsInf = 157,
+    OpIsFinite = 158,
+    OpIsNormal = 159,
+    OpSignBitSet = 160,
+    OpLessOrGreater = 161,
+    OpOrdered = 162,
+    OpUnordered = 163,
+    OpLogicalEqual = 164,
+    OpLogicalNotEqual = 165,
+    OpLogicalOr = 166,
+    OpLogicalAnd = 167,
+    OpLogicalNot = 168,
+    OpSelect = 169,
+    OpIEqual = 170,
+    OpINotEqual = 171,
+    OpUGreaterThan = 172,
+    OpSGreaterThan = 173,
+    OpUGreaterThanEqual = 174,
+    OpSGreaterThanEqual = 175,
+    OpULessThan = 176,
+    OpSLessThan = 177,
+    OpULessThanEqual = 178,
+    OpSLessThanEqual = 179,
+    OpFOrdEqual = 180,
+    OpFUnordEqual = 181,
+    OpFOrdNotEqual = 182,
+    OpFUnordNotEqual = 183,
+    OpFOrdLessThan = 184,
+    OpFUnordLessThan = 185,
+    OpFOrdGreaterThan = 186,
+    OpFUnordGreaterThan = 187,
+    OpFOrdLessThanEqual = 188,
+    OpFUnordLessThanEqual = 189,
+    OpFOrdGreaterThanEqual = 190,
+    OpFUnordGreaterThanEqual = 191,
+    OpShiftRightLogical = 194,
+    OpShiftRightArithmetic = 195,
+    OpShiftLeftLogical = 196,
+    OpBitwiseOr = 197,
+    OpBitwiseXor = 198,
+    OpBitwiseAnd = 199,
+    OpNot = 200,
+    OpBitFieldInsert = 201,
+    OpBitFieldSExtract = 202,
+    OpBitFieldUExtract = 203,
+    OpBitReverse = 204,
+    OpBitCount = 205,
+    OpDPdx = 207,
+    OpDPdy = 208,
+    OpFwidth = 209,
+    OpDPdxFine = 210,
+    OpDPdyFine = 211,
+    OpFwidthFine = 212,
+    OpDPdxCoarse = 213,
+    OpDPdyCoarse = 214,
+    OpFwidthCoarse = 215,
+    OpEmitVertex = 218,
+    OpEndPrimitive = 219,
+    OpEmitStreamVertex = 220,
+    OpEndStreamPrimitive = 221,
+    OpControlBarrier = 224,
+    OpMemoryBarrier = 225,
+    OpAtomicLoad = 227,
+    OpAtomicStore = 228,
+    OpAtomicExchange = 229,
+    OpAtomicCompareExchange = 230,
+    OpAtomicCompareExchangeWeak = 231,
+    OpAtomicIIncrement = 232,
+    OpAtomicIDecrement = 233,
+    OpAtomicIAdd = 234,
+    OpAtomicISub = 235,
+    OpAtomicSMin = 236,
+    OpAtomicUMin = 237,
+    OpAtomicSMax = 238,
+    OpAtomicUMax = 239,
+    OpAtomicAnd = 240,
+    OpAtomicOr = 241,
+    OpAtomicXor = 242,
+    OpPhi = 245,
+    OpLoopMerge = 246,
+    OpSelectionMerge = 247,
+    OpLabel = 248,
+    OpBranch = 249,
+    OpBranchConditional = 250,
+    OpSwitch = 251,
+    OpKill = 252,
+    OpReturn = 253,
+    OpReturnValue = 254,
+    OpUnreachable = 255,
+    OpLifetimeStart = 256,
+    OpLifetimeStop = 257,
+    OpGroupAsyncCopy = 259,
+    OpGroupWaitEvents = 260,
+    OpGroupAll = 261,
+    OpGroupAny = 262,
+    OpGroupBroadcast = 263,
+    OpGroupIAdd = 264,
+    OpGroupFAdd = 265,
+    OpGroupFMin = 266,
+    OpGroupUMin = 267,
+    OpGroupSMin = 268,
+    OpGroupFMax = 269,
+    OpGroupUMax = 270,
+    OpGroupSMax = 271,
+    OpReadPipe = 274,
+    OpWritePipe = 275,
+    OpReservedReadPipe = 276,
+    OpReservedWritePipe = 277,
+    OpReserveReadPipePackets = 278,
+    OpReserveWritePipePackets = 279,
+    OpCommitReadPipe = 280,
+    OpCommitWritePipe = 281,
+    OpIsValidReserveId = 282,
+    OpGetNumPipePackets = 283,
+    OpGetMaxPipePackets = 284,
+    OpGroupReserveReadPipePackets = 285,
+    OpGroupReserveWritePipePackets = 286,
+    OpGroupCommitReadPipe = 287,
+    OpGroupCommitWritePipe = 288,
+    OpEnqueueMarker = 291,
+    OpEnqueueKernel = 292,
+    OpGetKernelNDrangeSubGroupCount = 293,
+    OpGetKernelNDrangeMaxSubGroupSize = 294,
+    OpGetKernelWorkGroupSize = 295,
+    OpGetKernelPreferredWorkGroupSizeMultiple = 296,
+    OpRetainEvent = 297,
+    OpReleaseEvent = 298,
+    OpCreateUserEvent = 299,
+    OpIsValidEvent = 300,
+    OpSetUserEventStatus = 301,
+    OpCaptureEventProfilingInfo = 302,
+    OpGetDefaultQueue = 303,
+    OpBuildNDRange = 304,
+    OpImageSparseSampleImplicitLod = 305,
+    OpImageSparseSampleExplicitLod = 306,
+    OpImageSparseSampleDrefImplicitLod = 307,
+    OpImageSparseSampleDrefExplicitLod = 308,
+    OpImageSparseSampleProjImplicitLod = 309,
+    OpImageSparseSampleProjExplicitLod = 310,
+    OpImageSparseSampleProjDrefImplicitLod = 311,
+    OpImageSparseSampleProjDrefExplicitLod = 312,
+    OpImageSparseFetch = 313,
+    OpImageSparseGather = 314,
+    OpImageSparseDrefGather = 315,
+    OpImageSparseTexelsResident = 316,
+    OpNoLine = 317,
+    OpAtomicFlagTestAndSet = 318,
+    OpAtomicFlagClear = 319,
+    OpImageSparseRead = 320,
+    OpSizeOf = 321,
+    OpTypePipeStorage = 322,
+    OpConstantPipeStorage = 323,
+    OpCreatePipeFromPipeStorage = 324,
+    OpGetKernelLocalSizeForSubgroupCount = 325,
+    OpGetKernelMaxNumSubgroups = 326,
+    OpTypeNamedBarrier = 327,
+    OpNamedBarrierInitialize = 328,
+    OpMemoryNamedBarrier = 329,
+    OpModuleProcessed = 330,
+    OpExecutionModeId = 331,
+    OpDecorateId = 332,
+    OpGroupNonUniformElect = 333,
+    OpGroupNonUniformAll = 334,
+    OpGroupNonUniformAny = 335,
+    OpGroupNonUniformAllEqual = 336,
+    OpGroupNonUniformBroadcast = 337,
+    OpGroupNonUniformBroadcastFirst = 338,
+    OpGroupNonUniformBallot = 339,
+    OpGroupNonUniformInverseBallot = 340,
+    OpGroupNonUniformBallotBitExtract = 341,
+    OpGroupNonUniformBallotBitCount = 342,
+    OpGroupNonUniformBallotFindLSB = 343,
+    OpGroupNonUniformBallotFindMSB = 344,
+    OpGroupNonUniformShuffle = 345,
+    OpGroupNonUniformShuffleXor = 346,
+    OpGroupNonUniformShuffleUp = 347,
+    OpGroupNonUniformShuffleDown = 348,
+    OpGroupNonUniformIAdd = 349,
+    OpGroupNonUniformFAdd = 350,
+    OpGroupNonUniformIMul = 351,
+    OpGroupNonUniformFMul = 352,
+    OpGroupNonUniformSMin = 353,
+    OpGroupNonUniformUMin = 354,
+    OpGroupNonUniformFMin = 355,
+    OpGroupNonUniformSMax = 356,
+    OpGroupNonUniformUMax = 357,
+    OpGroupNonUniformFMax = 358,
+    OpGroupNonUniformBitwiseAnd = 359,
+    OpGroupNonUniformBitwiseOr = 360,
+    OpGroupNonUniformBitwiseXor = 361,
+    OpGroupNonUniformLogicalAnd = 362,
+    OpGroupNonUniformLogicalOr = 363,
+    OpGroupNonUniformLogicalXor = 364,
+    OpGroupNonUniformQuadBroadcast = 365,
+    OpGroupNonUniformQuadSwap = 366,
+    OpCopyLogical = 400,
+    OpPtrEqual = 401,
+    OpPtrNotEqual = 402,
+    OpPtrDiff = 403,
+    OpTerminateInvocation = 4416,
+    OpSubgroupBallotKHR = 4421,
+    OpSubgroupFirstInvocationKHR = 4422,
+    OpSubgroupAllKHR = 4428,
+    OpSubgroupAnyKHR = 4429,
+    OpSubgroupAllEqualKHR = 4430,
+    OpGroupNonUniformRotateKHR = 4431,
+    OpSubgroupReadInvocationKHR = 4432,
+    OpTraceRayKHR = 4445,
+    OpExecuteCallableKHR = 4446,
+    OpConvertUToAccelerationStructureKHR = 4447,
+    OpIgnoreIntersectionKHR = 4448,
+    OpTerminateRayKHR = 4449,
+    OpSDot = 4450,
+    OpSDotKHR = 4450,
+    OpUDot = 4451,
+    OpUDotKHR = 4451,
+    OpSUDot = 4452,
+    OpSUDotKHR = 4452,
+    OpSDotAccSat = 4453,
+    OpSDotAccSatKHR = 4453,
+    OpUDotAccSat = 4454,
+    OpUDotAccSatKHR = 4454,
+    OpSUDotAccSat = 4455,
+    OpSUDotAccSatKHR = 4455,
+    OpTypeRayQueryKHR = 4472,
+    OpRayQueryInitializeKHR = 4473,
+    OpRayQueryTerminateKHR = 4474,
+    OpRayQueryGenerateIntersectionKHR = 4475,
+    OpRayQueryConfirmIntersectionKHR = 4476,
+    OpRayQueryProceedKHR = 4477,
+    OpRayQueryGetIntersectionTypeKHR = 4479,
+    OpGroupIAddNonUniformAMD = 5000,
+    OpGroupFAddNonUniformAMD = 5001,
+    OpGroupFMinNonUniformAMD = 5002,
+    OpGroupUMinNonUniformAMD = 5003,
+    OpGroupSMinNonUniformAMD = 5004,
+    OpGroupFMaxNonUniformAMD = 5005,
+    OpGroupUMaxNonUniformAMD = 5006,
+    OpGroupSMaxNonUniformAMD = 5007,
+    OpFragmentMaskFetchAMD = 5011,
+    OpFragmentFetchAMD = 5012,
+    OpReadClockKHR = 5056,
+    OpImageSampleFootprintNV = 5283,
+    OpEmitMeshTasksEXT = 5294,
+    OpSetMeshOutputsEXT = 5295,
+    OpGroupNonUniformPartitionNV = 5296,
+    OpWritePackedPrimitiveIndices4x8NV = 5299,
+    OpReportIntersectionKHR = 5334,
+    OpReportIntersectionNV = 5334,
+    OpIgnoreIntersectionNV = 5335,
+    OpTerminateRayNV = 5336,
+    OpTraceNV = 5337,
+    OpTraceMotionNV = 5338,
+    OpTraceRayMotionNV = 5339,
+    OpTypeAccelerationStructureKHR = 5341,
+    OpTypeAccelerationStructureNV = 5341,
+    OpExecuteCallableNV = 5344,
+    OpTypeCooperativeMatrixNV = 5358,
+    OpCooperativeMatrixLoadNV = 5359,
+    OpCooperativeMatrixStoreNV = 5360,
+    OpCooperativeMatrixMulAddNV = 5361,
+    OpCooperativeMatrixLengthNV = 5362,
+    OpBeginInvocationInterlockEXT = 5364,
+    OpEndInvocationInterlockEXT = 5365,
+    OpDemoteToHelperInvocation = 5380,
+    OpDemoteToHelperInvocationEXT = 5380,
+    OpIsHelperInvocationEXT = 5381,
+    OpConvertUToImageNV = 5391,
+    OpConvertUToSamplerNV = 5392,
+    OpConvertImageToUNV = 5393,
+    OpConvertSamplerToUNV = 5394,
+    OpConvertUToSampledImageNV = 5395,
+    OpConvertSampledImageToUNV = 5396,
+    OpSamplerImageAddressingModeNV = 5397,
+    OpSubgroupShuffleINTEL = 5571,
+    OpSubgroupShuffleDownINTEL = 5572,
+    OpSubgroupShuffleUpINTEL = 5573,
+    OpSubgroupShuffleXorINTEL = 5574,
+    OpSubgroupBlockReadINTEL = 5575,
+    OpSubgroupBlockWriteINTEL = 5576,
+    OpSubgroupImageBlockReadINTEL = 5577,
+    OpSubgroupImageBlockWriteINTEL = 5578,
+    OpSubgroupImageMediaBlockReadINTEL = 5580,
+    OpSubgroupImageMediaBlockWriteINTEL = 5581,
+    OpUCountLeadingZerosINTEL = 5585,
+    OpUCountTrailingZerosINTEL = 5586,
+    OpAbsISubINTEL = 5587,
+    OpAbsUSubINTEL = 5588,
+    OpIAddSatINTEL = 5589,
+    OpUAddSatINTEL = 5590,
+    OpIAverageINTEL = 5591,
+    OpUAverageINTEL = 5592,
+    OpIAverageRoundedINTEL = 5593,
+    OpUAverageRoundedINTEL = 5594,
+    OpISubSatINTEL = 5595,
+    OpUSubSatINTEL = 5596,
+    OpIMul32x16INTEL = 5597,
+    OpUMul32x16INTEL = 5598,
+    OpConstantFunctionPointerINTEL = 5600,
+    OpFunctionPointerCallINTEL = 5601,
+    OpAsmTargetINTEL = 5609,
+    OpAsmINTEL = 5610,
+    OpAsmCallINTEL = 5611,
+    OpAtomicFMinEXT = 5614,
+    OpAtomicFMaxEXT = 5615,
+    OpAssumeTrueKHR = 5630,
+    OpExpectKHR = 5631,
+    OpDecorateString = 5632,
+    OpDecorateStringGOOGLE = 5632,
+    OpMemberDecorateString = 5633,
+    OpMemberDecorateStringGOOGLE = 5633,
+    OpVmeImageINTEL = 5699,
+    OpTypeVmeImageINTEL = 5700,
+    OpTypeAvcImePayloadINTEL = 5701,
+    OpTypeAvcRefPayloadINTEL = 5702,
+    OpTypeAvcSicPayloadINTEL = 5703,
+    OpTypeAvcMcePayloadINTEL = 5704,
+    OpTypeAvcMceResultINTEL = 5705,
+    OpTypeAvcImeResultINTEL = 5706,
+    OpTypeAvcImeResultSingleReferenceStreamoutINTEL = 5707,
+    OpTypeAvcImeResultDualReferenceStreamoutINTEL = 5708,
+    OpTypeAvcImeSingleReferenceStreaminINTEL = 5709,
+    OpTypeAvcImeDualReferenceStreaminINTEL = 5710,
+    OpTypeAvcRefResultINTEL = 5711,
+    OpTypeAvcSicResultINTEL = 5712,
+    OpSubgroupAvcMceGetDefaultInterBaseMultiReferencePenaltyINTEL = 5713,
+    OpSubgroupAvcMceSetInterBaseMultiReferencePenaltyINTEL = 5714,
+    OpSubgroupAvcMceGetDefaultInterShapePenaltyINTEL = 5715,
+    OpSubgroupAvcMceSetInterShapePenaltyINTEL = 5716,
+    OpSubgroupAvcMceGetDefaultInterDirectionPenaltyINTEL = 5717,
+    OpSubgroupAvcMceSetInterDirectionPenaltyINTEL = 5718,
+    OpSubgroupAvcMceGetDefaultIntraLumaShapePenaltyINTEL = 5719,
+    OpSubgroupAvcMceGetDefaultInterMotionVectorCostTableINTEL = 5720,
+    OpSubgroupAvcMceGetDefaultHighPenaltyCostTableINTEL = 5721,
+    OpSubgroupAvcMceGetDefaultMediumPenaltyCostTableINTEL = 5722,
+    OpSubgroupAvcMceGetDefaultLowPenaltyCostTableINTEL = 5723,
+    OpSubgroupAvcMceSetMotionVectorCostFunctionINTEL = 5724,
+    OpSubgroupAvcMceGetDefaultIntraLumaModePenaltyINTEL = 5725,
+    OpSubgroupAvcMceGetDefaultNonDcLumaIntraPenaltyINTEL = 5726,
+    OpSubgroupAvcMceGetDefaultIntraChromaModeBasePenaltyINTEL = 5727,
+    OpSubgroupAvcMceSetAcOnlyHaarINTEL = 5728,
+    OpSubgroupAvcMceSetSourceInterlacedFieldPolarityINTEL = 5729,
+    OpSubgroupAvcMceSetSingleReferenceInterlacedFieldPolarityINTEL = 5730,
+    OpSubgroupAvcMceSetDualReferenceInterlacedFieldPolaritiesINTEL = 5731,
+    OpSubgroupAvcMceConvertToImePayloadINTEL = 5732,
+    OpSubgroupAvcMceConvertToImeResultINTEL = 5733,
+    OpSubgroupAvcMceConvertToRefPayloadINTEL = 5734,
+    OpSubgroupAvcMceConvertToRefResultINTEL = 5735,
+    OpSubgroupAvcMceConvertToSicPayloadINTEL = 5736,
+    OpSubgroupAvcMceConvertToSicResultINTEL = 5737,
+    OpSubgroupAvcMceGetMotionVectorsINTEL = 5738,
+    OpSubgroupAvcMceGetInterDistortionsINTEL = 5739,
+    OpSubgroupAvcMceGetBestInterDistortionsINTEL = 5740,
+    OpSubgroupAvcMceGetInterMajorShapeINTEL = 5741,
+    OpSubgroupAvcMceGetInterMinorShapeINTEL = 5742,
+    OpSubgroupAvcMceGetInterDirectionsINTEL = 5743,
+    OpSubgroupAvcMceGetInterMotionVectorCountINTEL = 5744,
+    OpSubgroupAvcMceGetInterReferenceIdsINTEL = 5745,
+    OpSubgroupAvcMceGetInterReferenceInterlacedFieldPolaritiesINTEL = 5746,
+    OpSubgroupAvcImeInitializeINTEL = 5747,
+    OpSubgroupAvcImeSetSingleReferenceINTEL = 5748,
+    OpSubgroupAvcImeSetDualReferenceINTEL = 5749,
+    OpSubgroupAvcImeRefWindowSizeINTEL = 5750,
+    OpSubgroupAvcImeAdjustRefOffsetINTEL = 5751,
+    OpSubgroupAvcImeConvertToMcePayloadINTEL = 5752,
+    OpSubgroupAvcImeSetMaxMotionVectorCountINTEL = 5753,
+    OpSubgroupAvcImeSetUnidirectionalMixDisableINTEL = 5754,
+    OpSubgroupAvcImeSetEarlySearchTerminationThresholdINTEL = 5755,
+    OpSubgroupAvcImeSetWeightedSadINTEL = 5756,
+    OpSubgroupAvcImeEvaluateWithSingleReferenceINTEL = 5757,
+    OpSubgroupAvcImeEvaluateWithDualReferenceINTEL = 5758,
+    OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminINTEL = 5759,
+    OpSubgroupAvcImeEvaluateWithDualReferenceStreaminINTEL = 5760,
+    OpSubgroupAvcImeEvaluateWithSingleReferenceStreamoutINTEL = 5761,
+    OpSubgroupAvcImeEvaluateWithDualReferenceStreamoutINTEL = 5762,
+    OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminoutINTEL = 5763,
+    OpSubgroupAvcImeEvaluateWithDualReferenceStreaminoutINTEL = 5764,
+    OpSubgroupAvcImeConvertToMceResultINTEL = 5765,
+    OpSubgroupAvcImeGetSingleReferenceStreaminINTEL = 5766,
+    OpSubgroupAvcImeGetDualReferenceStreaminINTEL = 5767,
+    OpSubgroupAvcImeStripSingleReferenceStreamoutINTEL = 5768,
+    OpSubgroupAvcImeStripDualReferenceStreamoutINTEL = 5769,
+    OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeMotionVectorsINTEL = 5770,
+    OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeDistortionsINTEL = 5771,
+    OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeReferenceIdsINTEL = 5772,
+    OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeMotionVectorsINTEL = 5773,
+    OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeDistortionsINTEL = 5774,
+    OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeReferenceIdsINTEL = 5775,
+    OpSubgroupAvcImeGetBorderReachedINTEL = 5776,
+    OpSubgroupAvcImeGetTruncatedSearchIndicationINTEL = 5777,
+    OpSubgroupAvcImeGetUnidirectionalEarlySearchTerminationINTEL = 5778,
+    OpSubgroupAvcImeGetWeightingPatternMinimumMotionVectorINTEL = 5779,
+    OpSubgroupAvcImeGetWeightingPatternMinimumDistortionINTEL = 5780,
+    OpSubgroupAvcFmeInitializeINTEL = 5781,
+    OpSubgroupAvcBmeInitializeINTEL = 5782,
+    OpSubgroupAvcRefConvertToMcePayloadINTEL = 5783,
+    OpSubgroupAvcRefSetBidirectionalMixDisableINTEL = 5784,
+    OpSubgroupAvcRefSetBilinearFilterEnableINTEL = 5785,
+    OpSubgroupAvcRefEvaluateWithSingleReferenceINTEL = 5786,
+    OpSubgroupAvcRefEvaluateWithDualReferenceINTEL = 5787,
+    OpSubgroupAvcRefEvaluateWithMultiReferenceINTEL = 5788,
+    OpSubgroupAvcRefEvaluateWithMultiReferenceInterlacedINTEL = 5789,
+    OpSubgroupAvcRefConvertToMceResultINTEL = 5790,
+    OpSubgroupAvcSicInitializeINTEL = 5791,
+    OpSubgroupAvcSicConfigureSkcINTEL = 5792,
+    OpSubgroupAvcSicConfigureIpeLumaINTEL = 5793,
+    OpSubgroupAvcSicConfigureIpeLumaChromaINTEL = 5794,
+    OpSubgroupAvcSicGetMotionVectorMaskINTEL = 5795,
+    OpSubgroupAvcSicConvertToMcePayloadINTEL = 5796,
+    OpSubgroupAvcSicSetIntraLumaShapePenaltyINTEL = 5797,
+    OpSubgroupAvcSicSetIntraLumaModeCostFunctionINTEL = 5798,
+    OpSubgroupAvcSicSetIntraChromaModeCostFunctionINTEL = 5799,
+    OpSubgroupAvcSicSetBilinearFilterEnableINTEL = 5800,
+    OpSubgroupAvcSicSetSkcForwardTransformEnableINTEL = 5801,
+    OpSubgroupAvcSicSetBlockBasedRawSkipSadINTEL = 5802,
+    OpSubgroupAvcSicEvaluateIpeINTEL = 5803,
+    OpSubgroupAvcSicEvaluateWithSingleReferenceINTEL = 5804,
+    OpSubgroupAvcSicEvaluateWithDualReferenceINTEL = 5805,
+    OpSubgroupAvcSicEvaluateWithMultiReferenceINTEL = 5806,
+    OpSubgroupAvcSicEvaluateWithMultiReferenceInterlacedINTEL = 5807,
+    OpSubgroupAvcSicConvertToMceResultINTEL = 5808,
+    OpSubgroupAvcSicGetIpeLumaShapeINTEL = 5809,
+    OpSubgroupAvcSicGetBestIpeLumaDistortionINTEL = 5810,
+    OpSubgroupAvcSicGetBestIpeChromaDistortionINTEL = 5811,
+    OpSubgroupAvcSicGetPackedIpeLumaModesINTEL = 5812,
+    OpSubgroupAvcSicGetIpeChromaModeINTEL = 5813,
+    OpSubgroupAvcSicGetPackedSkcLumaCountThresholdINTEL = 5814,
+    OpSubgroupAvcSicGetPackedSkcLumaSumThresholdINTEL = 5815,
+    OpSubgroupAvcSicGetInterRawSadsINTEL = 5816,
+    OpVariableLengthArrayINTEL = 5818,
+    OpSaveMemoryINTEL = 5819,
+    OpRestoreMemoryINTEL = 5820,
+    OpArbitraryFloatSinCosPiINTEL = 5840,
+    OpArbitraryFloatCastINTEL = 5841,
+    OpArbitraryFloatCastFromIntINTEL = 5842,
+    OpArbitraryFloatCastToIntINTEL = 5843,
+    OpArbitraryFloatAddINTEL = 5846,
+    OpArbitraryFloatSubINTEL = 5847,
+    OpArbitraryFloatMulINTEL = 5848,
+    OpArbitraryFloatDivINTEL = 5849,
+    OpArbitraryFloatGTINTEL = 5850,
+    OpArbitraryFloatGEINTEL = 5851,
+    OpArbitraryFloatLTINTEL = 5852,
+    OpArbitraryFloatLEINTEL = 5853,
+    OpArbitraryFloatEQINTEL = 5854,
+    OpArbitraryFloatRecipINTEL = 5855,
+    OpArbitraryFloatRSqrtINTEL = 5856,
+    OpArbitraryFloatCbrtINTEL = 5857,
+    OpArbitraryFloatHypotINTEL = 5858,
+    OpArbitraryFloatSqrtINTEL = 5859,
+    OpArbitraryFloatLogINTEL = 5860,
+    OpArbitraryFloatLog2INTEL = 5861,
+    OpArbitraryFloatLog10INTEL = 5862,
+    OpArbitraryFloatLog1pINTEL = 5863,
+    OpArbitraryFloatExpINTEL = 5864,
+    OpArbitraryFloatExp2INTEL = 5865,
+    OpArbitraryFloatExp10INTEL = 5866,
+    OpArbitraryFloatExpm1INTEL = 5867,
+    OpArbitraryFloatSinINTEL = 5868,
+    OpArbitraryFloatCosINTEL = 5869,
+    OpArbitraryFloatSinCosINTEL = 5870,
+    OpArbitraryFloatSinPiINTEL = 5871,
+    OpArbitraryFloatCosPiINTEL = 5872,
+    OpArbitraryFloatASinINTEL = 5873,
+    OpArbitraryFloatASinPiINTEL = 5874,
+    OpArbitraryFloatACosINTEL = 5875,
+    OpArbitraryFloatACosPiINTEL = 5876,
+    OpArbitraryFloatATanINTEL = 5877,
+    OpArbitraryFloatATanPiINTEL = 5878,
+    OpArbitraryFloatATan2INTEL = 5879,
+    OpArbitraryFloatPowINTEL = 5880,
+    OpArbitraryFloatPowRINTEL = 5881,
+    OpArbitraryFloatPowNINTEL = 5882,
+    OpLoopControlINTEL = 5887,
+    OpAliasDomainDeclINTEL = 5911,
+    OpAliasScopeDeclINTEL = 5912,
+    OpAliasScopeListDeclINTEL = 5913,
+    OpFixedSqrtINTEL = 5923,
+    OpFixedRecipINTEL = 5924,
+    OpFixedRsqrtINTEL = 5925,
+    OpFixedSinINTEL = 5926,
+    OpFixedCosINTEL = 5927,
+    OpFixedSinCosINTEL = 5928,
+    OpFixedSinPiINTEL = 5929,
+    OpFixedCosPiINTEL = 5930,
+    OpFixedSinCosPiINTEL = 5931,
+    OpFixedLogINTEL = 5932,
+    OpFixedExpINTEL = 5933,
+    OpPtrCastToCrossWorkgroupINTEL = 5934,
+    OpCrossWorkgroupCastToPtrINTEL = 5938,
+    OpReadPipeBlockingINTEL = 5946,
+    OpWritePipeBlockingINTEL = 5947,
+    OpFPGARegINTEL = 5949,
+    OpRayQueryGetRayTMinKHR = 6016,
+    OpRayQueryGetRayFlagsKHR = 6017,
+    OpRayQueryGetIntersectionTKHR = 6018,
+    OpRayQueryGetIntersectionInstanceCustomIndexKHR = 6019,
+    OpRayQueryGetIntersectionInstanceIdKHR = 6020,
+    OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR = 6021,
+    OpRayQueryGetIntersectionGeometryIndexKHR = 6022,
+    OpRayQueryGetIntersectionPrimitiveIndexKHR = 6023,
+    OpRayQueryGetIntersectionBarycentricsKHR = 6024,
+    OpRayQueryGetIntersectionFrontFaceKHR = 6025,
+    OpRayQueryGetIntersectionCandidateAABBOpaqueKHR = 6026,
+    OpRayQueryGetIntersectionObjectRayDirectionKHR = 6027,
+    OpRayQueryGetIntersectionObjectRayOriginKHR = 6028,
+    OpRayQueryGetWorldRayDirectionKHR = 6029,
+    OpRayQueryGetWorldRayOriginKHR = 6030,
+    OpRayQueryGetIntersectionObjectToWorldKHR = 6031,
+    OpRayQueryGetIntersectionWorldToObjectKHR = 6032,
+    OpAtomicFAddEXT = 6035,
+    OpTypeBufferSurfaceINTEL = 6086,
+    OpTypeStructContinuedINTEL = 6090,
+    OpConstantCompositeContinuedINTEL = 6091,
+    OpSpecConstantCompositeContinuedINTEL = 6092,
+    OpControlBarrierArriveINTEL = 6142,
+    OpControlBarrierWaitINTEL = 6143,
+    OpGroupIMulKHR = 6401,
+    OpGroupFMulKHR = 6402,
+    OpGroupBitwiseAndKHR = 6403,
+    OpGroupBitwiseOrKHR = 6404,
+    OpGroupBitwiseXorKHR = 6405,
+    OpGroupLogicalAndKHR = 6406,
+    OpGroupLogicalOrKHR = 6407,
+    OpGroupLogicalXorKHR = 6408,
+    OpMax = 0x7fffffff,
+};
+
+#ifdef SPV_ENABLE_UTILITY_CODE
+#ifndef __cplusplus
+#include <stdbool.h>
+#endif
+inline void HasResultAndType(Op opcode, bool *hasResult, bool *hasResultType) {
+    *hasResult = *hasResultType = false;
+    switch (opcode) {
+    default: /* unknown opcode */ break;
+    case OpNop: *hasResult = false; *hasResultType = false; break;
+    case OpUndef: *hasResult = true; *hasResultType = true; break;
+    case OpSourceContinued: *hasResult = false; *hasResultType = false; break;
+    case OpSource: *hasResult = false; *hasResultType = false; break;
+    case OpSourceExtension: *hasResult = false; *hasResultType = false; break;
+    case OpName: *hasResult = false; *hasResultType = false; break;
+    case OpMemberName: *hasResult = false; *hasResultType = false; break;
+    case OpString: *hasResult = true; *hasResultType = false; break;
+    case OpLine: *hasResult = false; *hasResultType = false; break;
+    case OpExtension: *hasResult = false; *hasResultType = false; break;
+    case OpExtInstImport: *hasResult = true; *hasResultType = false; break;
+    case OpExtInst: *hasResult = true; *hasResultType = true; break;
+    case OpMemoryModel: *hasResult = false; *hasResultType = false; break;
+    case OpEntryPoint: *hasResult = false; *hasResultType = false; break;
+    case OpExecutionMode: *hasResult = false; *hasResultType = false; break;
+    case OpCapability: *hasResult = false; *hasResultType = false; break;
+    case OpTypeVoid: *hasResult = true; *hasResultType = false; break;
+    case OpTypeBool: *hasResult = true; *hasResultType = false; break;
+    case OpTypeInt: *hasResult = true; *hasResultType = false; break;
+    case OpTypeFloat: *hasResult = true; *hasResultType = false; break;
+    case OpTypeVector: *hasResult = true; *hasResultType = false; break;
+    case OpTypeMatrix: *hasResult = true; *hasResultType = false; break;
+    case OpTypeImage: *hasResult = true; *hasResultType = false; break;
+    case OpTypeSampler: *hasResult = true; *hasResultType = false; break;
+    case OpTypeSampledImage: *hasResult = true; *hasResultType = false; break;
+    case OpTypeArray: *hasResult = true; *hasResultType = false; break;
+    case OpTypeRuntimeArray: *hasResult = true; *hasResultType = false; break;
+    case OpTypeStruct: *hasResult = true; *hasResultType = false; break;
+    case OpTypeOpaque: *hasResult = true; *hasResultType = false; break;
+    case OpTypePointer: *hasResult = true; *hasResultType = false; break;
+    case OpTypeFunction: *hasResult = true; *hasResultType = false; break;
+    case OpTypeEvent: *hasResult = true; *hasResultType = false; break;
+    case OpTypeDeviceEvent: *hasResult = true; *hasResultType = false; break;
+    case OpTypeReserveId: *hasResult = true; *hasResultType = false; break;
+    case OpTypeQueue: *hasResult = true; *hasResultType = false; break;
+    case OpTypePipe: *hasResult = true; *hasResultType = false; break;
+    case OpTypeForwardPointer: *hasResult = false; *hasResultType = false; break;
+    case OpConstantTrue: *hasResult = true; *hasResultType = true; break;
+    case OpConstantFalse: *hasResult = true; *hasResultType = true; break;
+    case OpConstant: *hasResult = true; *hasResultType = true; break;
+    case OpConstantComposite: *hasResult = true; *hasResultType = true; break;
+    case OpConstantSampler: *hasResult = true; *hasResultType = true; break;
+    case OpConstantNull: *hasResult = true; *hasResultType = true; break;
+    case OpSpecConstantTrue: *hasResult = true; *hasResultType = true; break;
+    case OpSpecConstantFalse: *hasResult = true; *hasResultType = true; break;
+    case OpSpecConstant: *hasResult = true; *hasResultType = true; break;
+    case OpSpecConstantComposite: *hasResult = true; *hasResultType = true; break;
+    case OpSpecConstantOp: *hasResult = true; *hasResultType = true; break;
+    case OpFunction: *hasResult = true; *hasResultType = true; break;
+    case OpFunctionParameter: *hasResult = true; *hasResultType = true; break;
+    case OpFunctionEnd: *hasResult = false; *hasResultType = false; break;
+    case OpFunctionCall: *hasResult = true; *hasResultType = true; break;
+    case OpVariable: *hasResult = true; *hasResultType = true; break;
+    case OpImageTexelPointer: *hasResult = true; *hasResultType = true; break;
+    case OpLoad: *hasResult = true; *hasResultType = true; break;
+    case OpStore: *hasResult = false; *hasResultType = false; break;
+    case OpCopyMemory: *hasResult = false; *hasResultType = false; break;
+    case OpCopyMemorySized: *hasResult = false; *hasResultType = false; break;
+    case OpAccessChain: *hasResult = true; *hasResultType = true; break;
+    case OpInBoundsAccessChain: *hasResult = true; *hasResultType = true; break;
+    case OpPtrAccessChain: *hasResult = true; *hasResultType = true; break;
+    case OpArrayLength: *hasResult = true; *hasResultType = true; break;
+    case OpGenericPtrMemSemantics: *hasResult = true; *hasResultType = true; break;
+    case OpInBoundsPtrAccessChain: *hasResult = true; *hasResultType = true; break;
+    case OpDecorate: *hasResult = false; *hasResultType = false; break;
+    case OpMemberDecorate: *hasResult = false; *hasResultType = false; break;
+    case OpDecorationGroup: *hasResult = true; *hasResultType = false; break;
+    case OpGroupDecorate: *hasResult = false; *hasResultType = false; break;
+    case OpGroupMemberDecorate: *hasResult = false; *hasResultType = false; break;
+    case OpVectorExtractDynamic: *hasResult = true; *hasResultType = true; break;
+    case OpVectorInsertDynamic: *hasResult = true; *hasResultType = true; break;
+    case OpVectorShuffle: *hasResult = true; *hasResultType = true; break;
+    case OpCompositeConstruct: *hasResult = true; *hasResultType = true; break;
+    case OpCompositeExtract: *hasResult = true; *hasResultType = true; break;
+    case OpCompositeInsert: *hasResult = true; *hasResultType = true; break;
+    case OpCopyObject: *hasResult = true; *hasResultType = true; break;
+    case OpTranspose: *hasResult = true; *hasResultType = true; break;
+    case OpSampledImage: *hasResult = true; *hasResultType = true; break;
+    case OpImageSampleImplicitLod: *hasResult = true; *hasResultType = true; break;
+    case OpImageSampleExplicitLod: *hasResult = true; *hasResultType = true; break;
+    case OpImageSampleDrefImplicitLod: *hasResult = true; *hasResultType = true; break;
+    case OpImageSampleDrefExplicitLod: *hasResult = true; *hasResultType = true; break;
+    case OpImageSampleProjImplicitLod: *hasResult = true; *hasResultType = true; break;
+    case OpImageSampleProjExplicitLod: *hasResult = true; *hasResultType = true; break;
+    case OpImageSampleProjDrefImplicitLod: *hasResult = true; *hasResultType = true; break;
+    case OpImageSampleProjDrefExplicitLod: *hasResult = true; *hasResultType = true; break;
+    case OpImageFetch: *hasResult = true; *hasResultType = true; break;
+    case OpImageGather: *hasResult = true; *hasResultType = true; break;
+    case OpImageDrefGather: *hasResult = true; *hasResultType = true; break;
+    case OpImageRead: *hasResult = true; *hasResultType = true; break;
+    case OpImageWrite: *hasResult = false; *hasResultType = false; break;
+    case OpImage: *hasResult = true; *hasResultType = true; break;
+    case OpImageQueryFormat: *hasResult = true; *hasResultType = true; break;
+    case OpImageQueryOrder: *hasResult = true; *hasResultType = true; break;
+    case OpImageQuerySizeLod: *hasResult = true; *hasResultType = true; break;
+    case OpImageQuerySize: *hasResult = true; *hasResultType = true; break;
+    case OpImageQueryLod: *hasResult = true; *hasResultType = true; break;
+    case OpImageQueryLevels: *hasResult = true; *hasResultType = true; break;
+    case OpImageQuerySamples: *hasResult = true; *hasResultType = true; break;
+    case OpConvertFToU: *hasResult = true; *hasResultType = true; break;
+    case OpConvertFToS: *hasResult = true; *hasResultType = true; break;
+    case OpConvertSToF: *hasResult = true; *hasResultType = true; break;
+    case OpConvertUToF: *hasResult = true; *hasResultType = true; break;
+    case OpUConvert: *hasResult = true; *hasResultType = true; break;
+    case OpSConvert: *hasResult = true; *hasResultType = true; break;
+    case OpFConvert: *hasResult = true; *hasResultType = true; break;
+    case OpQuantizeToF16: *hasResult = true; *hasResultType = true; break;
+    case OpConvertPtrToU: *hasResult = true; *hasResultType = true; break;
+    case OpSatConvertSToU: *hasResult = true; *hasResultType = true; break;
+    case OpSatConvertUToS: *hasResult = true; *hasResultType = true; break;
+    case OpConvertUToPtr: *hasResult = true; *hasResultType = true; break;
+    case OpPtrCastToGeneric: *hasResult = true; *hasResultType = true; break;
+    case OpGenericCastToPtr: *hasResult = true; *hasResultType = true; break;
+    case OpGenericCastToPtrExplicit: *hasResult = true; *hasResultType = true; break;
+    case OpBitcast: *hasResult = true; *hasResultType = true; break;
+    case OpSNegate: *hasResult = true; *hasResultType = true; break;
+    case OpFNegate: *hasResult = true; *hasResultType = true; break;
+    case OpIAdd: *hasResult = true; *hasResultType = true; break;
+    case OpFAdd: *hasResult = true; *hasResultType = true; break;
+    case OpISub: *hasResult = true; *hasResultType = true; break;
+    case OpFSub: *hasResult = true; *hasResultType = true; break;
+    case OpIMul: *hasResult = true; *hasResultType = true; break;
+    case OpFMul: *hasResult = true; *hasResultType = true; break;
+    case OpUDiv: *hasResult = true; *hasResultType = true; break;
+    case OpSDiv: *hasResult = true; *hasResultType = true; break;
+    case OpFDiv: *hasResult = true; *hasResultType = true; break;
+    case OpUMod: *hasResult = true; *hasResultType = true; break;
+    case OpSRem: *hasResult = true; *hasResultType = true; break;
+    case OpSMod: *hasResult = true; *hasResultType = true; break;
+    case OpFRem: *hasResult = true; *hasResultType = true; break;
+    case OpFMod: *hasResult = true; *hasResultType = true; break;
+    case OpVectorTimesScalar: *hasResult = true; *hasResultType = true; break;
+    case OpMatrixTimesScalar: *hasResult = true; *hasResultType = true; break;
+    case OpVectorTimesMatrix: *hasResult = true; *hasResultType = true; break;
+    case OpMatrixTimesVector: *hasResult = true; *hasResultType = true; break;
+    case OpMatrixTimesMatrix: *hasResult = true; *hasResultType = true; break;
+    case OpOuterProduct: *hasResult = true; *hasResultType = true; break;
+    case OpDot: *hasResult = true; *hasResultType = true; break;
+    case OpIAddCarry: *hasResult = true; *hasResultType = true; break;
+    case OpISubBorrow: *hasResult = true; *hasResultType = true; break;
+    case OpUMulExtended: *hasResult = true; *hasResultType = true; break;
+    case OpSMulExtended: *hasResult = true; *hasResultType = true; break;
+    case OpAny: *hasResult = true; *hasResultType = true; break;
+    case OpAll: *hasResult = true; *hasResultType = true; break;
+    case OpIsNan: *hasResult = true; *hasResultType = true; break;
+    case OpIsInf: *hasResult = true; *hasResultType = true; break;
+    case OpIsFinite: *hasResult = true; *hasResultType = true; break;
+    case OpIsNormal: *hasResult = true; *hasResultType = true; break;
+    case OpSignBitSet: *hasResult = true; *hasResultType = true; break;
+    case OpLessOrGreater: *hasResult = true; *hasResultType = true; break;
+    case OpOrdered: *hasResult = true; *hasResultType = true; break;
+    case OpUnordered: *hasResult = true; *hasResultType = true; break;
+    case OpLogicalEqual: *hasResult = true; *hasResultType = true; break;
+    case OpLogicalNotEqual: *hasResult = true; *hasResultType = true; break;
+    case OpLogicalOr: *hasResult = true; *hasResultType = true; break;
+    case OpLogicalAnd: *hasResult = true; *hasResultType = true; break;
+    case OpLogicalNot: *hasResult = true; *hasResultType = true; break;
+    case OpSelect: *hasResult = true; *hasResultType = true; break;
+    case OpIEqual: *hasResult = true; *hasResultType = true; break;
+    case OpINotEqual: *hasResult = true; *hasResultType = true; break;
+    case OpUGreaterThan: *hasResult = true; *hasResultType = true; break;
+    case OpSGreaterThan: *hasResult = true; *hasResultType = true; break;
+    case OpUGreaterThanEqual: *hasResult = true; *hasResultType = true; break;
+    case OpSGreaterThanEqual: *hasResult = true; *hasResultType = true; break;
+    case OpULessThan: *hasResult = true; *hasResultType = true; break;
+    case OpSLessThan: *hasResult = true; *hasResultType = true; break;
+    case OpULessThanEqual: *hasResult = true; *hasResultType = true; break;
+    case OpSLessThanEqual: *hasResult = true; *hasResultType = true; break;
+    case OpFOrdEqual: *hasResult = true; *hasResultType = true; break;
+    case OpFUnordEqual: *hasResult = true; *hasResultType = true; break;
+    case OpFOrdNotEqual: *hasResult = true; *hasResultType = true; break;
+    case OpFUnordNotEqual: *hasResult = true; *hasResultType = true; break;
+    case OpFOrdLessThan: *hasResult = true; *hasResultType = true; break;
+    case OpFUnordLessThan: *hasResult = true; *hasResultType = true; break;
+    case OpFOrdGreaterThan: *hasResult = true; *hasResultType = true; break;
+    case OpFUnordGreaterThan: *hasResult = true; *hasResultType = true; break;
+    case OpFOrdLessThanEqual: *hasResult = true; *hasResultType = true; break;
+    case OpFUnordLessThanEqual: *hasResult = true; *hasResultType = true; break;
+    case OpFOrdGreaterThanEqual: *hasResult = true; *hasResultType = true; break;
+    case OpFUnordGreaterThanEqual: *hasResult = true; *hasResultType = true; break;
+    case OpShiftRightLogical: *hasResult = true; *hasResultType = true; break;
+    case OpShiftRightArithmetic: *hasResult = true; *hasResultType = true; break;
+    case OpShiftLeftLogical: *hasResult = true; *hasResultType = true; break;
+    case OpBitwiseOr: *hasResult = true; *hasResultType = true; break;
+    case OpBitwiseXor: *hasResult = true; *hasResultType = true; break;
+    case OpBitwiseAnd: *hasResult = true; *hasResultType = true; break;
+    case OpNot: *hasResult = true; *hasResultType = true; break;
+    case OpBitFieldInsert: *hasResult = true; *hasResultType = true; break;
+    case OpBitFieldSExtract: *hasResult = true; *hasResultType = true; break;
+    case OpBitFieldUExtract: *hasResult = true; *hasResultType = true; break;
+    case OpBitReverse: *hasResult = true; *hasResultType = true; break;
+    case OpBitCount: *hasResult = true; *hasResultType = true; break;
+    case OpDPdx: *hasResult = true; *hasResultType = true; break;
+    case OpDPdy: *hasResult = true; *hasResultType = true; break;
+    case OpFwidth: *hasResult = true; *hasResultType = true; break;
+    case OpDPdxFine: *hasResult = true; *hasResultType = true; break;
+    case OpDPdyFine: *hasResult = true; *hasResultType = true; break;
+    case OpFwidthFine: *hasResult = true; *hasResultType = true; break;
+    case OpDPdxCoarse: *hasResult = true; *hasResultType = true; break;
+    case OpDPdyCoarse: *hasResult = true; *hasResultType = true; break;
+    case OpFwidthCoarse: *hasResult = true; *hasResultType = true; break;
+    case OpEmitVertex: *hasResult = false; *hasResultType = false; break;
+    case OpEndPrimitive: *hasResult = false; *hasResultType = false; break;
+    case OpEmitStreamVertex: *hasResult = false; *hasResultType = false; break;
+    case OpEndStreamPrimitive: *hasResult = false; *hasResultType = false; break;
+    case OpControlBarrier: *hasResult = false; *hasResultType = false; break;
+    case OpMemoryBarrier: *hasResult = false; *hasResultType = false; break;
+    case OpAtomicLoad: *hasResult = true; *hasResultType = true; break;
+    case OpAtomicStore: *hasResult = false; *hasResultType = false; break;
+    case OpAtomicExchange: *hasResult = true; *hasResultType = true; break;
+    case OpAtomicCompareExchange: *hasResult = true; *hasResultType = true; break;
+    case OpAtomicCompareExchangeWeak: *hasResult = true; *hasResultType = true; break;
+    case OpAtomicIIncrement: *hasResult = true; *hasResultType = true; break;
+    case OpAtomicIDecrement: *hasResult = true; *hasResultType = true; break;
+    case OpAtomicIAdd: *hasResult = true; *hasResultType = true; break;
+    case OpAtomicISub: *hasResult = true; *hasResultType = true; break;
+    case OpAtomicSMin: *hasResult = true; *hasResultType = true; break;
+    case OpAtomicUMin: *hasResult = true; *hasResultType = true; break;
+    case OpAtomicSMax: *hasResult = true; *hasResultType = true; break;
+    case OpAtomicUMax: *hasResult = true; *hasResultType = true; break;
+    case OpAtomicAnd: *hasResult = true; *hasResultType = true; break;
+    case OpAtomicOr: *hasResult = true; *hasResultType = true; break;
+    case OpAtomicXor: *hasResult = true; *hasResultType = true; break;
+    case OpPhi: *hasResult = true; *hasResultType = true; break;
+    case OpLoopMerge: *hasResult = false; *hasResultType = false; break;
+    case OpSelectionMerge: *hasResult = false; *hasResultType = false; break;
+    case OpLabel: *hasResult = true; *hasResultType = false; break;
+    case OpBranch: *hasResult = false; *hasResultType = false; break;
+    case OpBranchConditional: *hasResult = false; *hasResultType = false; break;
+    case OpSwitch: *hasResult = false; *hasResultType = false; break;
+    case OpKill: *hasResult = false; *hasResultType = false; break;
+    case OpReturn: *hasResult = false; *hasResultType = false; break;
+    case OpReturnValue: *hasResult = false; *hasResultType = false; break;
+    case OpUnreachable: *hasResult = false; *hasResultType = false; break;
+    case OpLifetimeStart: *hasResult = false; *hasResultType = false; break;
+    case OpLifetimeStop: *hasResult = false; *hasResultType = false; break;
+    case OpGroupAsyncCopy: *hasResult = true; *hasResultType = true; break;
+    case OpGroupWaitEvents: *hasResult = false; *hasResultType = false; break;
+    case OpGroupAll: *hasResult = true; *hasResultType = true; break;
+    case OpGroupAny: *hasResult = true; *hasResultType = true; break;
+    case OpGroupBroadcast: *hasResult = true; *hasResultType = true; break;
+    case OpGroupIAdd: *hasResult = true; *hasResultType = true; break;
+    case OpGroupFAdd: *hasResult = true; *hasResultType = true; break;
+    case OpGroupFMin: *hasResult = true; *hasResultType = true; break;
+    case OpGroupUMin: *hasResult = true; *hasResultType = true; break;
+    case OpGroupSMin: *hasResult = true; *hasResultType = true; break;
+    case OpGroupFMax: *hasResult = true; *hasResultType = true; break;
+    case OpGroupUMax: *hasResult = true; *hasResultType = true; break;
+    case OpGroupSMax: *hasResult = true; *hasResultType = true; break;
+    case OpReadPipe: *hasResult = true; *hasResultType = true; break;
+    case OpWritePipe: *hasResult = true; *hasResultType = true; break;
+    case OpReservedReadPipe: *hasResult = true; *hasResultType = true; break;
+    case OpReservedWritePipe: *hasResult = true; *hasResultType = true; break;
+    case OpReserveReadPipePackets: *hasResult = true; *hasResultType = true; break;
+    case OpReserveWritePipePackets: *hasResult = true; *hasResultType = true; break;
+    case OpCommitReadPipe: *hasResult = false; *hasResultType = false; break;
+    case OpCommitWritePipe: *hasResult = false; *hasResultType = false; break;
+    case OpIsValidReserveId: *hasResult = true; *hasResultType = true; break;
+    case OpGetNumPipePackets: *hasResult = true; *hasResultType = true; break;
+    case OpGetMaxPipePackets: *hasResult = true; *hasResultType = true; break;
+    case OpGroupReserveReadPipePackets: *hasResult = true; *hasResultType = true; break;
+    case OpGroupReserveWritePipePackets: *hasResult = true; *hasResultType = true; break;
+    case OpGroupCommitReadPipe: *hasResult = false; *hasResultType = false; break;
+    case OpGroupCommitWritePipe: *hasResult = false; *hasResultType = false; break;
+    case OpEnqueueMarker: *hasResult = true; *hasResultType = true; break;
+    case OpEnqueueKernel: *hasResult = true; *hasResultType = true; break;
+    case OpGetKernelNDrangeSubGroupCount: *hasResult = true; *hasResultType = true; break;
+    case OpGetKernelNDrangeMaxSubGroupSize: *hasResult = true; *hasResultType = true; break;
+    case OpGetKernelWorkGroupSize: *hasResult = true; *hasResultType = true; break;
+    case OpGetKernelPreferredWorkGroupSizeMultiple: *hasResult = true; *hasResultType = true; break;
+    case OpRetainEvent: *hasResult = false; *hasResultType = false; break;
+    case OpReleaseEvent: *hasResult = false; *hasResultType = false; break;
+    case OpCreateUserEvent: *hasResult = true; *hasResultType = true; break;
+    case OpIsValidEvent: *hasResult = true; *hasResultType = true; break;
+    case OpSetUserEventStatus: *hasResult = false; *hasResultType = false; break;
+    case OpCaptureEventProfilingInfo: *hasResult = false; *hasResultType = false; break;
+    case OpGetDefaultQueue: *hasResult = true; *hasResultType = true; break;
+    case OpBuildNDRange: *hasResult = true; *hasResultType = true; break;
+    case OpImageSparseSampleImplicitLod: *hasResult = true; *hasResultType = true; break;
+    case OpImageSparseSampleExplicitLod: *hasResult = true; *hasResultType = true; break;
+    case OpImageSparseSampleDrefImplicitLod: *hasResult = true; *hasResultType = true; break;
+    case OpImageSparseSampleDrefExplicitLod: *hasResult = true; *hasResultType = true; break;
+    case OpImageSparseSampleProjImplicitLod: *hasResult = true; *hasResultType = true; break;
+    case OpImageSparseSampleProjExplicitLod: *hasResult = true; *hasResultType = true; break;
+    case OpImageSparseSampleProjDrefImplicitLod: *hasResult = true; *hasResultType = true; break;
+    case OpImageSparseSampleProjDrefExplicitLod: *hasResult = true; *hasResultType = true; break;
+    case OpImageSparseFetch: *hasResult = true; *hasResultType = true; break;
+    case OpImageSparseGather: *hasResult = true; *hasResultType = true; break;
+    case OpImageSparseDrefGather: *hasResult = true; *hasResultType = true; break;
+    case OpImageSparseTexelsResident: *hasResult = true; *hasResultType = true; break;
+    case OpNoLine: *hasResult = false; *hasResultType = false; break;
+    case OpAtomicFlagTestAndSet: *hasResult = true; *hasResultType = true; break;
+    case OpAtomicFlagClear: *hasResult = false; *hasResultType = false; break;
+    case OpImageSparseRead: *hasResult = true; *hasResultType = true; break;
+    case OpSizeOf: *hasResult = true; *hasResultType = true; break;
+    case OpTypePipeStorage: *hasResult = true; *hasResultType = false; break;
+    case OpConstantPipeStorage: *hasResult = true; *hasResultType = true; break;
+    case OpCreatePipeFromPipeStorage: *hasResult = true; *hasResultType = true; break;
+    case OpGetKernelLocalSizeForSubgroupCount: *hasResult = true; *hasResultType = true; break;
+    case OpGetKernelMaxNumSubgroups: *hasResult = true; *hasResultType = true; break;
+    case OpTypeNamedBarrier: *hasResult = true; *hasResultType = false; break;
+    case OpNamedBarrierInitialize: *hasResult = true; *hasResultType = true; break;
+    case OpMemoryNamedBarrier: *hasResult = false; *hasResultType = false; break;
+    case OpModuleProcessed: *hasResult = false; *hasResultType = false; break;
+    case OpExecutionModeId: *hasResult = false; *hasResultType = false; break;
+    case OpDecorateId: *hasResult = false; *hasResultType = false; break;
+    case OpGroupNonUniformElect: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformAll: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformAny: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformAllEqual: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformBroadcast: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformBroadcastFirst: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformBallot: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformInverseBallot: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformBallotBitExtract: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformBallotBitCount: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformBallotFindLSB: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformBallotFindMSB: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformShuffle: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformShuffleXor: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformShuffleUp: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformShuffleDown: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformIAdd: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformFAdd: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformIMul: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformFMul: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformSMin: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformUMin: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformFMin: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformSMax: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformUMax: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformFMax: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformBitwiseAnd: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformBitwiseOr: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformBitwiseXor: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformLogicalAnd: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformLogicalOr: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformLogicalXor: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformQuadBroadcast: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformQuadSwap: *hasResult = true; *hasResultType = true; break;
+    case OpCopyLogical: *hasResult = true; *hasResultType = true; break;
+    case OpPtrEqual: *hasResult = true; *hasResultType = true; break;
+    case OpPtrNotEqual: *hasResult = true; *hasResultType = true; break;
+    case OpPtrDiff: *hasResult = true; *hasResultType = true; break;
+    case OpTerminateInvocation: *hasResult = false; *hasResultType = false; break;
+    case OpSubgroupBallotKHR: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupFirstInvocationKHR: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAllKHR: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAnyKHR: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAllEqualKHR: *hasResult = true; *hasResultType = true; break;
+    case OpGroupNonUniformRotateKHR: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupReadInvocationKHR: *hasResult = true; *hasResultType = true; break;
+    case OpTraceRayKHR: *hasResult = false; *hasResultType = false; break;
+    case OpExecuteCallableKHR: *hasResult = false; *hasResultType = false; break;
+    case OpConvertUToAccelerationStructureKHR: *hasResult = true; *hasResultType = true; break;
+    case OpIgnoreIntersectionKHR: *hasResult = false; *hasResultType = false; break;
+    case OpTerminateRayKHR: *hasResult = false; *hasResultType = false; break;
+    case OpSDot: *hasResult = true; *hasResultType = true; break;
+    case OpUDot: *hasResult = true; *hasResultType = true; break;
+    case OpSUDot: *hasResult = true; *hasResultType = true; break;
+    case OpSDotAccSat: *hasResult = true; *hasResultType = true; break;
+    case OpUDotAccSat: *hasResult = true; *hasResultType = true; break;
+    case OpSUDotAccSat: *hasResult = true; *hasResultType = true; break;
+    case OpTypeRayQueryKHR: *hasResult = true; *hasResultType = false; break;
+    case OpRayQueryInitializeKHR: *hasResult = false; *hasResultType = false; break;
+    case OpRayQueryTerminateKHR: *hasResult = false; *hasResultType = false; break;
+    case OpRayQueryGenerateIntersectionKHR: *hasResult = false; *hasResultType = false; break;
+    case OpRayQueryConfirmIntersectionKHR: *hasResult = false; *hasResultType = false; break;
+    case OpRayQueryProceedKHR: *hasResult = true; *hasResultType = true; break;
+    case OpRayQueryGetIntersectionTypeKHR: *hasResult = true; *hasResultType = true; break;
+    case OpGroupIAddNonUniformAMD: *hasResult = true; *hasResultType = true; break;
+    case OpGroupFAddNonUniformAMD: *hasResult = true; *hasResultType = true; break;
+    case OpGroupFMinNonUniformAMD: *hasResult = true; *hasResultType = true; break;
+    case OpGroupUMinNonUniformAMD: *hasResult = true; *hasResultType = true; break;
+    case OpGroupSMinNonUniformAMD: *hasResult = true; *hasResultType = true; break;
+    case OpGroupFMaxNonUniformAMD: *hasResult = true; *hasResultType = true; break;
+    case OpGroupUMaxNonUniformAMD: *hasResult = true; *hasResultType = true; break;
+    case OpGroupSMaxNonUniformAMD: *hasResult = true; *hasResultType = true; break;
+    case OpFragmentMaskFetchAMD: *hasResult = true; *hasResultType = true; break;
+    case OpFragmentFetchAMD: *hasResult = true; *hasResultType = true; break;
+    case OpReadClockKHR: *hasResult = true; *hasResultType = true; break;
+    case OpImageSampleFootprintNV: *hasResult = true; *hasResultType = true; break;
+    case OpEmitMeshTasksEXT: *hasResult = false; *hasResultType = false; break;
+    case OpSetMeshOutputsEXT: *hasResult = false; *hasResultType = false; break;
+    case OpGroupNonUniformPartitionNV: *hasResult = true; *hasResultType = true; break;
+    case OpWritePackedPrimitiveIndices4x8NV: *hasResult = false; *hasResultType = false; break;
+    case OpReportIntersectionNV: *hasResult = true; *hasResultType = true; break;
+    case OpIgnoreIntersectionNV: *hasResult = false; *hasResultType = false; break;
+    case OpTerminateRayNV: *hasResult = false; *hasResultType = false; break;
+    case OpTraceNV: *hasResult = false; *hasResultType = false; break;
+    case OpTraceMotionNV: *hasResult = false; *hasResultType = false; break;
+    case OpTraceRayMotionNV: *hasResult = false; *hasResultType = false; break;
+    case OpTypeAccelerationStructureNV: *hasResult = true; *hasResultType = false; break;
+    case OpExecuteCallableNV: *hasResult = false; *hasResultType = false; break;
+    case OpTypeCooperativeMatrixNV: *hasResult = true; *hasResultType = false; break;
+    case OpCooperativeMatrixLoadNV: *hasResult = true; *hasResultType = true; break;
+    case OpCooperativeMatrixStoreNV: *hasResult = false; *hasResultType = false; break;
+    case OpCooperativeMatrixMulAddNV: *hasResult = true; *hasResultType = true; break;
+    case OpCooperativeMatrixLengthNV: *hasResult = true; *hasResultType = true; break;
+    case OpBeginInvocationInterlockEXT: *hasResult = false; *hasResultType = false; break;
+    case OpEndInvocationInterlockEXT: *hasResult = false; *hasResultType = false; break;
+    case OpDemoteToHelperInvocation: *hasResult = false; *hasResultType = false; break;
+    case OpIsHelperInvocationEXT: *hasResult = true; *hasResultType = true; break;
+    case OpConvertUToImageNV: *hasResult = true; *hasResultType = true; break;
+    case OpConvertUToSamplerNV: *hasResult = true; *hasResultType = true; break;
+    case OpConvertImageToUNV: *hasResult = true; *hasResultType = true; break;
+    case OpConvertSamplerToUNV: *hasResult = true; *hasResultType = true; break;
+    case OpConvertUToSampledImageNV: *hasResult = true; *hasResultType = true; break;
+    case OpConvertSampledImageToUNV: *hasResult = true; *hasResultType = true; break;
+    case OpSamplerImageAddressingModeNV: *hasResult = false; *hasResultType = false; break;
+    case OpSubgroupShuffleINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupShuffleDownINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupShuffleUpINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupShuffleXorINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupBlockReadINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupBlockWriteINTEL: *hasResult = false; *hasResultType = false; break;
+    case OpSubgroupImageBlockReadINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupImageBlockWriteINTEL: *hasResult = false; *hasResultType = false; break;
+    case OpSubgroupImageMediaBlockReadINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupImageMediaBlockWriteINTEL: *hasResult = false; *hasResultType = false; break;
+    case OpUCountLeadingZerosINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpUCountTrailingZerosINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpAbsISubINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpAbsUSubINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpIAddSatINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpUAddSatINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpIAverageINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpUAverageINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpIAverageRoundedINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpUAverageRoundedINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpISubSatINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpUSubSatINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpIMul32x16INTEL: *hasResult = true; *hasResultType = true; break;
+    case OpUMul32x16INTEL: *hasResult = true; *hasResultType = true; break;
+    case OpConstantFunctionPointerINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpFunctionPointerCallINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpAsmTargetINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpAsmINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpAsmCallINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpAtomicFMinEXT: *hasResult = true; *hasResultType = true; break;
+    case OpAtomicFMaxEXT: *hasResult = true; *hasResultType = true; break;
+    case OpAssumeTrueKHR: *hasResult = false; *hasResultType = false; break;
+    case OpExpectKHR: *hasResult = true; *hasResultType = true; break;
+    case OpDecorateString: *hasResult = false; *hasResultType = false; break;
+    case OpMemberDecorateString: *hasResult = false; *hasResultType = false; break;
+    case OpVmeImageINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpTypeVmeImageINTEL: *hasResult = true; *hasResultType = false; break;
+    case OpTypeAvcImePayloadINTEL: *hasResult = true; *hasResultType = false; break;
+    case OpTypeAvcRefPayloadINTEL: *hasResult = true; *hasResultType = false; break;
+    case OpTypeAvcSicPayloadINTEL: *hasResult = true; *hasResultType = false; break;
+    case OpTypeAvcMcePayloadINTEL: *hasResult = true; *hasResultType = false; break;
+    case OpTypeAvcMceResultINTEL: *hasResult = true; *hasResultType = false; break;
+    case OpTypeAvcImeResultINTEL: *hasResult = true; *hasResultType = false; break;
+    case OpTypeAvcImeResultSingleReferenceStreamoutINTEL: *hasResult = true; *hasResultType = false; break;
+    case OpTypeAvcImeResultDualReferenceStreamoutINTEL: *hasResult = true; *hasResultType = false; break;
+    case OpTypeAvcImeSingleReferenceStreaminINTEL: *hasResult = true; *hasResultType = false; break;
+    case OpTypeAvcImeDualReferenceStreaminINTEL: *hasResult = true; *hasResultType = false; break;
+    case OpTypeAvcRefResultINTEL: *hasResult = true; *hasResultType = false; break;
+    case OpTypeAvcSicResultINTEL: *hasResult = true; *hasResultType = false; break;
+    case OpSubgroupAvcMceGetDefaultInterBaseMultiReferencePenaltyINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceSetInterBaseMultiReferencePenaltyINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceGetDefaultInterShapePenaltyINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceSetInterShapePenaltyINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceGetDefaultInterDirectionPenaltyINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceSetInterDirectionPenaltyINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceGetDefaultIntraLumaShapePenaltyINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceGetDefaultInterMotionVectorCostTableINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceGetDefaultHighPenaltyCostTableINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceGetDefaultMediumPenaltyCostTableINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceGetDefaultLowPenaltyCostTableINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceSetMotionVectorCostFunctionINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceGetDefaultIntraLumaModePenaltyINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceGetDefaultNonDcLumaIntraPenaltyINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceGetDefaultIntraChromaModeBasePenaltyINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceSetAcOnlyHaarINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceSetSourceInterlacedFieldPolarityINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceSetSingleReferenceInterlacedFieldPolarityINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceSetDualReferenceInterlacedFieldPolaritiesINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceConvertToImePayloadINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceConvertToImeResultINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceConvertToRefPayloadINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceConvertToRefResultINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceConvertToSicPayloadINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceConvertToSicResultINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceGetMotionVectorsINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceGetInterDistortionsINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceGetBestInterDistortionsINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceGetInterMajorShapeINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceGetInterMinorShapeINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceGetInterDirectionsINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceGetInterMotionVectorCountINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceGetInterReferenceIdsINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcMceGetInterReferenceInterlacedFieldPolaritiesINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeInitializeINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeSetSingleReferenceINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeSetDualReferenceINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeRefWindowSizeINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeAdjustRefOffsetINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeConvertToMcePayloadINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeSetMaxMotionVectorCountINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeSetUnidirectionalMixDisableINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeSetEarlySearchTerminationThresholdINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeSetWeightedSadINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeEvaluateWithSingleReferenceINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeEvaluateWithDualReferenceINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeEvaluateWithDualReferenceStreaminINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeEvaluateWithSingleReferenceStreamoutINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeEvaluateWithDualReferenceStreamoutINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeEvaluateWithSingleReferenceStreaminoutINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeEvaluateWithDualReferenceStreaminoutINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeConvertToMceResultINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeGetSingleReferenceStreaminINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeGetDualReferenceStreaminINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeStripSingleReferenceStreamoutINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeStripDualReferenceStreamoutINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeMotionVectorsINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeDistortionsINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeGetStreamoutSingleReferenceMajorShapeReferenceIdsINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeMotionVectorsINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeDistortionsINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeGetStreamoutDualReferenceMajorShapeReferenceIdsINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeGetBorderReachedINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeGetTruncatedSearchIndicationINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeGetUnidirectionalEarlySearchTerminationINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeGetWeightingPatternMinimumMotionVectorINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcImeGetWeightingPatternMinimumDistortionINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcFmeInitializeINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcBmeInitializeINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcRefConvertToMcePayloadINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcRefSetBidirectionalMixDisableINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcRefSetBilinearFilterEnableINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcRefEvaluateWithSingleReferenceINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcRefEvaluateWithDualReferenceINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcRefEvaluateWithMultiReferenceINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcRefEvaluateWithMultiReferenceInterlacedINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcRefConvertToMceResultINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicInitializeINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicConfigureSkcINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicConfigureIpeLumaINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicConfigureIpeLumaChromaINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicGetMotionVectorMaskINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicConvertToMcePayloadINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicSetIntraLumaShapePenaltyINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicSetIntraLumaModeCostFunctionINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicSetIntraChromaModeCostFunctionINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicSetBilinearFilterEnableINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicSetSkcForwardTransformEnableINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicSetBlockBasedRawSkipSadINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicEvaluateIpeINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicEvaluateWithSingleReferenceINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicEvaluateWithDualReferenceINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicEvaluateWithMultiReferenceINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicEvaluateWithMultiReferenceInterlacedINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicConvertToMceResultINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicGetIpeLumaShapeINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicGetBestIpeLumaDistortionINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicGetBestIpeChromaDistortionINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicGetPackedIpeLumaModesINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicGetIpeChromaModeINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicGetPackedSkcLumaCountThresholdINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicGetPackedSkcLumaSumThresholdINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSubgroupAvcSicGetInterRawSadsINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpVariableLengthArrayINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpSaveMemoryINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpRestoreMemoryINTEL: *hasResult = false; *hasResultType = false; break;
+    case OpArbitraryFloatSinCosPiINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatCastINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatCastFromIntINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatCastToIntINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatAddINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatSubINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatMulINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatDivINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatGTINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatGEINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatLTINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatLEINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatEQINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatRecipINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatRSqrtINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatCbrtINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatHypotINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatSqrtINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatLogINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatLog2INTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatLog10INTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatLog1pINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatExpINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatExp2INTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatExp10INTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatExpm1INTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatSinINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatCosINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatSinCosINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatSinPiINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatCosPiINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatASinINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatASinPiINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatACosINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatACosPiINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatATanINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatATanPiINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatATan2INTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatPowINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatPowRINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpArbitraryFloatPowNINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpLoopControlINTEL: *hasResult = false; *hasResultType = false; break;
+    case OpAliasDomainDeclINTEL: *hasResult = true; *hasResultType = false; break;
+    case OpAliasScopeDeclINTEL: *hasResult = true; *hasResultType = false; break;
+    case OpAliasScopeListDeclINTEL: *hasResult = true; *hasResultType = false; break;
+    case OpFixedSqrtINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpFixedRecipINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpFixedRsqrtINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpFixedSinINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpFixedCosINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpFixedSinCosINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpFixedSinPiINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpFixedCosPiINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpFixedSinCosPiINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpFixedLogINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpFixedExpINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpPtrCastToCrossWorkgroupINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpCrossWorkgroupCastToPtrINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpReadPipeBlockingINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpWritePipeBlockingINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpFPGARegINTEL: *hasResult = true; *hasResultType = true; break;
+    case OpRayQueryGetRayTMinKHR: *hasResult = true; *hasResultType = true; break;
+    case OpRayQueryGetRayFlagsKHR: *hasResult = true; *hasResultType = true; break;
+    case OpRayQueryGetIntersectionTKHR: *hasResult = true; *hasResultType = true; break;
+    case OpRayQueryGetIntersectionInstanceCustomIndexKHR: *hasResult = true; *hasResultType = true; break;
+    case OpRayQueryGetIntersectionInstanceIdKHR: *hasResult = true; *hasResultType = true; break;
+    case OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR: *hasResult = true; *hasResultType = true; break;
+    case OpRayQueryGetIntersectionGeometryIndexKHR: *hasResult = true; *hasResultType = true; break;
+    case OpRayQueryGetIntersectionPrimitiveIndexKHR: *hasResult = true; *hasResultType = true; break;
+    case OpRayQueryGetIntersectionBarycentricsKHR: *hasResult = true; *hasResultType = true; break;
+    case OpRayQueryGetIntersectionFrontFaceKHR: *hasResult = true; *hasResultType = true; break;
+    case OpRayQueryGetIntersectionCandidateAABBOpaqueKHR: *hasResult = true; *hasResultType = true; break;
+    case OpRayQueryGetIntersectionObjectRayDirectionKHR: *hasResult = true; *hasResultType = true; break;
+    case OpRayQueryGetIntersectionObjectRayOriginKHR: *hasResult = true; *hasResultType = true; break;
+    case OpRayQueryGetWorldRayDirectionKHR: *hasResult = true; *hasResultType = true; break;
+    case OpRayQueryGetWorldRayOriginKHR: *hasResult = true; *hasResultType = true; break;
+    case OpRayQueryGetIntersectionObjectToWorldKHR: *hasResult = true; *hasResultType = true; break;
+    case OpRayQueryGetIntersectionWorldToObjectKHR: *hasResult = true; *hasResultType = true; break;
+    case OpAtomicFAddEXT: *hasResult = true; *hasResultType = true; break;
+    case OpTypeBufferSurfaceINTEL: *hasResult = true; *hasResultType = false; break;
+    case OpTypeStructContinuedINTEL: *hasResult = false; *hasResultType = false; break;
+    case OpConstantCompositeContinuedINTEL: *hasResult = false; *hasResultType = false; break;
+    case OpSpecConstantCompositeContinuedINTEL: *hasResult = false; *hasResultType = false; break;
+    case OpControlBarrierArriveINTEL: *hasResult = false; *hasResultType = false; break;
+    case OpControlBarrierWaitINTEL: *hasResult = false; *hasResultType = false; break;
+    case OpGroupIMulKHR: *hasResult = true; *hasResultType = true; break;
+    case OpGroupFMulKHR: *hasResult = true; *hasResultType = true; break;
+    case OpGroupBitwiseAndKHR: *hasResult = true; *hasResultType = true; break;
+    case OpGroupBitwiseOrKHR: *hasResult = true; *hasResultType = true; break;
+    case OpGroupBitwiseXorKHR: *hasResult = true; *hasResultType = true; break;
+    case OpGroupLogicalAndKHR: *hasResult = true; *hasResultType = true; break;
+    case OpGroupLogicalOrKHR: *hasResult = true; *hasResultType = true; break;
+    case OpGroupLogicalXorKHR: *hasResult = true; *hasResultType = true; break;
+    }
+}
+#endif /* SPV_ENABLE_UTILITY_CODE */
+
+// Overload operator| for mask bit combining
+
+inline ImageOperandsMask operator|(ImageOperandsMask a, ImageOperandsMask b) { return ImageOperandsMask(unsigned(a) | unsigned(b)); }
+inline FPFastMathModeMask operator|(FPFastMathModeMask a, FPFastMathModeMask b) { return FPFastMathModeMask(unsigned(a) | unsigned(b)); }
+inline SelectionControlMask operator|(SelectionControlMask a, SelectionControlMask b) { return SelectionControlMask(unsigned(a) | unsigned(b)); }
+inline LoopControlMask operator|(LoopControlMask a, LoopControlMask b) { return LoopControlMask(unsigned(a) | unsigned(b)); }
+inline FunctionControlMask operator|(FunctionControlMask a, FunctionControlMask b) { return FunctionControlMask(unsigned(a) | unsigned(b)); }
+inline MemorySemanticsMask operator|(MemorySemanticsMask a, MemorySemanticsMask b) { return MemorySemanticsMask(unsigned(a) | unsigned(b)); }
+inline MemoryAccessMask operator|(MemoryAccessMask a, MemoryAccessMask b) { return MemoryAccessMask(unsigned(a) | unsigned(b)); }
+inline KernelProfilingInfoMask operator|(KernelProfilingInfoMask a, KernelProfilingInfoMask b) { return KernelProfilingInfoMask(unsigned(a) | unsigned(b)); }
+inline RayFlagsMask operator|(RayFlagsMask a, RayFlagsMask b) { return RayFlagsMask(unsigned(a) | unsigned(b)); }
+inline FragmentShadingRateMask operator|(FragmentShadingRateMask a, FragmentShadingRateMask b) { return FragmentShadingRateMask(unsigned(a) | unsigned(b)); }
+
+}  // end namespace spv
+
+#endif  // #ifndef spirv_HPP
+

+ 36 - 10
ThirdParty/SpirvCross/spirv_cfg.cpp

@@ -1,5 +1,6 @@
 /*
  * Copyright 2016-2021 Arm Limited
+ * SPDX-License-Identifier: Apache-2.0 OR MIT
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -18,7 +19,6 @@
  * At your option, you may choose to accept this material under either:
  *  1. The Apache License, Version 2.0, found at <http://www.apache.org/licenses/LICENSE-2.0>, or
  *  2. The MIT License, found at <http://opensource.org/licenses/MIT>.
- * SPDX-License-Identifier: Apache-2.0 OR MIT.
  */
 
 #include "spirv_cfg.hpp"
@@ -135,7 +135,9 @@ bool CFG::post_order_visit(uint32_t block_id)
 		break;
 
 	case SPIRBlock::MultiSelect:
-		for (auto &target : block.cases)
+	{
+		const auto &cases = compiler.get_case_list(block);
+		for (const auto &target : cases)
 		{
 			if (post_order_visit(target.block))
 				add_branch(block_id, target.block);
@@ -143,7 +145,7 @@ bool CFG::post_order_visit(uint32_t block_id)
 		if (block.default_block && post_order_visit(block.default_block))
 			add_branch(block_id, block.default_block);
 		break;
-
+	}
 	default:
 		break;
 	}
@@ -304,15 +306,36 @@ bool CFG::node_terminates_control_flow_in_sub_graph(BlockID from, BlockID to) co
 
 		bool true_path_ignore = false;
 		bool false_path_ignore = false;
-		if (ignore_block_id && dom.terminator == SPIRBlock::Select)
+
+		bool merges_to_nothing = dom.merge == SPIRBlock::MergeNone ||
+		                         (dom.merge == SPIRBlock::MergeSelection && dom.next_block &&
+		                          compiler.get<SPIRBlock>(dom.next_block).terminator == SPIRBlock::Unreachable) ||
+		                         (dom.merge == SPIRBlock::MergeLoop && dom.merge_block &&
+		                          compiler.get<SPIRBlock>(dom.merge_block).terminator == SPIRBlock::Unreachable);
+
+		if (dom.self == from || merges_to_nothing)
 		{
-			auto &true_block = compiler.get<SPIRBlock>(dom.true_block);
-			auto &false_block = compiler.get<SPIRBlock>(dom.false_block);
-			auto &ignore_block = compiler.get<SPIRBlock>(ignore_block_id);
-			true_path_ignore = compiler.execution_is_branchless(true_block, ignore_block);
-			false_path_ignore = compiler.execution_is_branchless(false_block, ignore_block);
+			// We can only ignore inner branchy paths if there is no merge,
+			// i.e. no code is generated afterwards. E.g. this allows us to elide continue:
+			// for (;;) { if (cond) { continue; } else { break; } }.
+			// Codegen here in SPIR-V will be something like either no merge if one path directly breaks, or
+			// we merge to Unreachable.
+			if (ignore_block_id && dom.terminator == SPIRBlock::Select)
+			{
+				auto &true_block = compiler.get<SPIRBlock>(dom.true_block);
+				auto &false_block = compiler.get<SPIRBlock>(dom.false_block);
+				auto &ignore_block = compiler.get<SPIRBlock>(ignore_block_id);
+				true_path_ignore = compiler.execution_is_branchless(true_block, ignore_block);
+				false_path_ignore = compiler.execution_is_branchless(false_block, ignore_block);
+			}
 		}
 
+		// Cases where we allow traversal. This serves as a proxy for post-dominance in a loop body.
+		// TODO: Might want to do full post-dominance analysis, but it's a lot of churn for something like this ...
+		// - We're the merge block of a selection construct. Jump to header.
+		// - We're the merge block of a loop. Jump to header.
+		// - Direct branch. Trivial.
+		// - Allow cases inside a branch if the header cannot merge execution before loop exit.
 		if ((dom.merge == SPIRBlock::MergeSelection && dom.next_block == to) ||
 		    (dom.merge == SPIRBlock::MergeLoop && dom.merge_block == to) ||
 		    (dom.terminator == SPIRBlock::Direct && dom.next_block == to) ||
@@ -385,7 +408,9 @@ void DominatorBuilder::lift_continue_block_dominator()
 		break;
 
 	case SPIRBlock::MultiSelect:
-		for (auto &target : block.cases)
+	{
+		auto &cases = cfg.get_compiler().get_case_list(block);
+		for (auto &target : cases)
 		{
 			if (cfg.get_visit_order(target.block) > post_order)
 				back_edge_dominator = true;
@@ -393,6 +418,7 @@ void DominatorBuilder::lift_continue_block_dominator()
 		if (block.default_block && cfg.get_visit_order(block.default_block) > post_order)
 			back_edge_dominator = true;
 		break;
+	}
 
 	default:
 		break;

+ 6 - 1
ThirdParty/SpirvCross/spirv_cfg.hpp

@@ -1,5 +1,6 @@
 /*
  * Copyright 2016-2021 Arm Limited
+ * SPDX-License-Identifier: Apache-2.0 OR MIT
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -18,7 +19,6 @@
  * At your option, you may choose to accept this material under either:
  *  1. The Apache License, Version 2.0, found at <http://www.apache.org/licenses/LICENSE-2.0>, or
  *  2. The MIT License, found at <http://opensource.org/licenses/MIT>.
- * SPDX-License-Identifier: Apache-2.0 OR MIT.
  */
 
 #ifndef SPIRV_CROSS_CFG_HPP
@@ -59,6 +59,11 @@ public:
 			return 0;
 	}
 
+	bool is_reachable(uint32_t block) const
+	{
+		return visit_order.count(block) != 0;
+	}
+
 	uint32_t get_visit_order(uint32_t block) const
 	{
 		auto itr = visit_order.find(block);

+ 133 - 14
ThirdParty/SpirvCross/spirv_common.hpp

@@ -1,5 +1,6 @@
 /*
  * Copyright 2015-2021 Arm Limited
+ * SPDX-License-Identifier: Apache-2.0 OR MIT
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -18,13 +19,16 @@
  * At your option, you may choose to accept this material under either:
  *  1. The Apache License, Version 2.0, found at <http://www.apache.org/licenses/LICENSE-2.0>, or
  *  2. The MIT License, found at <http://opensource.org/licenses/MIT>.
- * SPDX-License-Identifier: Apache-2.0 OR MIT.
  */
 
 #ifndef SPIRV_CROSS_COMMON_HPP
 #define SPIRV_CROSS_COMMON_HPP
 
-#include "Khronos/spirv/spirv.hpp"
+#ifndef SPV_ENABLE_UTILITY_CODE
+#define SPV_ENABLE_UTILITY_CODE
+#endif
+#include "spirv.hpp"
+
 #include "spirv_cross_containers.hpp"
 #include "spirv_cross_error_handling.hpp"
 #include <functional>
@@ -211,6 +215,28 @@ inline std::string convert_to_string(const T &t)
 	return std::to_string(t);
 }
 
+static inline std::string convert_to_string(int32_t value)
+{
+	// INT_MIN is ... special on some backends. If we use a decimal literal, and negate it, we
+	// could accidentally promote the literal to long first, then negate.
+	// To workaround it, emit int(0x80000000) instead.
+	if (value == std::numeric_limits<int32_t>::min())
+		return "int(0x80000000)";
+	else
+		return std::to_string(value);
+}
+
+static inline std::string convert_to_string(int64_t value, const std::string &int64_type, bool long_long_literal_suffix)
+{
+	// INT64_MIN is ... special on some backends.
+	// If we use a decimal literal, and negate it, we might overflow the representable numbers.
+	// To workaround it, emit int(0x80000000) instead.
+	if (value == std::numeric_limits<int64_t>::min())
+		return join(int64_type, "(0x8000000000000000u", (long_long_literal_suffix ? "ll" : "l"), ")");
+	else
+		return std::to_string(value) + (long_long_literal_suffix ? "ll" : "l");
+}
+
 // Allow implementations to set a convenient standard precision
 #ifndef SPIRV_CROSS_FLT_FMT
 #define SPIRV_CROSS_FLT_FMT "%.32g"
@@ -302,8 +328,20 @@ struct Instruction
 {
 	uint16_t op = 0;
 	uint16_t count = 0;
+	// If offset is 0 (not a valid offset into the instruction stream),
+	// we have an instruction stream which is embedded in the object.
 	uint32_t offset = 0;
 	uint32_t length = 0;
+
+	inline bool is_embedded() const
+	{
+		return offset == 0;
+	}
+};
+
+struct EmbeddedInstruction : Instruction
+{
+	SmallVector<uint32_t> ops;
 };
 
 enum Types
@@ -405,6 +443,11 @@ struct IVariant
 	virtual ~IVariant() = default;
 	virtual IVariant *clone(ObjectPoolBase *pool) = 0;
 	ID self = 0;
+
+protected:
+	IVariant() = default;
+	IVariant(const IVariant&) = default;
+	IVariant &operator=(const IVariant&) = default;
 };
 
 #define SPIRV_CROSS_DECLARE_CLONE(T)                                \
@@ -599,7 +642,10 @@ struct SPIRExtension : IVariant
 		SPV_AMD_shader_ballot,
 		SPV_AMD_shader_explicit_vertex_parameter,
 		SPV_AMD_shader_trinary_minmax,
-		SPV_AMD_gcn_shader
+		SPV_AMD_gcn_shader,
+		NonSemanticDebugPrintf,
+		NonSemanticShaderDebugInfo,
+		NonSemanticGeneric
 	};
 
 	explicit SPIRExtension(Extension ext_)
@@ -633,10 +679,12 @@ struct SPIREntryPoint
 	struct WorkgroupSize
 	{
 		uint32_t x = 0, y = 0, z = 0;
+		uint32_t id_x = 0, id_y = 0, id_z = 0;
 		uint32_t constant = 0; // Workgroup size can be expressed as a constant/spec-constant instead.
 	} workgroup_size;
 	uint32_t invocations = 0;
 	uint32_t output_vertices = 0;
+	uint32_t output_primitives = 0;
 	spv::ExecutionModel model = spv::ExecutionModelMax;
 	bool geometry_passthrough = false;
 };
@@ -650,7 +698,7 @@ struct SPIRExpression : IVariant
 
 	// Only created by the backend target to avoid creating tons of temporaries.
 	SPIRExpression(std::string expr, TypeID expression_type_, bool immutable_)
-	    : expression(expr)
+	    : expression(std::move(expr))
 	    , expression_type(expression_type_)
 	    , immutable(immutable_)
 	{
@@ -681,6 +729,9 @@ struct SPIRExpression : IVariant
 	// Whether or not this is an access chain expression.
 	bool access_chain = false;
 
+	// Whether or not gl_MeshVerticesEXT[].gl_Position (as a whole or .y) is referenced
+	bool access_meshlet_position_y = false;
+
 	// A list of expressions which this expression depends on.
 	SmallVector<ID> expression_dependencies;
 
@@ -731,7 +782,8 @@ struct SPIRBlock : IVariant
 		Unreachable, // Noop
 		Kill, // Discard
 		IgnoreIntersection, // Ray Tracing
-		TerminateRay // Ray Tracing
+		TerminateRay, // Ray Tracing
+		EmitMeshTasks // Mesh shaders
 	};
 
 	enum Merge
@@ -793,6 +845,13 @@ struct SPIRBlock : IVariant
 	BlockID false_block = 0;
 	BlockID default_block = 0;
 
+	// If terminator is EmitMeshTasksEXT.
+	struct
+	{
+		ID groups[3];
+		ID payload;
+	} mesh = {};
+
 	SmallVector<Instruction> ops;
 
 	struct Phi
@@ -815,10 +874,11 @@ struct SPIRBlock : IVariant
 
 	struct Case
 	{
-		uint32_t value;
+		uint64_t value;
 		BlockID block;
 	};
-	SmallVector<Case> cases;
+	SmallVector<Case> cases_32bit;
+	SmallVector<Case> cases_64bit;
 
 	// If we have tried to optimize code for this block but failed,
 	// keep track of this.
@@ -1031,7 +1091,6 @@ struct SPIRVariable : IVariant
 
 	// Temporaries which can remain forwarded as long as this variable is not modified.
 	SmallVector<ID> dependees;
-	bool forwardable = true;
 
 	bool deferred_declaration = false;
 	bool phi_variable = false;
@@ -1360,7 +1419,7 @@ public:
 	~Variant()
 	{
 		if (holder)
-			group->pools[type]->free_opaque(holder);
+			group->pools[type]->deallocate_opaque(holder);
 	}
 
 	// Marking custom move constructor as noexcept is important.
@@ -1379,7 +1438,7 @@ public:
 		if (this != &other)
 		{
 			if (holder)
-				group->pools[type]->free_opaque(holder);
+				group->pools[type]->deallocate_opaque(holder);
 			holder = other.holder;
 			group = other.group;
 			type = other.type;
@@ -1403,7 +1462,7 @@ public:
 		if (this != &other)
 		{
 			if (holder)
-				group->pools[type]->free_opaque(holder);
+				group->pools[type]->deallocate_opaque(holder);
 
 			if (other.holder)
 				holder = other.holder->clone(group->pools[other.type].get());
@@ -1419,13 +1478,13 @@ public:
 	void set(IVariant *val, Types new_type)
 	{
 		if (holder)
-			group->pools[type]->free_opaque(holder);
+			group->pools[type]->deallocate_opaque(holder);
 		holder = nullptr;
 
 		if (!allow_type_rewrite && type != TypeNone && type != new_type)
 		{
 			if (val)
-				group->pools[new_type]->free_opaque(val);
+				group->pools[new_type]->deallocate_opaque(val);
 			SPIRV_CROSS_THROW("Overwriting a variant with new type.");
 		}
 
@@ -1480,7 +1539,7 @@ public:
 	void reset()
 	{
 		if (holder)
-			group->pools[type]->free_opaque(holder);
+			group->pools[type]->deallocate_opaque(holder);
 		holder = nullptr;
 		type = TypeNone;
 	}
@@ -1523,6 +1582,8 @@ struct AccessChainMeta
 	bool storage_is_packed = false;
 	bool storage_is_invariant = false;
 	bool flattened_struct = false;
+	bool relaxed_precision = false;
+	bool access_meshlet_position_y = false;
 };
 
 enum ExtendedDecorations
@@ -1590,6 +1651,12 @@ enum ExtendedDecorations
 	// results of interpolation can.
 	SPIRVCrossDecorationInterpolantComponentExpr,
 
+	// Apply to any struct type that is used in the Workgroup storage class.
+	// This causes matrices in MSL prior to Metal 3.0 to be emitted using a special
+	// class that is convertible to the standard matrix type, to work around the
+	// lack of constructors in the 'threadgroup' address space.
+	SPIRVCrossDecorationWorkgroupStruct,
+
 	SPIRVCrossDecorationCount
 };
 
@@ -1600,6 +1667,7 @@ struct Meta
 		std::string alias;
 		std::string qualified_alias;
 		std::string hlsl_semantic;
+		std::string user_type;
 		Bitset decoration_flags;
 		spv::BuiltIn builtin_type = spv::BuiltInMax;
 		uint32_t location = 0;
@@ -1735,6 +1803,33 @@ static inline bool opcode_is_sign_invariant(spv::Op opcode)
 	}
 }
 
+static inline bool opcode_can_promote_integer_implicitly(spv::Op opcode)
+{
+	switch (opcode)
+	{
+	case spv::OpSNegate:
+	case spv::OpNot:
+	case spv::OpBitwiseAnd:
+	case spv::OpBitwiseOr:
+	case spv::OpBitwiseXor:
+	case spv::OpShiftLeftLogical:
+	case spv::OpShiftRightLogical:
+	case spv::OpShiftRightArithmetic:
+	case spv::OpIAdd:
+	case spv::OpISub:
+	case spv::OpIMul:
+	case spv::OpSDiv:
+	case spv::OpUDiv:
+	case spv::OpSRem:
+	case spv::OpUMod:
+	case spv::OpSMod:
+		return true;
+
+	default:
+		return false;
+	}
+}
+
 struct SetBindingPair
 {
 	uint32_t desc_set;
@@ -1751,6 +1846,22 @@ struct SetBindingPair
 	}
 };
 
+struct LocationComponentPair
+{
+	uint32_t location;
+	uint32_t component;
+
+	inline bool operator==(const LocationComponentPair &other) const
+	{
+		return location == other.location && component == other.component;
+	}
+
+	inline bool operator<(const LocationComponentPair &other) const
+	{
+		return location < other.location || (location == other.location && component < other.component);
+	}
+};
+
 struct StageSetBinding
 {
 	spv::ExecutionModel model;
@@ -1773,6 +1884,14 @@ struct InternalHasher
 		return (hash_set * 0x10001b31) ^ hash_binding;
 	}
 
+	inline size_t operator()(const LocationComponentPair &value) const
+	{
+		// Quality of hash doesn't really matter here.
+		auto hash_set = std::hash<uint32_t>()(value.location);
+		auto hash_binding = std::hash<uint32_t>()(value.component);
+		return (hash_set * 0x10001b31) ^ hash_binding;
+	}
+
 	inline size_t operator()(const StageSetBinding &value) const
 	{
 		// Quality of hash doesn't really matter here.

File diff suppressed because it is too large
+ 504 - 102
ThirdParty/SpirvCross/spirv_cross.cpp


+ 97 - 11
ThirdParty/SpirvCross/spirv_cross.hpp

@@ -1,5 +1,6 @@
 /*
  * Copyright 2015-2021 Arm Limited
+ * SPDX-License-Identifier: Apache-2.0 OR MIT
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -18,13 +19,15 @@
  * At your option, you may choose to accept this material under either:
  *  1. The Apache License, Version 2.0, found at <http://www.apache.org/licenses/LICENSE-2.0>, or
  *  2. The MIT License, found at <http://opensource.org/licenses/MIT>.
- * SPDX-License-Identifier: Apache-2.0 OR MIT.
  */
 
 #ifndef SPIRV_CROSS_HPP
 #define SPIRV_CROSS_HPP
 
-#include "Khronos/spirv/spirv.hpp"
+#ifndef SPV_ENABLE_UTILITY_CODE
+#define SPV_ENABLE_UTILITY_CODE
+#endif
+#include "spirv.hpp"
 #include "spirv_cfg.hpp"
 #include "spirv_cross_parsed_ir.hpp"
 
@@ -59,6 +62,27 @@ struct Resource
 	std::string name;
 };
 
+struct BuiltInResource
+{
+	// This is mostly here to support reflection of builtins such as Position/PointSize/CullDistance/ClipDistance.
+	// This needs to be different from Resource since we can collect builtins from blocks.
+	// A builtin present here does not necessarily mean it's considered an active builtin,
+	// since variable ID "activeness" is only tracked on OpVariable level, not Block members.
+	// For that, update_active_builtins() -> has_active_builtin() can be used to further refine the reflection.
+	spv::BuiltIn builtin;
+
+	// This is the actual value type of the builtin.
+	// Typically float4, float, array<float, N> for the gl_PerVertex builtins.
+	// If the builtin is a control point, the control point array type will be stripped away here as appropriate.
+	TypeID value_type_id;
+
+	// This refers to the base resource which contains the builtin.
+	// If resource is a Block, it can hold multiple builtins, or it might not be a block.
+	// For advanced reflection scenarios, all information in builtin/value_type_id can be deduced,
+	// it's just more convenient this way.
+	Resource resource;
+};
+
 struct ShaderResources
 {
 	SmallVector<Resource> uniform_buffers;
@@ -70,15 +94,21 @@ struct ShaderResources
 	SmallVector<Resource> sampled_images;
 	SmallVector<Resource> atomic_counters;
 	SmallVector<Resource> acceleration_structures;
+	SmallVector<Resource> gl_plain_uniforms;
 
 	// There can only be one push constant block,
 	// but keep the vector in case this restriction is lifted in the future.
 	SmallVector<Resource> push_constant_buffers;
 
+	SmallVector<Resource> shader_record_buffers;
+
 	// For Vulkan GLSL and HLSL source,
 	// these correspond to separate texture2D and samplers respectively.
 	SmallVector<Resource> separate_images;
 	SmallVector<Resource> separate_samplers;
+
+	SmallVector<BuiltInResource> builtin_inputs;
+	SmallVector<BuiltInResource> builtin_outputs;
 };
 
 struct CombinedImageSampler
@@ -324,7 +354,7 @@ public:
 
 	// Traverses all reachable opcodes and sets active_builtins to a bitmask of all builtin variables which are accessed in the shader.
 	void update_active_builtins();
-	bool has_active_builtin(spv::BuiltIn builtin, spv::StorageClass storage);
+	bool has_active_builtin(spv::BuiltIn builtin, spv::StorageClass storage) const;
 
 	// Query and modify OpExecutionMode.
 	const Bitset &get_execution_mode_bitset() const;
@@ -333,12 +363,16 @@ public:
 	void set_execution_mode(spv::ExecutionMode mode, uint32_t arg0 = 0, uint32_t arg1 = 0, uint32_t arg2 = 0);
 
 	// Gets argument for an execution mode (LocalSize, Invocations, OutputVertices).
-	// For LocalSize, the index argument is used to select the dimension (X = 0, Y = 1, Z = 2).
+	// For LocalSize or LocalSizeId, the index argument is used to select the dimension (X = 0, Y = 1, Z = 2).
 	// For execution modes which do not have arguments, 0 is returned.
+	// LocalSizeId query returns an ID. If LocalSizeId execution mode is not used, it returns 0.
+	// LocalSize always returns a literal. If execution mode is LocalSizeId,
+	// the literal (spec constant or not) is still returned.
 	uint32_t get_execution_mode_argument(spv::ExecutionMode mode, uint32_t index = 0) const;
 	spv::ExecutionModel get_execution_model() const;
 
 	bool is_tessellation_shader() const;
+	bool is_tessellating_triangles() const;
 
 	// In SPIR-V, the compute work group size can be represented by a constant vector, in which case
 	// the LocalSize execution mode is ignored.
@@ -356,6 +390,8 @@ public:
 	// If the component is not a specialization constant, a zeroed out struct will be written.
 	// The return value is the constant ID of the builtin WorkGroupSize, but this is not expected to be useful
 	// for most use cases.
+	// If LocalSizeId is used, there is no uvec3 value representing the workgroup size, so the return value is 0,
+	// but x, y and z are written as normal if the components are specialization constants.
 	uint32_t get_work_group_size_specialization_constants(SpecializationConstant &x, SpecializationConstant &y,
 	                                                      SpecializationConstant &z) const;
 
@@ -513,9 +549,23 @@ protected:
 		if (!instr.length)
 			return nullptr;
 
-		if (instr.offset + instr.length > ir.spirv.size())
-			SPIRV_CROSS_THROW("Compiler::stream() out of range.");
-		return &ir.spirv[instr.offset];
+		if (instr.is_embedded())
+		{
+			auto &embedded = static_cast<const EmbeddedInstruction &>(instr);
+			assert(embedded.ops.size() == instr.length);
+			return embedded.ops.data();
+		}
+		else
+		{
+			if (instr.offset + instr.length > ir.spirv.size())
+				SPIRV_CROSS_THROW("Compiler::stream() out of range.");
+			return &ir.spirv[instr.offset];
+		}
+	}
+
+	uint32_t *stream_mutable(const Instruction &instr) const
+	{
+		return const_cast<uint32_t *>(stream(instr));
 	}
 
 	ParsedIR ir;
@@ -633,12 +683,12 @@ protected:
 	bool is_vector(const SPIRType &type) const;
 	bool is_matrix(const SPIRType &type) const;
 	bool is_array(const SPIRType &type) const;
+	static bool is_runtime_size_array(const SPIRType &type);
 	uint32_t expression_type_id(uint32_t id) const;
 	const SPIRType &expression_type(uint32_t id) const;
 	bool expression_is_lvalue(uint32_t id) const;
 	bool variable_storage_is_aliased(const SPIRVariable &var);
 	SPIRVariable *maybe_get_backing_variable(uint32_t chain);
-	spv::StorageClass get_expression_effective_storage_class(uint32_t ptr);
 
 	void register_read(uint32_t expr, uint32_t chain, bool forwarded);
 	void register_write(uint32_t chain);
@@ -698,16 +748,20 @@ protected:
 	SPIRBlock::ContinueBlockType continue_block_type(const SPIRBlock &continue_block) const;
 
 	void force_recompile();
+	void force_recompile_guarantee_forward_progress();
 	void clear_force_recompile();
 	bool is_forcing_recompilation() const;
 	bool is_force_recompile = false;
+	bool is_force_recompile_forward_progress = false;
 
+	bool block_is_noop(const SPIRBlock &block) const;
 	bool block_is_loop_candidate(const SPIRBlock &block, SPIRBlock::Method method) const;
 
 	bool types_are_logically_equivalent(const SPIRType &a, const SPIRType &b) const;
 	void inherit_expression_dependencies(uint32_t dst, uint32_t source);
 	void add_implied_read_expression(SPIRExpression &e, uint32_t source);
 	void add_implied_read_expression(SPIRAccessChain &e, uint32_t source);
+	void add_active_interface_variable(uint32_t var_id);
 
 	// For proper multiple entry point support, allow querying if an Input or Output
 	// variable is part of that entry points interface.
@@ -733,6 +787,10 @@ protected:
 		// Return true if traversal should continue.
 		// If false, traversal will end immediately.
 		virtual bool handle(spv::Op opcode, const uint32_t *args, uint32_t length) = 0;
+		virtual bool handle_terminator(const SPIRBlock &)
+		{
+			return true;
+		}
 
 		virtual bool follow_function_call(const SPIRFunction &)
 		{
@@ -879,6 +937,7 @@ protected:
 	// Similar is implemented for images, as well as if subpass inputs are needed.
 	std::unordered_set<uint32_t> comparison_ids;
 	bool need_subpass_input = false;
+	bool need_subpass_input_ms = false;
 
 	// In certain backends, we will need to use a dummy sampler to be able to emit code.
 	// GLSL does not support texelFetch on texture2D objects, but SPIR-V does,
@@ -918,6 +977,7 @@ protected:
 
 		void add_hierarchy_to_comparison_ids(uint32_t ids);
 		bool need_subpass_input = false;
+		bool need_subpass_input_ms = false;
 		void add_dependency(uint32_t dst, uint32_t src);
 	};
 
@@ -947,6 +1007,7 @@ protected:
 		bool id_is_phi_variable(uint32_t id) const;
 		bool id_is_potential_temporary(uint32_t id) const;
 		bool handle(spv::Op op, const uint32_t *args, uint32_t length) override;
+		bool handle_terminator(const SPIRBlock &block) override;
 
 		Compiler &compiler;
 		SPIRFunction &entry;
@@ -957,7 +1018,8 @@ protected:
 		std::unordered_map<uint32_t, std::unordered_set<uint32_t>> partial_write_variables_to_block;
 		std::unordered_set<uint32_t> access_chain_expressions;
 		// Access chains used in multiple blocks mean hoisting all the variables used to construct the access chain as not all backends can use pointers.
-		std::unordered_map<uint32_t, std::unordered_set<uint32_t>> access_chain_children;
+		// This is also relevant when forwarding opaque objects since we cannot lower these to temporaries.
+		std::unordered_map<uint32_t, std::unordered_set<uint32_t>> rvalue_forward_children;
 		const SPIRBlock *current_block = nullptr;
 	};
 
@@ -973,15 +1035,32 @@ protected:
 		uint32_t write_count = 0;
 	};
 
+	struct PhysicalBlockMeta
+	{
+		uint32_t alignment = 0;
+	};
+
 	struct PhysicalStorageBufferPointerHandler : OpcodeHandler
 	{
 		explicit PhysicalStorageBufferPointerHandler(Compiler &compiler_);
 		bool handle(spv::Op op, const uint32_t *args, uint32_t length) override;
 		Compiler &compiler;
-		std::unordered_set<uint32_t> types;
+
+		std::unordered_set<uint32_t> non_block_types;
+		std::unordered_map<uint32_t, PhysicalBlockMeta> physical_block_type_meta;
+		std::unordered_map<uint32_t, PhysicalBlockMeta *> access_chain_to_physical_block;
+
+		void mark_aligned_access(uint32_t id, const uint32_t *args, uint32_t length);
+		PhysicalBlockMeta *find_block_meta(uint32_t id) const;
+		bool type_is_bda_block_entry(uint32_t type_id) const;
+		void setup_meta_chain(uint32_t type_id, uint32_t var_id);
+		uint32_t get_minimum_scalar_alignment(const SPIRType &type) const;
+		void analyze_non_block_types_from_block(const SPIRType &type);
+		uint32_t get_base_non_block_type_id(uint32_t type_id) const;
 	};
 	void analyze_non_block_pointer_types();
 	SmallVector<uint32_t> physical_storage_non_block_pointer_types;
+	std::unordered_map<uint32_t, PhysicalBlockMeta> physical_storage_type_to_alignment;
 
 	void analyze_variable_scope(SPIRFunction &function, AnalyzeVariableScopeAccessHandler &handler);
 	void find_function_local_luts(SPIRFunction &function, const AnalyzeVariableScopeAccessHandler &handler,
@@ -1053,7 +1132,7 @@ protected:
 	Bitset combined_decoration_for_member(const SPIRType &type, uint32_t index) const;
 	static bool is_desktop_only_format(spv::ImageFormat format);
 
-	bool image_is_comparison(const SPIRType &type, uint32_t id) const;
+	bool is_depth_image(const SPIRType &type, uint32_t id) const;
 
 	void set_extended_decoration(uint32_t id, ExtendedDecorations decoration, uint32_t value = 0);
 	uint32_t get_extended_decoration(uint32_t id, ExtendedDecorations decoration) const;
@@ -1068,6 +1147,8 @@ protected:
 
 	bool type_is_array_of_pointers(const SPIRType &type) const;
 	bool type_is_top_level_physical_pointer(const SPIRType &type) const;
+	bool type_is_top_level_pointer(const SPIRType &type) const;
+	bool type_is_top_level_array(const SPIRType &type) const;
 	bool type_is_block_like(const SPIRType &type) const;
 	bool type_is_opaque_value(const SPIRType &type) const;
 
@@ -1081,6 +1162,11 @@ protected:
 
 	bool is_vertex_like_shader() const;
 
+	// Get the correct case list for the OpSwitch, since it can be either a
+	// 32 bit wide condition or a 64 bit, but the type is not embedded in the
+	// instruction itself.
+	const SmallVector<SPIRBlock::Case> &get_case_list(const SPIRBlock &block) const;
+
 private:
 	// Used only to implement the old deprecated get_entry_point() interface.
 	const SPIREntryPoint &get_first_entry_point(const std::string &name) const;

+ 17 - 9
ThirdParty/SpirvCross/spirv_cross_containers.hpp

@@ -1,5 +1,6 @@
 /*
  * Copyright 2019-2021 Hans-Kristian Arntzen
+ * SPDX-License-Identifier: Apache-2.0 OR MIT
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -18,7 +19,6 @@
  * At your option, you may choose to accept this material under either:
  *  1. The Apache License, Version 2.0, found at <http://www.apache.org/licenses/LICENSE-2.0>, or
  *  2. The MIT License, found at <http://opensource.org/licenses/MIT>.
- * SPDX-License-Identifier: Apache-2.0 OR MIT.
  */
 
 #ifndef SPIRV_CROSS_CONTAINERS_HPP
@@ -26,6 +26,7 @@
 
 #include "spirv_cross_error_handling.hpp"
 #include <algorithm>
+#include <exception>
 #include <functional>
 #include <iterator>
 #include <limits>
@@ -210,7 +211,8 @@ public:
 		buffer_capacity = N;
 	}
 
-	SmallVector(const T *arg_list_begin, const T *arg_list_end) SPIRV_CROSS_NOEXCEPT : SmallVector()
+	template <typename U>
+	SmallVector(const U *arg_list_begin, const U *arg_list_end) SPIRV_CROSS_NOEXCEPT : SmallVector()
 	{
 		auto count = size_t(arg_list_end - arg_list_begin);
 		reserve(count);
@@ -219,7 +221,13 @@ public:
 		this->buffer_size = count;
 	}
 
-	SmallVector(std::initializer_list<T> init) SPIRV_CROSS_NOEXCEPT : SmallVector(init.begin(), init.end())
+	template <typename U>
+	SmallVector(std::initializer_list<U> init) SPIRV_CROSS_NOEXCEPT : SmallVector(init.begin(), init.end())
+	{
+	}
+
+	template <typename U, size_t M>
+	explicit SmallVector(const U (&init)[M]) SPIRV_CROSS_NOEXCEPT : SmallVector(init, init + M)
 	{
 	}
 
@@ -327,8 +335,8 @@ public:
 
 	void reserve(size_t count) SPIRV_CROSS_NOEXCEPT
 	{
-		if ((count > std::numeric_limits<size_t>::max() / sizeof(T)) ||
-		    (count > std::numeric_limits<size_t>::max() / 2))
+		if ((count > (std::numeric_limits<size_t>::max)() / sizeof(T)) ||
+		    (count > (std::numeric_limits<size_t>::max)() / 2))
 		{
 			// Only way this should ever happen is with garbage input, terminate.
 			std::terminate();
@@ -546,7 +554,7 @@ class ObjectPoolBase
 {
 public:
 	virtual ~ObjectPoolBase() = default;
-	virtual void free_opaque(void *ptr) = 0;
+	virtual void deallocate_opaque(void *ptr) = 0;
 };
 
 template <typename T>
@@ -580,15 +588,15 @@ public:
 		return ptr;
 	}
 
-	void free(T *ptr)
+	void deallocate(T *ptr)
 	{
 		ptr->~T();
 		vacants.push_back(ptr);
 	}
 
-	void free_opaque(void *ptr) override
+	void deallocate_opaque(void *ptr) override
 	{
-		free(static_cast<T *>(ptr));
+		deallocate(static_cast<T *>(ptr));
 	}
 
 	void clear()

+ 1 - 1
ThirdParty/SpirvCross/spirv_cross_error_handling.hpp

@@ -1,5 +1,6 @@
 /*
  * Copyright 2015-2021 Arm Limited
+ * SPDX-License-Identifier: Apache-2.0 OR MIT
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -18,7 +19,6 @@
  * At your option, you may choose to accept this material under either:
  *  1. The Apache License, Version 2.0, found at <http://www.apache.org/licenses/LICENSE-2.0>, or
  *  2. The MIT License, found at <http://opensource.org/licenses/MIT>.
- * SPDX-License-Identifier: Apache-2.0 OR MIT.
  */
 
 #ifndef SPIRV_CROSS_ERROR_HANDLING

+ 39 - 22
ThirdParty/SpirvCross/spirv_cross_parsed_ir.cpp

@@ -1,5 +1,6 @@
 /*
  * Copyright 2018-2021 Arm Limited
+ * SPDX-License-Identifier: Apache-2.0 OR MIT
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -18,7 +19,6 @@
  * At your option, you may choose to accept this material under either:
  *  1. The Apache License, Version 2.0, found at <http://www.apache.org/licenses/LICENSE-2.0>, or
  *  2. The MIT License, found at <http://opensource.org/licenses/MIT>.
- * SPDX-License-Identifier: Apache-2.0 OR MIT.
  */
 
 #include "spirv_cross_parsed_ir.hpp"
@@ -54,26 +54,26 @@ ParsedIR::ParsedIR()
 // Should have been default-implemented, but need this on MSVC 2013.
 ParsedIR::ParsedIR(ParsedIR &&other) SPIRV_CROSS_NOEXCEPT
 {
-	*this = move(other);
+	*this = std::move(other);
 }
 
 ParsedIR &ParsedIR::operator=(ParsedIR &&other) SPIRV_CROSS_NOEXCEPT
 {
 	if (this != &other)
 	{
-		pool_group = move(other.pool_group);
-		spirv = move(other.spirv);
-		meta = move(other.meta);
+		pool_group = std::move(other.pool_group);
+		spirv = std::move(other.spirv);
+		meta = std::move(other.meta);
 		for (int i = 0; i < TypeCount; i++)
-			ids_for_type[i] = move(other.ids_for_type[i]);
-		ids_for_constant_or_type = move(other.ids_for_constant_or_type);
-		ids_for_constant_or_variable = move(other.ids_for_constant_or_variable);
-		declared_capabilities = move(other.declared_capabilities);
-		declared_extensions = move(other.declared_extensions);
-		block_meta = move(other.block_meta);
-		continue_block_to_loop_header = move(other.continue_block_to_loop_header);
-		entry_points = move(other.entry_points);
-		ids = move(other.ids);
+			ids_for_type[i] = std::move(other.ids_for_type[i]);
+		ids_for_constant_undef_or_type = std::move(other.ids_for_constant_undef_or_type);
+		ids_for_constant_or_variable = std::move(other.ids_for_constant_or_variable);
+		declared_capabilities = std::move(other.declared_capabilities);
+		declared_extensions = std::move(other.declared_extensions);
+		block_meta = std::move(other.block_meta);
+		continue_block_to_loop_header = std::move(other.continue_block_to_loop_header);
+		entry_points = std::move(other.entry_points);
+		ids = std::move(other.ids);
 		addressing_model = other.addressing_model;
 		memory_model = other.memory_model;
 
@@ -83,6 +83,7 @@ ParsedIR &ParsedIR::operator=(ParsedIR &&other) SPIRV_CROSS_NOEXCEPT
 		loop_iteration_depth_soft = other.loop_iteration_depth_soft;
 
 		meta_needing_name_fixup = std::move(other.meta_needing_name_fixup);
+		load_type_width = std::move(other.load_type_width);
 	}
 	return *this;
 }
@@ -101,7 +102,7 @@ ParsedIR &ParsedIR::operator=(const ParsedIR &other)
 		meta = other.meta;
 		for (int i = 0; i < TypeCount; i++)
 			ids_for_type[i] = other.ids_for_type[i];
-		ids_for_constant_or_type = other.ids_for_constant_or_type;
+		ids_for_constant_undef_or_type = other.ids_for_constant_undef_or_type;
 		ids_for_constant_or_variable = other.ids_for_constant_or_variable;
 		declared_capabilities = other.declared_capabilities;
 		declared_extensions = other.declared_extensions;
@@ -115,7 +116,9 @@ ParsedIR &ParsedIR::operator=(const ParsedIR &other)
 		addressing_model = other.addressing_model;
 		memory_model = other.memory_model;
 
+
 		meta_needing_name_fixup = other.meta_needing_name_fixup;
+		load_type_width = other.load_type_width;
 
 		// Very deliberate copying of IDs. There is no default copy constructor, nor a simple default constructor.
 		// Construct object first so we have the correct allocator set-up, then we can copy object into our new pool group.
@@ -327,6 +330,10 @@ void ParsedIR::fixup_reserved_names()
 {
 	for (uint32_t id : meta_needing_name_fixup)
 	{
+		// Don't rename remapped variables like 'gl_LastFragDepthARM'.
+		if (ids[id].get_type() == TypeVariable && get<SPIRVariable>(id).remapped_variable)
+			continue;
+
 		auto &m = meta[id];
 		sanitize_identifier(m.decoration.alias, false, false);
 		for (auto &memb : m.members)
@@ -346,7 +353,7 @@ void ParsedIR::set_name(ID id, const string &name)
 void ParsedIR::set_member_name(TypeID id, uint32_t index, const string &name)
 {
 	auto &m = meta[id];
-	m.members.resize(max(meta[id].members.size(), size_t(index) + 1));
+	m.members.resize(max(m.members.size(), size_t(index) + 1));
 	m.members[index].alias = name;
 	if (!is_valid_identifier(name) || is_reserved_identifier(name, true, false))
 		meta_needing_name_fixup.insert(id);
@@ -363,6 +370,10 @@ void ParsedIR::set_decoration_string(ID id, Decoration decoration, const string
 		dec.hlsl_semantic = argument;
 		break;
 
+	case DecorationUserTypeGOOGLE:
+		dec.user_type = argument;
+		break;
+
 	default:
 		break;
 	}
@@ -448,8 +459,9 @@ void ParsedIR::set_decoration(ID id, Decoration decoration, uint32_t argument)
 
 void ParsedIR::set_member_decoration(TypeID id, uint32_t index, Decoration decoration, uint32_t argument)
 {
-	meta[id].members.resize(max(meta[id].members.size(), size_t(index) + 1));
-	auto &dec = meta[id].members[index];
+	auto &m = meta[id];
+	m.members.resize(max(m.members.size(), size_t(index) + 1));
+	auto &dec = m.members[index];
 	dec.decoration_flags.set(decoration);
 
 	switch (decoration)
@@ -651,6 +663,9 @@ const string &ParsedIR::get_decoration_string(ID id, Decoration decoration) cons
 	case DecorationHlslSemanticGOOGLE:
 		return dec.hlsl_semantic;
 
+	case DecorationUserTypeGOOGLE:
+		return dec.user_type;
+
 	default:
 		return empty_string;
 	}
@@ -789,7 +804,8 @@ const Bitset &ParsedIR::get_decoration_bitset(ID id) const
 
 void ParsedIR::set_member_decoration_string(TypeID id, uint32_t index, Decoration decoration, const string &argument)
 {
-	meta[id].members.resize(max(meta[id].members.size(), size_t(index) + 1));
+	auto &m = meta[id];
+	m.members.resize(max(m.members.size(), size_t(index) + 1));
 	auto &dec = meta[id].members[index];
 	dec.decoration_flags.set(decoration);
 
@@ -925,7 +941,7 @@ void ParsedIR::add_typed_id(Types type, ID id)
 		{
 		case TypeConstant:
 			ids_for_constant_or_variable.push_back(id);
-			ids_for_constant_or_type.push_back(id);
+			ids_for_constant_undef_or_type.push_back(id);
 			break;
 
 		case TypeVariable:
@@ -934,7 +950,8 @@ void ParsedIR::add_typed_id(Types type, ID id)
 
 		case TypeType:
 		case TypeConstantOp:
-			ids_for_constant_or_type.push_back(id);
+		case TypeUndef:
+			ids_for_constant_undef_or_type.push_back(id);
 			break;
 
 		default:
@@ -996,7 +1013,7 @@ ParsedIR::LoopLock::LoopLock(uint32_t *lock_)
 
 ParsedIR::LoopLock::LoopLock(LoopLock &&other) SPIRV_CROSS_NOEXCEPT
 {
-	*this = move(other);
+	*this = std::move(other);
 }
 
 ParsedIR::LoopLock &ParsedIR::LoopLock::operator=(LoopLock &&other) SPIRV_CROSS_NOEXCEPT

+ 10 - 3
ThirdParty/SpirvCross/spirv_cross_parsed_ir.hpp

@@ -1,5 +1,6 @@
 /*
  * Copyright 2018-2021 Arm Limited
+ * SPDX-License-Identifier: Apache-2.0 OR MIT
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -18,7 +19,6 @@
  * At your option, you may choose to accept this material under either:
  *  1. The Apache License, Version 2.0, found at <http://www.apache.org/licenses/LICENSE-2.0>, or
  *  2. The MIT License, found at <http://opensource.org/licenses/MIT>.
- * SPDX-License-Identifier: Apache-2.0 OR MIT.
  */
 
 #ifndef SPIRV_CROSS_PARSED_IR_HPP
@@ -74,10 +74,17 @@ public:
 	// Special purpose lists which contain a union of types.
 	// This is needed so we can declare specialization constants and structs in an interleaved fashion,
 	// among other things.
-	// Constants can be of struct type, and struct array sizes can use specialization constants.
-	SmallVector<ID> ids_for_constant_or_type;
+	// Constants can be undef or of struct type, and struct array sizes can use specialization constants.
+	SmallVector<ID> ids_for_constant_undef_or_type;
 	SmallVector<ID> ids_for_constant_or_variable;
 
+	// We need to keep track of the width the Ops that contains a type for the
+	// OpSwitch instruction, since this one doesn't contains the type in the
+	// instruction itself. And in some case we need to cast the condition to
+	// wider types. We only need the width to do the branch fixup since the
+	// type check itself can be done at runtime
+	std::unordered_map<ID, uint32_t> load_type_width;
+
 	// Declared capabilities and extensions in the SPIR-V module.
 	// Not really used except for reflection at the moment.
 	SmallVector<spv::Capability> declared_capabilities;

+ 77 - 0
ThirdParty/SpirvCross/spirv_cross_util.cpp

@@ -0,0 +1,77 @@
+/*
+ * Copyright 2015-2021 Arm Limited
+ * SPDX-License-Identifier: Apache-2.0 OR MIT
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * At your option, you may choose to accept this material under either:
+ *  1. The Apache License, Version 2.0, found at <http://www.apache.org/licenses/LICENSE-2.0>, or
+ *  2. The MIT License, found at <http://opensource.org/licenses/MIT>.
+ */
+
+#include "spirv_cross_util.hpp"
+#include "spirv_common.hpp"
+
+using namespace spv;
+using namespace SPIRV_CROSS_NAMESPACE;
+
+namespace spirv_cross_util
+{
+void rename_interface_variable(Compiler &compiler, const SmallVector<Resource> &resources, uint32_t location,
+                               const std::string &name)
+{
+	for (auto &v : resources)
+	{
+		if (!compiler.has_decoration(v.id, spv::DecorationLocation))
+			continue;
+
+		auto loc = compiler.get_decoration(v.id, spv::DecorationLocation);
+		if (loc != location)
+			continue;
+
+		auto &type = compiler.get_type(v.base_type_id);
+
+		// This is more of a friendly variant. If we need to rename interface variables, we might have to rename
+		// structs as well and make sure all the names match up.
+		if (type.basetype == SPIRType::Struct)
+		{
+			compiler.set_name(v.base_type_id, join("SPIRV_Cross_Interface_Location", location));
+			for (uint32_t i = 0; i < uint32_t(type.member_types.size()); i++)
+				compiler.set_member_name(v.base_type_id, i, join("InterfaceMember", i));
+		}
+
+		compiler.set_name(v.id, name);
+	}
+}
+
+void inherit_combined_sampler_bindings(Compiler &compiler)
+{
+	auto &samplers = compiler.get_combined_image_samplers();
+	for (auto &s : samplers)
+	{
+		if (compiler.has_decoration(s.image_id, spv::DecorationDescriptorSet))
+		{
+			uint32_t set = compiler.get_decoration(s.image_id, spv::DecorationDescriptorSet);
+			compiler.set_decoration(s.combined_id, spv::DecorationDescriptorSet, set);
+		}
+
+		if (compiler.has_decoration(s.image_id, spv::DecorationBinding))
+		{
+			uint32_t binding = compiler.get_decoration(s.image_id, spv::DecorationBinding);
+			compiler.set_decoration(s.combined_id, spv::DecorationBinding, binding);
+		}
+	}
+}
+} // namespace spirv_cross_util

+ 37 - 0
ThirdParty/SpirvCross/spirv_cross_util.hpp

@@ -0,0 +1,37 @@
+/*
+ * Copyright 2015-2021 Arm Limited
+ * SPDX-License-Identifier: Apache-2.0 OR MIT
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * At your option, you may choose to accept this material under either:
+ *  1. The Apache License, Version 2.0, found at <http://www.apache.org/licenses/LICENSE-2.0>, or
+ *  2. The MIT License, found at <http://opensource.org/licenses/MIT>.
+ */
+
+#ifndef SPIRV_CROSS_UTIL_HPP
+#define SPIRV_CROSS_UTIL_HPP
+
+#include "spirv_cross.hpp"
+
+namespace spirv_cross_util
+{
+void rename_interface_variable(SPIRV_CROSS_NAMESPACE::Compiler &compiler,
+                               const SPIRV_CROSS_NAMESPACE::SmallVector<SPIRV_CROSS_NAMESPACE::Resource> &resources,
+                               uint32_t location, const std::string &name);
+void inherit_combined_sampler_bindings(SPIRV_CROSS_NAMESPACE::Compiler &compiler);
+} // namespace spirv_cross_util
+
+#endif

File diff suppressed because it is too large
+ 462 - 108
ThirdParty/SpirvCross/spirv_glsl.cpp


+ 168 - 34
ThirdParty/SpirvCross/spirv_glsl.hpp

@@ -1,5 +1,6 @@
 /*
  * Copyright 2015-2021 Arm Limited
+ * SPDX-License-Identifier: Apache-2.0 OR MIT
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -18,7 +19,6 @@
  * At your option, you may choose to accept this material under either:
  *  1. The Apache License, Version 2.0, found at <http://www.apache.org/licenses/LICENSE-2.0>, or
  *  2. The MIT License, found at <http://opensource.org/licenses/MIT>.
- * SPDX-License-Identifier: Apache-2.0 OR MIT.
  */
 
 #ifndef SPIRV_CROSS_GLSL_HPP
@@ -65,7 +65,8 @@ enum AccessChainFlagBits
 	ACCESS_CHAIN_PTR_CHAIN_BIT = 1 << 2,
 	ACCESS_CHAIN_SKIP_REGISTER_EXPRESSION_READ_BIT = 1 << 3,
 	ACCESS_CHAIN_LITERAL_MSB_FORCE_ID = 1 << 4,
-	ACCESS_CHAIN_FLATTEN_ALL_MEMBERS_BIT = 1 << 5
+	ACCESS_CHAIN_FLATTEN_ALL_MEMBERS_BIT = 1 << 5,
+	ACCESS_CHAIN_FORCE_COMPOSITE_BIT = 1 << 6
 };
 typedef uint32_t AccessChainFlags;
 
@@ -82,6 +83,11 @@ public:
 
 		// Debug option to always emit temporary variables for all expressions.
 		bool force_temporary = false;
+		// Debug option, can be increased in an attempt to workaround SPIRV-Cross bugs temporarily.
+		// If this limit has to be increased, it points to an implementation bug.
+		// In certain scenarios, the maximum number of debug iterations may increase beyond this limit
+		// as long as we can prove we're making certain kinds of forward progress.
+		uint32_t force_recompile_max_debug_iterations = 3;
 
 		// If true, Vulkan GLSL features are used instead of GL-compatible features.
 		// Mostly useful for debugging SPIR-V files.
@@ -132,6 +138,22 @@ public:
 		// what happens on legacy GLSL targets for blocks and structs.
 		bool force_flattened_io_blocks = false;
 
+		// For opcodes where we have to perform explicit additional nan checks, very ugly code is generated.
+		// If we opt-in, ignore these requirements.
+		// In opcodes like NClamp/NMin/NMax and FP compare, ignore NaN behavior.
+		// Use FClamp/FMin/FMax semantics for clamps and lets implementation choose ordered or unordered
+		// compares.
+		bool relax_nan_checks = false;
+
+		// Loading row-major matrices from UBOs on older AMD Windows OpenGL drivers is problematic.
+		// To load these types correctly, we must generate a wrapper. them in a dummy function which only purpose is to
+		// ensure row_major decoration is actually respected.
+		// This workaround may cause significant performance degeneration on some Android devices.
+		bool enable_row_major_load_workaround = true;
+
+		// If non-zero, controls layout(num_views = N) in; in GL_OVR_multiview2.
+		uint32_t ovr_multiview_view_count = 0;
+
 		enum Precision
 		{
 			DontCare,
@@ -177,7 +199,8 @@ public:
 
 	// Redirect a subpassInput reading from input_attachment_index to instead load its value from
 	// the color attachment at location = color_location. Requires ESSL.
-	void remap_ext_framebuffer_fetch(uint32_t input_attachment_index, uint32_t color_location);
+	// If coherent, uses GL_EXT_shader_framebuffer_fetch, if not, uses noncoherent variant.
+	void remap_ext_framebuffer_fetch(uint32_t input_attachment_index, uint32_t color_location, bool coherent);
 
 	explicit CompilerGLSL(std::vector<uint32_t> spirv_)
 	    : Compiler(std::move(spirv_))
@@ -235,6 +258,10 @@ public:
 	// require_extension("GL_KHR_my_extension");
 	void require_extension(const std::string &ext);
 
+	// Returns the list of required extensions. After compilation this will contains any other 
+	// extensions that the compiler used automatically, in addition to the user specified ones.
+	const SmallVector<std::string> &get_required_extensions() const;
+
 	// Legacy GLSL compatibility method.
 	// Takes a uniform or push constant variable and flattens it into a (i|u)vec4 array[N]; array instead.
 	// For this to work, all types in the block must be the same basic type, e.g. mixing vec2 and vec4 is fine, but
@@ -250,6 +277,16 @@ public:
 	// - Images which are statically used at least once with Dref opcodes.
 	bool variable_is_depth_or_compare(VariableID id) const;
 
+	// If a shader output is active in this stage, but inactive in a subsequent stage,
+	// this can be signalled here. This can be used to work around certain cross-stage matching problems
+	// which plagues MSL and HLSL in certain scenarios.
+	// An output which matches one of these will not be emitted in stage output interfaces, but rather treated as a private
+	// variable.
+	// This option is only meaningful for MSL and HLSL, since GLSL matches by location directly.
+	// Masking builtins only takes effect if the builtin in question is part of the stage output interface.
+	void mask_stage_output_by_location(uint32_t location, uint32_t component);
+	void mask_stage_output_by_builtin(spv::BuiltIn builtin);
+
 protected:
 	struct ShaderSubgroupSupportHelper
 	{
@@ -259,6 +296,7 @@ protected:
 			KHR_shader_subgroup_ballot,
 			KHR_shader_subgroup_basic,
 			KHR_shader_subgroup_vote,
+			KHR_shader_subgroup_arithmetic,
 			NV_gpu_shader_5,
 			NV_shader_thread_group,
 			NV_shader_thread_shuffle,
@@ -291,7 +329,18 @@ protected:
 			SubgroupInverseBallot_InclBitCount_ExclBitCout = 13,
 			SubgroupBallotBitExtract = 14,
 			SubgroupBallotBitCount = 15,
-
+			SubgroupArithmeticIAddReduce = 16,
+			SubgroupArithmeticIAddExclusiveScan = 17,
+			SubgroupArithmeticIAddInclusiveScan = 18,
+			SubgroupArithmeticFAddReduce = 19,
+			SubgroupArithmeticFAddExclusiveScan = 20,
+			SubgroupArithmeticFAddInclusiveScan = 21,
+			SubgroupArithmeticIMulReduce = 22,
+			SubgroupArithmeticIMulExclusiveScan = 23,
+			SubgroupArithmeticIMulInclusiveScan = 24,
+			SubgroupArithmeticFMulReduce = 25,
+			SubgroupArithmeticFMulExclusiveScan = 26,
+			SubgroupArithmeticFMulInclusiveScan = 27,
 			FeatureCount
 		};
 
@@ -325,9 +374,9 @@ protected:
 	};
 
 	// TODO remove this function when all subgroup ops are supported (or make it always return true)
-	static bool is_supported_subgroup_op_in_opengl(spv::Op op);
+	static bool is_supported_subgroup_op_in_opengl(spv::Op op, const uint32_t *ops);
 
-	void reset();
+	void reset(uint32_t iteration_count);
 	void emit_function(SPIRFunction &func, const Bitset &return_flags);
 
 	bool has_extension(const std::string &ext) const;
@@ -337,11 +386,23 @@ protected:
 	virtual void emit_function_prototype(SPIRFunction &func, const Bitset &return_flags);
 
 	SPIRBlock *current_emitting_block = nullptr;
-	SPIRBlock *current_emitting_switch = nullptr;
+	SmallVector<SPIRBlock *> current_emitting_switch_stack;
 	bool current_emitting_switch_fallthrough = false;
 
 	virtual void emit_instruction(const Instruction &instr);
+	struct TemporaryCopy
+	{
+		uint32_t dst_id;
+		uint32_t src_id;
+	};
+	TemporaryCopy handle_instruction_precision(const Instruction &instr);
 	void emit_block_instructions(SPIRBlock &block);
+	void emit_block_instructions_with_masked_debug(SPIRBlock &block);
+
+	// For relax_nan_checks.
+	GLSLstd450 get_remapped_glsl_op(GLSLstd450 std450_op) const;
+	spv::Op get_remapped_spirv_op(spv::Op op) const;
+
 	virtual void emit_glsl_op(uint32_t result_type, uint32_t result_id, uint32_t op, const uint32_t *args,
 	                          uint32_t count);
 	virtual void emit_spv_amd_shader_ballot_op(uint32_t result_type, uint32_t result_id, uint32_t op,
@@ -370,11 +431,14 @@ protected:
 	                                const std::string &qualifier = "", uint32_t base_offset = 0);
 	virtual void emit_struct_padding_target(const SPIRType &type);
 	virtual std::string image_type_glsl(const SPIRType &type, uint32_t id = 0);
-	std::string constant_expression(const SPIRConstant &c);
-	std::string constant_op_expression(const SPIRConstantOp &cop);
+	std::string constant_expression(const SPIRConstant &c,
+	                                bool inside_block_like_struct_scope = false,
+	                                bool inside_struct_scope = false);
+	virtual std::string constant_op_expression(const SPIRConstantOp &cop);
 	virtual std::string constant_expression_vector(const SPIRConstant &c, uint32_t vector);
 	virtual void emit_fixup();
 	virtual std::string variable_decl(const SPIRType &type, const std::string &name, uint32_t id = 0);
+	virtual bool variable_decl_is_remapped_storage(const SPIRVariable &var, spv::StorageClass storage) const;
 	virtual std::string to_func_call_arg(const SPIRFunction::Parameter &arg, uint32_t id);
 
 	struct TextureFunctionBaseArguments
@@ -403,7 +467,7 @@ protected:
 		TextureFunctionArguments() = default;
 		TextureFunctionBaseArguments base;
 		uint32_t coord = 0, coord_components = 0, dref = 0;
-		uint32_t grad_x = 0, grad_y = 0, lod = 0, coffset = 0, offset = 0;
+		uint32_t grad_x = 0, grad_y = 0, lod = 0, offset = 0;
 		uint32_t bias = 0, component = 0, sample = 0, sparse_texel = 0, min_lod = 0;
 		bool nonuniform_expression = false;
 	};
@@ -420,6 +484,8 @@ protected:
 
 	virtual bool builtin_translates_to_nonarray(spv::BuiltIn builtin) const;
 
+	virtual bool is_user_type_structured(uint32_t id) const;
+
 	void emit_copy_logical_type(uint32_t lhs_id, uint32_t lhs_type_id, uint32_t rhs_id, uint32_t rhs_type_id,
 	                            SmallVector<uint32_t> chain);
 
@@ -479,6 +545,8 @@ protected:
 	// on a single line separated by comma.
 	SmallVector<std::string> *redirect_statement = nullptr;
 	const SPIRBlock *current_continue_block = nullptr;
+	bool block_temporary_hoisting = false;
+	bool block_debug_directives = false;
 
 	void begin_scope();
 	void end_scope();
@@ -506,7 +574,8 @@ protected:
 	bool member_is_remapped_physical_type(const SPIRType &type, uint32_t index) const;
 	bool member_is_packed_physical_type(const SPIRType &type, uint32_t index) const;
 	virtual std::string convert_row_major_matrix(std::string exp_str, const SPIRType &exp_type,
-	                                             uint32_t physical_type_id, bool is_packed);
+	                                             uint32_t physical_type_id, bool is_packed,
+	                                             bool relaxed = false);
 
 	std::unordered_set<std::string> local_variable_names;
 	std::unordered_set<std::string> resource_names;
@@ -543,6 +612,7 @@ protected:
 		const char *uint16_t_literal_suffix = "us";
 		const char *nonuniform_qualifier = "nonuniformEXT";
 		const char *boolean_mix_function = "mix";
+		SPIRType::BaseType boolean_in_struct_remapped_type = SPIRType::Boolean;
 		bool swizzle_is_function = false;
 		bool shared_is_implied = false;
 		bool unsized_array_supported = true;
@@ -556,28 +626,37 @@ protected:
 		bool allow_precision_qualifiers = false;
 		bool can_swizzle_scalar = false;
 		bool force_gl_in_out_block = false;
+		bool force_merged_mesh_block = false;
 		bool can_return_array = true;
 		bool allow_truncated_access_chain = false;
 		bool supports_extensions = false;
 		bool supports_empty_struct = false;
 		bool array_is_value_type = true;
-		bool buffer_offset_array_is_value_type = true;
+		bool array_is_value_type_in_buffer_blocks = true;
 		bool comparison_image_samples_scalar = false;
 		bool native_pointers = false;
 		bool support_small_type_sampling_result = false;
 		bool support_case_fallthrough = true;
 		bool use_array_constructor = false;
 		bool needs_row_major_load_workaround = false;
+		bool support_pointer_to_pointer = false;
+		bool support_precise_qualifier = false;
+		bool support_64bit_switch = false;
+		bool workgroup_size_is_hidden = false;
+		bool requires_relaxed_precision_analysis = false;
+		bool implicit_c_integer_promotion_rules = false;
 	} backend;
 
 	void emit_struct(SPIRType &type);
 	void emit_resources();
 	void emit_extension_workarounds(spv::ExecutionModel model);
+	void emit_subgroup_arithmetic_workaround(const std::string &func, spv::Op op, spv::GroupOperation group_op);
+	void emit_polyfills(uint32_t polyfills, bool relaxed);
 	void emit_buffer_block_native(const SPIRVariable &var);
-	void emit_buffer_reference_block(SPIRType &type, bool forward_declaration);
+	void emit_buffer_reference_block(uint32_t type_id, bool forward_declaration);
 	void emit_buffer_block_legacy(const SPIRVariable &var);
 	void emit_buffer_block_flattened(const SPIRVariable &type);
-	void fixup_implicit_builtin_block_names();
+	void fixup_implicit_builtin_block_names(spv::ExecutionModel model);
 	void emit_declared_builtin_block(spv::StorageClass storage, spv::ExecutionModel model);
 	bool should_force_emit_builtin_block(spv::StorageClass storage);
 	void emit_push_constant_block_vulkan(const SPIRVariable &var);
@@ -591,6 +670,7 @@ protected:
 	void emit_block_chain(SPIRBlock &block);
 	void emit_hoisted_temporaries(SmallVector<std::pair<TypeID, ID>> &temporaries);
 	std::string constant_value_macro_name(uint32_t id);
+	int get_constant_mapping_to_workgroup_component(const SPIRConstant &constant) const;
 	void emit_constant(const SPIRConstant &constant);
 	void emit_specialization_constant_op(const SPIRConstantOp &constant);
 	std::string emit_continue_block(uint32_t continue_block, bool follow_true_block, bool follow_false_block);
@@ -609,12 +689,15 @@ protected:
 	bool should_suppress_usage_tracking(uint32_t id) const;
 	void emit_mix_op(uint32_t result_type, uint32_t id, uint32_t left, uint32_t right, uint32_t lerp);
 	void emit_nminmax_op(uint32_t result_type, uint32_t id, uint32_t op0, uint32_t op1, GLSLstd450 op);
+	void emit_emulated_ahyper_op(uint32_t result_type, uint32_t result_id, uint32_t op0, GLSLstd450 op);
 	bool to_trivial_mix_op(const SPIRType &type, std::string &op, uint32_t left, uint32_t right, uint32_t lerp);
 	void emit_quaternary_func_op(uint32_t result_type, uint32_t result_id, uint32_t op0, uint32_t op1, uint32_t op2,
 	                             uint32_t op3, const char *op);
 	void emit_trinary_func_op(uint32_t result_type, uint32_t result_id, uint32_t op0, uint32_t op1, uint32_t op2,
 	                          const char *op);
 	void emit_binary_func_op(uint32_t result_type, uint32_t result_id, uint32_t op0, uint32_t op1, const char *op);
+	void emit_atomic_func_op(uint32_t result_type, uint32_t result_id, uint32_t op0, uint32_t op1, const char *op);
+	void emit_atomic_func_op(uint32_t result_type, uint32_t result_id, uint32_t op0, uint32_t op1, uint32_t op2, const char *op);
 
 	void emit_unary_func_op_cast(uint32_t result_type, uint32_t result_id, uint32_t op0, const char *op,
 	                             SPIRType::BaseType input_type, SPIRType::BaseType expected_result_type);
@@ -637,7 +720,7 @@ protected:
 	void emit_unrolled_binary_op(uint32_t result_type, uint32_t result_id, uint32_t op0, uint32_t op1, const char *op,
 	                             bool negate, SPIRType::BaseType expected_type);
 	void emit_binary_op_cast(uint32_t result_type, uint32_t result_id, uint32_t op0, uint32_t op1, const char *op,
-	                         SPIRType::BaseType input_type, bool skip_cast_if_equal_type);
+	                         SPIRType::BaseType input_type, bool skip_cast_if_equal_type, bool implicit_integer_promotion);
 
 	SPIRType binary_op_bitcast_helper(std::string &cast_op0, std::string &cast_op1, SPIRType::BaseType &input_type,
 	                                  uint32_t op0, uint32_t op1, bool skip_cast_if_equal_type);
@@ -648,6 +731,8 @@ protected:
 	                                  uint32_t false_value);
 
 	void emit_unary_op(uint32_t result_type, uint32_t result_id, uint32_t op0, const char *op);
+	void emit_unary_op_cast(uint32_t result_type, uint32_t result_id, uint32_t op0, const char *op);
+	virtual void emit_mesh_tasks(SPIRBlock &block);
 	bool expression_is_forwarded(uint32_t id) const;
 	bool expression_suppresses_usage_tracking(uint32_t id) const;
 	bool expression_read_implies_multiple_reads(uint32_t id) const;
@@ -660,6 +745,10 @@ protected:
 	std::string access_chain_internal(uint32_t base, const uint32_t *indices, uint32_t count, AccessChainFlags flags,
 	                                  AccessChainMeta *meta);
 
+	spv::StorageClass get_expression_effective_storage_class(uint32_t ptr);
+	virtual bool access_chain_needs_stage_io_builtin_translation(uint32_t base);
+
+	virtual void check_physical_type_cast(std::string &expr, const SPIRType *type, uint32_t physical_type);
 	virtual void prepare_access_chain_for_scalar_access(std::string &expr, const SPIRType &type,
 	                                                    spv::StorageClass storage, bool &is_packed);
 
@@ -690,9 +779,10 @@ protected:
 	void emit_uninitialized_temporary(uint32_t type, uint32_t id);
 	SPIRExpression &emit_uninitialized_temporary_expression(uint32_t type, uint32_t id);
 	void append_global_func_args(const SPIRFunction &func, uint32_t index, SmallVector<std::string> &arglist);
+	std::string to_non_uniform_aware_expression(uint32_t id);
 	std::string to_expression(uint32_t id, bool register_expression_read = true);
-	std::string to_composite_constructor_expression(uint32_t id, bool uses_buffer_offset);
-	std::string to_rerolled_array_expression(const std::string &expr, const SPIRType &type);
+	std::string to_composite_constructor_expression(const SPIRType &parent_type, uint32_t id, bool block_like_type);
+	std::string to_rerolled_array_expression(const SPIRType &parent_type, const std::string &expr, const SPIRType &type);
 	std::string to_enclosed_expression(uint32_t id, bool register_expression_read = true);
 	std::string to_unpacked_expression(uint32_t id, bool register_expression_read = true);
 	std::string to_unpacked_row_major_matrix_expression(uint32_t id);
@@ -703,29 +793,30 @@ protected:
 	std::string to_extract_component_expression(uint32_t id, uint32_t index);
 	std::string to_extract_constant_composite_expression(uint32_t result_type, const SPIRConstant &c,
 	                                                     const uint32_t *chain, uint32_t length);
+	static bool needs_enclose_expression(const std::string &expr);
 	std::string enclose_expression(const std::string &expr);
 	std::string dereference_expression(const SPIRType &expression_type, const std::string &expr);
 	std::string address_of_expression(const std::string &expr);
 	void strip_enclosed_expression(std::string &expr);
 	std::string to_member_name(const SPIRType &type, uint32_t index);
-	virtual std::string to_member_reference(uint32_t base, const SPIRType &type, uint32_t index, bool ptr_chain);
+	virtual std::string to_member_reference(uint32_t base, const SPIRType &type, uint32_t index, bool ptr_chain_is_resolved);
 	std::string to_multi_member_reference(const SPIRType &type, const SmallVector<uint32_t> &indices);
 	std::string type_to_glsl_constructor(const SPIRType &type);
 	std::string argument_decl(const SPIRFunction::Parameter &arg);
 	virtual std::string to_qualifiers_glsl(uint32_t id);
-	void fixup_io_block_patch_qualifiers(const SPIRVariable &var);
+	void fixup_io_block_patch_primitive_qualifiers(const SPIRVariable &var);
 	void emit_output_variable_initializer(const SPIRVariable &var);
-	const char *to_precision_qualifiers_glsl(uint32_t id);
+	std::string to_precision_qualifiers_glsl(uint32_t id);
 	virtual const char *to_storage_qualifiers_glsl(const SPIRVariable &var);
-	const char *flags_to_qualifiers_glsl(const SPIRType &type, const Bitset &flags);
+	std::string flags_to_qualifiers_glsl(const SPIRType &type, const Bitset &flags);
 	const char *format_to_glsl(spv::ImageFormat format);
 	virtual std::string layout_for_member(const SPIRType &type, uint32_t index);
 	virtual std::string to_interpolation_qualifiers(const Bitset &flags);
 	std::string layout_for_variable(const SPIRVariable &variable);
 	std::string to_combined_image_sampler(VariableID image_id, VariableID samp_id);
 	virtual bool skip_argument(uint32_t id) const;
-	virtual void emit_array_copy(const std::string &lhs, uint32_t rhs_id, spv::StorageClass lhs_storage,
-	                             spv::StorageClass rhs_storage);
+	virtual bool emit_array_copy(const char *expr, uint32_t lhs_id, uint32_t rhs_id,
+	                             spv::StorageClass lhs_storage, spv::StorageClass rhs_storage);
 	virtual void emit_block_hints(const SPIRBlock &block);
 	virtual std::string to_initializer_expression(const SPIRVariable &var);
 	virtual std::string to_zero_initialized_expression(uint32_t type_id);
@@ -740,6 +831,7 @@ protected:
 	uint32_t type_to_packed_alignment(const SPIRType &type, const Bitset &flags, BufferPackingStandard packing);
 	uint32_t type_to_packed_array_stride(const SPIRType &type, const Bitset &flags, BufferPackingStandard packing);
 	uint32_t type_to_packed_size(const SPIRType &type, const Bitset &flags, BufferPackingStandard packing);
+	uint32_t type_to_location_count(const SPIRType &type) const;
 
 	std::string bitcast_glsl(const SPIRType &result_type, uint32_t arg);
 	virtual std::string bitcast_glsl_op(const SPIRType &result_type, const SPIRType &argument_type);
@@ -763,6 +855,10 @@ protected:
 	void replace_fragment_outputs();
 	std::string legacy_tex_op(const std::string &op, const SPIRType &imgtype, uint32_t id);
 
+	void forward_relaxed_precision(uint32_t dst_id, const uint32_t *args, uint32_t length);
+	void analyze_precision_requirements(uint32_t type_id, uint32_t dst_id, uint32_t *args, uint32_t length);
+	Options::Precision analyze_expression_precision(const uint32_t *args, uint32_t length) const;
+
 	uint32_t indent = 0;
 
 	std::unordered_set<uint32_t> emitted_functions;
@@ -816,10 +912,25 @@ protected:
 		return !options.es && options.version < 130;
 	}
 
-	bool requires_transpose_2x2 = false;
-	bool requires_transpose_3x3 = false;
-	bool requires_transpose_4x4 = false;
+	enum Polyfill : uint32_t
+	{
+		PolyfillTranspose2x2 = 1 << 0,
+		PolyfillTranspose3x3 = 1 << 1,
+		PolyfillTranspose4x4 = 1 << 2,
+		PolyfillDeterminant2x2 = 1 << 3,
+		PolyfillDeterminant3x3 = 1 << 4,
+		PolyfillDeterminant4x4 = 1 << 5,
+		PolyfillMatrixInverse2x2 = 1 << 6,
+		PolyfillMatrixInverse3x3 = 1 << 7,
+		PolyfillMatrixInverse4x4 = 1 << 8,
+	};
+
+	uint32_t required_polyfills = 0;
+	uint32_t required_polyfills_relaxed = 0;
+	void require_polyfill(Polyfill polyfill, bool relaxed);
+
 	bool ray_tracing_is_khr = false;
+	bool barycentric_is_nv = false;
 	void ray_tracing_khr_fixup_locations();
 
 	bool args_will_forward(uint32_t id, const uint32_t *args, uint32_t num_args, bool pure);
@@ -837,7 +948,9 @@ protected:
 
 	// GL_EXT_shader_framebuffer_fetch support.
 	std::vector<std::pair<uint32_t, uint32_t>> subpass_to_framebuffer_fetch_attachment;
-	std::unordered_set<uint32_t> inout_color_attachments;
+	std::vector<std::pair<uint32_t, bool>> inout_color_attachments;
+	bool location_is_framebuffer_fetch(uint32_t location) const;
+	bool location_is_non_coherent_framebuffer_fetch(uint32_t location) const;
 	bool subpass_input_is_framebuffer_fetch(uint32_t id) const;
 	void emit_inout_fragment_outputs_copy_to_subpass_inputs();
 	const SPIRVariable *find_subpass_input_by_attachment_index(uint32_t index) const;
@@ -851,8 +964,14 @@ protected:
 
 	void check_function_call_constraints(const uint32_t *args, uint32_t length);
 	void handle_invalid_expression(uint32_t id);
+	void force_temporary_and_recompile(uint32_t id);
 	void find_static_extensions();
 
+	uint32_t consume_temporary_in_precision_context(uint32_t type_id, uint32_t id, Options::Precision precision);
+	std::unordered_map<uint32_t, uint32_t> temporary_to_mirror_precision_alias;
+	std::unordered_set<uint32_t> composite_insert_overwritten;
+	std::unordered_set<uint32_t> block_composite_insert_overwrite;
+
 	std::string emit_for_loop_initializers(const SPIRBlock &block);
 	void emit_while_loop_initializers(const SPIRBlock &block);
 	bool for_loop_initializers_are_same_type(const SPIRBlock &block);
@@ -861,8 +980,6 @@ protected:
 
 	bool type_is_empty(const SPIRType &type);
 
-	virtual void declare_undefined_values();
-
 	bool can_use_io_location(spv::StorageClass storage, bool block);
 	const Instruction *get_next_instruction_in_block(const Instruction &instr);
 	static uint32_t mask_relevant_memory_semantics(uint32_t semantics);
@@ -876,10 +993,11 @@ protected:
 	// Builtins in GLSL are always specific signedness, but the SPIR-V can declare them
 	// as either unsigned or signed.
 	// Sometimes we will need to automatically perform casts on load and store to make this work.
-	virtual void cast_to_builtin_store(uint32_t target_id, std::string &expr, const SPIRType &expr_type);
-	virtual void cast_from_builtin_load(uint32_t source_id, std::string &expr, const SPIRType &expr_type);
+	virtual void cast_to_variable_store(uint32_t target_id, std::string &expr, const SPIRType &expr_type);
+	virtual void cast_from_variable_load(uint32_t source_id, std::string &expr, const SPIRType &expr_type);
 	void unroll_array_from_complex_load(uint32_t target_id, uint32_t source_id, std::string &expr);
-	void convert_non_uniform_expression(const SPIRType &type, std::string &expr);
+	bool unroll_array_to_complex_store(uint32_t target_id, uint32_t source_id);
+	void convert_non_uniform_expression(std::string &expr, uint32_t ptr_id);
 
 	void handle_store_to_invariant_variable(uint32_t store_id, uint32_t value_id);
 	void disallow_forwarding_in_expression_chain(const SPIRExpression &expr);
@@ -897,13 +1015,29 @@ protected:
 
 	void fixup_type_alias();
 	void reorder_type_alias();
-
-	void propagate_nonuniform_qualifier(uint32_t id);
+	void fixup_anonymous_struct_names();
+	void fixup_anonymous_struct_names(std::unordered_set<uint32_t> &visited, const SPIRType &type);
 
 	static const char *vector_swizzle(int vecsize, int index);
 
+	bool is_stage_output_location_masked(uint32_t location, uint32_t component) const;
+	bool is_stage_output_builtin_masked(spv::BuiltIn builtin) const;
+	bool is_stage_output_variable_masked(const SPIRVariable &var) const;
+	bool is_stage_output_block_member_masked(const SPIRVariable &var, uint32_t index, bool strip_array) const;
+	bool is_per_primitive_variable(const SPIRVariable &var) const;
+	uint32_t get_accumulated_member_location(const SPIRVariable &var, uint32_t mbr_idx, bool strip_array) const;
+	uint32_t get_declared_member_location(const SPIRVariable &var, uint32_t mbr_idx, bool strip_array) const;
+	std::unordered_set<LocationComponentPair, InternalHasher> masked_output_locations;
+	std::unordered_set<uint32_t> masked_output_builtins;
+
 private:
 	void init();
+
+	SmallVector<ConstantID> get_composite_constant_ids(ConstantID const_id);
+	void fill_composite_constant(SPIRConstant &constant, TypeID type_id, const SmallVector<ConstantID> &initializers);
+	void set_composite_constant(ConstantID const_id, TypeID type_id, const SmallVector<ConstantID> &initializers);
+	TypeID get_composite_member_type(TypeID type_id, uint32_t member_idx);
+	std::unordered_map<uint32_t, SmallVector<ConstantID>> const_composite_insert_ids;
 };
 } // namespace SPIRV_CROSS_NAMESPACE
 

+ 6771 - 0
ThirdParty/SpirvCross/spirv_hlsl.cpp

@@ -0,0 +1,6771 @@
+/*
+ * Copyright 2016-2021 Robert Konrad
+ * SPDX-License-Identifier: Apache-2.0 OR MIT
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ *
+ */
+
+/*
+ * At your option, you may choose to accept this material under either:
+ *  1. The Apache License, Version 2.0, found at <http://www.apache.org/licenses/LICENSE-2.0>, or
+ *  2. The MIT License, found at <http://opensource.org/licenses/MIT>.
+ */
+
+#include "spirv_hlsl.hpp"
+#include "GLSL.std.450.h"
+#include <algorithm>
+#include <assert.h>
+
+using namespace spv;
+using namespace SPIRV_CROSS_NAMESPACE;
+using namespace std;
+
+enum class ImageFormatNormalizedState
+{
+	None = 0,
+	Unorm = 1,
+	Snorm = 2
+};
+
+static ImageFormatNormalizedState image_format_to_normalized_state(ImageFormat fmt)
+{
+	switch (fmt)
+	{
+	case ImageFormatR8:
+	case ImageFormatR16:
+	case ImageFormatRg8:
+	case ImageFormatRg16:
+	case ImageFormatRgba8:
+	case ImageFormatRgba16:
+	case ImageFormatRgb10A2:
+		return ImageFormatNormalizedState::Unorm;
+
+	case ImageFormatR8Snorm:
+	case ImageFormatR16Snorm:
+	case ImageFormatRg8Snorm:
+	case ImageFormatRg16Snorm:
+	case ImageFormatRgba8Snorm:
+	case ImageFormatRgba16Snorm:
+		return ImageFormatNormalizedState::Snorm;
+
+	default:
+		break;
+	}
+
+	return ImageFormatNormalizedState::None;
+}
+
+static unsigned image_format_to_components(ImageFormat fmt)
+{
+	switch (fmt)
+	{
+	case ImageFormatR8:
+	case ImageFormatR16:
+	case ImageFormatR8Snorm:
+	case ImageFormatR16Snorm:
+	case ImageFormatR16f:
+	case ImageFormatR32f:
+	case ImageFormatR8i:
+	case ImageFormatR16i:
+	case ImageFormatR32i:
+	case ImageFormatR8ui:
+	case ImageFormatR16ui:
+	case ImageFormatR32ui:
+		return 1;
+
+	case ImageFormatRg8:
+	case ImageFormatRg16:
+	case ImageFormatRg8Snorm:
+	case ImageFormatRg16Snorm:
+	case ImageFormatRg16f:
+	case ImageFormatRg32f:
+	case ImageFormatRg8i:
+	case ImageFormatRg16i:
+	case ImageFormatRg32i:
+	case ImageFormatRg8ui:
+	case ImageFormatRg16ui:
+	case ImageFormatRg32ui:
+		return 2;
+
+	case ImageFormatR11fG11fB10f:
+		return 3;
+
+	case ImageFormatRgba8:
+	case ImageFormatRgba16:
+	case ImageFormatRgb10A2:
+	case ImageFormatRgba8Snorm:
+	case ImageFormatRgba16Snorm:
+	case ImageFormatRgba16f:
+	case ImageFormatRgba32f:
+	case ImageFormatRgba8i:
+	case ImageFormatRgba16i:
+	case ImageFormatRgba32i:
+	case ImageFormatRgba8ui:
+	case ImageFormatRgba16ui:
+	case ImageFormatRgba32ui:
+	case ImageFormatRgb10a2ui:
+		return 4;
+
+	case ImageFormatUnknown:
+		return 4; // Assume 4.
+
+	default:
+		SPIRV_CROSS_THROW("Unrecognized typed image format.");
+	}
+}
+
+static string image_format_to_type(ImageFormat fmt, SPIRType::BaseType basetype)
+{
+	switch (fmt)
+	{
+	case ImageFormatR8:
+	case ImageFormatR16:
+		if (basetype != SPIRType::Float)
+			SPIRV_CROSS_THROW("Mismatch in image type and base type of image.");
+		return "unorm float";
+	case ImageFormatRg8:
+	case ImageFormatRg16:
+		if (basetype != SPIRType::Float)
+			SPIRV_CROSS_THROW("Mismatch in image type and base type of image.");
+		return "unorm float2";
+	case ImageFormatRgba8:
+	case ImageFormatRgba16:
+		if (basetype != SPIRType::Float)
+			SPIRV_CROSS_THROW("Mismatch in image type and base type of image.");
+		return "unorm float4";
+	case ImageFormatRgb10A2:
+		if (basetype != SPIRType::Float)
+			SPIRV_CROSS_THROW("Mismatch in image type and base type of image.");
+		return "unorm float4";
+
+	case ImageFormatR8Snorm:
+	case ImageFormatR16Snorm:
+		if (basetype != SPIRType::Float)
+			SPIRV_CROSS_THROW("Mismatch in image type and base type of image.");
+		return "snorm float";
+	case ImageFormatRg8Snorm:
+	case ImageFormatRg16Snorm:
+		if (basetype != SPIRType::Float)
+			SPIRV_CROSS_THROW("Mismatch in image type and base type of image.");
+		return "snorm float2";
+	case ImageFormatRgba8Snorm:
+	case ImageFormatRgba16Snorm:
+		if (basetype != SPIRType::Float)
+			SPIRV_CROSS_THROW("Mismatch in image type and base type of image.");
+		return "snorm float4";
+
+	case ImageFormatR16f:
+	case ImageFormatR32f:
+		if (basetype != SPIRType::Float)
+			SPIRV_CROSS_THROW("Mismatch in image type and base type of image.");
+		return "float";
+	case ImageFormatRg16f:
+	case ImageFormatRg32f:
+		if (basetype != SPIRType::Float)
+			SPIRV_CROSS_THROW("Mismatch in image type and base type of image.");
+		return "float2";
+	case ImageFormatRgba16f:
+	case ImageFormatRgba32f:
+		if (basetype != SPIRType::Float)
+			SPIRV_CROSS_THROW("Mismatch in image type and base type of image.");
+		return "float4";
+
+	case ImageFormatR11fG11fB10f:
+		if (basetype != SPIRType::Float)
+			SPIRV_CROSS_THROW("Mismatch in image type and base type of image.");
+		return "float3";
+
+	case ImageFormatR8i:
+	case ImageFormatR16i:
+	case ImageFormatR32i:
+		if (basetype != SPIRType::Int)
+			SPIRV_CROSS_THROW("Mismatch in image type and base type of image.");
+		return "int";
+	case ImageFormatRg8i:
+	case ImageFormatRg16i:
+	case ImageFormatRg32i:
+		if (basetype != SPIRType::Int)
+			SPIRV_CROSS_THROW("Mismatch in image type and base type of image.");
+		return "int2";
+	case ImageFormatRgba8i:
+	case ImageFormatRgba16i:
+	case ImageFormatRgba32i:
+		if (basetype != SPIRType::Int)
+			SPIRV_CROSS_THROW("Mismatch in image type and base type of image.");
+		return "int4";
+
+	case ImageFormatR8ui:
+	case ImageFormatR16ui:
+	case ImageFormatR32ui:
+		if (basetype != SPIRType::UInt)
+			SPIRV_CROSS_THROW("Mismatch in image type and base type of image.");
+		return "uint";
+	case ImageFormatRg8ui:
+	case ImageFormatRg16ui:
+	case ImageFormatRg32ui:
+		if (basetype != SPIRType::UInt)
+			SPIRV_CROSS_THROW("Mismatch in image type and base type of image.");
+		return "uint2";
+	case ImageFormatRgba8ui:
+	case ImageFormatRgba16ui:
+	case ImageFormatRgba32ui:
+		if (basetype != SPIRType::UInt)
+			SPIRV_CROSS_THROW("Mismatch in image type and base type of image.");
+		return "uint4";
+	case ImageFormatRgb10a2ui:
+		if (basetype != SPIRType::UInt)
+			SPIRV_CROSS_THROW("Mismatch in image type and base type of image.");
+		return "uint4";
+
+	case ImageFormatUnknown:
+		switch (basetype)
+		{
+		case SPIRType::Float:
+			return "float4";
+		case SPIRType::Int:
+			return "int4";
+		case SPIRType::UInt:
+			return "uint4";
+		default:
+			SPIRV_CROSS_THROW("Unsupported base type for image.");
+		}
+
+	default:
+		SPIRV_CROSS_THROW("Unrecognized typed image format.");
+	}
+}
+
+string CompilerHLSL::image_type_hlsl_modern(const SPIRType &type, uint32_t id)
+{
+	auto &imagetype = get<SPIRType>(type.image.type);
+	const char *dim = nullptr;
+	bool typed_load = false;
+	uint32_t components = 4;
+
+	bool force_image_srv = hlsl_options.nonwritable_uav_texture_as_srv && has_decoration(id, DecorationNonWritable);
+
+	switch (type.image.dim)
+	{
+	case Dim1D:
+		typed_load = type.image.sampled == 2;
+		dim = "1D";
+		break;
+	case Dim2D:
+		typed_load = type.image.sampled == 2;
+		dim = "2D";
+		break;
+	case Dim3D:
+		typed_load = type.image.sampled == 2;
+		dim = "3D";
+		break;
+	case DimCube:
+		if (type.image.sampled == 2)
+			SPIRV_CROSS_THROW("RWTextureCube does not exist in HLSL.");
+		dim = "Cube";
+		break;
+	case DimRect:
+		SPIRV_CROSS_THROW("Rectangle texture support is not yet implemented for HLSL."); // TODO
+	case DimBuffer:
+		if (type.image.sampled == 1)
+			return join("Buffer<", type_to_glsl(imagetype), components, ">");
+		else if (type.image.sampled == 2)
+		{
+			if (interlocked_resources.count(id))
+				return join("RasterizerOrderedBuffer<", image_format_to_type(type.image.format, imagetype.basetype),
+				            ">");
+
+			typed_load = !force_image_srv && type.image.sampled == 2;
+
+			const char *rw = force_image_srv ? "" : "RW";
+			return join(rw, "Buffer<",
+			            typed_load ? image_format_to_type(type.image.format, imagetype.basetype) :
+			                         join(type_to_glsl(imagetype), components),
+			            ">");
+		}
+		else
+			SPIRV_CROSS_THROW("Sampler buffers must be either sampled or unsampled. Cannot deduce in runtime.");
+	case DimSubpassData:
+		dim = "2D";
+		typed_load = false;
+		break;
+	default:
+		SPIRV_CROSS_THROW("Invalid dimension.");
+	}
+	const char *arrayed = type.image.arrayed ? "Array" : "";
+	const char *ms = type.image.ms ? "MS" : "";
+	const char *rw = typed_load && !force_image_srv ? "RW" : "";
+
+	if (force_image_srv)
+		typed_load = false;
+
+	if (typed_load && interlocked_resources.count(id))
+		rw = "RasterizerOrdered";
+
+	return join(rw, "Texture", dim, ms, arrayed, "<",
+	            typed_load ? image_format_to_type(type.image.format, imagetype.basetype) :
+	                         join(type_to_glsl(imagetype), components),
+	            ">");
+}
+
+string CompilerHLSL::image_type_hlsl_legacy(const SPIRType &type, uint32_t /*id*/)
+{
+	auto &imagetype = get<SPIRType>(type.image.type);
+	string res;
+
+	switch (imagetype.basetype)
+	{
+	case SPIRType::Int:
+		res = "i";
+		break;
+	case SPIRType::UInt:
+		res = "u";
+		break;
+	default:
+		break;
+	}
+
+	if (type.basetype == SPIRType::Image && type.image.dim == DimSubpassData)
+		return res + "subpassInput" + (type.image.ms ? "MS" : "");
+
+	// If we're emulating subpassInput with samplers, force sampler2D
+	// so we don't have to specify format.
+	if (type.basetype == SPIRType::Image && type.image.dim != DimSubpassData)
+	{
+		// Sampler buffers are always declared as samplerBuffer even though they might be separate images in the SPIR-V.
+		if (type.image.dim == DimBuffer && type.image.sampled == 1)
+			res += "sampler";
+		else
+			res += type.image.sampled == 2 ? "image" : "texture";
+	}
+	else
+		res += "sampler";
+
+	switch (type.image.dim)
+	{
+	case Dim1D:
+		res += "1D";
+		break;
+	case Dim2D:
+		res += "2D";
+		break;
+	case Dim3D:
+		res += "3D";
+		break;
+	case DimCube:
+		res += "CUBE";
+		break;
+
+	case DimBuffer:
+		res += "Buffer";
+		break;
+
+	case DimSubpassData:
+		res += "2D";
+		break;
+	default:
+		SPIRV_CROSS_THROW("Only 1D, 2D, 3D, Buffer, InputTarget and Cube textures supported.");
+	}
+
+	if (type.image.ms)
+		res += "MS";
+	if (type.image.arrayed)
+		res += "Array";
+
+	return res;
+}
+
+string CompilerHLSL::image_type_hlsl(const SPIRType &type, uint32_t id)
+{
+	if (hlsl_options.shader_model <= 30)
+		return image_type_hlsl_legacy(type, id);
+	else
+		return image_type_hlsl_modern(type, id);
+}
+
+// The optional id parameter indicates the object whose type we are trying
+// to find the description for. It is optional. Most type descriptions do not
+// depend on a specific object's use of that type.
+string CompilerHLSL::type_to_glsl(const SPIRType &type, uint32_t id)
+{
+	// Ignore the pointer type since GLSL doesn't have pointers.
+
+	switch (type.basetype)
+	{
+	case SPIRType::Struct:
+		// Need OpName lookup here to get a "sensible" name for a struct.
+		if (backend.explicit_struct_type)
+			return join("struct ", to_name(type.self));
+		else
+			return to_name(type.self);
+
+	case SPIRType::Image:
+	case SPIRType::SampledImage:
+		return image_type_hlsl(type, id);
+
+	case SPIRType::Sampler:
+		return comparison_ids.count(id) ? "SamplerComparisonState" : "SamplerState";
+
+	case SPIRType::Void:
+		return "void";
+
+	default:
+		break;
+	}
+
+	if (type.vecsize == 1 && type.columns == 1) // Scalar builtin
+	{
+		switch (type.basetype)
+		{
+		case SPIRType::Boolean:
+			return "bool";
+		case SPIRType::Int:
+			return backend.basic_int_type;
+		case SPIRType::UInt:
+			return backend.basic_uint_type;
+		case SPIRType::AtomicCounter:
+			return "atomic_uint";
+		case SPIRType::Half:
+			if (hlsl_options.enable_16bit_types)
+				return "half";
+			else
+				return "min16float";
+		case SPIRType::Short:
+			if (hlsl_options.enable_16bit_types)
+				return "int16_t";
+			else
+				return "min16int";
+		case SPIRType::UShort:
+			if (hlsl_options.enable_16bit_types)
+				return "uint16_t";
+			else
+				return "min16uint";
+		case SPIRType::Float:
+			return "float";
+		case SPIRType::Double:
+			return "double";
+		case SPIRType::Int64:
+			if (hlsl_options.shader_model < 60)
+				SPIRV_CROSS_THROW("64-bit integers only supported in SM 6.0.");
+			return "int64_t";
+		case SPIRType::UInt64:
+			if (hlsl_options.shader_model < 60)
+				SPIRV_CROSS_THROW("64-bit integers only supported in SM 6.0.");
+			return "uint64_t";
+		case SPIRType::AccelerationStructure:
+			return "RaytracingAccelerationStructure";
+		case SPIRType::RayQuery:
+			return "RayQuery<RAY_FLAG_NONE>";
+		default:
+			return "???";
+		}
+	}
+	else if (type.vecsize > 1 && type.columns == 1) // Vector builtin
+	{
+		switch (type.basetype)
+		{
+		case SPIRType::Boolean:
+			return join("bool", type.vecsize);
+		case SPIRType::Int:
+			return join("int", type.vecsize);
+		case SPIRType::UInt:
+			return join("uint", type.vecsize);
+		case SPIRType::Half:
+			return join(hlsl_options.enable_16bit_types ? "half" : "min16float", type.vecsize);
+		case SPIRType::Short:
+			return join(hlsl_options.enable_16bit_types ? "int16_t" : "min16int", type.vecsize);
+		case SPIRType::UShort:
+			return join(hlsl_options.enable_16bit_types ? "uint16_t" : "min16uint", type.vecsize);
+		case SPIRType::Float:
+			return join("float", type.vecsize);
+		case SPIRType::Double:
+			return join("double", type.vecsize);
+		case SPIRType::Int64:
+			return join("i64vec", type.vecsize);
+		case SPIRType::UInt64:
+			return join("u64vec", type.vecsize);
+		default:
+			return "???";
+		}
+	}
+	else
+	{
+		switch (type.basetype)
+		{
+		case SPIRType::Boolean:
+			return join("bool", type.columns, "x", type.vecsize);
+		case SPIRType::Int:
+			return join("int", type.columns, "x", type.vecsize);
+		case SPIRType::UInt:
+			return join("uint", type.columns, "x", type.vecsize);
+		case SPIRType::Half:
+			return join(hlsl_options.enable_16bit_types ? "half" : "min16float", type.columns, "x", type.vecsize);
+		case SPIRType::Short:
+			return join(hlsl_options.enable_16bit_types ? "int16_t" : "min16int", type.columns, "x", type.vecsize);
+		case SPIRType::UShort:
+			return join(hlsl_options.enable_16bit_types ? "uint16_t" : "min16uint", type.columns, "x", type.vecsize);
+		case SPIRType::Float:
+			return join("float", type.columns, "x", type.vecsize);
+		case SPIRType::Double:
+			return join("double", type.columns, "x", type.vecsize);
+		// Matrix types not supported for int64/uint64.
+		default:
+			return "???";
+		}
+	}
+}
+
+void CompilerHLSL::emit_header()
+{
+	for (auto &header : header_lines)
+		statement(header);
+
+	if (header_lines.size() > 0)
+	{
+		statement("");
+	}
+}
+
+void CompilerHLSL::emit_interface_block_globally(const SPIRVariable &var)
+{
+	add_resource_name(var.self);
+
+	// The global copies of I/O variables should not contain interpolation qualifiers.
+	// These are emitted inside the interface structs.
+	auto &flags = ir.meta[var.self].decoration.decoration_flags;
+	auto old_flags = flags;
+	flags.reset();
+	statement("static ", variable_decl(var), ";");
+	flags = old_flags;
+}
+
+const char *CompilerHLSL::to_storage_qualifiers_glsl(const SPIRVariable &var)
+{
+	// Input and output variables are handled specially in HLSL backend.
+	// The variables are declared as global, private variables, and do not need any qualifiers.
+	if (var.storage == StorageClassUniformConstant || var.storage == StorageClassUniform ||
+	    var.storage == StorageClassPushConstant)
+	{
+		return "uniform ";
+	}
+
+	return "";
+}
+
+void CompilerHLSL::emit_builtin_outputs_in_struct()
+{
+	auto &execution = get_entry_point();
+
+	bool legacy = hlsl_options.shader_model <= 30;
+	active_output_builtins.for_each_bit([&](uint32_t i) {
+		const char *type = nullptr;
+		const char *semantic = nullptr;
+		auto builtin = static_cast<BuiltIn>(i);
+		switch (builtin)
+		{
+		case BuiltInPosition:
+			type = is_position_invariant() && backend.support_precise_qualifier ? "precise float4" : "float4";
+			semantic = legacy ? "POSITION" : "SV_Position";
+			break;
+
+		case BuiltInSampleMask:
+			if (hlsl_options.shader_model < 41 || execution.model != ExecutionModelFragment)
+				SPIRV_CROSS_THROW("Sample Mask output is only supported in PS 4.1 or higher.");
+			type = "uint";
+			semantic = "SV_Coverage";
+			break;
+
+		case BuiltInFragDepth:
+			type = "float";
+			if (legacy)
+			{
+				semantic = "DEPTH";
+			}
+			else
+			{
+				if (hlsl_options.shader_model >= 50 && execution.flags.get(ExecutionModeDepthGreater))
+					semantic = "SV_DepthGreaterEqual";
+				else if (hlsl_options.shader_model >= 50 && execution.flags.get(ExecutionModeDepthLess))
+					semantic = "SV_DepthLessEqual";
+				else
+					semantic = "SV_Depth";
+			}
+			break;
+
+		case BuiltInClipDistance:
+		{
+			static const char *types[] = { "float", "float2", "float3", "float4" };
+
+			// HLSL is a bit weird here, use SV_ClipDistance0, SV_ClipDistance1 and so on with vectors.
+			if (execution.model == ExecutionModelMeshEXT)
+			{
+				if (clip_distance_count > 4)
+					SPIRV_CROSS_THROW("Clip distance count > 4 not supported for mesh shaders.");
+
+				if (clip_distance_count == 1)
+				{
+					// Avoids having to hack up access_chain code. Makes it trivially indexable.
+					statement("float gl_ClipDistance[1] : SV_ClipDistance;");
+				}
+				else
+				{
+					// Replace array with vector directly, avoids any weird fixup path.
+					statement(types[clip_distance_count - 1], " gl_ClipDistance : SV_ClipDistance;");
+				}
+			}
+			else
+			{
+				for (uint32_t clip = 0; clip < clip_distance_count; clip += 4)
+				{
+					uint32_t to_declare = clip_distance_count - clip;
+					if (to_declare > 4)
+						to_declare = 4;
+
+					uint32_t semantic_index = clip / 4;
+
+					statement(types[to_declare - 1], " ", builtin_to_glsl(builtin, StorageClassOutput), semantic_index,
+					          " : SV_ClipDistance", semantic_index, ";");
+				}
+			}
+			break;
+		}
+
+		case BuiltInCullDistance:
+		{
+			static const char *types[] = { "float", "float2", "float3", "float4" };
+
+			// HLSL is a bit weird here, use SV_CullDistance0, SV_CullDistance1 and so on with vectors.
+			if (execution.model == ExecutionModelMeshEXT)
+			{
+				if (cull_distance_count > 4)
+					SPIRV_CROSS_THROW("Cull distance count > 4 not supported for mesh shaders.");
+
+				if (cull_distance_count == 1)
+				{
+					// Avoids having to hack up access_chain code. Makes it trivially indexable.
+					statement("float gl_CullDistance[1] : SV_CullDistance;");
+				}
+				else
+				{
+					// Replace array with vector directly, avoids any weird fixup path.
+					statement(types[cull_distance_count - 1], " gl_CullDistance : SV_CullDistance;");
+				}
+			}
+			else
+			{
+				for (uint32_t cull = 0; cull < cull_distance_count; cull += 4)
+				{
+					uint32_t to_declare = cull_distance_count - cull;
+					if (to_declare > 4)
+						to_declare = 4;
+
+					uint32_t semantic_index = cull / 4;
+
+					statement(types[to_declare - 1], " ", builtin_to_glsl(builtin, StorageClassOutput), semantic_index,
+					          " : SV_CullDistance", semantic_index, ";");
+				}
+			}
+			break;
+		}
+
+		case BuiltInPointSize:
+			// If point_size_compat is enabled, just ignore PointSize.
+			// PointSize does not exist in HLSL, but some code bases might want to be able to use these shaders,
+			// even if it means working around the missing feature.
+			if (legacy)
+			{
+				type = "float";
+				semantic = "PSIZE";
+			}
+			else if (!hlsl_options.point_size_compat)
+				SPIRV_CROSS_THROW("Unsupported builtin in HLSL.");
+			break;
+
+		case BuiltInLayer:
+		case BuiltInPrimitiveId:
+		case BuiltInViewportIndex:
+		case BuiltInPrimitiveShadingRateKHR:
+		case BuiltInCullPrimitiveEXT:
+			// per-primitive attributes handled separatly
+			break;
+
+		case BuiltInPrimitivePointIndicesEXT:
+		case BuiltInPrimitiveLineIndicesEXT:
+		case BuiltInPrimitiveTriangleIndicesEXT:
+			// meshlet local-index buffer handled separatly
+			break;
+
+		default:
+			SPIRV_CROSS_THROW("Unsupported builtin in HLSL.");
+		}
+
+		if (type && semantic)
+			statement(type, " ", builtin_to_glsl(builtin, StorageClassOutput), " : ", semantic, ";");
+	    });
+}
+
+void CompilerHLSL::emit_builtin_primitive_outputs_in_struct()
+{
+	active_output_builtins.for_each_bit([&](uint32_t i) {
+		const char *type = nullptr;
+		const char *semantic = nullptr;
+		auto builtin = static_cast<BuiltIn>(i);
+		switch (builtin)
+		{
+		case BuiltInLayer:
+		{
+			if (hlsl_options.shader_model < 50)
+				SPIRV_CROSS_THROW("Render target array index output is only supported in SM 5.0 or higher.");
+			type = "uint";
+			semantic = "SV_RenderTargetArrayIndex";
+			break;
+		}
+
+		case BuiltInPrimitiveId:
+			type = "uint";
+			semantic = "SV_PrimitiveID";
+			break;
+
+		case BuiltInViewportIndex:
+			type = "uint";
+			semantic = "SV_ViewportArrayIndex";
+			break;
+
+		case BuiltInPrimitiveShadingRateKHR:
+			type = "uint";
+			semantic = "SV_ShadingRate";
+			break;
+
+		case BuiltInCullPrimitiveEXT:
+			type = "bool";
+			semantic = "SV_CullPrimitive";
+			break;
+
+		default:
+			break;
+		}
+
+		if (type && semantic)
+			statement(type, " ", builtin_to_glsl(builtin, StorageClassOutput), " : ", semantic, ";");
+	});
+}
+
+void CompilerHLSL::emit_builtin_inputs_in_struct()
+{
+	bool legacy = hlsl_options.shader_model <= 30;
+	active_input_builtins.for_each_bit([&](uint32_t i) {
+		const char *type = nullptr;
+		const char *semantic = nullptr;
+		auto builtin = static_cast<BuiltIn>(i);
+		switch (builtin)
+		{
+		case BuiltInFragCoord:
+			type = "float4";
+			semantic = legacy ? "VPOS" : "SV_Position";
+			break;
+
+		case BuiltInVertexId:
+		case BuiltInVertexIndex:
+			if (legacy)
+				SPIRV_CROSS_THROW("Vertex index not supported in SM 3.0 or lower.");
+			type = "uint";
+			semantic = "SV_VertexID";
+			break;
+
+		case BuiltInPrimitiveId:
+			type = "uint";
+			semantic = "SV_PrimitiveID";
+			break;
+
+		case BuiltInInstanceId:
+		case BuiltInInstanceIndex:
+			if (legacy)
+				SPIRV_CROSS_THROW("Instance index not supported in SM 3.0 or lower.");
+			type = "uint";
+			semantic = "SV_InstanceID";
+			break;
+
+		case BuiltInSampleId:
+			if (legacy)
+				SPIRV_CROSS_THROW("Sample ID not supported in SM 3.0 or lower.");
+			type = "uint";
+			semantic = "SV_SampleIndex";
+			break;
+
+		case BuiltInSampleMask:
+			if (hlsl_options.shader_model < 50 || get_entry_point().model != ExecutionModelFragment)
+				SPIRV_CROSS_THROW("Sample Mask input is only supported in PS 5.0 or higher.");
+			type = "uint";
+			semantic = "SV_Coverage";
+			break;
+
+		case BuiltInGlobalInvocationId:
+			type = "uint3";
+			semantic = "SV_DispatchThreadID";
+			break;
+
+		case BuiltInLocalInvocationId:
+			type = "uint3";
+			semantic = "SV_GroupThreadID";
+			break;
+
+		case BuiltInLocalInvocationIndex:
+			type = "uint";
+			semantic = "SV_GroupIndex";
+			break;
+
+		case BuiltInWorkgroupId:
+			type = "uint3";
+			semantic = "SV_GroupID";
+			break;
+
+		case BuiltInFrontFacing:
+			type = "bool";
+			semantic = "SV_IsFrontFace";
+			break;
+
+		case BuiltInViewIndex:
+			if (hlsl_options.shader_model < 61 || (get_entry_point().model != ExecutionModelVertex && get_entry_point().model != ExecutionModelFragment))
+				SPIRV_CROSS_THROW("View Index input is only supported in VS and PS 6.1 or higher.");
+			type = "uint";
+			semantic = "SV_ViewID";
+			break;
+
+		case BuiltInNumWorkgroups:
+		case BuiltInSubgroupSize:
+		case BuiltInSubgroupLocalInvocationId:
+		case BuiltInSubgroupEqMask:
+		case BuiltInSubgroupLtMask:
+		case BuiltInSubgroupLeMask:
+		case BuiltInSubgroupGtMask:
+		case BuiltInSubgroupGeMask:
+		case BuiltInBaseVertex:
+		case BuiltInBaseInstance:
+			// Handled specially.
+			break;
+
+		case BuiltInHelperInvocation:
+			if (hlsl_options.shader_model < 50 || get_entry_point().model != ExecutionModelFragment)
+				SPIRV_CROSS_THROW("Helper Invocation input is only supported in PS 5.0 or higher.");
+			break;
+
+		case BuiltInClipDistance:
+			// HLSL is a bit weird here, use SV_ClipDistance0, SV_ClipDistance1 and so on with vectors.
+			for (uint32_t clip = 0; clip < clip_distance_count; clip += 4)
+			{
+				uint32_t to_declare = clip_distance_count - clip;
+				if (to_declare > 4)
+					to_declare = 4;
+
+				uint32_t semantic_index = clip / 4;
+
+				static const char *types[] = { "float", "float2", "float3", "float4" };
+				statement(types[to_declare - 1], " ", builtin_to_glsl(builtin, StorageClassInput), semantic_index,
+				          " : SV_ClipDistance", semantic_index, ";");
+			}
+			break;
+
+		case BuiltInCullDistance:
+			// HLSL is a bit weird here, use SV_CullDistance0, SV_CullDistance1 and so on with vectors.
+			for (uint32_t cull = 0; cull < cull_distance_count; cull += 4)
+			{
+				uint32_t to_declare = cull_distance_count - cull;
+				if (to_declare > 4)
+					to_declare = 4;
+
+				uint32_t semantic_index = cull / 4;
+
+				static const char *types[] = { "float", "float2", "float3", "float4" };
+				statement(types[to_declare - 1], " ", builtin_to_glsl(builtin, StorageClassInput), semantic_index,
+				          " : SV_CullDistance", semantic_index, ";");
+			}
+			break;
+
+		case BuiltInPointCoord:
+			// PointCoord is not supported, but provide a way to just ignore that, similar to PointSize.
+			if (hlsl_options.point_coord_compat)
+				break;
+			else
+				SPIRV_CROSS_THROW("Unsupported builtin in HLSL.");
+
+		case BuiltInLayer:
+			if (hlsl_options.shader_model < 50 || get_entry_point().model != ExecutionModelFragment)
+				SPIRV_CROSS_THROW("Render target array index input is only supported in PS 5.0 or higher.");
+			type = "uint";
+			semantic = "SV_RenderTargetArrayIndex";
+			break;
+
+		default:
+			SPIRV_CROSS_THROW("Unsupported builtin in HLSL.");
+		}
+
+		if (type && semantic)
+			statement(type, " ", builtin_to_glsl(builtin, StorageClassInput), " : ", semantic, ";");
+	});
+}
+
+uint32_t CompilerHLSL::type_to_consumed_locations(const SPIRType &type) const
+{
+	// TODO: Need to verify correctness.
+	uint32_t elements = 0;
+
+	if (type.basetype == SPIRType::Struct)
+	{
+		for (uint32_t i = 0; i < uint32_t(type.member_types.size()); i++)
+			elements += type_to_consumed_locations(get<SPIRType>(type.member_types[i]));
+	}
+	else
+	{
+		uint32_t array_multiplier = 1;
+		for (uint32_t i = 0; i < uint32_t(type.array.size()); i++)
+		{
+			if (type.array_size_literal[i])
+				array_multiplier *= type.array[i];
+			else
+				array_multiplier *= evaluate_constant_u32(type.array[i]);
+		}
+		elements += array_multiplier * type.columns;
+	}
+	return elements;
+}
+
+string CompilerHLSL::to_interpolation_qualifiers(const Bitset &flags)
+{
+	string res;
+	//if (flags & (1ull << DecorationSmooth))
+	//    res += "linear ";
+	if (flags.get(DecorationFlat))
+		res += "nointerpolation ";
+	if (flags.get(DecorationNoPerspective))
+		res += "noperspective ";
+	if (flags.get(DecorationCentroid))
+		res += "centroid ";
+	if (flags.get(DecorationPatch))
+		res += "patch "; // Seems to be different in actual HLSL.
+	if (flags.get(DecorationSample))
+		res += "sample ";
+	if (flags.get(DecorationInvariant) && backend.support_precise_qualifier)
+		res += "precise "; // Not supported?
+
+	return res;
+}
+
+std::string CompilerHLSL::to_semantic(uint32_t location, ExecutionModel em, StorageClass sc)
+{
+	if (em == ExecutionModelVertex && sc == StorageClassInput)
+	{
+		// We have a vertex attribute - we should look at remapping it if the user provided
+		// vertex attribute hints.
+		for (auto &attribute : remap_vertex_attributes)
+			if (attribute.location == location)
+				return attribute.semantic;
+	}
+
+	// Not a vertex attribute, or no remap_vertex_attributes entry.
+	return join("TEXCOORD", location);
+}
+
+std::string CompilerHLSL::to_initializer_expression(const SPIRVariable &var)
+{
+	// We cannot emit static const initializer for block constants for practical reasons,
+	// so just inline the initializer.
+	// FIXME: There is a theoretical problem here if someone tries to composite extract
+	// into this initializer since we don't declare it properly, but that is somewhat non-sensical.
+	auto &type = get<SPIRType>(var.basetype);
+	bool is_block = has_decoration(type.self, DecorationBlock);
+	auto *c = maybe_get<SPIRConstant>(var.initializer);
+	if (is_block && c)
+		return constant_expression(*c);
+	else
+		return CompilerGLSL::to_initializer_expression(var);
+}
+
+void CompilerHLSL::emit_interface_block_member_in_struct(const SPIRVariable &var, uint32_t member_index,
+                                                         uint32_t location,
+                                                         std::unordered_set<uint32_t> &active_locations)
+{
+	auto &execution = get_entry_point();
+	auto type = get<SPIRType>(var.basetype);
+	auto semantic = to_semantic(location, execution.model, var.storage);
+	auto mbr_name = join(to_name(type.self), "_", to_member_name(type, member_index));
+	auto &mbr_type = get<SPIRType>(type.member_types[member_index]);
+
+	statement(to_interpolation_qualifiers(get_member_decoration_bitset(type.self, member_index)),
+	          type_to_glsl(mbr_type),
+	          " ", mbr_name, type_to_array_glsl(mbr_type),
+	          " : ", semantic, ";");
+
+	// Structs and arrays should consume more locations.
+	uint32_t consumed_locations = type_to_consumed_locations(mbr_type);
+	for (uint32_t i = 0; i < consumed_locations; i++)
+		active_locations.insert(location + i);
+}
+
+void CompilerHLSL::emit_interface_block_in_struct(const SPIRVariable &var, unordered_set<uint32_t> &active_locations)
+{
+	auto &execution = get_entry_point();
+	auto type = get<SPIRType>(var.basetype);
+
+	string binding;
+	bool use_location_number = true;
+	bool need_matrix_unroll = false;
+	bool legacy = hlsl_options.shader_model <= 30;
+	if (execution.model == ExecutionModelFragment && var.storage == StorageClassOutput)
+	{
+		// Dual-source blending is achieved in HLSL by emitting to SV_Target0 and 1.
+		uint32_t index = get_decoration(var.self, DecorationIndex);
+		uint32_t location = get_decoration(var.self, DecorationLocation);
+
+		if (index != 0 && location != 0)
+			SPIRV_CROSS_THROW("Dual-source blending is only supported on MRT #0 in HLSL.");
+
+		binding = join(legacy ? "COLOR" : "SV_Target", location + index);
+		use_location_number = false;
+		if (legacy) // COLOR must be a four-component vector on legacy shader model targets (HLSL ERR_COLOR_4COMP)
+			type.vecsize = 4;
+	}
+	else if (var.storage == StorageClassInput && execution.model == ExecutionModelVertex)
+	{
+		need_matrix_unroll = true;
+		if (legacy) // Inputs must be floating-point in legacy targets.
+			type.basetype = SPIRType::Float;
+	}
+
+	const auto get_vacant_location = [&]() -> uint32_t {
+		for (uint32_t i = 0; i < 64; i++)
+			if (!active_locations.count(i))
+				return i;
+		SPIRV_CROSS_THROW("All locations from 0 to 63 are exhausted.");
+	};
+
+	auto name = to_name(var.self);
+	if (use_location_number)
+	{
+		uint32_t location_number;
+
+		// If an explicit location exists, use it with TEXCOORD[N] semantic.
+		// Otherwise, pick a vacant location.
+		if (has_decoration(var.self, DecorationLocation))
+			location_number = get_decoration(var.self, DecorationLocation);
+		else
+			location_number = get_vacant_location();
+
+		// Allow semantic remap if specified.
+		auto semantic = to_semantic(location_number, execution.model, var.storage);
+
+		if (need_matrix_unroll && type.columns > 1)
+		{
+			if (!type.array.empty())
+				SPIRV_CROSS_THROW("Arrays of matrices used as input/output. This is not supported.");
+
+			// Unroll matrices.
+			for (uint32_t i = 0; i < type.columns; i++)
+			{
+				SPIRType newtype = type;
+				newtype.columns = 1;
+
+				string effective_semantic;
+				if (hlsl_options.flatten_matrix_vertex_input_semantics)
+					effective_semantic = to_semantic(location_number, execution.model, var.storage);
+				else
+					effective_semantic = join(semantic, "_", i);
+
+				statement(to_interpolation_qualifiers(get_decoration_bitset(var.self)),
+				          variable_decl(newtype, join(name, "_", i)), " : ", effective_semantic, ";");
+				active_locations.insert(location_number++);
+			}
+		}
+		else
+		{
+			auto decl_type = type;
+			if (execution.model == ExecutionModelMeshEXT)
+			{
+				decl_type.array.erase(decl_type.array.begin());
+				decl_type.array_size_literal.erase(decl_type.array_size_literal.begin());
+			}
+			statement(to_interpolation_qualifiers(get_decoration_bitset(var.self)), variable_decl(decl_type, name), " : ",
+			          semantic, ";");
+
+			// Structs and arrays should consume more locations.
+			uint32_t consumed_locations = type_to_consumed_locations(decl_type);
+			for (uint32_t i = 0; i < consumed_locations; i++)
+				active_locations.insert(location_number + i);
+		}
+	}
+	else
+	{
+		statement(variable_decl(type, name), " : ", binding, ";");
+	}
+}
+
+std::string CompilerHLSL::builtin_to_glsl(spv::BuiltIn builtin, spv::StorageClass storage)
+{
+	switch (builtin)
+	{
+	case BuiltInVertexId:
+		return "gl_VertexID";
+	case BuiltInInstanceId:
+		return "gl_InstanceID";
+	case BuiltInNumWorkgroups:
+	{
+		if (!num_workgroups_builtin)
+			SPIRV_CROSS_THROW("NumWorkgroups builtin is used, but remap_num_workgroups_builtin() was not called. "
+			                  "Cannot emit code for this builtin.");
+
+		auto &var = get<SPIRVariable>(num_workgroups_builtin);
+		auto &type = get<SPIRType>(var.basetype);
+		auto ret = join(to_name(num_workgroups_builtin), "_", get_member_name(type.self, 0));
+		ParsedIR::sanitize_underscores(ret);
+		return ret;
+	}
+	case BuiltInPointCoord:
+		// Crude hack, but there is no real alternative. This path is only enabled if point_coord_compat is set.
+		return "float2(0.5f, 0.5f)";
+	case BuiltInSubgroupLocalInvocationId:
+		return "WaveGetLaneIndex()";
+	case BuiltInSubgroupSize:
+		return "WaveGetLaneCount()";
+	case BuiltInHelperInvocation:
+		return "IsHelperLane()";
+
+	default:
+		return CompilerGLSL::builtin_to_glsl(builtin, storage);
+	}
+}
+
+void CompilerHLSL::emit_builtin_variables()
+{
+	Bitset builtins = active_input_builtins;
+	builtins.merge_or(active_output_builtins);
+
+	std::unordered_map<uint32_t, ID> builtin_to_initializer;
+	ir.for_each_typed_id<SPIRVariable>([&](uint32_t, SPIRVariable &var) {
+		if (!is_builtin_variable(var) || var.storage != StorageClassOutput || !var.initializer)
+			return;
+
+		auto *c = this->maybe_get<SPIRConstant>(var.initializer);
+		if (!c)
+			return;
+
+		auto &type = this->get<SPIRType>(var.basetype);
+		if (type.basetype == SPIRType::Struct)
+		{
+			uint32_t member_count = uint32_t(type.member_types.size());
+			for (uint32_t i = 0; i < member_count; i++)
+			{
+				if (has_member_decoration(type.self, i, DecorationBuiltIn))
+				{
+					builtin_to_initializer[get_member_decoration(type.self, i, DecorationBuiltIn)] =
+						c->subconstants[i];
+				}
+			}
+		}
+		else if (has_decoration(var.self, DecorationBuiltIn))
+			builtin_to_initializer[get_decoration(var.self, DecorationBuiltIn)] = var.initializer;
+	});
+
+	// Emit global variables for the interface variables which are statically used by the shader.
+	builtins.for_each_bit([&](uint32_t i) {
+		const char *type = nullptr;
+		auto builtin = static_cast<BuiltIn>(i);
+		uint32_t array_size = 0;
+
+		string init_expr;
+		auto init_itr = builtin_to_initializer.find(builtin);
+		if (init_itr != builtin_to_initializer.end())
+			init_expr = join(" = ", to_expression(init_itr->second));
+
+		if (get_execution_model() == ExecutionModelMeshEXT)
+		{
+			if (builtin == BuiltInPosition || builtin == BuiltInPointSize || builtin == BuiltInClipDistance ||
+			    builtin == BuiltInCullDistance || builtin == BuiltInLayer || builtin == BuiltInPrimitiveId ||
+			    builtin == BuiltInViewportIndex || builtin == BuiltInCullPrimitiveEXT ||
+			    builtin == BuiltInPrimitiveShadingRateKHR || builtin == BuiltInPrimitivePointIndicesEXT ||
+			    builtin == BuiltInPrimitiveLineIndicesEXT || builtin == BuiltInPrimitiveTriangleIndicesEXT)
+			{
+				return;
+			}
+		}
+
+		switch (builtin)
+		{
+		case BuiltInFragCoord:
+		case BuiltInPosition:
+			type = "float4";
+			break;
+
+		case BuiltInFragDepth:
+			type = "float";
+			break;
+
+		case BuiltInVertexId:
+		case BuiltInVertexIndex:
+		case BuiltInInstanceIndex:
+			type = "int";
+			if (hlsl_options.support_nonzero_base_vertex_base_instance)
+				base_vertex_info.used = true;
+			break;
+
+		case BuiltInBaseVertex:
+		case BuiltInBaseInstance:
+			type = "int";
+			base_vertex_info.used = true;
+			break;
+
+		case BuiltInInstanceId:
+		case BuiltInSampleId:
+			type = "int";
+			break;
+
+		case BuiltInPointSize:
+			if (hlsl_options.point_size_compat || hlsl_options.shader_model <= 30)
+			{
+				// Just emit the global variable, it will be ignored.
+				type = "float";
+				break;
+			}
+			else
+				SPIRV_CROSS_THROW(join("Unsupported builtin in HLSL: ", unsigned(builtin)));
+
+		case BuiltInGlobalInvocationId:
+		case BuiltInLocalInvocationId:
+		case BuiltInWorkgroupId:
+			type = "uint3";
+			break;
+
+		case BuiltInLocalInvocationIndex:
+			type = "uint";
+			break;
+
+		case BuiltInFrontFacing:
+			type = "bool";
+			break;
+
+		case BuiltInNumWorkgroups:
+		case BuiltInPointCoord:
+			// Handled specially.
+			break;
+
+		case BuiltInSubgroupLocalInvocationId:
+		case BuiltInSubgroupSize:
+			if (hlsl_options.shader_model < 60)
+				SPIRV_CROSS_THROW("Need SM 6.0 for Wave ops.");
+			break;
+
+		case BuiltInSubgroupEqMask:
+		case BuiltInSubgroupLtMask:
+		case BuiltInSubgroupLeMask:
+		case BuiltInSubgroupGtMask:
+		case BuiltInSubgroupGeMask:
+			if (hlsl_options.shader_model < 60)
+				SPIRV_CROSS_THROW("Need SM 6.0 for Wave ops.");
+			type = "uint4";
+			break;
+
+		case BuiltInHelperInvocation:
+			if (hlsl_options.shader_model < 50)
+				SPIRV_CROSS_THROW("Need SM 5.0 for Helper Invocation.");
+			break;
+
+		case BuiltInClipDistance:
+			array_size = clip_distance_count;
+			type = "float";
+			break;
+
+		case BuiltInCullDistance:
+			array_size = cull_distance_count;
+			type = "float";
+			break;
+
+		case BuiltInSampleMask:
+			type = "int";
+			break;
+
+		case BuiltInPrimitiveId:
+		case BuiltInViewIndex:
+		case BuiltInLayer:
+			type = "uint";
+			break;
+
+		case BuiltInViewportIndex:
+		case BuiltInPrimitiveShadingRateKHR:
+		case BuiltInPrimitiveLineIndicesEXT:
+		case BuiltInCullPrimitiveEXT:
+			type = "uint";
+			break;
+
+		default:
+			SPIRV_CROSS_THROW(join("Unsupported builtin in HLSL: ", unsigned(builtin)));
+		}
+
+		StorageClass storage = active_input_builtins.get(i) ? StorageClassInput : StorageClassOutput;
+
+		if (type)
+		{
+			if (array_size)
+				statement("static ", type, " ", builtin_to_glsl(builtin, storage), "[", array_size, "]", init_expr, ";");
+			else
+				statement("static ", type, " ", builtin_to_glsl(builtin, storage), init_expr, ";");
+		}
+
+		// SampleMask can be both in and out with sample builtin, in this case we have already
+		// declared the input variable and we need to add the output one now.
+		if (builtin == BuiltInSampleMask && storage == StorageClassInput && this->active_output_builtins.get(i))
+		{
+			statement("static ", type, " ", this->builtin_to_glsl(builtin, StorageClassOutput), init_expr, ";");
+		}
+	});
+
+	if (base_vertex_info.used)
+	{
+		string binding_info;
+		if (base_vertex_info.explicit_binding)
+		{
+			binding_info = join(" : register(b", base_vertex_info.register_index);
+			if (base_vertex_info.register_space)
+				binding_info += join(", space", base_vertex_info.register_space);
+			binding_info += ")";
+		}
+		statement("cbuffer SPIRV_Cross_VertexInfo", binding_info);
+		begin_scope();
+		statement("int SPIRV_Cross_BaseVertex;");
+		statement("int SPIRV_Cross_BaseInstance;");
+		end_scope_decl();
+		statement("");
+	}
+}
+
+void CompilerHLSL::set_hlsl_aux_buffer_binding(HLSLAuxBinding binding, uint32_t register_index, uint32_t register_space)
+{
+	if (binding == HLSL_AUX_BINDING_BASE_VERTEX_INSTANCE)
+	{
+		base_vertex_info.explicit_binding = true;
+		base_vertex_info.register_space = register_space;
+		base_vertex_info.register_index = register_index;
+	}
+}
+
+void CompilerHLSL::unset_hlsl_aux_buffer_binding(HLSLAuxBinding binding)
+{
+	if (binding == HLSL_AUX_BINDING_BASE_VERTEX_INSTANCE)
+		base_vertex_info.explicit_binding = false;
+}
+
+bool CompilerHLSL::is_hlsl_aux_buffer_binding_used(HLSLAuxBinding binding) const
+{
+	if (binding == HLSL_AUX_BINDING_BASE_VERTEX_INSTANCE)
+		return base_vertex_info.used;
+	else
+		return false;
+}
+
+void CompilerHLSL::emit_composite_constants()
+{
+	// HLSL cannot declare structs or arrays inline, so we must move them out to
+	// global constants directly.
+	bool emitted = false;
+
+	ir.for_each_typed_id<SPIRConstant>([&](uint32_t, SPIRConstant &c) {
+		if (c.specialization)
+			return;
+
+		auto &type = this->get<SPIRType>(c.constant_type);
+
+		if (type.basetype == SPIRType::Struct && is_builtin_type(type))
+			return;
+
+		if (type.basetype == SPIRType::Struct || !type.array.empty())
+		{
+			add_resource_name(c.self);
+			auto name = to_name(c.self);
+			statement("static const ", variable_decl(type, name), " = ", constant_expression(c), ";");
+			emitted = true;
+		}
+	});
+
+	if (emitted)
+		statement("");
+}
+
+void CompilerHLSL::emit_specialization_constants_and_structs()
+{
+	bool emitted = false;
+	SpecializationConstant wg_x, wg_y, wg_z;
+	ID workgroup_size_id = get_work_group_size_specialization_constants(wg_x, wg_y, wg_z);
+
+	std::unordered_set<TypeID> io_block_types;
+	ir.for_each_typed_id<SPIRVariable>([&](uint32_t, const SPIRVariable &var) {
+		auto &type = this->get<SPIRType>(var.basetype);
+		if ((var.storage == StorageClassInput || var.storage == StorageClassOutput) &&
+		    !var.remapped_variable && type.pointer && !is_builtin_variable(var) &&
+		    interface_variable_exists_in_entry_point(var.self) &&
+		    has_decoration(type.self, DecorationBlock))
+		{
+			io_block_types.insert(type.self);
+		}
+	});
+
+	auto loop_lock = ir.create_loop_hard_lock();
+	for (auto &id_ : ir.ids_for_constant_undef_or_type)
+	{
+		auto &id = ir.ids[id_];
+
+		if (id.get_type() == TypeConstant)
+		{
+			auto &c = id.get<SPIRConstant>();
+
+			if (c.self == workgroup_size_id)
+			{
+				statement("static const uint3 gl_WorkGroupSize = ",
+				          constant_expression(get<SPIRConstant>(workgroup_size_id)), ";");
+				emitted = true;
+			}
+			else if (c.specialization)
+			{
+				auto &type = get<SPIRType>(c.constant_type);
+				add_resource_name(c.self);
+				auto name = to_name(c.self);
+
+				if (has_decoration(c.self, DecorationSpecId))
+				{
+					// HLSL does not support specialization constants, so fallback to macros.
+					c.specialization_constant_macro_name =
+							constant_value_macro_name(get_decoration(c.self, DecorationSpecId));
+
+					statement("#ifndef ", c.specialization_constant_macro_name);
+					statement("#define ", c.specialization_constant_macro_name, " ", constant_expression(c));
+					statement("#endif");
+					statement("static const ", variable_decl(type, name), " = ", c.specialization_constant_macro_name, ";");
+				}
+				else
+					statement("static const ", variable_decl(type, name), " = ", constant_expression(c), ";");
+
+				emitted = true;
+			}
+		}
+		else if (id.get_type() == TypeConstantOp)
+		{
+			auto &c = id.get<SPIRConstantOp>();
+			auto &type = get<SPIRType>(c.basetype);
+			add_resource_name(c.self);
+			auto name = to_name(c.self);
+			statement("static const ", variable_decl(type, name), " = ", constant_op_expression(c), ";");
+			emitted = true;
+		}
+		else if (id.get_type() == TypeType)
+		{
+			auto &type = id.get<SPIRType>();
+			bool is_non_io_block = has_decoration(type.self, DecorationBlock) &&
+			                       io_block_types.count(type.self) == 0;
+			bool is_buffer_block = has_decoration(type.self, DecorationBufferBlock);
+			if (type.basetype == SPIRType::Struct && type.array.empty() &&
+			    !type.pointer && !is_non_io_block && !is_buffer_block)
+			{
+				if (emitted)
+					statement("");
+				emitted = false;
+
+				emit_struct(type);
+			}
+		}
+		else if (id.get_type() == TypeUndef)
+		{
+			auto &undef = id.get<SPIRUndef>();
+			auto &type = this->get<SPIRType>(undef.basetype);
+			// OpUndef can be void for some reason ...
+			if (type.basetype == SPIRType::Void)
+				return;
+
+			string initializer;
+			if (options.force_zero_initialized_variables && type_can_zero_initialize(type))
+				initializer = join(" = ", to_zero_initialized_expression(undef.basetype));
+
+			statement("static ", variable_decl(type, to_name(undef.self), undef.self), initializer, ";");
+			emitted = true;
+		}
+	}
+
+	if (emitted)
+		statement("");
+}
+
+void CompilerHLSL::replace_illegal_names()
+{
+	static const unordered_set<string> keywords = {
+		// Additional HLSL specific keywords.
+		// From https://docs.microsoft.com/en-US/windows/win32/direct3dhlsl/dx-graphics-hlsl-appendix-keywords
+		"AppendStructuredBuffer", "asm", "asm_fragment",
+		"BlendState", "bool", "break", "Buffer", "ByteAddressBuffer",
+		"case", "cbuffer", "centroid", "class", "column_major", "compile",
+		"compile_fragment", "CompileShader", "const", "continue", "ComputeShader",
+		"ConsumeStructuredBuffer",
+		"default", "DepthStencilState", "DepthStencilView", "discard", "do",
+		"double", "DomainShader", "dword",
+		"else", "export", "false", "float", "for", "fxgroup",
+		"GeometryShader", "groupshared", "half", "HullShader",
+		"indices", "if", "in", "inline", "inout", "InputPatch", "int", "interface",
+		"line", "lineadj", "linear", "LineStream",
+		"matrix", "min16float", "min10float", "min16int", "min16uint",
+		"namespace", "nointerpolation", "noperspective", "NULL",
+		"out", "OutputPatch",
+		"payload", "packoffset", "pass", "pixelfragment", "PixelShader", "point",
+		"PointStream", "precise", "RasterizerState", "RenderTargetView",
+		"return", "register", "row_major", "RWBuffer", "RWByteAddressBuffer",
+		"RWStructuredBuffer", "RWTexture1D", "RWTexture1DArray", "RWTexture2D",
+		"RWTexture2DArray", "RWTexture3D", "sample", "sampler", "SamplerState",
+		"SamplerComparisonState", "shared", "snorm", "stateblock", "stateblock_state",
+		"static", "string", "struct", "switch", "StructuredBuffer", "tbuffer",
+		"technique", "technique10", "technique11", "texture", "Texture1D",
+		"Texture1DArray", "Texture2D", "Texture2DArray", "Texture2DMS", "Texture2DMSArray",
+		"Texture3D", "TextureCube", "TextureCubeArray", "true", "typedef", "triangle",
+		"triangleadj", "TriangleStream", "uint", "uniform", "unorm", "unsigned",
+		"vector", "vertexfragment", "VertexShader", "vertices", "void", "volatile", "while",
+	};
+
+	CompilerGLSL::replace_illegal_names(keywords);
+	CompilerGLSL::replace_illegal_names();
+}
+
+void CompilerHLSL::emit_resources()
+{
+	auto &execution = get_entry_point();
+
+	replace_illegal_names();
+
+	switch (execution.model)
+	{
+	case ExecutionModelGeometry:
+	case ExecutionModelTessellationControl:
+	case ExecutionModelTessellationEvaluation:
+	case ExecutionModelMeshEXT:
+		fixup_implicit_builtin_block_names(execution.model);
+		break;
+
+	default:
+		break;
+	}
+
+	emit_specialization_constants_and_structs();
+	emit_composite_constants();
+
+	bool emitted = false;
+
+	// Output UBOs and SSBOs
+	ir.for_each_typed_id<SPIRVariable>([&](uint32_t, SPIRVariable &var) {
+		auto &type = this->get<SPIRType>(var.basetype);
+
+		bool is_block_storage = type.storage == StorageClassStorageBuffer || type.storage == StorageClassUniform;
+		bool has_block_flags = ir.meta[type.self].decoration.decoration_flags.get(DecorationBlock) ||
+		                       ir.meta[type.self].decoration.decoration_flags.get(DecorationBufferBlock);
+
+		if (var.storage != StorageClassFunction && type.pointer && is_block_storage && !is_hidden_variable(var) &&
+		    has_block_flags)
+		{
+			emit_buffer_block(var);
+			emitted = true;
+		}
+	});
+
+	// Output push constant blocks
+	ir.for_each_typed_id<SPIRVariable>([&](uint32_t, SPIRVariable &var) {
+		auto &type = this->get<SPIRType>(var.basetype);
+		if (var.storage != StorageClassFunction && type.pointer && type.storage == StorageClassPushConstant &&
+		    !is_hidden_variable(var))
+		{
+			emit_push_constant_block(var);
+			emitted = true;
+		}
+	});
+
+	if (execution.model == ExecutionModelVertex && hlsl_options.shader_model <= 30 &&
+	    active_output_builtins.get(BuiltInPosition))
+	{
+		statement("uniform float4 gl_HalfPixel;");
+		emitted = true;
+	}
+
+	bool skip_separate_image_sampler = !combined_image_samplers.empty() || hlsl_options.shader_model <= 30;
+
+	// Output Uniform Constants (values, samplers, images, etc).
+	ir.for_each_typed_id<SPIRVariable>([&](uint32_t, SPIRVariable &var) {
+		auto &type = this->get<SPIRType>(var.basetype);
+
+		// If we're remapping separate samplers and images, only emit the combined samplers.
+		if (skip_separate_image_sampler)
+		{
+			// Sampler buffers are always used without a sampler, and they will also work in regular D3D.
+			bool sampler_buffer = type.basetype == SPIRType::Image && type.image.dim == DimBuffer;
+			bool separate_image = type.basetype == SPIRType::Image && type.image.sampled == 1;
+			bool separate_sampler = type.basetype == SPIRType::Sampler;
+			if (!sampler_buffer && (separate_image || separate_sampler))
+				return;
+		}
+
+		if (var.storage != StorageClassFunction && !is_builtin_variable(var) && !var.remapped_variable &&
+		    type.pointer && (type.storage == StorageClassUniformConstant || type.storage == StorageClassAtomicCounter) &&
+		    !is_hidden_variable(var))
+		{
+			emit_uniform(var);
+			emitted = true;
+		}
+	});
+
+	if (emitted)
+		statement("");
+	emitted = false;
+
+	// Emit builtin input and output variables here.
+	emit_builtin_variables();
+
+	if (execution.model != ExecutionModelMeshEXT)
+	{
+		ir.for_each_typed_id<SPIRVariable>([&](uint32_t, SPIRVariable &var) {
+			auto &type = this->get<SPIRType>(var.basetype);
+
+			if (var.storage != StorageClassFunction && !var.remapped_variable && type.pointer &&
+			   (var.storage == StorageClassInput || var.storage == StorageClassOutput) && !is_builtin_variable(var) &&
+			   interface_variable_exists_in_entry_point(var.self))
+			{
+				// Builtin variables are handled separately.
+				emit_interface_block_globally(var);
+				emitted = true;
+			}
+		});
+	}
+
+	if (emitted)
+		statement("");
+	emitted = false;
+
+	require_input = false;
+	require_output = false;
+	unordered_set<uint32_t> active_inputs;
+	unordered_set<uint32_t> active_outputs;
+
+	struct IOVariable
+	{
+		const SPIRVariable *var;
+		uint32_t location;
+		uint32_t block_member_index;
+		bool block;
+	};
+
+	SmallVector<IOVariable> input_variables;
+	SmallVector<IOVariable> output_variables;
+
+	ir.for_each_typed_id<SPIRVariable>([&](uint32_t, SPIRVariable &var) {
+		auto &type = this->get<SPIRType>(var.basetype);
+		bool block = has_decoration(type.self, DecorationBlock);
+
+		if (var.storage != StorageClassInput && var.storage != StorageClassOutput)
+			return;
+
+		if (!var.remapped_variable && type.pointer && !is_builtin_variable(var) &&
+		    interface_variable_exists_in_entry_point(var.self))
+		{
+			if (block)
+			{
+				for (uint32_t i = 0; i < uint32_t(type.member_types.size()); i++)
+				{
+					uint32_t location = get_declared_member_location(var, i, false);
+					if (var.storage == StorageClassInput)
+						input_variables.push_back({ &var, location, i, true });
+					else
+						output_variables.push_back({ &var, location, i, true });
+				}
+			}
+			else
+			{
+				uint32_t location = get_decoration(var.self, DecorationLocation);
+				if (var.storage == StorageClassInput)
+					input_variables.push_back({ &var, location, 0, false });
+				else
+					output_variables.push_back({ &var, location, 0, false });
+			}
+		}
+	});
+
+	const auto variable_compare = [&](const IOVariable &a, const IOVariable &b) -> bool {
+		// Sort input and output variables based on, from more robust to less robust:
+		// - Location
+		// - Variable has a location
+		// - Name comparison
+		// - Variable has a name
+		// - Fallback: ID
+		bool has_location_a = a.block || has_decoration(a.var->self, DecorationLocation);
+		bool has_location_b = b.block || has_decoration(b.var->self, DecorationLocation);
+
+		if (has_location_a && has_location_b)
+			return a.location < b.location;
+		else if (has_location_a && !has_location_b)
+			return true;
+		else if (!has_location_a && has_location_b)
+			return false;
+
+		const auto &name1 = to_name(a.var->self);
+		const auto &name2 = to_name(b.var->self);
+
+		if (name1.empty() && name2.empty())
+			return a.var->self < b.var->self;
+		else if (name1.empty())
+			return true;
+		else if (name2.empty())
+			return false;
+
+		return name1.compare(name2) < 0;
+	};
+
+	auto input_builtins = active_input_builtins;
+	input_builtins.clear(BuiltInNumWorkgroups);
+	input_builtins.clear(BuiltInPointCoord);
+	input_builtins.clear(BuiltInSubgroupSize);
+	input_builtins.clear(BuiltInSubgroupLocalInvocationId);
+	input_builtins.clear(BuiltInSubgroupEqMask);
+	input_builtins.clear(BuiltInSubgroupLtMask);
+	input_builtins.clear(BuiltInSubgroupLeMask);
+	input_builtins.clear(BuiltInSubgroupGtMask);
+	input_builtins.clear(BuiltInSubgroupGeMask);
+
+	if (!input_variables.empty() || !input_builtins.empty())
+	{
+		require_input = true;
+		statement("struct SPIRV_Cross_Input");
+
+		begin_scope();
+		sort(input_variables.begin(), input_variables.end(), variable_compare);
+		for (auto &var : input_variables)
+		{
+			if (var.block)
+				emit_interface_block_member_in_struct(*var.var, var.block_member_index, var.location, active_inputs);
+			else
+				emit_interface_block_in_struct(*var.var, active_inputs);
+		}
+		emit_builtin_inputs_in_struct();
+		end_scope_decl();
+		statement("");
+	}
+
+	const bool is_mesh_shader = execution.model == ExecutionModelMeshEXT;
+	if (!output_variables.empty() || !active_output_builtins.empty())
+	{
+		sort(output_variables.begin(), output_variables.end(), variable_compare);
+		require_output = !is_mesh_shader;
+
+		statement(is_mesh_shader ? "struct gl_MeshPerVertexEXT" : "struct SPIRV_Cross_Output");
+		begin_scope();
+		for (auto &var : output_variables)
+		{
+			if (is_per_primitive_variable(*var.var))
+				continue;
+			if (var.block && is_mesh_shader && var.block_member_index != 0)
+				continue;
+			if (var.block && !is_mesh_shader)
+				emit_interface_block_member_in_struct(*var.var, var.block_member_index, var.location, active_outputs);
+			else
+				emit_interface_block_in_struct(*var.var, active_outputs);
+		}
+		emit_builtin_outputs_in_struct();
+		if (!is_mesh_shader)
+			emit_builtin_primitive_outputs_in_struct();
+		end_scope_decl();
+		statement("");
+
+		if (is_mesh_shader)
+		{
+			statement("struct gl_MeshPerPrimitiveEXT");
+			begin_scope();
+			for (auto &var : output_variables)
+			{
+				if (!is_per_primitive_variable(*var.var))
+					continue;
+				if (var.block && var.block_member_index != 0)
+					continue;
+
+				emit_interface_block_in_struct(*var.var, active_outputs);
+			}
+			emit_builtin_primitive_outputs_in_struct();
+			end_scope_decl();
+			statement("");
+		}
+	}
+
+	// Global variables.
+	for (auto global : global_variables)
+	{
+		auto &var = get<SPIRVariable>(global);
+		if (is_hidden_variable(var, true))
+			continue;
+
+		if (var.storage == StorageClassTaskPayloadWorkgroupEXT && is_mesh_shader)
+			continue;
+
+		if (var.storage != StorageClassOutput)
+		{
+			if (!variable_is_lut(var))
+			{
+				add_resource_name(var.self);
+
+				const char *storage = nullptr;
+				switch (var.storage)
+				{
+				case StorageClassWorkgroup:
+				case StorageClassTaskPayloadWorkgroupEXT:
+					storage = "groupshared";
+					break;
+
+				default:
+					storage = "static";
+					break;
+				}
+
+				string initializer;
+				if (options.force_zero_initialized_variables && var.storage == StorageClassPrivate &&
+				    !var.initializer && !var.static_expression && type_can_zero_initialize(get_variable_data_type(var)))
+				{
+					initializer = join(" = ", to_zero_initialized_expression(get_variable_data_type_id(var)));
+				}
+				statement(storage, " ", variable_decl(var), initializer, ";");
+
+				emitted = true;
+			}
+		}
+	}
+
+	if (emitted)
+		statement("");
+
+	if (requires_op_fmod)
+	{
+		static const char *types[] = {
+			"float",
+			"float2",
+			"float3",
+			"float4",
+		};
+
+		for (auto &type : types)
+		{
+			statement(type, " mod(", type, " x, ", type, " y)");
+			begin_scope();
+			statement("return x - y * floor(x / y);");
+			end_scope();
+			statement("");
+		}
+	}
+
+	emit_texture_size_variants(required_texture_size_variants.srv, "4", false, "");
+	for (uint32_t norm = 0; norm < 3; norm++)
+	{
+		for (uint32_t comp = 0; comp < 4; comp++)
+		{
+			static const char *qualifiers[] = { "", "unorm ", "snorm " };
+			static const char *vecsizes[] = { "", "2", "3", "4" };
+			emit_texture_size_variants(required_texture_size_variants.uav[norm][comp], vecsizes[comp], true,
+			                           qualifiers[norm]);
+		}
+	}
+
+	if (requires_fp16_packing)
+	{
+		// HLSL does not pack into a single word sadly :(
+		statement("uint spvPackHalf2x16(float2 value)");
+		begin_scope();
+		statement("uint2 Packed = f32tof16(value);");
+		statement("return Packed.x | (Packed.y << 16);");
+		end_scope();
+		statement("");
+
+		statement("float2 spvUnpackHalf2x16(uint value)");
+		begin_scope();
+		statement("return f16tof32(uint2(value & 0xffff, value >> 16));");
+		end_scope();
+		statement("");
+	}
+
+	if (requires_uint2_packing)
+	{
+		statement("uint64_t spvPackUint2x32(uint2 value)");
+		begin_scope();
+		statement("return (uint64_t(value.y) << 32) | uint64_t(value.x);");
+		end_scope();
+		statement("");
+
+		statement("uint2 spvUnpackUint2x32(uint64_t value)");
+		begin_scope();
+		statement("uint2 Unpacked;");
+		statement("Unpacked.x = uint(value & 0xffffffff);");
+		statement("Unpacked.y = uint(value >> 32);");
+		statement("return Unpacked;");
+		end_scope();
+		statement("");
+	}
+
+	if (requires_explicit_fp16_packing)
+	{
+		// HLSL does not pack into a single word sadly :(
+		statement("uint spvPackFloat2x16(min16float2 value)");
+		begin_scope();
+		statement("uint2 Packed = f32tof16(value);");
+		statement("return Packed.x | (Packed.y << 16);");
+		end_scope();
+		statement("");
+
+		statement("min16float2 spvUnpackFloat2x16(uint value)");
+		begin_scope();
+		statement("return min16float2(f16tof32(uint2(value & 0xffff, value >> 16)));");
+		end_scope();
+		statement("");
+	}
+
+	// HLSL does not seem to have builtins for these operation, so roll them by hand ...
+	if (requires_unorm8_packing)
+	{
+		statement("uint spvPackUnorm4x8(float4 value)");
+		begin_scope();
+		statement("uint4 Packed = uint4(round(saturate(value) * 255.0));");
+		statement("return Packed.x | (Packed.y << 8) | (Packed.z << 16) | (Packed.w << 24);");
+		end_scope();
+		statement("");
+
+		statement("float4 spvUnpackUnorm4x8(uint value)");
+		begin_scope();
+		statement("uint4 Packed = uint4(value & 0xff, (value >> 8) & 0xff, (value >> 16) & 0xff, value >> 24);");
+		statement("return float4(Packed) / 255.0;");
+		end_scope();
+		statement("");
+	}
+
+	if (requires_snorm8_packing)
+	{
+		statement("uint spvPackSnorm4x8(float4 value)");
+		begin_scope();
+		statement("int4 Packed = int4(round(clamp(value, -1.0, 1.0) * 127.0)) & 0xff;");
+		statement("return uint(Packed.x | (Packed.y << 8) | (Packed.z << 16) | (Packed.w << 24));");
+		end_scope();
+		statement("");
+
+		statement("float4 spvUnpackSnorm4x8(uint value)");
+		begin_scope();
+		statement("int SignedValue = int(value);");
+		statement("int4 Packed = int4(SignedValue << 24, SignedValue << 16, SignedValue << 8, SignedValue) >> 24;");
+		statement("return clamp(float4(Packed) / 127.0, -1.0, 1.0);");
+		end_scope();
+		statement("");
+	}
+
+	if (requires_unorm16_packing)
+	{
+		statement("uint spvPackUnorm2x16(float2 value)");
+		begin_scope();
+		statement("uint2 Packed = uint2(round(saturate(value) * 65535.0));");
+		statement("return Packed.x | (Packed.y << 16);");
+		end_scope();
+		statement("");
+
+		statement("float2 spvUnpackUnorm2x16(uint value)");
+		begin_scope();
+		statement("uint2 Packed = uint2(value & 0xffff, value >> 16);");
+		statement("return float2(Packed) / 65535.0;");
+		end_scope();
+		statement("");
+	}
+
+	if (requires_snorm16_packing)
+	{
+		statement("uint spvPackSnorm2x16(float2 value)");
+		begin_scope();
+		statement("int2 Packed = int2(round(clamp(value, -1.0, 1.0) * 32767.0)) & 0xffff;");
+		statement("return uint(Packed.x | (Packed.y << 16));");
+		end_scope();
+		statement("");
+
+		statement("float2 spvUnpackSnorm2x16(uint value)");
+		begin_scope();
+		statement("int SignedValue = int(value);");
+		statement("int2 Packed = int2(SignedValue << 16, SignedValue) >> 16;");
+		statement("return clamp(float2(Packed) / 32767.0, -1.0, 1.0);");
+		end_scope();
+		statement("");
+	}
+
+	if (requires_bitfield_insert)
+	{
+		static const char *types[] = { "uint", "uint2", "uint3", "uint4" };
+		for (auto &type : types)
+		{
+			statement(type, " spvBitfieldInsert(", type, " Base, ", type, " Insert, uint Offset, uint Count)");
+			begin_scope();
+			statement("uint Mask = Count == 32 ? 0xffffffff : (((1u << Count) - 1) << (Offset & 31));");
+			statement("return (Base & ~Mask) | ((Insert << Offset) & Mask);");
+			end_scope();
+			statement("");
+		}
+	}
+
+	if (requires_bitfield_extract)
+	{
+		static const char *unsigned_types[] = { "uint", "uint2", "uint3", "uint4" };
+		for (auto &type : unsigned_types)
+		{
+			statement(type, " spvBitfieldUExtract(", type, " Base, uint Offset, uint Count)");
+			begin_scope();
+			statement("uint Mask = Count == 32 ? 0xffffffff : ((1 << Count) - 1);");
+			statement("return (Base >> Offset) & Mask;");
+			end_scope();
+			statement("");
+		}
+
+		// In this overload, we will have to do sign-extension, which we will emulate by shifting up and down.
+		static const char *signed_types[] = { "int", "int2", "int3", "int4" };
+		for (auto &type : signed_types)
+		{
+			statement(type, " spvBitfieldSExtract(", type, " Base, int Offset, int Count)");
+			begin_scope();
+			statement("int Mask = Count == 32 ? -1 : ((1 << Count) - 1);");
+			statement(type, " Masked = (Base >> Offset) & Mask;");
+			statement("int ExtendShift = (32 - Count) & 31;");
+			statement("return (Masked << ExtendShift) >> ExtendShift;");
+			end_scope();
+			statement("");
+		}
+	}
+
+	if (requires_inverse_2x2)
+	{
+		statement("// Returns the inverse of a matrix, by using the algorithm of calculating the classical");
+		statement("// adjoint and dividing by the determinant. The contents of the matrix are changed.");
+		statement("float2x2 spvInverse(float2x2 m)");
+		begin_scope();
+		statement("float2x2 adj;	// The adjoint matrix (inverse after dividing by determinant)");
+		statement_no_indent("");
+		statement("// Create the transpose of the cofactors, as the classical adjoint of the matrix.");
+		statement("adj[0][0] =  m[1][1];");
+		statement("adj[0][1] = -m[0][1];");
+		statement_no_indent("");
+		statement("adj[1][0] = -m[1][0];");
+		statement("adj[1][1] =  m[0][0];");
+		statement_no_indent("");
+		statement("// Calculate the determinant as a combination of the cofactors of the first row.");
+		statement("float det = (adj[0][0] * m[0][0]) + (adj[0][1] * m[1][0]);");
+		statement_no_indent("");
+		statement("// Divide the classical adjoint matrix by the determinant.");
+		statement("// If determinant is zero, matrix is not invertable, so leave it unchanged.");
+		statement("return (det != 0.0f) ? (adj * (1.0f / det)) : m;");
+		end_scope();
+		statement("");
+	}
+
+	if (requires_inverse_3x3)
+	{
+		statement("// Returns the determinant of a 2x2 matrix.");
+		statement("float spvDet2x2(float a1, float a2, float b1, float b2)");
+		begin_scope();
+		statement("return a1 * b2 - b1 * a2;");
+		end_scope();
+		statement_no_indent("");
+		statement("// Returns the inverse of a matrix, by using the algorithm of calculating the classical");
+		statement("// adjoint and dividing by the determinant. The contents of the matrix are changed.");
+		statement("float3x3 spvInverse(float3x3 m)");
+		begin_scope();
+		statement("float3x3 adj;	// The adjoint matrix (inverse after dividing by determinant)");
+		statement_no_indent("");
+		statement("// Create the transpose of the cofactors, as the classical adjoint of the matrix.");
+		statement("adj[0][0] =  spvDet2x2(m[1][1], m[1][2], m[2][1], m[2][2]);");
+		statement("adj[0][1] = -spvDet2x2(m[0][1], m[0][2], m[2][1], m[2][2]);");
+		statement("adj[0][2] =  spvDet2x2(m[0][1], m[0][2], m[1][1], m[1][2]);");
+		statement_no_indent("");
+		statement("adj[1][0] = -spvDet2x2(m[1][0], m[1][2], m[2][0], m[2][2]);");
+		statement("adj[1][1] =  spvDet2x2(m[0][0], m[0][2], m[2][0], m[2][2]);");
+		statement("adj[1][2] = -spvDet2x2(m[0][0], m[0][2], m[1][0], m[1][2]);");
+		statement_no_indent("");
+		statement("adj[2][0] =  spvDet2x2(m[1][0], m[1][1], m[2][0], m[2][1]);");
+		statement("adj[2][1] = -spvDet2x2(m[0][0], m[0][1], m[2][0], m[2][1]);");
+		statement("adj[2][2] =  spvDet2x2(m[0][0], m[0][1], m[1][0], m[1][1]);");
+		statement_no_indent("");
+		statement("// Calculate the determinant as a combination of the cofactors of the first row.");
+		statement("float det = (adj[0][0] * m[0][0]) + (adj[0][1] * m[1][0]) + (adj[0][2] * m[2][0]);");
+		statement_no_indent("");
+		statement("// Divide the classical adjoint matrix by the determinant.");
+		statement("// If determinant is zero, matrix is not invertable, so leave it unchanged.");
+		statement("return (det != 0.0f) ? (adj * (1.0f / det)) : m;");
+		end_scope();
+		statement("");
+	}
+
+	if (requires_inverse_4x4)
+	{
+		if (!requires_inverse_3x3)
+		{
+			statement("// Returns the determinant of a 2x2 matrix.");
+			statement("float spvDet2x2(float a1, float a2, float b1, float b2)");
+			begin_scope();
+			statement("return a1 * b2 - b1 * a2;");
+			end_scope();
+			statement("");
+		}
+
+		statement("// Returns the determinant of a 3x3 matrix.");
+		statement("float spvDet3x3(float a1, float a2, float a3, float b1, float b2, float b3, float c1, "
+		          "float c2, float c3)");
+		begin_scope();
+		statement("return a1 * spvDet2x2(b2, b3, c2, c3) - b1 * spvDet2x2(a2, a3, c2, c3) + c1 * "
+		          "spvDet2x2(a2, a3, "
+		          "b2, b3);");
+		end_scope();
+		statement_no_indent("");
+		statement("// Returns the inverse of a matrix, by using the algorithm of calculating the classical");
+		statement("// adjoint and dividing by the determinant. The contents of the matrix are changed.");
+		statement("float4x4 spvInverse(float4x4 m)");
+		begin_scope();
+		statement("float4x4 adj;	// The adjoint matrix (inverse after dividing by determinant)");
+		statement_no_indent("");
+		statement("// Create the transpose of the cofactors, as the classical adjoint of the matrix.");
+		statement(
+		    "adj[0][0] =  spvDet3x3(m[1][1], m[1][2], m[1][3], m[2][1], m[2][2], m[2][3], m[3][1], m[3][2], "
+		    "m[3][3]);");
+		statement(
+		    "adj[0][1] = -spvDet3x3(m[0][1], m[0][2], m[0][3], m[2][1], m[2][2], m[2][3], m[3][1], m[3][2], "
+		    "m[3][3]);");
+		statement(
+		    "adj[0][2] =  spvDet3x3(m[0][1], m[0][2], m[0][3], m[1][1], m[1][2], m[1][3], m[3][1], m[3][2], "
+		    "m[3][3]);");
+		statement(
+		    "adj[0][3] = -spvDet3x3(m[0][1], m[0][2], m[0][3], m[1][1], m[1][2], m[1][3], m[2][1], m[2][2], "
+		    "m[2][3]);");
+		statement_no_indent("");
+		statement(
+		    "adj[1][0] = -spvDet3x3(m[1][0], m[1][2], m[1][3], m[2][0], m[2][2], m[2][3], m[3][0], m[3][2], "
+		    "m[3][3]);");
+		statement(
+		    "adj[1][1] =  spvDet3x3(m[0][0], m[0][2], m[0][3], m[2][0], m[2][2], m[2][3], m[3][0], m[3][2], "
+		    "m[3][3]);");
+		statement(
+		    "adj[1][2] = -spvDet3x3(m[0][0], m[0][2], m[0][3], m[1][0], m[1][2], m[1][3], m[3][0], m[3][2], "
+		    "m[3][3]);");
+		statement(
+		    "adj[1][3] =  spvDet3x3(m[0][0], m[0][2], m[0][3], m[1][0], m[1][2], m[1][3], m[2][0], m[2][2], "
+		    "m[2][3]);");
+		statement_no_indent("");
+		statement(
+		    "adj[2][0] =  spvDet3x3(m[1][0], m[1][1], m[1][3], m[2][0], m[2][1], m[2][3], m[3][0], m[3][1], "
+		    "m[3][3]);");
+		statement(
+		    "adj[2][1] = -spvDet3x3(m[0][0], m[0][1], m[0][3], m[2][0], m[2][1], m[2][3], m[3][0], m[3][1], "
+		    "m[3][3]);");
+		statement(
+		    "adj[2][2] =  spvDet3x3(m[0][0], m[0][1], m[0][3], m[1][0], m[1][1], m[1][3], m[3][0], m[3][1], "
+		    "m[3][3]);");
+		statement(
+		    "adj[2][3] = -spvDet3x3(m[0][0], m[0][1], m[0][3], m[1][0], m[1][1], m[1][3], m[2][0], m[2][1], "
+		    "m[2][3]);");
+		statement_no_indent("");
+		statement(
+		    "adj[3][0] = -spvDet3x3(m[1][0], m[1][1], m[1][2], m[2][0], m[2][1], m[2][2], m[3][0], m[3][1], "
+		    "m[3][2]);");
+		statement(
+		    "adj[3][1] =  spvDet3x3(m[0][0], m[0][1], m[0][2], m[2][0], m[2][1], m[2][2], m[3][0], m[3][1], "
+		    "m[3][2]);");
+		statement(
+		    "adj[3][2] = -spvDet3x3(m[0][0], m[0][1], m[0][2], m[1][0], m[1][1], m[1][2], m[3][0], m[3][1], "
+		    "m[3][2]);");
+		statement(
+		    "adj[3][3] =  spvDet3x3(m[0][0], m[0][1], m[0][2], m[1][0], m[1][1], m[1][2], m[2][0], m[2][1], "
+		    "m[2][2]);");
+		statement_no_indent("");
+		statement("// Calculate the determinant as a combination of the cofactors of the first row.");
+		statement("float det = (adj[0][0] * m[0][0]) + (adj[0][1] * m[1][0]) + (adj[0][2] * m[2][0]) + (adj[0][3] "
+		          "* m[3][0]);");
+		statement_no_indent("");
+		statement("// Divide the classical adjoint matrix by the determinant.");
+		statement("// If determinant is zero, matrix is not invertable, so leave it unchanged.");
+		statement("return (det != 0.0f) ? (adj * (1.0f / det)) : m;");
+		end_scope();
+		statement("");
+	}
+
+	if (requires_scalar_reflect)
+	{
+		// FP16/FP64? No templates in HLSL.
+		statement("float spvReflect(float i, float n)");
+		begin_scope();
+		statement("return i - 2.0 * dot(n, i) * n;");
+		end_scope();
+		statement("");
+	}
+
+	if (requires_scalar_refract)
+	{
+		// FP16/FP64? No templates in HLSL.
+		statement("float spvRefract(float i, float n, float eta)");
+		begin_scope();
+		statement("float NoI = n * i;");
+		statement("float NoI2 = NoI * NoI;");
+		statement("float k = 1.0 - eta * eta * (1.0 - NoI2);");
+		statement("if (k < 0.0)");
+		begin_scope();
+		statement("return 0.0;");
+		end_scope();
+		statement("else");
+		begin_scope();
+		statement("return eta * i - (eta * NoI + sqrt(k)) * n;");
+		end_scope();
+		end_scope();
+		statement("");
+	}
+
+	if (requires_scalar_faceforward)
+	{
+		// FP16/FP64? No templates in HLSL.
+		statement("float spvFaceForward(float n, float i, float nref)");
+		begin_scope();
+		statement("return i * nref < 0.0 ? n : -n;");
+		end_scope();
+		statement("");
+	}
+
+	for (TypeID type_id : composite_selection_workaround_types)
+	{
+		// Need out variable since HLSL does not support returning arrays.
+		auto &type = get<SPIRType>(type_id);
+		auto type_str = type_to_glsl(type);
+		auto type_arr_str = type_to_array_glsl(type);
+		statement("void spvSelectComposite(out ", type_str, " out_value", type_arr_str, ", bool cond, ",
+		          type_str, " true_val", type_arr_str, ", ",
+		          type_str, " false_val", type_arr_str, ")");
+		begin_scope();
+		statement("if (cond)");
+		begin_scope();
+		statement("out_value = true_val;");
+		end_scope();
+		statement("else");
+		begin_scope();
+		statement("out_value = false_val;");
+		end_scope();
+		end_scope();
+		statement("");
+	}
+
+	if (is_mesh_shader && options.vertex.flip_vert_y)
+	{
+		statement("float4 spvFlipVertY(float4 v)");
+		begin_scope();
+		statement("return float4(v.x, -v.y, v.z, v.w);");
+		end_scope();
+		statement("");
+		statement("float spvFlipVertY(float v)");
+		begin_scope();
+		statement("return -v;");
+		end_scope();
+		statement("");
+	}
+}
+
+void CompilerHLSL::emit_texture_size_variants(uint64_t variant_mask, const char *vecsize_qualifier, bool uav,
+                                              const char *type_qualifier)
+{
+	if (variant_mask == 0)
+		return;
+
+	static const char *types[QueryTypeCount] = { "float", "int", "uint" };
+	static const char *dims[QueryDimCount] = { "Texture1D",   "Texture1DArray",  "Texture2D",   "Texture2DArray",
+		                                       "Texture3D",   "Buffer",          "TextureCube", "TextureCubeArray",
+		                                       "Texture2DMS", "Texture2DMSArray" };
+
+	static const bool has_lod[QueryDimCount] = { true, true, true, true, true, false, true, true, false, false };
+
+	static const char *ret_types[QueryDimCount] = {
+		"uint", "uint2", "uint2", "uint3", "uint3", "uint", "uint2", "uint3", "uint2", "uint3",
+	};
+
+	static const uint32_t return_arguments[QueryDimCount] = {
+		1, 2, 2, 3, 3, 1, 2, 3, 2, 3,
+	};
+
+	for (uint32_t index = 0; index < QueryDimCount; index++)
+	{
+		for (uint32_t type_index = 0; type_index < QueryTypeCount; type_index++)
+		{
+			uint32_t bit = 16 * type_index + index;
+			uint64_t mask = 1ull << bit;
+
+			if ((variant_mask & mask) == 0)
+				continue;
+
+			statement(ret_types[index], " spv", (uav ? "Image" : "Texture"), "Size(", (uav ? "RW" : ""),
+			          dims[index], "<", type_qualifier, types[type_index], vecsize_qualifier, "> Tex, ",
+			          (uav ? "" : "uint Level, "), "out uint Param)");
+			begin_scope();
+			statement(ret_types[index], " ret;");
+			switch (return_arguments[index])
+			{
+			case 1:
+				if (has_lod[index] && !uav)
+					statement("Tex.GetDimensions(Level, ret.x, Param);");
+				else
+				{
+					statement("Tex.GetDimensions(ret.x);");
+					statement("Param = 0u;");
+				}
+				break;
+			case 2:
+				if (has_lod[index] && !uav)
+					statement("Tex.GetDimensions(Level, ret.x, ret.y, Param);");
+				else if (!uav)
+					statement("Tex.GetDimensions(ret.x, ret.y, Param);");
+				else
+				{
+					statement("Tex.GetDimensions(ret.x, ret.y);");
+					statement("Param = 0u;");
+				}
+				break;
+			case 3:
+				if (has_lod[index] && !uav)
+					statement("Tex.GetDimensions(Level, ret.x, ret.y, ret.z, Param);");
+				else if (!uav)
+					statement("Tex.GetDimensions(ret.x, ret.y, ret.z, Param);");
+				else
+				{
+					statement("Tex.GetDimensions(ret.x, ret.y, ret.z);");
+					statement("Param = 0u;");
+				}
+				break;
+			}
+
+			statement("return ret;");
+			end_scope();
+			statement("");
+		}
+	}
+}
+
+void CompilerHLSL::analyze_meshlet_writes()
+{
+	uint32_t id_per_vertex = 0;
+	uint32_t id_per_primitive = 0;
+	bool need_per_primitive = false;
+	bool need_per_vertex = false;
+
+	ir.for_each_typed_id<SPIRVariable>([&](uint32_t, SPIRVariable &var) {
+		auto &type = this->get<SPIRType>(var.basetype);
+		bool block = has_decoration(type.self, DecorationBlock);
+		if (var.storage == StorageClassOutput && block && is_builtin_variable(var))
+		{
+			auto flags = get_buffer_block_flags(var.self);
+			if (flags.get(DecorationPerPrimitiveEXT))
+				id_per_primitive = var.self;
+			else
+				id_per_vertex = var.self;
+		}
+		else if (var.storage == StorageClassOutput)
+		{
+			Bitset flags;
+			if (block)
+				flags = get_buffer_block_flags(var.self);
+			else
+				flags = get_decoration_bitset(var.self);
+
+			if (flags.get(DecorationPerPrimitiveEXT))
+				need_per_primitive = true;
+			else
+				need_per_vertex = true;
+		}
+	});
+
+	// If we have per-primitive outputs, and no per-primitive builtins,
+	// empty version of gl_MeshPerPrimitiveEXT will be emitted.
+	// If we don't use block IO for vertex output, we'll also need to synthesize the PerVertex block.
+
+	const auto generate_block = [&](const char *block_name, const char *instance_name, bool per_primitive) -> uint32_t {
+		auto &execution = get_entry_point();
+
+		uint32_t op_type = ir.increase_bound_by(4);
+		uint32_t op_arr = op_type + 1;
+		uint32_t op_ptr = op_type + 2;
+		uint32_t op_var = op_type + 3;
+
+		auto &type = set<SPIRType>(op_type);
+		type.basetype = SPIRType::Struct;
+		set_name(op_type, block_name);
+		set_decoration(op_type, DecorationBlock);
+		if (per_primitive)
+			set_decoration(op_type, DecorationPerPrimitiveEXT);
+
+		auto &arr = set<SPIRType>(op_arr, type);
+		arr.parent_type = type.self;
+		arr.array.push_back(per_primitive ? execution.output_primitives : execution.output_vertices);
+		arr.array_size_literal.push_back(true);
+
+		auto &ptr = set<SPIRType>(op_ptr, arr);
+		ptr.parent_type = arr.self;
+		ptr.pointer = true;
+		ptr.pointer_depth++;
+		ptr.storage = StorageClassOutput;
+		set_decoration(op_ptr, DecorationBlock);
+		set_name(op_ptr, block_name);
+
+		auto &var = set<SPIRVariable>(op_var, op_ptr, StorageClassOutput);
+		if (per_primitive)
+			set_decoration(op_var, DecorationPerPrimitiveEXT);
+		set_name(op_var, instance_name);
+		execution.interface_variables.push_back(var.self);
+
+		return op_var;
+	};
+
+	if (id_per_vertex == 0 && need_per_vertex)
+		id_per_vertex = generate_block("gl_MeshPerVertexEXT", "gl_MeshVerticesEXT", false);
+	if (id_per_primitive == 0 && need_per_primitive)
+		id_per_primitive = generate_block("gl_MeshPerPrimitiveEXT", "gl_MeshPrimitivesEXT", true);
+
+	unordered_set<uint32_t> processed_func_ids;
+	analyze_meshlet_writes(ir.default_entry_point, id_per_vertex, id_per_primitive, processed_func_ids);
+}
+
+void CompilerHLSL::analyze_meshlet_writes(uint32_t func_id, uint32_t id_per_vertex, uint32_t id_per_primitive,
+                                          std::unordered_set<uint32_t> &processed_func_ids)
+{
+	// Avoid processing a function more than once
+	if (processed_func_ids.find(func_id) != processed_func_ids.end())
+		return;
+	processed_func_ids.insert(func_id);
+
+	auto &func = get<SPIRFunction>(func_id);
+	// Recursively establish global args added to functions on which we depend.
+	for (auto& block : func.blocks)
+	{
+		auto &b = get<SPIRBlock>(block);
+		for (auto &i : b.ops)
+		{
+			auto ops = stream(i);
+			auto op = static_cast<Op>(i.op);
+
+			switch (op)
+			{
+			case OpFunctionCall:
+			{
+				// Then recurse into the function itself to extract globals used internally in the function
+				uint32_t inner_func_id = ops[2];
+				analyze_meshlet_writes(inner_func_id, id_per_vertex, id_per_primitive, processed_func_ids);
+				auto &inner_func = get<SPIRFunction>(inner_func_id);
+				for (auto &iarg : inner_func.arguments)
+				{
+					if (!iarg.alias_global_variable)
+						continue;
+
+					bool already_declared = false;
+					for (auto &arg : func.arguments)
+					{
+						if (arg.id == iarg.id)
+						{
+							already_declared = true;
+							break;
+						}
+					}
+
+					if (!already_declared)
+					{
+						// basetype is effectively ignored here since we declare the argument
+						// with explicit types. Just pass down a valid type.
+						func.arguments.push_back({ expression_type_id(iarg.id), iarg.id,
+						                           iarg.read_count, iarg.write_count, true });
+					}
+				}
+				break;
+			}
+
+			case OpStore:
+			case OpLoad:
+			case OpInBoundsAccessChain:
+			case OpAccessChain:
+			case OpPtrAccessChain:
+			case OpInBoundsPtrAccessChain:
+			case OpArrayLength:
+			{
+				auto *var = maybe_get<SPIRVariable>(ops[op == OpStore ? 0 : 2]);
+				if (var && (var->storage == StorageClassOutput || var->storage == StorageClassTaskPayloadWorkgroupEXT))
+				{
+					bool already_declared = false;
+					auto builtin_type = BuiltIn(get_decoration(var->self, DecorationBuiltIn));
+
+					uint32_t var_id = var->self;
+					if (var->storage != StorageClassTaskPayloadWorkgroupEXT &&
+						builtin_type != BuiltInPrimitivePointIndicesEXT &&
+						builtin_type != BuiltInPrimitiveLineIndicesEXT &&
+						builtin_type != BuiltInPrimitiveTriangleIndicesEXT)
+					{
+						var_id = is_per_primitive_variable(*var) ? id_per_primitive : id_per_vertex;
+					}
+
+					for (auto &arg : func.arguments)
+					{
+						if (arg.id == var_id)
+						{
+							already_declared = true;
+							break;
+						}
+					}
+
+					if (!already_declared)
+					{
+						// basetype is effectively ignored here since we declare the argument
+						// with explicit types. Just pass down a valid type.
+						uint32_t type_id = expression_type_id(var_id);
+						if (var->storage == StorageClassTaskPayloadWorkgroupEXT)
+							func.arguments.push_back({ type_id, var_id, 1u, 0u, true });
+						else
+							func.arguments.push_back({ type_id, var_id, 1u, 1u, true });
+					}
+				}
+				break;
+			}
+
+			default:
+				break;
+			}
+		}
+	}
+}
+
+string CompilerHLSL::layout_for_member(const SPIRType &type, uint32_t index)
+{
+	auto &flags = get_member_decoration_bitset(type.self, index);
+
+	// HLSL can emit row_major or column_major decoration in any struct.
+	// Do not try to merge combined decorations for children like in GLSL.
+
+	// Flip the convention. HLSL is a bit odd in that the memory layout is column major ... but the language API is "row-major".
+	// The way to deal with this is to multiply everything in inverse order, and reverse the memory layout.
+	if (flags.get(DecorationColMajor))
+		return "row_major ";
+	else if (flags.get(DecorationRowMajor))
+		return "column_major ";
+
+	return "";
+}
+
+void CompilerHLSL::emit_struct_member(const SPIRType &type, uint32_t member_type_id, uint32_t index,
+                                      const string &qualifier, uint32_t base_offset)
+{
+	auto &membertype = get<SPIRType>(member_type_id);
+
+	Bitset memberflags;
+	auto &memb = ir.meta[type.self].members;
+	if (index < memb.size())
+		memberflags = memb[index].decoration_flags;
+
+	string packing_offset;
+	bool is_push_constant = type.storage == StorageClassPushConstant;
+
+	if ((has_extended_decoration(type.self, SPIRVCrossDecorationExplicitOffset) || is_push_constant) &&
+	    has_member_decoration(type.self, index, DecorationOffset))
+	{
+		uint32_t offset = memb[index].offset - base_offset;
+		if (offset & 3)
+			SPIRV_CROSS_THROW("Cannot pack on tighter bounds than 4 bytes in HLSL.");
+
+		static const char *packing_swizzle[] = { "", ".y", ".z", ".w" };
+		packing_offset = join(" : packoffset(c", offset / 16, packing_swizzle[(offset & 15) >> 2], ")");
+	}
+
+	statement(layout_for_member(type, index), qualifier,
+	          variable_decl(membertype, to_member_name(type, index)), packing_offset, ";");
+}
+
+void CompilerHLSL::emit_rayquery_function(const char *commited, const char *candidate, const uint32_t *ops)
+{
+	flush_variable_declaration(ops[0]);
+	uint32_t is_commited = evaluate_constant_u32(ops[3]);
+	emit_op(ops[0], ops[1], join(to_expression(ops[2]), is_commited ? commited : candidate), false);
+}
+
+void CompilerHLSL::emit_mesh_tasks(SPIRBlock &block)
+{
+	if (block.mesh.payload != 0)
+	{
+		statement("DispatchMesh(", to_unpacked_expression(block.mesh.groups[0]), ", ", to_unpacked_expression(block.mesh.groups[1]), ", ",
+		    to_unpacked_expression(block.mesh.groups[2]), ", ", to_unpacked_expression(block.mesh.payload), ");");
+	}
+	else
+	{
+		SPIRV_CROSS_THROW("Amplification shader in HLSL must have payload");
+	}
+}
+
+void CompilerHLSL::emit_buffer_block(const SPIRVariable &var)
+{
+	auto &type = get<SPIRType>(var.basetype);
+
+	bool is_uav = var.storage == StorageClassStorageBuffer || has_decoration(type.self, DecorationBufferBlock);
+
+	if (flattened_buffer_blocks.count(var.self))
+	{
+		emit_buffer_block_flattened(var);
+	}
+	else if (is_uav)
+	{
+		Bitset flags = ir.get_buffer_block_flags(var);
+		bool is_readonly = flags.get(DecorationNonWritable) && !is_hlsl_force_storage_buffer_as_uav(var.self);
+		bool is_coherent = flags.get(DecorationCoherent) && !is_readonly;
+		bool is_interlocked = interlocked_resources.count(var.self) > 0;
+
+		auto to_structuredbuffer_subtype_name = [this](const SPIRType &parent_type) -> std::string
+		{
+			if (parent_type.basetype == SPIRType::Struct && parent_type.member_types.size() == 1)
+			{
+				// Use type of first struct member as a StructuredBuffer will have only one '._m0' field in SPIR-V
+				const auto &member0_type = this->get<SPIRType>(parent_type.member_types.front());
+				return this->type_to_glsl(member0_type);
+			}
+			else
+			{
+				// Otherwise, this StructuredBuffer only has a basic subtype, e.g. StructuredBuffer<int>
+				return this->type_to_glsl(parent_type);
+			}
+		};
+
+		std::string type_name;
+		if (is_user_type_structured(var.self))
+			type_name = join(is_readonly ? "" : is_interlocked ? "RasterizerOrdered" : "RW", "StructuredBuffer<", to_structuredbuffer_subtype_name(type), ">");
+		else
+			type_name = is_readonly ? "ByteAddressBuffer" : is_interlocked ? "RasterizerOrderedByteAddressBuffer" : "RWByteAddressBuffer";
+
+		add_resource_name(var.self);
+		statement(is_coherent ? "globallycoherent " : "", type_name, " ", to_name(var.self), type_to_array_glsl(type),
+		          to_resource_binding(var), ";");
+	}
+	else
+	{
+		if (type.array.empty())
+		{
+			// Flatten the top-level struct so we can use packoffset,
+			// this restriction is similar to GLSL where layout(offset) is not possible on sub-structs.
+			flattened_structs[var.self] = false;
+
+			// Prefer the block name if possible.
+			auto buffer_name = to_name(type.self, false);
+			if (ir.meta[type.self].decoration.alias.empty() ||
+			    resource_names.find(buffer_name) != end(resource_names) ||
+			    block_names.find(buffer_name) != end(block_names))
+			{
+				buffer_name = get_block_fallback_name(var.self);
+			}
+
+			add_variable(block_names, resource_names, buffer_name);
+
+			// If for some reason buffer_name is an illegal name, make a final fallback to a workaround name.
+			// This cannot conflict with anything else, so we're safe now.
+			if (buffer_name.empty())
+				buffer_name = join("_", get<SPIRType>(var.basetype).self, "_", var.self);
+
+			uint32_t failed_index = 0;
+			if (buffer_is_packing_standard(type, BufferPackingHLSLCbufferPackOffset, &failed_index))
+				set_extended_decoration(type.self, SPIRVCrossDecorationExplicitOffset);
+			else
+			{
+				SPIRV_CROSS_THROW(join("cbuffer ID ", var.self, " (name: ", buffer_name, "), member index ",
+				                       failed_index, " (name: ", to_member_name(type, failed_index),
+				                       ") cannot be expressed with either HLSL packing layout or packoffset."));
+			}
+
+			block_names.insert(buffer_name);
+
+			// Save for post-reflection later.
+			declared_block_names[var.self] = buffer_name;
+
+			type.member_name_cache.clear();
+			// var.self can be used as a backup name for the block name,
+			// so we need to make sure we don't disturb the name here on a recompile.
+			// It will need to be reset if we have to recompile.
+			preserve_alias_on_reset(var.self);
+			add_resource_name(var.self);
+			statement("cbuffer ", buffer_name, to_resource_binding(var));
+			begin_scope();
+
+			uint32_t i = 0;
+			for (auto &member : type.member_types)
+			{
+				add_member_name(type, i);
+				auto backup_name = get_member_name(type.self, i);
+				auto member_name = to_member_name(type, i);
+				member_name = join(to_name(var.self), "_", member_name);
+				ParsedIR::sanitize_underscores(member_name);
+				set_member_name(type.self, i, member_name);
+				emit_struct_member(type, member, i, "");
+				set_member_name(type.self, i, backup_name);
+				i++;
+			}
+
+			end_scope_decl();
+			statement("");
+		}
+		else
+		{
+			if (hlsl_options.shader_model < 51)
+				SPIRV_CROSS_THROW(
+				    "Need ConstantBuffer<T> to use arrays of UBOs, but this is only supported in SM 5.1.");
+
+			add_resource_name(type.self);
+			add_resource_name(var.self);
+
+			// ConstantBuffer<T> does not support packoffset, so it is unuseable unless everything aligns as we expect.
+			uint32_t failed_index = 0;
+			if (!buffer_is_packing_standard(type, BufferPackingHLSLCbuffer, &failed_index))
+			{
+				SPIRV_CROSS_THROW(join("HLSL ConstantBuffer<T> ID ", var.self, " (name: ", to_name(type.self),
+				                       "), member index ", failed_index, " (name: ", to_member_name(type, failed_index),
+				                       ") cannot be expressed with normal HLSL packing rules."));
+			}
+
+			emit_struct(get<SPIRType>(type.self));
+			statement("ConstantBuffer<", to_name(type.self), "> ", to_name(var.self), type_to_array_glsl(type),
+			          to_resource_binding(var), ";");
+		}
+	}
+}
+
+void CompilerHLSL::emit_push_constant_block(const SPIRVariable &var)
+{
+	if (flattened_buffer_blocks.count(var.self))
+	{
+		emit_buffer_block_flattened(var);
+	}
+	else if (root_constants_layout.empty())
+	{
+		emit_buffer_block(var);
+	}
+	else
+	{
+		for (const auto &layout : root_constants_layout)
+		{
+			auto &type = get<SPIRType>(var.basetype);
+
+			uint32_t failed_index = 0;
+			if (buffer_is_packing_standard(type, BufferPackingHLSLCbufferPackOffset, &failed_index, layout.start,
+			                               layout.end))
+				set_extended_decoration(type.self, SPIRVCrossDecorationExplicitOffset);
+			else
+			{
+				SPIRV_CROSS_THROW(join("Root constant cbuffer ID ", var.self, " (name: ", to_name(type.self), ")",
+				                       ", member index ", failed_index, " (name: ", to_member_name(type, failed_index),
+				                       ") cannot be expressed with either HLSL packing layout or packoffset."));
+			}
+
+			flattened_structs[var.self] = false;
+			type.member_name_cache.clear();
+			add_resource_name(var.self);
+			auto &memb = ir.meta[type.self].members;
+
+			statement("cbuffer SPIRV_CROSS_RootConstant_", to_name(var.self),
+			          to_resource_register(HLSL_BINDING_AUTO_PUSH_CONSTANT_BIT, 'b', layout.binding, layout.space));
+			begin_scope();
+
+			// Index of the next field in the generated root constant constant buffer
+			auto constant_index = 0u;
+
+			// Iterate over all member of the push constant and check which of the fields
+			// fit into the given root constant layout.
+			for (auto i = 0u; i < memb.size(); i++)
+			{
+				const auto offset = memb[i].offset;
+				if (layout.start <= offset && offset < layout.end)
+				{
+					const auto &member = type.member_types[i];
+
+					add_member_name(type, constant_index);
+					auto backup_name = get_member_name(type.self, i);
+					auto member_name = to_member_name(type, i);
+					member_name = join(to_name(var.self), "_", member_name);
+					ParsedIR::sanitize_underscores(member_name);
+					set_member_name(type.self, constant_index, member_name);
+					emit_struct_member(type, member, i, "", layout.start);
+					set_member_name(type.self, constant_index, backup_name);
+
+					constant_index++;
+				}
+			}
+
+			end_scope_decl();
+		}
+	}
+}
+
+string CompilerHLSL::to_sampler_expression(uint32_t id)
+{
+	auto expr = join("_", to_non_uniform_aware_expression(id));
+	auto index = expr.find_first_of('[');
+	if (index == string::npos)
+	{
+		return expr + "_sampler";
+	}
+	else
+	{
+		// We have an expression like _ident[array], so we cannot tack on _sampler, insert it inside the string instead.
+		return expr.insert(index, "_sampler");
+	}
+}
+
+void CompilerHLSL::emit_sampled_image_op(uint32_t result_type, uint32_t result_id, uint32_t image_id, uint32_t samp_id)
+{
+	if (hlsl_options.shader_model >= 40 && combined_image_samplers.empty())
+	{
+		set<SPIRCombinedImageSampler>(result_id, result_type, image_id, samp_id);
+	}
+	else
+	{
+		// Make sure to suppress usage tracking. It is illegal to create temporaries of opaque types.
+		emit_op(result_type, result_id, to_combined_image_sampler(image_id, samp_id), true, true);
+	}
+}
+
+string CompilerHLSL::to_func_call_arg(const SPIRFunction::Parameter &arg, uint32_t id)
+{
+	string arg_str = CompilerGLSL::to_func_call_arg(arg, id);
+
+	if (hlsl_options.shader_model <= 30)
+		return arg_str;
+
+	// Manufacture automatic sampler arg if the arg is a SampledImage texture and we're in modern HLSL.
+	auto &type = expression_type(id);
+
+	// We don't have to consider combined image samplers here via OpSampledImage because
+	// those variables cannot be passed as arguments to functions.
+	// Only global SampledImage variables may be used as arguments.
+	if (type.basetype == SPIRType::SampledImage && type.image.dim != DimBuffer)
+		arg_str += ", " + to_sampler_expression(id);
+
+	return arg_str;
+}
+
+string CompilerHLSL::get_inner_entry_point_name() const
+{
+	auto &execution = get_entry_point();
+
+	if (hlsl_options.use_entry_point_name)
+	{
+		auto name = join(execution.name, "_inner");
+		ParsedIR::sanitize_underscores(name);
+		return name;
+	}
+
+	if (execution.model == ExecutionModelVertex)
+		return "vert_main";
+	else if (execution.model == ExecutionModelFragment)
+		return "frag_main";
+	else if (execution.model == ExecutionModelGLCompute)
+		return "comp_main";
+	else if (execution.model == ExecutionModelMeshEXT)
+		return "mesh_main";
+	else if (execution.model == ExecutionModelTaskEXT)
+		return "task_main";
+	else
+		SPIRV_CROSS_THROW("Unsupported execution model.");
+}
+
+void CompilerHLSL::emit_function_prototype(SPIRFunction &func, const Bitset &return_flags)
+{
+	if (func.self != ir.default_entry_point)
+		add_function_overload(func);
+
+	// Avoid shadow declarations.
+	local_variable_names = resource_names;
+
+	string decl;
+
+	auto &type = get<SPIRType>(func.return_type);
+	if (type.array.empty())
+	{
+		decl += flags_to_qualifiers_glsl(type, return_flags);
+		decl += type_to_glsl(type);
+		decl += " ";
+	}
+	else
+	{
+		// We cannot return arrays in HLSL, so "return" through an out variable.
+		decl = "void ";
+	}
+
+	if (func.self == ir.default_entry_point)
+	{
+		decl += get_inner_entry_point_name();
+		processing_entry_point = true;
+	}
+	else
+		decl += to_name(func.self);
+
+	decl += "(";
+	SmallVector<string> arglist;
+
+	if (!type.array.empty())
+	{
+		// Fake array returns by writing to an out array instead.
+		string out_argument;
+		out_argument += "out ";
+		out_argument += type_to_glsl(type);
+		out_argument += " ";
+		out_argument += "spvReturnValue";
+		out_argument += type_to_array_glsl(type);
+		arglist.push_back(std::move(out_argument));
+	}
+
+	for (auto &arg : func.arguments)
+	{
+		// Do not pass in separate images or samplers if we're remapping
+		// to combined image samplers.
+		if (skip_argument(arg.id))
+			continue;
+
+		// Might change the variable name if it already exists in this function.
+		// SPIRV OpName doesn't have any semantic effect, so it's valid for an implementation
+		// to use same name for variables.
+		// Since we want to make the GLSL debuggable and somewhat sane, use fallback names for variables which are duplicates.
+		add_local_variable_name(arg.id);
+
+		arglist.push_back(argument_decl(arg));
+
+		// Flatten a combined sampler to two separate arguments in modern HLSL.
+		auto &arg_type = get<SPIRType>(arg.type);
+		if (hlsl_options.shader_model > 30 && arg_type.basetype == SPIRType::SampledImage &&
+		    arg_type.image.dim != DimBuffer)
+		{
+			// Manufacture automatic sampler arg for SampledImage texture
+			arglist.push_back(join(is_depth_image(arg_type, arg.id) ? "SamplerComparisonState " : "SamplerState ",
+			                       to_sampler_expression(arg.id), type_to_array_glsl(arg_type)));
+		}
+
+		// Hold a pointer to the parameter so we can invalidate the readonly field if needed.
+		auto *var = maybe_get<SPIRVariable>(arg.id);
+		if (var)
+			var->parameter = &arg;
+	}
+
+	for (auto &arg : func.shadow_arguments)
+	{
+		// Might change the variable name if it already exists in this function.
+		// SPIRV OpName doesn't have any semantic effect, so it's valid for an implementation
+		// to use same name for variables.
+		// Since we want to make the GLSL debuggable and somewhat sane, use fallback names for variables which are duplicates.
+		add_local_variable_name(arg.id);
+
+		arglist.push_back(argument_decl(arg));
+
+		// Hold a pointer to the parameter so we can invalidate the readonly field if needed.
+		auto *var = maybe_get<SPIRVariable>(arg.id);
+		if (var)
+			var->parameter = &arg;
+	}
+
+	decl += merge(arglist);
+	decl += ")";
+	statement(decl);
+}
+
+void CompilerHLSL::emit_hlsl_entry_point()
+{
+	SmallVector<string> arguments;
+
+	if (require_input)
+		arguments.push_back("SPIRV_Cross_Input stage_input");
+
+	auto &execution = get_entry_point();
+
+	switch (execution.model)
+	{
+	case ExecutionModelTaskEXT:
+	case ExecutionModelMeshEXT:
+	case ExecutionModelGLCompute:
+	{
+		if (execution.model == ExecutionModelMeshEXT)
+		{
+			if (execution.flags.get(ExecutionModeOutputTrianglesEXT))
+				statement("[outputtopology(\"triangle\")]");
+			else if (execution.flags.get(ExecutionModeOutputLinesEXT))
+				statement("[outputtopology(\"line\")]");
+			else if (execution.flags.get(ExecutionModeOutputPoints))
+				SPIRV_CROSS_THROW("Topology mode \"points\" is not supported in DirectX");
+
+			auto &func = get<SPIRFunction>(ir.default_entry_point);
+			for (auto &arg : func.arguments)
+			{
+				auto &var = get<SPIRVariable>(arg.id);
+				auto &base_type = get<SPIRType>(var.basetype);
+				bool block = has_decoration(base_type.self, DecorationBlock);
+				if (var.storage == StorageClassTaskPayloadWorkgroupEXT)
+				{
+					arguments.push_back("in payload " + variable_decl(var));
+				}
+				else if (block)
+				{
+					auto flags = get_buffer_block_flags(var.self);
+					if (flags.get(DecorationPerPrimitiveEXT) || has_decoration(arg.id, DecorationPerPrimitiveEXT))
+					{
+						arguments.push_back("out primitives gl_MeshPerPrimitiveEXT gl_MeshPrimitivesEXT[" +
+						                    std::to_string(execution.output_primitives) + "]");
+					}
+					else
+					{
+						arguments.push_back("out vertices gl_MeshPerVertexEXT gl_MeshVerticesEXT[" +
+						                    std::to_string(execution.output_vertices) + "]");
+					}
+				}
+				else
+				{
+					if (execution.flags.get(ExecutionModeOutputTrianglesEXT))
+					{
+						arguments.push_back("out indices uint3 gl_PrimitiveTriangleIndicesEXT[" +
+						                    std::to_string(execution.output_primitives) + "]");
+					}
+					else
+					{
+						arguments.push_back("out indices uint2 gl_PrimitiveLineIndicesEXT[" +
+						                    std::to_string(execution.output_primitives) + "]");
+					}
+				}
+			}
+		}
+		SpecializationConstant wg_x, wg_y, wg_z;
+		get_work_group_size_specialization_constants(wg_x, wg_y, wg_z);
+
+		uint32_t x = execution.workgroup_size.x;
+		uint32_t y = execution.workgroup_size.y;
+		uint32_t z = execution.workgroup_size.z;
+
+		if (!execution.workgroup_size.constant && execution.flags.get(ExecutionModeLocalSizeId))
+		{
+			if (execution.workgroup_size.id_x)
+				x = get<SPIRConstant>(execution.workgroup_size.id_x).scalar();
+			if (execution.workgroup_size.id_y)
+				y = get<SPIRConstant>(execution.workgroup_size.id_y).scalar();
+			if (execution.workgroup_size.id_z)
+				z = get<SPIRConstant>(execution.workgroup_size.id_z).scalar();
+		}
+
+		auto x_expr = wg_x.id ? get<SPIRConstant>(wg_x.id).specialization_constant_macro_name : to_string(x);
+		auto y_expr = wg_y.id ? get<SPIRConstant>(wg_y.id).specialization_constant_macro_name : to_string(y);
+		auto z_expr = wg_z.id ? get<SPIRConstant>(wg_z.id).specialization_constant_macro_name : to_string(z);
+
+		statement("[numthreads(", x_expr, ", ", y_expr, ", ", z_expr, ")]");
+		break;
+	}
+	case ExecutionModelFragment:
+		if (execution.flags.get(ExecutionModeEarlyFragmentTests))
+			statement("[earlydepthstencil]");
+		break;
+	default:
+		break;
+	}
+
+	const char *entry_point_name;
+	if (hlsl_options.use_entry_point_name)
+		entry_point_name = get_entry_point().name.c_str();
+	else
+		entry_point_name = "main";
+
+	statement(require_output ? "SPIRV_Cross_Output " : "void ", entry_point_name, "(", merge(arguments), ")");
+	begin_scope();
+	bool legacy = hlsl_options.shader_model <= 30;
+
+	// Copy builtins from entry point arguments to globals.
+	active_input_builtins.for_each_bit([&](uint32_t i) {
+		auto builtin = builtin_to_glsl(static_cast<BuiltIn>(i), StorageClassInput);
+		switch (static_cast<BuiltIn>(i))
+		{
+		case BuiltInFragCoord:
+			// VPOS in D3D9 is sampled at integer locations, apply half-pixel offset to be consistent.
+			// TODO: Do we need an option here? Any reason why a D3D9 shader would be used
+			// on a D3D10+ system with a different rasterization config?
+			if (legacy)
+				statement(builtin, " = stage_input.", builtin, " + float4(0.5f, 0.5f, 0.0f, 0.0f);");
+			else
+			{
+				statement(builtin, " = stage_input.", builtin, ";");
+				// ZW are undefined in D3D9, only do this fixup here.
+				statement(builtin, ".w = 1.0 / ", builtin, ".w;");
+			}
+			break;
+
+		case BuiltInVertexId:
+		case BuiltInVertexIndex:
+		case BuiltInInstanceIndex:
+			// D3D semantics are uint, but shader wants int.
+			if (hlsl_options.support_nonzero_base_vertex_base_instance)
+			{
+				if (static_cast<BuiltIn>(i) == BuiltInInstanceIndex)
+					statement(builtin, " = int(stage_input.", builtin, ") + SPIRV_Cross_BaseInstance;");
+				else
+					statement(builtin, " = int(stage_input.", builtin, ") + SPIRV_Cross_BaseVertex;");
+			}
+			else
+				statement(builtin, " = int(stage_input.", builtin, ");");
+			break;
+
+		case BuiltInBaseVertex:
+			statement(builtin, " = SPIRV_Cross_BaseVertex;");
+			break;
+
+		case BuiltInBaseInstance:
+			statement(builtin, " = SPIRV_Cross_BaseInstance;");
+			break;
+
+		case BuiltInInstanceId:
+			// D3D semantics are uint, but shader wants int.
+			statement(builtin, " = int(stage_input.", builtin, ");");
+			break;
+
+		case BuiltInNumWorkgroups:
+		case BuiltInPointCoord:
+		case BuiltInSubgroupSize:
+		case BuiltInSubgroupLocalInvocationId:
+		case BuiltInHelperInvocation:
+			break;
+
+		case BuiltInSubgroupEqMask:
+			// Emulate these ...
+			// No 64-bit in HLSL, so have to do it in 32-bit and unroll.
+			statement("gl_SubgroupEqMask = 1u << (WaveGetLaneIndex() - uint4(0, 32, 64, 96));");
+			statement("if (WaveGetLaneIndex() >= 32) gl_SubgroupEqMask.x = 0;");
+			statement("if (WaveGetLaneIndex() >= 64 || WaveGetLaneIndex() < 32) gl_SubgroupEqMask.y = 0;");
+			statement("if (WaveGetLaneIndex() >= 96 || WaveGetLaneIndex() < 64) gl_SubgroupEqMask.z = 0;");
+			statement("if (WaveGetLaneIndex() < 96) gl_SubgroupEqMask.w = 0;");
+			break;
+
+		case BuiltInSubgroupGeMask:
+			// Emulate these ...
+			// No 64-bit in HLSL, so have to do it in 32-bit and unroll.
+			statement("gl_SubgroupGeMask = ~((1u << (WaveGetLaneIndex() - uint4(0, 32, 64, 96))) - 1u);");
+			statement("if (WaveGetLaneIndex() >= 32) gl_SubgroupGeMask.x = 0u;");
+			statement("if (WaveGetLaneIndex() >= 64) gl_SubgroupGeMask.y = 0u;");
+			statement("if (WaveGetLaneIndex() >= 96) gl_SubgroupGeMask.z = 0u;");
+			statement("if (WaveGetLaneIndex() < 32) gl_SubgroupGeMask.y = ~0u;");
+			statement("if (WaveGetLaneIndex() < 64) gl_SubgroupGeMask.z = ~0u;");
+			statement("if (WaveGetLaneIndex() < 96) gl_SubgroupGeMask.w = ~0u;");
+			break;
+
+		case BuiltInSubgroupGtMask:
+			// Emulate these ...
+			// No 64-bit in HLSL, so have to do it in 32-bit and unroll.
+			statement("uint gt_lane_index = WaveGetLaneIndex() + 1;");
+			statement("gl_SubgroupGtMask = ~((1u << (gt_lane_index - uint4(0, 32, 64, 96))) - 1u);");
+			statement("if (gt_lane_index >= 32) gl_SubgroupGtMask.x = 0u;");
+			statement("if (gt_lane_index >= 64) gl_SubgroupGtMask.y = 0u;");
+			statement("if (gt_lane_index >= 96) gl_SubgroupGtMask.z = 0u;");
+			statement("if (gt_lane_index >= 128) gl_SubgroupGtMask.w = 0u;");
+			statement("if (gt_lane_index < 32) gl_SubgroupGtMask.y = ~0u;");
+			statement("if (gt_lane_index < 64) gl_SubgroupGtMask.z = ~0u;");
+			statement("if (gt_lane_index < 96) gl_SubgroupGtMask.w = ~0u;");
+			break;
+
+		case BuiltInSubgroupLeMask:
+			// Emulate these ...
+			// No 64-bit in HLSL, so have to do it in 32-bit and unroll.
+			statement("uint le_lane_index = WaveGetLaneIndex() + 1;");
+			statement("gl_SubgroupLeMask = (1u << (le_lane_index - uint4(0, 32, 64, 96))) - 1u;");
+			statement("if (le_lane_index >= 32) gl_SubgroupLeMask.x = ~0u;");
+			statement("if (le_lane_index >= 64) gl_SubgroupLeMask.y = ~0u;");
+			statement("if (le_lane_index >= 96) gl_SubgroupLeMask.z = ~0u;");
+			statement("if (le_lane_index >= 128) gl_SubgroupLeMask.w = ~0u;");
+			statement("if (le_lane_index < 32) gl_SubgroupLeMask.y = 0u;");
+			statement("if (le_lane_index < 64) gl_SubgroupLeMask.z = 0u;");
+			statement("if (le_lane_index < 96) gl_SubgroupLeMask.w = 0u;");
+			break;
+
+		case BuiltInSubgroupLtMask:
+			// Emulate these ...
+			// No 64-bit in HLSL, so have to do it in 32-bit and unroll.
+			statement("gl_SubgroupLtMask = (1u << (WaveGetLaneIndex() - uint4(0, 32, 64, 96))) - 1u;");
+			statement("if (WaveGetLaneIndex() >= 32) gl_SubgroupLtMask.x = ~0u;");
+			statement("if (WaveGetLaneIndex() >= 64) gl_SubgroupLtMask.y = ~0u;");
+			statement("if (WaveGetLaneIndex() >= 96) gl_SubgroupLtMask.z = ~0u;");
+			statement("if (WaveGetLaneIndex() < 32) gl_SubgroupLtMask.y = 0u;");
+			statement("if (WaveGetLaneIndex() < 64) gl_SubgroupLtMask.z = 0u;");
+			statement("if (WaveGetLaneIndex() < 96) gl_SubgroupLtMask.w = 0u;");
+			break;
+
+		case BuiltInClipDistance:
+			for (uint32_t clip = 0; clip < clip_distance_count; clip++)
+				statement("gl_ClipDistance[", clip, "] = stage_input.gl_ClipDistance", clip / 4, ".", "xyzw"[clip & 3],
+				          ";");
+			break;
+
+		case BuiltInCullDistance:
+			for (uint32_t cull = 0; cull < cull_distance_count; cull++)
+				statement("gl_CullDistance[", cull, "] = stage_input.gl_CullDistance", cull / 4, ".", "xyzw"[cull & 3],
+				          ";");
+			break;
+
+		default:
+			statement(builtin, " = stage_input.", builtin, ";");
+			break;
+		}
+	});
+
+	// Copy from stage input struct to globals.
+	ir.for_each_typed_id<SPIRVariable>([&](uint32_t, SPIRVariable &var) {
+		auto &type = this->get<SPIRType>(var.basetype);
+		bool block = has_decoration(type.self, DecorationBlock);
+
+		if (var.storage != StorageClassInput)
+			return;
+
+		bool need_matrix_unroll = var.storage == StorageClassInput && execution.model == ExecutionModelVertex;
+
+		if (!var.remapped_variable && type.pointer && !is_builtin_variable(var) &&
+		    interface_variable_exists_in_entry_point(var.self))
+		{
+			if (block)
+			{
+				auto type_name = to_name(type.self);
+				auto var_name = to_name(var.self);
+				for (uint32_t mbr_idx = 0; mbr_idx < uint32_t(type.member_types.size()); mbr_idx++)
+				{
+					auto mbr_name = to_member_name(type, mbr_idx);
+					auto flat_name = join(type_name, "_", mbr_name);
+					statement(var_name, ".", mbr_name, " = stage_input.", flat_name, ";");
+				}
+			}
+			else
+			{
+				auto name = to_name(var.self);
+				auto &mtype = this->get<SPIRType>(var.basetype);
+				if (need_matrix_unroll && mtype.columns > 1)
+				{
+					// Unroll matrices.
+					for (uint32_t col = 0; col < mtype.columns; col++)
+						statement(name, "[", col, "] = stage_input.", name, "_", col, ";");
+				}
+				else
+				{
+					statement(name, " = stage_input.", name, ";");
+				}
+			}
+		}
+	});
+
+	// Run the shader.
+	if (execution.model == ExecutionModelVertex ||
+	    execution.model == ExecutionModelFragment ||
+	    execution.model == ExecutionModelGLCompute ||
+	    execution.model == ExecutionModelMeshEXT ||
+	    execution.model == ExecutionModelTaskEXT)
+	{
+		// For mesh shaders, we receive special arguments that we must pass down as function arguments.
+		// HLSL does not support proper reference types for passing these IO blocks,
+		// but DXC post-inlining seems to magically fix it up anyways *shrug*.
+		SmallVector<string> arglist;
+		auto &func = get<SPIRFunction>(ir.default_entry_point);
+		// The arguments are marked out, avoid detecting reads and emitting inout.
+		for (auto &arg : func.arguments)
+			arglist.push_back(to_expression(arg.id, false));
+		statement(get_inner_entry_point_name(), "(", merge(arglist), ");");
+	}
+	else
+		SPIRV_CROSS_THROW("Unsupported shader stage.");
+
+	// Copy stage outputs.
+	if (require_output)
+	{
+		statement("SPIRV_Cross_Output stage_output;");
+
+		// Copy builtins from globals to return struct.
+		active_output_builtins.for_each_bit([&](uint32_t i) {
+			// PointSize doesn't exist in HLSL SM 4+.
+			if (i == BuiltInPointSize && !legacy)
+				return;
+
+			switch (static_cast<BuiltIn>(i))
+			{
+			case BuiltInClipDistance:
+				for (uint32_t clip = 0; clip < clip_distance_count; clip++)
+					statement("stage_output.gl_ClipDistance", clip / 4, ".", "xyzw"[clip & 3], " = gl_ClipDistance[",
+					          clip, "];");
+				break;
+
+			case BuiltInCullDistance:
+				for (uint32_t cull = 0; cull < cull_distance_count; cull++)
+					statement("stage_output.gl_CullDistance", cull / 4, ".", "xyzw"[cull & 3], " = gl_CullDistance[",
+					          cull, "];");
+				break;
+
+			default:
+			{
+				auto builtin_expr = builtin_to_glsl(static_cast<BuiltIn>(i), StorageClassOutput);
+				statement("stage_output.", builtin_expr, " = ", builtin_expr, ";");
+				break;
+			}
+			}
+		});
+
+		ir.for_each_typed_id<SPIRVariable>([&](uint32_t, SPIRVariable &var) {
+			auto &type = this->get<SPIRType>(var.basetype);
+			bool block = has_decoration(type.self, DecorationBlock);
+
+			if (var.storage != StorageClassOutput)
+				return;
+
+			if (!var.remapped_variable && type.pointer &&
+			    !is_builtin_variable(var) &&
+			    interface_variable_exists_in_entry_point(var.self))
+			{
+				if (block)
+				{
+					// I/O blocks need to flatten output.
+					auto type_name = to_name(type.self);
+					auto var_name = to_name(var.self);
+					for (uint32_t mbr_idx = 0; mbr_idx < uint32_t(type.member_types.size()); mbr_idx++)
+					{
+						auto mbr_name = to_member_name(type, mbr_idx);
+						auto flat_name = join(type_name, "_", mbr_name);
+						statement("stage_output.", flat_name, " = ", var_name, ".", mbr_name, ";");
+					}
+				}
+				else
+				{
+					auto name = to_name(var.self);
+
+					if (legacy && execution.model == ExecutionModelFragment)
+					{
+						string output_filler;
+						for (uint32_t size = type.vecsize; size < 4; ++size)
+							output_filler += ", 0.0";
+
+						statement("stage_output.", name, " = float4(", name, output_filler, ");");
+					}
+					else
+					{
+						statement("stage_output.", name, " = ", name, ";");
+					}
+				}
+			}
+		});
+
+		statement("return stage_output;");
+	}
+
+	end_scope();
+}
+
+void CompilerHLSL::emit_fixup()
+{
+	if (is_vertex_like_shader() && active_output_builtins.get(BuiltInPosition))
+	{
+		// Do various mangling on the gl_Position.
+		if (hlsl_options.shader_model <= 30)
+		{
+			statement("gl_Position.x = gl_Position.x - gl_HalfPixel.x * "
+			          "gl_Position.w;");
+			statement("gl_Position.y = gl_Position.y + gl_HalfPixel.y * "
+			          "gl_Position.w;");
+		}
+
+		if (options.vertex.flip_vert_y)
+			statement("gl_Position.y = -gl_Position.y;");
+		if (options.vertex.fixup_clipspace)
+			statement("gl_Position.z = (gl_Position.z + gl_Position.w) * 0.5;");
+	}
+}
+
+void CompilerHLSL::emit_texture_op(const Instruction &i, bool sparse)
+{
+	if (sparse)
+		SPIRV_CROSS_THROW("Sparse feedback not yet supported in HLSL.");
+
+	auto *ops = stream(i);
+	auto op = static_cast<Op>(i.op);
+	uint32_t length = i.length;
+
+	SmallVector<uint32_t> inherited_expressions;
+
+	uint32_t result_type = ops[0];
+	uint32_t id = ops[1];
+	VariableID img = ops[2];
+	uint32_t coord = ops[3];
+	uint32_t dref = 0;
+	uint32_t comp = 0;
+	bool gather = false;
+	bool proj = false;
+	const uint32_t *opt = nullptr;
+	auto *combined_image = maybe_get<SPIRCombinedImageSampler>(img);
+
+	if (combined_image && has_decoration(img, DecorationNonUniform))
+	{
+		set_decoration(combined_image->image, DecorationNonUniform);
+		set_decoration(combined_image->sampler, DecorationNonUniform);
+	}
+
+	auto img_expr = to_non_uniform_aware_expression(combined_image ? combined_image->image : img);
+
+	inherited_expressions.push_back(coord);
+
+	switch (op)
+	{
+	case OpImageSampleDrefImplicitLod:
+	case OpImageSampleDrefExplicitLod:
+		dref = ops[4];
+		opt = &ops[5];
+		length -= 5;
+		break;
+
+	case OpImageSampleProjDrefImplicitLod:
+	case OpImageSampleProjDrefExplicitLod:
+		dref = ops[4];
+		proj = true;
+		opt = &ops[5];
+		length -= 5;
+		break;
+
+	case OpImageDrefGather:
+		dref = ops[4];
+		opt = &ops[5];
+		gather = true;
+		length -= 5;
+		break;
+
+	case OpImageGather:
+		comp = ops[4];
+		opt = &ops[5];
+		gather = true;
+		length -= 5;
+		break;
+
+	case OpImageSampleProjImplicitLod:
+	case OpImageSampleProjExplicitLod:
+		opt = &ops[4];
+		length -= 4;
+		proj = true;
+		break;
+
+	case OpImageQueryLod:
+		opt = &ops[4];
+		length -= 4;
+		break;
+
+	default:
+		opt = &ops[4];
+		length -= 4;
+		break;
+	}
+
+	auto &imgtype = expression_type(img);
+	uint32_t coord_components = 0;
+	switch (imgtype.image.dim)
+	{
+	case spv::Dim1D:
+		coord_components = 1;
+		break;
+	case spv::Dim2D:
+		coord_components = 2;
+		break;
+	case spv::Dim3D:
+		coord_components = 3;
+		break;
+	case spv::DimCube:
+		coord_components = 3;
+		break;
+	case spv::DimBuffer:
+		coord_components = 1;
+		break;
+	default:
+		coord_components = 2;
+		break;
+	}
+
+	if (dref)
+		inherited_expressions.push_back(dref);
+
+	if (imgtype.image.arrayed)
+		coord_components++;
+
+	uint32_t bias = 0;
+	uint32_t lod = 0;
+	uint32_t grad_x = 0;
+	uint32_t grad_y = 0;
+	uint32_t coffset = 0;
+	uint32_t offset = 0;
+	uint32_t coffsets = 0;
+	uint32_t sample = 0;
+	uint32_t minlod = 0;
+	uint32_t flags = 0;
+
+	if (length)
+	{
+		flags = opt[0];
+		opt++;
+		length--;
+	}
+
+	auto test = [&](uint32_t &v, uint32_t flag) {
+		if (length && (flags & flag))
+		{
+			v = *opt++;
+			inherited_expressions.push_back(v);
+			length--;
+		}
+	};
+
+	test(bias, ImageOperandsBiasMask);
+	test(lod, ImageOperandsLodMask);
+	test(grad_x, ImageOperandsGradMask);
+	test(grad_y, ImageOperandsGradMask);
+	test(coffset, ImageOperandsConstOffsetMask);
+	test(offset, ImageOperandsOffsetMask);
+	test(coffsets, ImageOperandsConstOffsetsMask);
+	test(sample, ImageOperandsSampleMask);
+	test(minlod, ImageOperandsMinLodMask);
+
+	string expr;
+	string texop;
+
+	if (minlod != 0)
+		SPIRV_CROSS_THROW("MinLod texture operand not supported in HLSL.");
+
+	if (op == OpImageFetch)
+	{
+		if (hlsl_options.shader_model < 40)
+		{
+			SPIRV_CROSS_THROW("texelFetch is not supported in HLSL shader model 2/3.");
+		}
+		texop += img_expr;
+		texop += ".Load";
+	}
+	else if (op == OpImageQueryLod)
+	{
+		texop += img_expr;
+		texop += ".CalculateLevelOfDetail";
+	}
+	else
+	{
+		auto &imgformat = get<SPIRType>(imgtype.image.type);
+		if (hlsl_options.shader_model < 67 && imgformat.basetype != SPIRType::Float)
+		{
+			SPIRV_CROSS_THROW("Sampling non-float textures is not supported in HLSL SM < 6.7.");
+		}
+
+		if (hlsl_options.shader_model >= 40)
+		{
+			texop += img_expr;
+
+			if (is_depth_image(imgtype, img))
+			{
+				if (gather)
+				{
+					texop += ".GatherCmp";
+				}
+				else if (lod || grad_x || grad_y)
+				{
+					// Assume we want a fixed level, and the only thing we can get in HLSL is SampleCmpLevelZero.
+					texop += ".SampleCmpLevelZero";
+				}
+				else
+					texop += ".SampleCmp";
+			}
+			else if (gather)
+			{
+				uint32_t comp_num = evaluate_constant_u32(comp);
+				if (hlsl_options.shader_model >= 50)
+				{
+					switch (comp_num)
+					{
+					case 0:
+						texop += ".GatherRed";
+						break;
+					case 1:
+						texop += ".GatherGreen";
+						break;
+					case 2:
+						texop += ".GatherBlue";
+						break;
+					case 3:
+						texop += ".GatherAlpha";
+						break;
+					default:
+						SPIRV_CROSS_THROW("Invalid component.");
+					}
+				}
+				else
+				{
+					if (comp_num == 0)
+						texop += ".Gather";
+					else
+						SPIRV_CROSS_THROW("HLSL shader model 4 can only gather from the red component.");
+				}
+			}
+			else if (bias)
+				texop += ".SampleBias";
+			else if (grad_x || grad_y)
+				texop += ".SampleGrad";
+			else if (lod)
+				texop += ".SampleLevel";
+			else
+				texop += ".Sample";
+		}
+		else
+		{
+			switch (imgtype.image.dim)
+			{
+			case Dim1D:
+				texop += "tex1D";
+				break;
+			case Dim2D:
+				texop += "tex2D";
+				break;
+			case Dim3D:
+				texop += "tex3D";
+				break;
+			case DimCube:
+				texop += "texCUBE";
+				break;
+			case DimRect:
+			case DimBuffer:
+			case DimSubpassData:
+				SPIRV_CROSS_THROW("Buffer texture support is not yet implemented for HLSL"); // TODO
+			default:
+				SPIRV_CROSS_THROW("Invalid dimension.");
+			}
+
+			if (gather)
+				SPIRV_CROSS_THROW("textureGather is not supported in HLSL shader model 2/3.");
+			if (offset || coffset)
+				SPIRV_CROSS_THROW("textureOffset is not supported in HLSL shader model 2/3.");
+
+			if (grad_x || grad_y)
+				texop += "grad";
+			else if (lod)
+				texop += "lod";
+			else if (bias)
+				texop += "bias";
+			else if (proj || dref)
+				texop += "proj";
+		}
+	}
+
+	expr += texop;
+	expr += "(";
+	if (hlsl_options.shader_model < 40)
+	{
+		if (combined_image)
+			SPIRV_CROSS_THROW("Separate images/samplers are not supported in HLSL shader model 2/3.");
+		expr += to_expression(img);
+	}
+	else if (op != OpImageFetch)
+	{
+		string sampler_expr;
+		if (combined_image)
+			sampler_expr = to_non_uniform_aware_expression(combined_image->sampler);
+		else
+			sampler_expr = to_sampler_expression(img);
+		expr += sampler_expr;
+	}
+
+	auto swizzle = [](uint32_t comps, uint32_t in_comps) -> const char * {
+		if (comps == in_comps)
+			return "";
+
+		switch (comps)
+		{
+		case 1:
+			return ".x";
+		case 2:
+			return ".xy";
+		case 3:
+			return ".xyz";
+		default:
+			return "";
+		}
+	};
+
+	bool forward = should_forward(coord);
+
+	// The IR can give us more components than we need, so chop them off as needed.
+	string coord_expr;
+	auto &coord_type = expression_type(coord);
+	if (coord_components != coord_type.vecsize)
+		coord_expr = to_enclosed_expression(coord) + swizzle(coord_components, expression_type(coord).vecsize);
+	else
+		coord_expr = to_expression(coord);
+
+	if (proj && hlsl_options.shader_model >= 40) // Legacy HLSL has "proj" operations which do this for us.
+		coord_expr = coord_expr + " / " + to_extract_component_expression(coord, coord_components);
+
+	if (hlsl_options.shader_model < 40)
+	{
+		if (dref)
+		{
+			if (imgtype.image.dim != spv::Dim1D && imgtype.image.dim != spv::Dim2D)
+			{
+				SPIRV_CROSS_THROW(
+				    "Depth comparison is only supported for 1D and 2D textures in HLSL shader model 2/3.");
+			}
+
+			if (grad_x || grad_y)
+				SPIRV_CROSS_THROW("Depth comparison is not supported for grad sampling in HLSL shader model 2/3.");
+
+			for (uint32_t size = coord_components; size < 2; ++size)
+				coord_expr += ", 0.0";
+
+			forward = forward && should_forward(dref);
+			coord_expr += ", " + to_expression(dref);
+		}
+		else if (lod || bias || proj)
+		{
+			for (uint32_t size = coord_components; size < 3; ++size)
+				coord_expr += ", 0.0";
+		}
+
+		if (lod)
+		{
+			coord_expr = "float4(" + coord_expr + ", " + to_expression(lod) + ")";
+		}
+		else if (bias)
+		{
+			coord_expr = "float4(" + coord_expr + ", " + to_expression(bias) + ")";
+		}
+		else if (proj)
+		{
+			coord_expr = "float4(" + coord_expr + ", " + to_extract_component_expression(coord, coord_components) + ")";
+		}
+		else if (dref)
+		{
+			// A "normal" sample gets fed into tex2Dproj as well, because the
+			// regular tex2D accepts only two coordinates.
+			coord_expr = "float4(" + coord_expr + ", 1.0)";
+		}
+
+		if (!!lod + !!bias + !!proj > 1)
+			SPIRV_CROSS_THROW("Legacy HLSL can only use one of lod/bias/proj modifiers.");
+	}
+
+	if (op == OpImageFetch)
+	{
+		if (imgtype.image.dim != DimBuffer && !imgtype.image.ms)
+			coord_expr =
+			    join("int", coord_components + 1, "(", coord_expr, ", ", lod ? to_expression(lod) : string("0"), ")");
+	}
+	else
+		expr += ", ";
+	expr += coord_expr;
+
+	if (dref && hlsl_options.shader_model >= 40)
+	{
+		forward = forward && should_forward(dref);
+		expr += ", ";
+
+		if (proj)
+			expr += to_enclosed_expression(dref) + " / " + to_extract_component_expression(coord, coord_components);
+		else
+			expr += to_expression(dref);
+	}
+
+	if (!dref && (grad_x || grad_y))
+	{
+		forward = forward && should_forward(grad_x);
+		forward = forward && should_forward(grad_y);
+		expr += ", ";
+		expr += to_expression(grad_x);
+		expr += ", ";
+		expr += to_expression(grad_y);
+	}
+
+	if (!dref && lod && hlsl_options.shader_model >= 40 && op != OpImageFetch)
+	{
+		forward = forward && should_forward(lod);
+		expr += ", ";
+		expr += to_expression(lod);
+	}
+
+	if (!dref && bias && hlsl_options.shader_model >= 40)
+	{
+		forward = forward && should_forward(bias);
+		expr += ", ";
+		expr += to_expression(bias);
+	}
+
+	if (coffset)
+	{
+		forward = forward && should_forward(coffset);
+		expr += ", ";
+		expr += to_expression(coffset);
+	}
+	else if (offset)
+	{
+		forward = forward && should_forward(offset);
+		expr += ", ";
+		expr += to_expression(offset);
+	}
+
+	if (sample)
+	{
+		expr += ", ";
+		expr += to_expression(sample);
+	}
+
+	expr += ")";
+
+	if (dref && hlsl_options.shader_model < 40)
+		expr += ".x";
+
+	if (op == OpImageQueryLod)
+	{
+		// This is rather awkward.
+		// textureQueryLod returns two values, the "accessed level",
+		// as well as the actual LOD lambda.
+		// As far as I can tell, there is no way to get the .x component
+		// according to GLSL spec, and it depends on the sampler itself.
+		// Just assume X == Y, so we will need to splat the result to a float2.
+		statement("float _", id, "_tmp = ", expr, ";");
+		statement("float2 _", id, " = _", id, "_tmp.xx;");
+		set<SPIRExpression>(id, join("_", id), result_type, true);
+	}
+	else
+	{
+		emit_op(result_type, id, expr, forward, false);
+	}
+
+	for (auto &inherit : inherited_expressions)
+		inherit_expression_dependencies(id, inherit);
+
+	switch (op)
+	{
+	case OpImageSampleDrefImplicitLod:
+	case OpImageSampleImplicitLod:
+	case OpImageSampleProjImplicitLod:
+	case OpImageSampleProjDrefImplicitLod:
+		register_control_dependent_expression(id);
+		break;
+
+	default:
+		break;
+	}
+}
+
+string CompilerHLSL::to_resource_binding(const SPIRVariable &var)
+{
+	const auto &type = get<SPIRType>(var.basetype);
+
+	// We can remap push constant blocks, even if they don't have any binding decoration.
+	if (type.storage != StorageClassPushConstant && !has_decoration(var.self, DecorationBinding))
+		return "";
+
+	char space = '\0';
+
+	HLSLBindingFlagBits resource_flags = HLSL_BINDING_AUTO_NONE_BIT;
+
+	switch (type.basetype)
+	{
+	case SPIRType::SampledImage:
+		space = 't'; // SRV
+		resource_flags = HLSL_BINDING_AUTO_SRV_BIT;
+		break;
+
+	case SPIRType::Image:
+		if (type.image.sampled == 2 && type.image.dim != DimSubpassData)
+		{
+			if (has_decoration(var.self, DecorationNonWritable) && hlsl_options.nonwritable_uav_texture_as_srv)
+			{
+				space = 't'; // SRV
+				resource_flags = HLSL_BINDING_AUTO_SRV_BIT;
+			}
+			else
+			{
+				space = 'u'; // UAV
+				resource_flags = HLSL_BINDING_AUTO_UAV_BIT;
+			}
+		}
+		else
+		{
+			space = 't'; // SRV
+			resource_flags = HLSL_BINDING_AUTO_SRV_BIT;
+		}
+		break;
+
+	case SPIRType::Sampler:
+		space = 's';
+		resource_flags = HLSL_BINDING_AUTO_SAMPLER_BIT;
+		break;
+
+	case SPIRType::AccelerationStructure:
+		space = 't'; // SRV
+		resource_flags = HLSL_BINDING_AUTO_SRV_BIT;
+		break;
+
+	case SPIRType::Struct:
+	{
+		auto storage = type.storage;
+		if (storage == StorageClassUniform)
+		{
+			if (has_decoration(type.self, DecorationBufferBlock))
+			{
+				Bitset flags = ir.get_buffer_block_flags(var);
+				bool is_readonly = flags.get(DecorationNonWritable) && !is_hlsl_force_storage_buffer_as_uav(var.self);
+				space = is_readonly ? 't' : 'u'; // UAV
+				resource_flags = is_readonly ? HLSL_BINDING_AUTO_SRV_BIT : HLSL_BINDING_AUTO_UAV_BIT;
+			}
+			else if (has_decoration(type.self, DecorationBlock))
+			{
+				space = 'b'; // Constant buffers
+				resource_flags = HLSL_BINDING_AUTO_CBV_BIT;
+			}
+		}
+		else if (storage == StorageClassPushConstant)
+		{
+			space = 'b'; // Constant buffers
+			resource_flags = HLSL_BINDING_AUTO_PUSH_CONSTANT_BIT;
+		}
+		else if (storage == StorageClassStorageBuffer)
+		{
+			// UAV or SRV depending on readonly flag.
+			Bitset flags = ir.get_buffer_block_flags(var);
+			bool is_readonly = flags.get(DecorationNonWritable) && !is_hlsl_force_storage_buffer_as_uav(var.self);
+			space = is_readonly ? 't' : 'u';
+			resource_flags = is_readonly ? HLSL_BINDING_AUTO_SRV_BIT : HLSL_BINDING_AUTO_UAV_BIT;
+		}
+
+		break;
+	}
+	default:
+		break;
+	}
+
+	if (!space)
+		return "";
+
+	uint32_t desc_set =
+	    resource_flags == HLSL_BINDING_AUTO_PUSH_CONSTANT_BIT ? ResourceBindingPushConstantDescriptorSet : 0u;
+	uint32_t binding = resource_flags == HLSL_BINDING_AUTO_PUSH_CONSTANT_BIT ? ResourceBindingPushConstantBinding : 0u;
+
+	if (has_decoration(var.self, DecorationBinding))
+		binding = get_decoration(var.self, DecorationBinding);
+	if (has_decoration(var.self, DecorationDescriptorSet))
+		desc_set = get_decoration(var.self, DecorationDescriptorSet);
+
+	return to_resource_register(resource_flags, space, binding, desc_set);
+}
+
+string CompilerHLSL::to_resource_binding_sampler(const SPIRVariable &var)
+{
+	// For combined image samplers.
+	if (!has_decoration(var.self, DecorationBinding))
+		return "";
+
+	return to_resource_register(HLSL_BINDING_AUTO_SAMPLER_BIT, 's', get_decoration(var.self, DecorationBinding),
+	                            get_decoration(var.self, DecorationDescriptorSet));
+}
+
+void CompilerHLSL::remap_hlsl_resource_binding(HLSLBindingFlagBits type, uint32_t &desc_set, uint32_t &binding)
+{
+	auto itr = resource_bindings.find({ get_execution_model(), desc_set, binding });
+	if (itr != end(resource_bindings))
+	{
+		auto &remap = itr->second;
+		remap.second = true;
+
+		switch (type)
+		{
+		case HLSL_BINDING_AUTO_PUSH_CONSTANT_BIT:
+		case HLSL_BINDING_AUTO_CBV_BIT:
+			desc_set = remap.first.cbv.register_space;
+			binding = remap.first.cbv.register_binding;
+			break;
+
+		case HLSL_BINDING_AUTO_SRV_BIT:
+			desc_set = remap.first.srv.register_space;
+			binding = remap.first.srv.register_binding;
+			break;
+
+		case HLSL_BINDING_AUTO_SAMPLER_BIT:
+			desc_set = remap.first.sampler.register_space;
+			binding = remap.first.sampler.register_binding;
+			break;
+
+		case HLSL_BINDING_AUTO_UAV_BIT:
+			desc_set = remap.first.uav.register_space;
+			binding = remap.first.uav.register_binding;
+			break;
+
+		default:
+			break;
+		}
+	}
+}
+
+string CompilerHLSL::to_resource_register(HLSLBindingFlagBits flag, char space, uint32_t binding, uint32_t space_set)
+{
+	if ((flag & resource_binding_flags) == 0)
+	{
+		remap_hlsl_resource_binding(flag, space_set, binding);
+
+		// The push constant block did not have a binding, and there were no remap for it,
+		// so, declare without register binding.
+		if (flag == HLSL_BINDING_AUTO_PUSH_CONSTANT_BIT && space_set == ResourceBindingPushConstantDescriptorSet)
+			return "";
+
+		if (hlsl_options.shader_model >= 51)
+			return join(" : register(", space, binding, ", space", space_set, ")");
+		else
+			return join(" : register(", space, binding, ")");
+	}
+	else
+		return "";
+}
+
+void CompilerHLSL::emit_modern_uniform(const SPIRVariable &var)
+{
+	auto &type = get<SPIRType>(var.basetype);
+	switch (type.basetype)
+	{
+	case SPIRType::SampledImage:
+	case SPIRType::Image:
+	{
+		bool is_coherent = false;
+		if (type.basetype == SPIRType::Image && type.image.sampled == 2)
+			is_coherent = has_decoration(var.self, DecorationCoherent);
+
+		statement(is_coherent ? "globallycoherent " : "", image_type_hlsl_modern(type, var.self), " ",
+		          to_name(var.self), type_to_array_glsl(type), to_resource_binding(var), ";");
+
+		if (type.basetype == SPIRType::SampledImage && type.image.dim != DimBuffer)
+		{
+			// For combined image samplers, also emit a combined image sampler.
+			if (is_depth_image(type, var.self))
+				statement("SamplerComparisonState ", to_sampler_expression(var.self), type_to_array_glsl(type),
+				          to_resource_binding_sampler(var), ";");
+			else
+				statement("SamplerState ", to_sampler_expression(var.self), type_to_array_glsl(type),
+				          to_resource_binding_sampler(var), ";");
+		}
+		break;
+	}
+
+	case SPIRType::Sampler:
+		if (comparison_ids.count(var.self))
+			statement("SamplerComparisonState ", to_name(var.self), type_to_array_glsl(type), to_resource_binding(var),
+			          ";");
+		else
+			statement("SamplerState ", to_name(var.self), type_to_array_glsl(type), to_resource_binding(var), ";");
+		break;
+
+	default:
+		statement(variable_decl(var), to_resource_binding(var), ";");
+		break;
+	}
+}
+
+void CompilerHLSL::emit_legacy_uniform(const SPIRVariable &var)
+{
+	auto &type = get<SPIRType>(var.basetype);
+	switch (type.basetype)
+	{
+	case SPIRType::Sampler:
+	case SPIRType::Image:
+		SPIRV_CROSS_THROW("Separate image and samplers not supported in legacy HLSL.");
+
+	default:
+		statement(variable_decl(var), ";");
+		break;
+	}
+}
+
+void CompilerHLSL::emit_uniform(const SPIRVariable &var)
+{
+	add_resource_name(var.self);
+	if (hlsl_options.shader_model >= 40)
+		emit_modern_uniform(var);
+	else
+		emit_legacy_uniform(var);
+}
+
+bool CompilerHLSL::emit_complex_bitcast(uint32_t, uint32_t, uint32_t)
+{
+	return false;
+}
+
+string CompilerHLSL::bitcast_glsl_op(const SPIRType &out_type, const SPIRType &in_type)
+{
+	if (out_type.basetype == SPIRType::UInt && in_type.basetype == SPIRType::Int)
+		return type_to_glsl(out_type);
+	else if (out_type.basetype == SPIRType::UInt64 && in_type.basetype == SPIRType::Int64)
+		return type_to_glsl(out_type);
+	else if (out_type.basetype == SPIRType::UInt && in_type.basetype == SPIRType::Float)
+		return "asuint";
+	else if (out_type.basetype == SPIRType::Int && in_type.basetype == SPIRType::UInt)
+		return type_to_glsl(out_type);
+	else if (out_type.basetype == SPIRType::Int64 && in_type.basetype == SPIRType::UInt64)
+		return type_to_glsl(out_type);
+	else if (out_type.basetype == SPIRType::Int && in_type.basetype == SPIRType::Float)
+		return "asint";
+	else if (out_type.basetype == SPIRType::Float && in_type.basetype == SPIRType::UInt)
+		return "asfloat";
+	else if (out_type.basetype == SPIRType::Float && in_type.basetype == SPIRType::Int)
+		return "asfloat";
+	else if (out_type.basetype == SPIRType::Int64 && in_type.basetype == SPIRType::Double)
+		SPIRV_CROSS_THROW("Double to Int64 is not supported in HLSL.");
+	else if (out_type.basetype == SPIRType::UInt64 && in_type.basetype == SPIRType::Double)
+		SPIRV_CROSS_THROW("Double to UInt64 is not supported in HLSL.");
+	else if (out_type.basetype == SPIRType::Double && in_type.basetype == SPIRType::Int64)
+		return "asdouble";
+	else if (out_type.basetype == SPIRType::Double && in_type.basetype == SPIRType::UInt64)
+		return "asdouble";
+	else if (out_type.basetype == SPIRType::Half && in_type.basetype == SPIRType::UInt && in_type.vecsize == 1)
+	{
+		if (!requires_explicit_fp16_packing)
+		{
+			requires_explicit_fp16_packing = true;
+			force_recompile();
+		}
+		return "spvUnpackFloat2x16";
+	}
+	else if (out_type.basetype == SPIRType::UInt && in_type.basetype == SPIRType::Half && in_type.vecsize == 2)
+	{
+		if (!requires_explicit_fp16_packing)
+		{
+			requires_explicit_fp16_packing = true;
+			force_recompile();
+		}
+		return "spvPackFloat2x16";
+	}
+	else if (out_type.basetype == SPIRType::UShort && in_type.basetype == SPIRType::Half)
+	{
+		if (hlsl_options.shader_model < 40)
+			SPIRV_CROSS_THROW("Half to UShort requires Shader Model 4.");
+		return "(" + type_to_glsl(out_type) + ")f32tof16";
+	}
+	else if (out_type.basetype == SPIRType::Half && in_type.basetype == SPIRType::UShort)
+	{
+		if (hlsl_options.shader_model < 40)
+			SPIRV_CROSS_THROW("UShort to Half requires Shader Model 4.");
+		return "(" + type_to_glsl(out_type) + ")f16tof32";
+	}
+	else
+		return "";
+}
+
+void CompilerHLSL::emit_glsl_op(uint32_t result_type, uint32_t id, uint32_t eop, const uint32_t *args, uint32_t count)
+{
+	auto op = static_cast<GLSLstd450>(eop);
+
+	// If we need to do implicit bitcasts, make sure we do it with the correct type.
+	uint32_t integer_width = get_integer_width_for_glsl_instruction(op, args, count);
+	auto int_type = to_signed_basetype(integer_width);
+	auto uint_type = to_unsigned_basetype(integer_width);
+
+	op = get_remapped_glsl_op(op);
+
+	switch (op)
+	{
+	case GLSLstd450InverseSqrt:
+		emit_unary_func_op(result_type, id, args[0], "rsqrt");
+		break;
+
+	case GLSLstd450Fract:
+		emit_unary_func_op(result_type, id, args[0], "frac");
+		break;
+
+	case GLSLstd450RoundEven:
+		if (hlsl_options.shader_model < 40)
+			SPIRV_CROSS_THROW("roundEven is not supported in HLSL shader model 2/3.");
+		emit_unary_func_op(result_type, id, args[0], "round");
+		break;
+
+	case GLSLstd450Trunc:
+		emit_unary_func_op(result_type, id, args[0], "trunc");
+		break;
+
+	case GLSLstd450Acosh:
+	case GLSLstd450Asinh:
+	case GLSLstd450Atanh:
+		// These are not supported in HLSL, always emulate them.
+		emit_emulated_ahyper_op(result_type, id, args[0], op);
+		break;
+
+	case GLSLstd450FMix:
+	case GLSLstd450IMix:
+		emit_trinary_func_op(result_type, id, args[0], args[1], args[2], "lerp");
+		break;
+
+	case GLSLstd450Atan2:
+		emit_binary_func_op(result_type, id, args[0], args[1], "atan2");
+		break;
+
+	case GLSLstd450Fma:
+		emit_trinary_func_op(result_type, id, args[0], args[1], args[2], "mad");
+		break;
+
+	case GLSLstd450InterpolateAtCentroid:
+		emit_unary_func_op(result_type, id, args[0], "EvaluateAttributeAtCentroid");
+		break;
+	case GLSLstd450InterpolateAtSample:
+		emit_binary_func_op(result_type, id, args[0], args[1], "EvaluateAttributeAtSample");
+		break;
+	case GLSLstd450InterpolateAtOffset:
+		emit_binary_func_op(result_type, id, args[0], args[1], "EvaluateAttributeSnapped");
+		break;
+
+	case GLSLstd450PackHalf2x16:
+		if (!requires_fp16_packing)
+		{
+			requires_fp16_packing = true;
+			force_recompile();
+		}
+		emit_unary_func_op(result_type, id, args[0], "spvPackHalf2x16");
+		break;
+
+	case GLSLstd450UnpackHalf2x16:
+		if (!requires_fp16_packing)
+		{
+			requires_fp16_packing = true;
+			force_recompile();
+		}
+		emit_unary_func_op(result_type, id, args[0], "spvUnpackHalf2x16");
+		break;
+
+	case GLSLstd450PackSnorm4x8:
+		if (!requires_snorm8_packing)
+		{
+			requires_snorm8_packing = true;
+			force_recompile();
+		}
+		emit_unary_func_op(result_type, id, args[0], "spvPackSnorm4x8");
+		break;
+
+	case GLSLstd450UnpackSnorm4x8:
+		if (!requires_snorm8_packing)
+		{
+			requires_snorm8_packing = true;
+			force_recompile();
+		}
+		emit_unary_func_op(result_type, id, args[0], "spvUnpackSnorm4x8");
+		break;
+
+	case GLSLstd450PackUnorm4x8:
+		if (!requires_unorm8_packing)
+		{
+			requires_unorm8_packing = true;
+			force_recompile();
+		}
+		emit_unary_func_op(result_type, id, args[0], "spvPackUnorm4x8");
+		break;
+
+	case GLSLstd450UnpackUnorm4x8:
+		if (!requires_unorm8_packing)
+		{
+			requires_unorm8_packing = true;
+			force_recompile();
+		}
+		emit_unary_func_op(result_type, id, args[0], "spvUnpackUnorm4x8");
+		break;
+
+	case GLSLstd450PackSnorm2x16:
+		if (!requires_snorm16_packing)
+		{
+			requires_snorm16_packing = true;
+			force_recompile();
+		}
+		emit_unary_func_op(result_type, id, args[0], "spvPackSnorm2x16");
+		break;
+
+	case GLSLstd450UnpackSnorm2x16:
+		if (!requires_snorm16_packing)
+		{
+			requires_snorm16_packing = true;
+			force_recompile();
+		}
+		emit_unary_func_op(result_type, id, args[0], "spvUnpackSnorm2x16");
+		break;
+
+	case GLSLstd450PackUnorm2x16:
+		if (!requires_unorm16_packing)
+		{
+			requires_unorm16_packing = true;
+			force_recompile();
+		}
+		emit_unary_func_op(result_type, id, args[0], "spvPackUnorm2x16");
+		break;
+
+	case GLSLstd450UnpackUnorm2x16:
+		if (!requires_unorm16_packing)
+		{
+			requires_unorm16_packing = true;
+			force_recompile();
+		}
+		emit_unary_func_op(result_type, id, args[0], "spvUnpackUnorm2x16");
+		break;
+
+	case GLSLstd450PackDouble2x32:
+	case GLSLstd450UnpackDouble2x32:
+		SPIRV_CROSS_THROW("packDouble2x32/unpackDouble2x32 not supported in HLSL.");
+
+	case GLSLstd450FindILsb:
+	{
+		auto basetype = expression_type(args[0]).basetype;
+		emit_unary_func_op_cast(result_type, id, args[0], "firstbitlow", basetype, basetype);
+		break;
+	}
+
+	case GLSLstd450FindSMsb:
+		emit_unary_func_op_cast(result_type, id, args[0], "firstbithigh", int_type, int_type);
+		break;
+
+	case GLSLstd450FindUMsb:
+		emit_unary_func_op_cast(result_type, id, args[0], "firstbithigh", uint_type, uint_type);
+		break;
+
+	case GLSLstd450MatrixInverse:
+	{
+		auto &type = get<SPIRType>(result_type);
+		if (type.vecsize == 2 && type.columns == 2)
+		{
+			if (!requires_inverse_2x2)
+			{
+				requires_inverse_2x2 = true;
+				force_recompile();
+			}
+		}
+		else if (type.vecsize == 3 && type.columns == 3)
+		{
+			if (!requires_inverse_3x3)
+			{
+				requires_inverse_3x3 = true;
+				force_recompile();
+			}
+		}
+		else if (type.vecsize == 4 && type.columns == 4)
+		{
+			if (!requires_inverse_4x4)
+			{
+				requires_inverse_4x4 = true;
+				force_recompile();
+			}
+		}
+		emit_unary_func_op(result_type, id, args[0], "spvInverse");
+		break;
+	}
+
+	case GLSLstd450Normalize:
+		// HLSL does not support scalar versions here.
+		if (expression_type(args[0]).vecsize == 1)
+		{
+			// Returns -1 or 1 for valid input, sign() does the job.
+			emit_unary_func_op(result_type, id, args[0], "sign");
+		}
+		else
+			CompilerGLSL::emit_glsl_op(result_type, id, eop, args, count);
+		break;
+
+	case GLSLstd450Reflect:
+		if (get<SPIRType>(result_type).vecsize == 1)
+		{
+			if (!requires_scalar_reflect)
+			{
+				requires_scalar_reflect = true;
+				force_recompile();
+			}
+			emit_binary_func_op(result_type, id, args[0], args[1], "spvReflect");
+		}
+		else
+			CompilerGLSL::emit_glsl_op(result_type, id, eop, args, count);
+		break;
+
+	case GLSLstd450Refract:
+		if (get<SPIRType>(result_type).vecsize == 1)
+		{
+			if (!requires_scalar_refract)
+			{
+				requires_scalar_refract = true;
+				force_recompile();
+			}
+			emit_trinary_func_op(result_type, id, args[0], args[1], args[2], "spvRefract");
+		}
+		else
+			CompilerGLSL::emit_glsl_op(result_type, id, eop, args, count);
+		break;
+
+	case GLSLstd450FaceForward:
+		if (get<SPIRType>(result_type).vecsize == 1)
+		{
+			if (!requires_scalar_faceforward)
+			{
+				requires_scalar_faceforward = true;
+				force_recompile();
+			}
+			emit_trinary_func_op(result_type, id, args[0], args[1], args[2], "spvFaceForward");
+		}
+		else
+			CompilerGLSL::emit_glsl_op(result_type, id, eop, args, count);
+		break;
+
+	default:
+		CompilerGLSL::emit_glsl_op(result_type, id, eop, args, count);
+		break;
+	}
+}
+
+void CompilerHLSL::read_access_chain_array(const string &lhs, const SPIRAccessChain &chain)
+{
+	auto &type = get<SPIRType>(chain.basetype);
+
+	// Need to use a reserved identifier here since it might shadow an identifier in the access chain input or other loops.
+	auto ident = get_unique_identifier();
+
+	statement("[unroll]");
+	statement("for (int ", ident, " = 0; ", ident, " < ", to_array_size(type, uint32_t(type.array.size() - 1)), "; ",
+	          ident, "++)");
+	begin_scope();
+	auto subchain = chain;
+	subchain.dynamic_index = join(ident, " * ", chain.array_stride, " + ", chain.dynamic_index);
+	subchain.basetype = type.parent_type;
+	if (!get<SPIRType>(subchain.basetype).array.empty())
+		subchain.array_stride = get_decoration(subchain.basetype, DecorationArrayStride);
+	read_access_chain(nullptr, join(lhs, "[", ident, "]"), subchain);
+	end_scope();
+}
+
+void CompilerHLSL::read_access_chain_struct(const string &lhs, const SPIRAccessChain &chain)
+{
+	auto &type = get<SPIRType>(chain.basetype);
+	auto subchain = chain;
+	uint32_t member_count = uint32_t(type.member_types.size());
+
+	for (uint32_t i = 0; i < member_count; i++)
+	{
+		uint32_t offset = type_struct_member_offset(type, i);
+		subchain.static_index = chain.static_index + offset;
+		subchain.basetype = type.member_types[i];
+
+		subchain.matrix_stride = 0;
+		subchain.array_stride = 0;
+		subchain.row_major_matrix = false;
+
+		auto &member_type = get<SPIRType>(subchain.basetype);
+		if (member_type.columns > 1)
+		{
+			subchain.matrix_stride = type_struct_member_matrix_stride(type, i);
+			subchain.row_major_matrix = has_member_decoration(type.self, i, DecorationRowMajor);
+		}
+
+		if (!member_type.array.empty())
+			subchain.array_stride = type_struct_member_array_stride(type, i);
+
+		read_access_chain(nullptr, join(lhs, ".", to_member_name(type, i)), subchain);
+	}
+}
+
+void CompilerHLSL::read_access_chain(string *expr, const string &lhs, const SPIRAccessChain &chain)
+{
+	auto &type = get<SPIRType>(chain.basetype);
+
+	SPIRType target_type;
+	target_type.basetype = SPIRType::UInt;
+	target_type.vecsize = type.vecsize;
+	target_type.columns = type.columns;
+
+	if (!type.array.empty())
+	{
+		read_access_chain_array(lhs, chain);
+		return;
+	}
+	else if (type.basetype == SPIRType::Struct)
+	{
+		read_access_chain_struct(lhs, chain);
+		return;
+	}
+	else if (type.width != 32 && !hlsl_options.enable_16bit_types)
+		SPIRV_CROSS_THROW("Reading types other than 32-bit from ByteAddressBuffer not yet supported, unless SM 6.2 and "
+		                  "native 16-bit types are enabled.");
+
+	string base = chain.base;
+	if (has_decoration(chain.self, DecorationNonUniform))
+		convert_non_uniform_expression(base, chain.self);
+
+	bool templated_load = hlsl_options.shader_model >= 62;
+	string load_expr;
+
+	string template_expr;
+	if (templated_load)
+		template_expr = join("<", type_to_glsl(type), ">");
+
+	// Load a vector or scalar.
+	if (type.columns == 1 && !chain.row_major_matrix)
+	{
+		const char *load_op = nullptr;
+		switch (type.vecsize)
+		{
+		case 1:
+			load_op = "Load";
+			break;
+		case 2:
+			load_op = "Load2";
+			break;
+		case 3:
+			load_op = "Load3";
+			break;
+		case 4:
+			load_op = "Load4";
+			break;
+		default:
+			SPIRV_CROSS_THROW("Unknown vector size.");
+		}
+
+		if (templated_load)
+			load_op = "Load";
+
+		load_expr = join(base, ".", load_op, template_expr, "(", chain.dynamic_index, chain.static_index, ")");
+	}
+	else if (type.columns == 1)
+	{
+		// Strided load since we are loading a column from a row-major matrix.
+		if (templated_load)
+		{
+			auto scalar_type = type;
+			scalar_type.vecsize = 1;
+			scalar_type.columns = 1;
+			template_expr = join("<", type_to_glsl(scalar_type), ">");
+			if (type.vecsize > 1)
+				load_expr += type_to_glsl(type) + "(";
+		}
+		else if (type.vecsize > 1)
+		{
+			load_expr = type_to_glsl(target_type);
+			load_expr += "(";
+		}
+
+		for (uint32_t r = 0; r < type.vecsize; r++)
+		{
+			load_expr += join(base, ".Load", template_expr, "(", chain.dynamic_index,
+			                  chain.static_index + r * chain.matrix_stride, ")");
+			if (r + 1 < type.vecsize)
+				load_expr += ", ";
+		}
+
+		if (type.vecsize > 1)
+			load_expr += ")";
+	}
+	else if (!chain.row_major_matrix)
+	{
+		// Load a matrix, column-major, the easy case.
+		const char *load_op = nullptr;
+		switch (type.vecsize)
+		{
+		case 1:
+			load_op = "Load";
+			break;
+		case 2:
+			load_op = "Load2";
+			break;
+		case 3:
+			load_op = "Load3";
+			break;
+		case 4:
+			load_op = "Load4";
+			break;
+		default:
+			SPIRV_CROSS_THROW("Unknown vector size.");
+		}
+
+		if (templated_load)
+		{
+			auto vector_type = type;
+			vector_type.columns = 1;
+			template_expr = join("<", type_to_glsl(vector_type), ">");
+			load_expr = type_to_glsl(type);
+			load_op = "Load";
+		}
+		else
+		{
+			// Note, this loading style in HLSL is *actually* row-major, but we always treat matrices as transposed in this backend,
+			// so row-major is technically column-major ...
+			load_expr = type_to_glsl(target_type);
+		}
+		load_expr += "(";
+
+		for (uint32_t c = 0; c < type.columns; c++)
+		{
+			load_expr += join(base, ".", load_op, template_expr, "(", chain.dynamic_index,
+			                  chain.static_index + c * chain.matrix_stride, ")");
+			if (c + 1 < type.columns)
+				load_expr += ", ";
+		}
+		load_expr += ")";
+	}
+	else
+	{
+		// Pick out elements one by one ... Hopefully compilers are smart enough to recognize this pattern
+		// considering HLSL is "row-major decl", but "column-major" memory layout (basically implicit transpose model, ugh) ...
+
+		if (templated_load)
+		{
+			load_expr = type_to_glsl(type);
+			auto scalar_type = type;
+			scalar_type.vecsize = 1;
+			scalar_type.columns = 1;
+			template_expr = join("<", type_to_glsl(scalar_type), ">");
+		}
+		else
+			load_expr = type_to_glsl(target_type);
+
+		load_expr += "(";
+
+		for (uint32_t c = 0; c < type.columns; c++)
+		{
+			for (uint32_t r = 0; r < type.vecsize; r++)
+			{
+				load_expr += join(base, ".Load", template_expr, "(", chain.dynamic_index,
+				                  chain.static_index + c * (type.width / 8) + r * chain.matrix_stride, ")");
+
+				if ((r + 1 < type.vecsize) || (c + 1 < type.columns))
+					load_expr += ", ";
+			}
+		}
+		load_expr += ")";
+	}
+
+	if (!templated_load)
+	{
+		auto bitcast_op = bitcast_glsl_op(type, target_type);
+		if (!bitcast_op.empty())
+			load_expr = join(bitcast_op, "(", load_expr, ")");
+	}
+
+	if (lhs.empty())
+	{
+		assert(expr);
+		*expr = std::move(load_expr);
+	}
+	else
+		statement(lhs, " = ", load_expr, ";");
+}
+
+void CompilerHLSL::emit_load(const Instruction &instruction)
+{
+	auto ops = stream(instruction);
+
+	auto *chain = maybe_get<SPIRAccessChain>(ops[2]);
+	if (chain)
+	{
+		uint32_t result_type = ops[0];
+		uint32_t id = ops[1];
+		uint32_t ptr = ops[2];
+
+		auto &type = get<SPIRType>(result_type);
+		bool composite_load = !type.array.empty() || type.basetype == SPIRType::Struct;
+
+		if (composite_load)
+		{
+			// We cannot make this work in one single expression as we might have nested structures and arrays,
+			// so unroll the load to an uninitialized temporary.
+			emit_uninitialized_temporary_expression(result_type, id);
+			read_access_chain(nullptr, to_expression(id), *chain);
+			track_expression_read(chain->self);
+		}
+		else
+		{
+			string load_expr;
+			read_access_chain(&load_expr, "", *chain);
+
+			bool forward = should_forward(ptr) && forced_temporaries.find(id) == end(forced_temporaries);
+
+			// If we are forwarding this load,
+			// don't register the read to access chain here, defer that to when we actually use the expression,
+			// using the add_implied_read_expression mechanism.
+			if (!forward)
+				track_expression_read(chain->self);
+
+			// Do not forward complex load sequences like matrices, structs and arrays.
+			if (type.columns > 1)
+				forward = false;
+
+			auto &e = emit_op(result_type, id, load_expr, forward, true);
+			e.need_transpose = false;
+			register_read(id, ptr, forward);
+			inherit_expression_dependencies(id, ptr);
+			if (forward)
+				add_implied_read_expression(e, chain->self);
+		}
+	}
+	else
+		CompilerGLSL::emit_instruction(instruction);
+}
+
+void CompilerHLSL::write_access_chain_array(const SPIRAccessChain &chain, uint32_t value,
+                                            const SmallVector<uint32_t> &composite_chain)
+{
+	auto *ptype = &get<SPIRType>(chain.basetype);
+	while (ptype->pointer)
+	{
+		ptype = &get<SPIRType>(ptype->basetype);
+	}
+	auto &type = *ptype;
+
+	// Need to use a reserved identifier here since it might shadow an identifier in the access chain input or other loops.
+	auto ident = get_unique_identifier();
+
+	uint32_t id = ir.increase_bound_by(2);
+	uint32_t int_type_id = id + 1;
+	SPIRType int_type;
+	int_type.basetype = SPIRType::Int;
+	int_type.width = 32;
+	set<SPIRType>(int_type_id, int_type);
+	set<SPIRExpression>(id, ident, int_type_id, true);
+	set_name(id, ident);
+	suppressed_usage_tracking.insert(id);
+
+	statement("[unroll]");
+	statement("for (int ", ident, " = 0; ", ident, " < ", to_array_size(type, uint32_t(type.array.size() - 1)), "; ",
+	          ident, "++)");
+	begin_scope();
+	auto subchain = chain;
+	subchain.dynamic_index = join(ident, " * ", chain.array_stride, " + ", chain.dynamic_index);
+	subchain.basetype = type.parent_type;
+
+	// Forcefully allow us to use an ID here by setting MSB.
+	auto subcomposite_chain = composite_chain;
+	subcomposite_chain.push_back(0x80000000u | id);
+
+	if (!get<SPIRType>(subchain.basetype).array.empty())
+		subchain.array_stride = get_decoration(subchain.basetype, DecorationArrayStride);
+
+	write_access_chain(subchain, value, subcomposite_chain);
+	end_scope();
+}
+
+void CompilerHLSL::write_access_chain_struct(const SPIRAccessChain &chain, uint32_t value,
+                                             const SmallVector<uint32_t> &composite_chain)
+{
+	auto &type = get<SPIRType>(chain.basetype);
+	uint32_t member_count = uint32_t(type.member_types.size());
+	auto subchain = chain;
+
+	auto subcomposite_chain = composite_chain;
+	subcomposite_chain.push_back(0);
+
+	for (uint32_t i = 0; i < member_count; i++)
+	{
+		uint32_t offset = type_struct_member_offset(type, i);
+		subchain.static_index = chain.static_index + offset;
+		subchain.basetype = type.member_types[i];
+
+		subchain.matrix_stride = 0;
+		subchain.array_stride = 0;
+		subchain.row_major_matrix = false;
+
+		auto &member_type = get<SPIRType>(subchain.basetype);
+		if (member_type.columns > 1)
+		{
+			subchain.matrix_stride = type_struct_member_matrix_stride(type, i);
+			subchain.row_major_matrix = has_member_decoration(type.self, i, DecorationRowMajor);
+		}
+
+		if (!member_type.array.empty())
+			subchain.array_stride = type_struct_member_array_stride(type, i);
+
+		subcomposite_chain.back() = i;
+		write_access_chain(subchain, value, subcomposite_chain);
+	}
+}
+
+string CompilerHLSL::write_access_chain_value(uint32_t value, const SmallVector<uint32_t> &composite_chain,
+                                              bool enclose)
+{
+	string ret;
+	if (composite_chain.empty())
+		ret = to_expression(value);
+	else
+	{
+		AccessChainMeta meta;
+		ret = access_chain_internal(value, composite_chain.data(), uint32_t(composite_chain.size()),
+		                            ACCESS_CHAIN_INDEX_IS_LITERAL_BIT | ACCESS_CHAIN_LITERAL_MSB_FORCE_ID, &meta);
+	}
+
+	if (enclose)
+		ret = enclose_expression(ret);
+	return ret;
+}
+
+void CompilerHLSL::write_access_chain(const SPIRAccessChain &chain, uint32_t value,
+                                      const SmallVector<uint32_t> &composite_chain)
+{
+	auto &type = get<SPIRType>(chain.basetype);
+
+	// Make sure we trigger a read of the constituents in the access chain.
+	track_expression_read(chain.self);
+
+	SPIRType target_type;
+	target_type.basetype = SPIRType::UInt;
+	target_type.vecsize = type.vecsize;
+	target_type.columns = type.columns;
+
+	if (!type.array.empty())
+	{
+		write_access_chain_array(chain, value, composite_chain);
+		register_write(chain.self);
+		return;
+	}
+	else if (type.basetype == SPIRType::Struct)
+	{
+		write_access_chain_struct(chain, value, composite_chain);
+		register_write(chain.self);
+		return;
+	}
+	else if (type.width != 32 && !hlsl_options.enable_16bit_types)
+		SPIRV_CROSS_THROW("Writing types other than 32-bit to RWByteAddressBuffer not yet supported, unless SM 6.2 and "
+		                  "native 16-bit types are enabled.");
+
+	bool templated_store = hlsl_options.shader_model >= 62;
+
+	auto base = chain.base;
+	if (has_decoration(chain.self, DecorationNonUniform))
+		convert_non_uniform_expression(base, chain.self);
+
+	string template_expr;
+	if (templated_store)
+		template_expr = join("<", type_to_glsl(type), ">");
+
+	if (type.columns == 1 && !chain.row_major_matrix)
+	{
+		const char *store_op = nullptr;
+		switch (type.vecsize)
+		{
+		case 1:
+			store_op = "Store";
+			break;
+		case 2:
+			store_op = "Store2";
+			break;
+		case 3:
+			store_op = "Store3";
+			break;
+		case 4:
+			store_op = "Store4";
+			break;
+		default:
+			SPIRV_CROSS_THROW("Unknown vector size.");
+		}
+
+		auto store_expr = write_access_chain_value(value, composite_chain, false);
+
+		if (!templated_store)
+		{
+			auto bitcast_op = bitcast_glsl_op(target_type, type);
+			if (!bitcast_op.empty())
+				store_expr = join(bitcast_op, "(", store_expr, ")");
+		}
+		else
+			store_op = "Store";
+		statement(base, ".", store_op, template_expr, "(", chain.dynamic_index, chain.static_index, ", ",
+		          store_expr, ");");
+	}
+	else if (type.columns == 1)
+	{
+		if (templated_store)
+		{
+			auto scalar_type = type;
+			scalar_type.vecsize = 1;
+			scalar_type.columns = 1;
+			template_expr = join("<", type_to_glsl(scalar_type), ">");
+		}
+
+		// Strided store.
+		for (uint32_t r = 0; r < type.vecsize; r++)
+		{
+			auto store_expr = write_access_chain_value(value, composite_chain, true);
+			if (type.vecsize > 1)
+			{
+				store_expr += ".";
+				store_expr += index_to_swizzle(r);
+			}
+			remove_duplicate_swizzle(store_expr);
+
+			if (!templated_store)
+			{
+				auto bitcast_op = bitcast_glsl_op(target_type, type);
+				if (!bitcast_op.empty())
+					store_expr = join(bitcast_op, "(", store_expr, ")");
+			}
+
+			statement(base, ".Store", template_expr, "(", chain.dynamic_index,
+			          chain.static_index + chain.matrix_stride * r, ", ", store_expr, ");");
+		}
+	}
+	else if (!chain.row_major_matrix)
+	{
+		const char *store_op = nullptr;
+		switch (type.vecsize)
+		{
+		case 1:
+			store_op = "Store";
+			break;
+		case 2:
+			store_op = "Store2";
+			break;
+		case 3:
+			store_op = "Store3";
+			break;
+		case 4:
+			store_op = "Store4";
+			break;
+		default:
+			SPIRV_CROSS_THROW("Unknown vector size.");
+		}
+
+		if (templated_store)
+		{
+			store_op = "Store";
+			auto vector_type = type;
+			vector_type.columns = 1;
+			template_expr = join("<", type_to_glsl(vector_type), ">");
+		}
+
+		for (uint32_t c = 0; c < type.columns; c++)
+		{
+			auto store_expr = join(write_access_chain_value(value, composite_chain, true), "[", c, "]");
+
+			if (!templated_store)
+			{
+				auto bitcast_op = bitcast_glsl_op(target_type, type);
+				if (!bitcast_op.empty())
+					store_expr = join(bitcast_op, "(", store_expr, ")");
+			}
+
+			statement(base, ".", store_op, template_expr, "(", chain.dynamic_index,
+			          chain.static_index + c * chain.matrix_stride, ", ", store_expr, ");");
+		}
+	}
+	else
+	{
+		if (templated_store)
+		{
+			auto scalar_type = type;
+			scalar_type.vecsize = 1;
+			scalar_type.columns = 1;
+			template_expr = join("<", type_to_glsl(scalar_type), ">");
+		}
+
+		for (uint32_t r = 0; r < type.vecsize; r++)
+		{
+			for (uint32_t c = 0; c < type.columns; c++)
+			{
+				auto store_expr =
+				    join(write_access_chain_value(value, composite_chain, true), "[", c, "].", index_to_swizzle(r));
+				remove_duplicate_swizzle(store_expr);
+				auto bitcast_op = bitcast_glsl_op(target_type, type);
+				if (!bitcast_op.empty())
+					store_expr = join(bitcast_op, "(", store_expr, ")");
+				statement(base, ".Store", template_expr, "(", chain.dynamic_index,
+				          chain.static_index + c * (type.width / 8) + r * chain.matrix_stride, ", ", store_expr, ");");
+			}
+		}
+	}
+
+	register_write(chain.self);
+}
+
+void CompilerHLSL::emit_store(const Instruction &instruction)
+{
+	auto ops = stream(instruction);
+	if (options.vertex.flip_vert_y)
+	{
+		auto *expr = maybe_get<SPIRExpression>(ops[0]);
+		if (expr != nullptr && expr->access_meshlet_position_y)
+		{
+			auto lhs = to_dereferenced_expression(ops[0]);
+			auto rhs = to_unpacked_expression(ops[1]);
+			statement(lhs, " = spvFlipVertY(", rhs, ");");
+			register_write(ops[0]);
+			return;
+		}
+	}
+
+	auto *chain = maybe_get<SPIRAccessChain>(ops[0]);
+	if (chain)
+		write_access_chain(*chain, ops[1], {});
+	else
+		CompilerGLSL::emit_instruction(instruction);
+}
+
+void CompilerHLSL::emit_access_chain(const Instruction &instruction)
+{
+	auto ops = stream(instruction);
+	uint32_t length = instruction.length;
+
+	bool need_byte_access_chain = false;
+	auto &type = expression_type(ops[2]);
+	const auto *chain = maybe_get<SPIRAccessChain>(ops[2]);
+
+	if (chain)
+	{
+		// Keep tacking on an existing access chain.
+		need_byte_access_chain = true;
+	}
+	else if (type.storage == StorageClassStorageBuffer || has_decoration(type.self, DecorationBufferBlock))
+	{
+		// If we are starting to poke into an SSBO, we are dealing with ByteAddressBuffers, and we need
+		// to emit SPIRAccessChain rather than a plain SPIRExpression.
+		uint32_t chain_arguments = length - 3;
+		if (chain_arguments > type.array.size())
+			need_byte_access_chain = true;
+	}
+
+	if (need_byte_access_chain)
+	{
+		// If we have a chain variable, we are already inside the SSBO, and any array type will refer to arrays within a block,
+		// and not array of SSBO.
+		uint32_t to_plain_buffer_length = chain ? 0u : static_cast<uint32_t>(type.array.size());
+
+		auto *backing_variable = maybe_get_backing_variable(ops[2]);
+
+		if (backing_variable != nullptr && is_user_type_structured(backing_variable->self))
+		{
+			CompilerGLSL::emit_instruction(instruction);
+			return;
+		}
+
+		string base;
+		if (to_plain_buffer_length != 0)
+			base = access_chain(ops[2], &ops[3], to_plain_buffer_length, get<SPIRType>(ops[0]));
+		else if (chain)
+			base = chain->base;
+		else
+			base = to_expression(ops[2]);
+
+		// Start traversing type hierarchy at the proper non-pointer types.
+		auto *basetype = &get_pointee_type(type);
+
+		// Traverse the type hierarchy down to the actual buffer types.
+		for (uint32_t i = 0; i < to_plain_buffer_length; i++)
+		{
+			assert(basetype->parent_type);
+			basetype = &get<SPIRType>(basetype->parent_type);
+		}
+
+		uint32_t matrix_stride = 0;
+		uint32_t array_stride = 0;
+		bool row_major_matrix = false;
+
+		// Inherit matrix information.
+		if (chain)
+		{
+			matrix_stride = chain->matrix_stride;
+			row_major_matrix = chain->row_major_matrix;
+			array_stride = chain->array_stride;
+		}
+
+		auto offsets = flattened_access_chain_offset(*basetype, &ops[3 + to_plain_buffer_length],
+		                                             length - 3 - to_plain_buffer_length, 0, 1, &row_major_matrix,
+		                                             &matrix_stride, &array_stride);
+
+		auto &e = set<SPIRAccessChain>(ops[1], ops[0], type.storage, base, offsets.first, offsets.second);
+		e.row_major_matrix = row_major_matrix;
+		e.matrix_stride = matrix_stride;
+		e.array_stride = array_stride;
+		e.immutable = should_forward(ops[2]);
+		e.loaded_from = backing_variable ? backing_variable->self : ID(0);
+
+		if (chain)
+		{
+			e.dynamic_index += chain->dynamic_index;
+			e.static_index += chain->static_index;
+		}
+
+		for (uint32_t i = 2; i < length; i++)
+		{
+			inherit_expression_dependencies(ops[1], ops[i]);
+			add_implied_read_expression(e, ops[i]);
+		}
+	}
+	else
+	{
+		CompilerGLSL::emit_instruction(instruction);
+	}
+}
+
+void CompilerHLSL::emit_atomic(const uint32_t *ops, uint32_t length, spv::Op op)
+{
+	const char *atomic_op = nullptr;
+
+	string value_expr;
+	if (op != OpAtomicIDecrement && op != OpAtomicIIncrement && op != OpAtomicLoad && op != OpAtomicStore)
+		value_expr = to_expression(ops[op == OpAtomicCompareExchange ? 6 : 5]);
+
+	bool is_atomic_store = false;
+
+	switch (op)
+	{
+	case OpAtomicIIncrement:
+		atomic_op = "InterlockedAdd";
+		value_expr = "1";
+		break;
+
+	case OpAtomicIDecrement:
+		atomic_op = "InterlockedAdd";
+		value_expr = "-1";
+		break;
+
+	case OpAtomicLoad:
+		atomic_op = "InterlockedAdd";
+		value_expr = "0";
+		break;
+
+	case OpAtomicISub:
+		atomic_op = "InterlockedAdd";
+		value_expr = join("-", enclose_expression(value_expr));
+		break;
+
+	case OpAtomicSMin:
+	case OpAtomicUMin:
+		atomic_op = "InterlockedMin";
+		break;
+
+	case OpAtomicSMax:
+	case OpAtomicUMax:
+		atomic_op = "InterlockedMax";
+		break;
+
+	case OpAtomicAnd:
+		atomic_op = "InterlockedAnd";
+		break;
+
+	case OpAtomicOr:
+		atomic_op = "InterlockedOr";
+		break;
+
+	case OpAtomicXor:
+		atomic_op = "InterlockedXor";
+		break;
+
+	case OpAtomicIAdd:
+		atomic_op = "InterlockedAdd";
+		break;
+
+	case OpAtomicExchange:
+		atomic_op = "InterlockedExchange";
+		break;
+
+	case OpAtomicStore:
+		atomic_op = "InterlockedExchange";
+		is_atomic_store = true;
+		break;
+
+	case OpAtomicCompareExchange:
+		if (length < 8)
+			SPIRV_CROSS_THROW("Not enough data for opcode.");
+		atomic_op = "InterlockedCompareExchange";
+		value_expr = join(to_expression(ops[7]), ", ", value_expr);
+		break;
+
+	default:
+		SPIRV_CROSS_THROW("Unknown atomic opcode.");
+	}
+
+	if (is_atomic_store)
+	{
+		auto &data_type = expression_type(ops[0]);
+		auto *chain = maybe_get<SPIRAccessChain>(ops[0]);
+
+		auto &tmp_id = extra_sub_expressions[ops[0]];
+		if (!tmp_id)
+		{
+			tmp_id = ir.increase_bound_by(1);
+			emit_uninitialized_temporary_expression(get_pointee_type(data_type).self, tmp_id);
+		}
+
+		if (data_type.storage == StorageClassImage || !chain)
+		{
+			statement(atomic_op, "(", to_non_uniform_aware_expression(ops[0]), ", ",
+			          to_expression(ops[3]), ", ", to_expression(tmp_id), ");");
+		}
+		else
+		{
+			string base = chain->base;
+			if (has_decoration(chain->self, DecorationNonUniform))
+				convert_non_uniform_expression(base, chain->self);
+			// RWByteAddress buffer is always uint in its underlying type.
+			statement(base, ".", atomic_op, "(", chain->dynamic_index, chain->static_index, ", ",
+			          to_expression(ops[3]), ", ", to_expression(tmp_id), ");");
+		}
+	}
+	else
+	{
+		uint32_t result_type = ops[0];
+		uint32_t id = ops[1];
+		forced_temporaries.insert(ops[1]);
+
+		auto &type = get<SPIRType>(result_type);
+		statement(variable_decl(type, to_name(id)), ";");
+
+		auto &data_type = expression_type(ops[2]);
+		auto *chain = maybe_get<SPIRAccessChain>(ops[2]);
+		SPIRType::BaseType expr_type;
+		if (data_type.storage == StorageClassImage || !chain)
+		{
+			statement(atomic_op, "(", to_non_uniform_aware_expression(ops[2]), ", ", value_expr, ", ", to_name(id), ");");
+			expr_type = data_type.basetype;
+		}
+		else
+		{
+			// RWByteAddress buffer is always uint in its underlying type.
+			string base = chain->base;
+			if (has_decoration(chain->self, DecorationNonUniform))
+				convert_non_uniform_expression(base, chain->self);
+			expr_type = SPIRType::UInt;
+			statement(base, ".", atomic_op, "(", chain->dynamic_index, chain->static_index, ", ", value_expr,
+			          ", ", to_name(id), ");");
+		}
+
+		auto expr = bitcast_expression(type, expr_type, to_name(id));
+		set<SPIRExpression>(id, expr, result_type, true);
+	}
+	flush_all_atomic_capable_variables();
+}
+
+void CompilerHLSL::emit_subgroup_op(const Instruction &i)
+{
+	if (hlsl_options.shader_model < 60)
+		SPIRV_CROSS_THROW("Wave ops requires SM 6.0 or higher.");
+
+	const uint32_t *ops = stream(i);
+	auto op = static_cast<Op>(i.op);
+
+	uint32_t result_type = ops[0];
+	uint32_t id = ops[1];
+
+	auto scope = static_cast<Scope>(evaluate_constant_u32(ops[2]));
+	if (scope != ScopeSubgroup)
+		SPIRV_CROSS_THROW("Only subgroup scope is supported.");
+
+	const auto make_inclusive_Sum = [&](const string &expr) -> string {
+		return join(expr, " + ", to_expression(ops[4]));
+	};
+
+	const auto make_inclusive_Product = [&](const string &expr) -> string {
+		return join(expr, " * ", to_expression(ops[4]));
+	};
+
+	// If we need to do implicit bitcasts, make sure we do it with the correct type.
+	uint32_t integer_width = get_integer_width_for_instruction(i);
+	auto int_type = to_signed_basetype(integer_width);
+	auto uint_type = to_unsigned_basetype(integer_width);
+
+#define make_inclusive_BitAnd(expr) ""
+#define make_inclusive_BitOr(expr) ""
+#define make_inclusive_BitXor(expr) ""
+#define make_inclusive_Min(expr) ""
+#define make_inclusive_Max(expr) ""
+
+	switch (op)
+	{
+	case OpGroupNonUniformElect:
+		emit_op(result_type, id, "WaveIsFirstLane()", true);
+		break;
+
+	case OpGroupNonUniformBroadcast:
+		emit_binary_func_op(result_type, id, ops[3], ops[4], "WaveReadLaneAt");
+		break;
+
+	case OpGroupNonUniformBroadcastFirst:
+		emit_unary_func_op(result_type, id, ops[3], "WaveReadLaneFirst");
+		break;
+
+	case OpGroupNonUniformBallot:
+		emit_unary_func_op(result_type, id, ops[3], "WaveActiveBallot");
+		break;
+
+	case OpGroupNonUniformInverseBallot:
+		SPIRV_CROSS_THROW("Cannot trivially implement InverseBallot in HLSL.");
+
+	case OpGroupNonUniformBallotBitExtract:
+		SPIRV_CROSS_THROW("Cannot trivially implement BallotBitExtract in HLSL.");
+
+	case OpGroupNonUniformBallotFindLSB:
+		SPIRV_CROSS_THROW("Cannot trivially implement BallotFindLSB in HLSL.");
+
+	case OpGroupNonUniformBallotFindMSB:
+		SPIRV_CROSS_THROW("Cannot trivially implement BallotFindMSB in HLSL.");
+
+	case OpGroupNonUniformBallotBitCount:
+	{
+		auto operation = static_cast<GroupOperation>(ops[3]);
+		bool forward = should_forward(ops[4]);
+		if (operation == GroupOperationReduce)
+		{
+			auto left = join("countbits(", to_enclosed_expression(ops[4]), ".x) + countbits(",
+			                 to_enclosed_expression(ops[4]), ".y)");
+			auto right = join("countbits(", to_enclosed_expression(ops[4]), ".z) + countbits(",
+			                  to_enclosed_expression(ops[4]), ".w)");
+			emit_op(result_type, id, join(left, " + ", right), forward);
+			inherit_expression_dependencies(id, ops[4]);
+		}
+		else if (operation == GroupOperationInclusiveScan)
+		{
+			auto left = join("countbits(", to_enclosed_expression(ops[4]), ".x & gl_SubgroupLeMask.x) + countbits(",
+			                 to_enclosed_expression(ops[4]), ".y & gl_SubgroupLeMask.y)");
+			auto right = join("countbits(", to_enclosed_expression(ops[4]), ".z & gl_SubgroupLeMask.z) + countbits(",
+			                  to_enclosed_expression(ops[4]), ".w & gl_SubgroupLeMask.w)");
+			emit_op(result_type, id, join(left, " + ", right), forward);
+			if (!active_input_builtins.get(BuiltInSubgroupLeMask))
+			{
+				active_input_builtins.set(BuiltInSubgroupLeMask);
+				force_recompile_guarantee_forward_progress();
+			}
+		}
+		else if (operation == GroupOperationExclusiveScan)
+		{
+			auto left = join("countbits(", to_enclosed_expression(ops[4]), ".x & gl_SubgroupLtMask.x) + countbits(",
+			                 to_enclosed_expression(ops[4]), ".y & gl_SubgroupLtMask.y)");
+			auto right = join("countbits(", to_enclosed_expression(ops[4]), ".z & gl_SubgroupLtMask.z) + countbits(",
+			                  to_enclosed_expression(ops[4]), ".w & gl_SubgroupLtMask.w)");
+			emit_op(result_type, id, join(left, " + ", right), forward);
+			if (!active_input_builtins.get(BuiltInSubgroupLtMask))
+			{
+				active_input_builtins.set(BuiltInSubgroupLtMask);
+				force_recompile_guarantee_forward_progress();
+			}
+		}
+		else
+			SPIRV_CROSS_THROW("Invalid BitCount operation.");
+		break;
+	}
+
+	case OpGroupNonUniformShuffle:
+		emit_binary_func_op(result_type, id, ops[3], ops[4], "WaveReadLaneAt");
+		break;
+	case OpGroupNonUniformShuffleXor:
+	{
+		bool forward = should_forward(ops[3]);
+		emit_op(ops[0], ops[1],
+		        join("WaveReadLaneAt(", to_unpacked_expression(ops[3]), ", ",
+		             "WaveGetLaneIndex() ^ ", to_enclosed_expression(ops[4]), ")"), forward);
+		inherit_expression_dependencies(ops[1], ops[3]);
+		break;
+	}
+	case OpGroupNonUniformShuffleUp:
+	{
+		bool forward = should_forward(ops[3]);
+		emit_op(ops[0], ops[1],
+		        join("WaveReadLaneAt(", to_unpacked_expression(ops[3]), ", ",
+		             "WaveGetLaneIndex() - ", to_enclosed_expression(ops[4]), ")"), forward);
+		inherit_expression_dependencies(ops[1], ops[3]);
+		break;
+	}
+	case OpGroupNonUniformShuffleDown:
+	{
+		bool forward = should_forward(ops[3]);
+		emit_op(ops[0], ops[1],
+		        join("WaveReadLaneAt(", to_unpacked_expression(ops[3]), ", ",
+		             "WaveGetLaneIndex() + ", to_enclosed_expression(ops[4]), ")"), forward);
+		inherit_expression_dependencies(ops[1], ops[3]);
+		break;
+	}
+
+	case OpGroupNonUniformAll:
+		emit_unary_func_op(result_type, id, ops[3], "WaveActiveAllTrue");
+		break;
+
+	case OpGroupNonUniformAny:
+		emit_unary_func_op(result_type, id, ops[3], "WaveActiveAnyTrue");
+		break;
+
+	case OpGroupNonUniformAllEqual:
+		emit_unary_func_op(result_type, id, ops[3], "WaveActiveAllEqual");
+		break;
+
+	// clang-format off
+#define HLSL_GROUP_OP(op, hlsl_op, supports_scan) \
+case OpGroupNonUniform##op: \
+	{ \
+		auto operation = static_cast<GroupOperation>(ops[3]); \
+		if (operation == GroupOperationReduce) \
+			emit_unary_func_op(result_type, id, ops[4], "WaveActive" #hlsl_op); \
+		else if (operation == GroupOperationInclusiveScan && supports_scan) \
+        { \
+			bool forward = should_forward(ops[4]); \
+			emit_op(result_type, id, make_inclusive_##hlsl_op (join("WavePrefix" #hlsl_op, "(", to_expression(ops[4]), ")")), forward); \
+			inherit_expression_dependencies(id, ops[4]); \
+        } \
+		else if (operation == GroupOperationExclusiveScan && supports_scan) \
+			emit_unary_func_op(result_type, id, ops[4], "WavePrefix" #hlsl_op); \
+		else if (operation == GroupOperationClusteredReduce) \
+			SPIRV_CROSS_THROW("Cannot trivially implement ClusteredReduce in HLSL."); \
+		else \
+			SPIRV_CROSS_THROW("Invalid group operation."); \
+		break; \
+	}
+
+#define HLSL_GROUP_OP_CAST(op, hlsl_op, type) \
+case OpGroupNonUniform##op: \
+	{ \
+		auto operation = static_cast<GroupOperation>(ops[3]); \
+		if (operation == GroupOperationReduce) \
+			emit_unary_func_op_cast(result_type, id, ops[4], "WaveActive" #hlsl_op, type, type); \
+		else \
+			SPIRV_CROSS_THROW("Invalid group operation."); \
+		break; \
+	}
+
+	HLSL_GROUP_OP(FAdd, Sum, true)
+	HLSL_GROUP_OP(FMul, Product, true)
+	HLSL_GROUP_OP(FMin, Min, false)
+	HLSL_GROUP_OP(FMax, Max, false)
+	HLSL_GROUP_OP(IAdd, Sum, true)
+	HLSL_GROUP_OP(IMul, Product, true)
+	HLSL_GROUP_OP_CAST(SMin, Min, int_type)
+	HLSL_GROUP_OP_CAST(SMax, Max, int_type)
+	HLSL_GROUP_OP_CAST(UMin, Min, uint_type)
+	HLSL_GROUP_OP_CAST(UMax, Max, uint_type)
+	HLSL_GROUP_OP(BitwiseAnd, BitAnd, false)
+	HLSL_GROUP_OP(BitwiseOr, BitOr, false)
+	HLSL_GROUP_OP(BitwiseXor, BitXor, false)
+	HLSL_GROUP_OP_CAST(LogicalAnd, BitAnd, uint_type)
+	HLSL_GROUP_OP_CAST(LogicalOr, BitOr, uint_type)
+	HLSL_GROUP_OP_CAST(LogicalXor, BitXor, uint_type)
+
+#undef HLSL_GROUP_OP
+#undef HLSL_GROUP_OP_CAST
+		// clang-format on
+
+	case OpGroupNonUniformQuadSwap:
+	{
+		uint32_t direction = evaluate_constant_u32(ops[4]);
+		if (direction == 0)
+			emit_unary_func_op(result_type, id, ops[3], "QuadReadAcrossX");
+		else if (direction == 1)
+			emit_unary_func_op(result_type, id, ops[3], "QuadReadAcrossY");
+		else if (direction == 2)
+			emit_unary_func_op(result_type, id, ops[3], "QuadReadAcrossDiagonal");
+		else
+			SPIRV_CROSS_THROW("Invalid quad swap direction.");
+		break;
+	}
+
+	case OpGroupNonUniformQuadBroadcast:
+	{
+		emit_binary_func_op(result_type, id, ops[3], ops[4], "QuadReadLaneAt");
+		break;
+	}
+
+	default:
+		SPIRV_CROSS_THROW("Invalid opcode for subgroup.");
+	}
+
+	register_control_dependent_expression(id);
+}
+
+void CompilerHLSL::emit_instruction(const Instruction &instruction)
+{
+	auto ops = stream(instruction);
+	auto opcode = static_cast<Op>(instruction.op);
+
+#define HLSL_BOP(op) emit_binary_op(ops[0], ops[1], ops[2], ops[3], #op)
+#define HLSL_BOP_CAST(op, type) \
+	emit_binary_op_cast(ops[0], ops[1], ops[2], ops[3], #op, type, opcode_is_sign_invariant(opcode), false)
+#define HLSL_UOP(op) emit_unary_op(ops[0], ops[1], ops[2], #op)
+#define HLSL_QFOP(op) emit_quaternary_func_op(ops[0], ops[1], ops[2], ops[3], ops[4], ops[5], #op)
+#define HLSL_TFOP(op) emit_trinary_func_op(ops[0], ops[1], ops[2], ops[3], ops[4], #op)
+#define HLSL_BFOP(op) emit_binary_func_op(ops[0], ops[1], ops[2], ops[3], #op)
+#define HLSL_BFOP_CAST(op, type) \
+	emit_binary_func_op_cast(ops[0], ops[1], ops[2], ops[3], #op, type, opcode_is_sign_invariant(opcode))
+#define HLSL_BFOP(op) emit_binary_func_op(ops[0], ops[1], ops[2], ops[3], #op)
+#define HLSL_UFOP(op) emit_unary_func_op(ops[0], ops[1], ops[2], #op)
+
+	// If we need to do implicit bitcasts, make sure we do it with the correct type.
+	uint32_t integer_width = get_integer_width_for_instruction(instruction);
+	auto int_type = to_signed_basetype(integer_width);
+	auto uint_type = to_unsigned_basetype(integer_width);
+
+	opcode = get_remapped_spirv_op(opcode);
+
+	switch (opcode)
+	{
+	case OpAccessChain:
+	case OpInBoundsAccessChain:
+	{
+		emit_access_chain(instruction);
+		break;
+	}
+	case OpBitcast:
+	{
+		auto bitcast_type = get_bitcast_type(ops[0], ops[2]);
+		if (bitcast_type == CompilerHLSL::TypeNormal)
+			CompilerGLSL::emit_instruction(instruction);
+		else
+		{
+			if (!requires_uint2_packing)
+			{
+				requires_uint2_packing = true;
+				force_recompile();
+			}
+
+			if (bitcast_type == CompilerHLSL::TypePackUint2x32)
+				emit_unary_func_op(ops[0], ops[1], ops[2], "spvPackUint2x32");
+			else
+				emit_unary_func_op(ops[0], ops[1], ops[2], "spvUnpackUint2x32");
+		}
+
+		break;
+	}
+
+	case OpSelect:
+	{
+		auto &value_type = expression_type(ops[3]);
+		if (value_type.basetype == SPIRType::Struct || is_array(value_type))
+		{
+			// HLSL does not support ternary expressions on composites.
+			// Cannot use branches, since we might be in a continue block
+			// where explicit control flow is prohibited.
+			// Emit a helper function where we can use control flow.
+			TypeID value_type_id = expression_type_id(ops[3]);
+			auto itr = std::find(composite_selection_workaround_types.begin(),
+			                     composite_selection_workaround_types.end(),
+			                     value_type_id);
+			if (itr == composite_selection_workaround_types.end())
+			{
+				composite_selection_workaround_types.push_back(value_type_id);
+				force_recompile();
+			}
+			emit_uninitialized_temporary_expression(ops[0], ops[1]);
+			statement("spvSelectComposite(",
+					  to_expression(ops[1]), ", ", to_expression(ops[2]), ", ",
+					  to_expression(ops[3]), ", ", to_expression(ops[4]), ");");
+		}
+		else
+			CompilerGLSL::emit_instruction(instruction);
+		break;
+	}
+
+	case OpStore:
+	{
+		emit_store(instruction);
+		break;
+	}
+
+	case OpLoad:
+	{
+		emit_load(instruction);
+		break;
+	}
+
+	case OpMatrixTimesVector:
+	{
+		// Matrices are kept in a transposed state all the time, flip multiplication order always.
+		emit_binary_func_op(ops[0], ops[1], ops[3], ops[2], "mul");
+		break;
+	}
+
+	case OpVectorTimesMatrix:
+	{
+		// Matrices are kept in a transposed state all the time, flip multiplication order always.
+		emit_binary_func_op(ops[0], ops[1], ops[3], ops[2], "mul");
+		break;
+	}
+
+	case OpMatrixTimesMatrix:
+	{
+		// Matrices are kept in a transposed state all the time, flip multiplication order always.
+		emit_binary_func_op(ops[0], ops[1], ops[3], ops[2], "mul");
+		break;
+	}
+
+	case OpOuterProduct:
+	{
+		uint32_t result_type = ops[0];
+		uint32_t id = ops[1];
+		uint32_t a = ops[2];
+		uint32_t b = ops[3];
+
+		auto &type = get<SPIRType>(result_type);
+		string expr = type_to_glsl_constructor(type);
+		expr += "(";
+		for (uint32_t col = 0; col < type.columns; col++)
+		{
+			expr += to_enclosed_expression(a);
+			expr += " * ";
+			expr += to_extract_component_expression(b, col);
+			if (col + 1 < type.columns)
+				expr += ", ";
+		}
+		expr += ")";
+		emit_op(result_type, id, expr, should_forward(a) && should_forward(b));
+		inherit_expression_dependencies(id, a);
+		inherit_expression_dependencies(id, b);
+		break;
+	}
+
+	case OpFMod:
+	{
+		if (!requires_op_fmod)
+		{
+			requires_op_fmod = true;
+			force_recompile();
+		}
+		CompilerGLSL::emit_instruction(instruction);
+		break;
+	}
+
+	case OpFRem:
+		emit_binary_func_op(ops[0], ops[1], ops[2], ops[3], "fmod");
+		break;
+
+	case OpImage:
+	{
+		uint32_t result_type = ops[0];
+		uint32_t id = ops[1];
+		auto *combined = maybe_get<SPIRCombinedImageSampler>(ops[2]);
+
+		if (combined)
+		{
+			auto &e = emit_op(result_type, id, to_expression(combined->image), true, true);
+			auto *var = maybe_get_backing_variable(combined->image);
+			if (var)
+				e.loaded_from = var->self;
+		}
+		else
+		{
+			auto &e = emit_op(result_type, id, to_expression(ops[2]), true, true);
+			auto *var = maybe_get_backing_variable(ops[2]);
+			if (var)
+				e.loaded_from = var->self;
+		}
+		break;
+	}
+
+	case OpDPdx:
+		HLSL_UFOP(ddx);
+		register_control_dependent_expression(ops[1]);
+		break;
+
+	case OpDPdy:
+		HLSL_UFOP(ddy);
+		register_control_dependent_expression(ops[1]);
+		break;
+
+	case OpDPdxFine:
+		HLSL_UFOP(ddx_fine);
+		register_control_dependent_expression(ops[1]);
+		break;
+
+	case OpDPdyFine:
+		HLSL_UFOP(ddy_fine);
+		register_control_dependent_expression(ops[1]);
+		break;
+
+	case OpDPdxCoarse:
+		HLSL_UFOP(ddx_coarse);
+		register_control_dependent_expression(ops[1]);
+		break;
+
+	case OpDPdyCoarse:
+		HLSL_UFOP(ddy_coarse);
+		register_control_dependent_expression(ops[1]);
+		break;
+
+	case OpFwidth:
+	case OpFwidthCoarse:
+	case OpFwidthFine:
+		HLSL_UFOP(fwidth);
+		register_control_dependent_expression(ops[1]);
+		break;
+
+	case OpLogicalNot:
+	{
+		auto result_type = ops[0];
+		auto id = ops[1];
+		auto &type = get<SPIRType>(result_type);
+
+		if (type.vecsize > 1)
+			emit_unrolled_unary_op(result_type, id, ops[2], "!");
+		else
+			HLSL_UOP(!);
+		break;
+	}
+
+	case OpIEqual:
+	{
+		auto result_type = ops[0];
+		auto id = ops[1];
+
+		if (expression_type(ops[2]).vecsize > 1)
+			emit_unrolled_binary_op(result_type, id, ops[2], ops[3], "==", false, SPIRType::Unknown);
+		else
+			HLSL_BOP_CAST(==, int_type);
+		break;
+	}
+
+	case OpLogicalEqual:
+	case OpFOrdEqual:
+	case OpFUnordEqual:
+	{
+		// HLSL != operator is unordered.
+		// https://docs.microsoft.com/en-us/windows/win32/direct3d10/d3d10-graphics-programming-guide-resources-float-rules.
+		// isnan() is apparently implemented as x != x as well.
+		// We cannot implement UnordEqual as !(OrdNotEqual), as HLSL cannot express OrdNotEqual.
+		// HACK: FUnordEqual will be implemented as FOrdEqual.
+
+		auto result_type = ops[0];
+		auto id = ops[1];
+
+		if (expression_type(ops[2]).vecsize > 1)
+			emit_unrolled_binary_op(result_type, id, ops[2], ops[3], "==", false, SPIRType::Unknown);
+		else
+			HLSL_BOP(==);
+		break;
+	}
+
+	case OpINotEqual:
+	{
+		auto result_type = ops[0];
+		auto id = ops[1];
+
+		if (expression_type(ops[2]).vecsize > 1)
+			emit_unrolled_binary_op(result_type, id, ops[2], ops[3], "!=", false, SPIRType::Unknown);
+		else
+			HLSL_BOP_CAST(!=, int_type);
+		break;
+	}
+
+	case OpLogicalNotEqual:
+	case OpFOrdNotEqual:
+	case OpFUnordNotEqual:
+	{
+		// HLSL != operator is unordered.
+		// https://docs.microsoft.com/en-us/windows/win32/direct3d10/d3d10-graphics-programming-guide-resources-float-rules.
+		// isnan() is apparently implemented as x != x as well.
+
+		// FIXME: FOrdNotEqual cannot be implemented in a crisp and simple way here.
+		// We would need to do something like not(UnordEqual), but that cannot be expressed either.
+		// Adding a lot of NaN checks would be a breaking change from perspective of performance.
+		// SPIR-V will generally use isnan() checks when this even matters.
+		// HACK: FOrdNotEqual will be implemented as FUnordEqual.
+
+		auto result_type = ops[0];
+		auto id = ops[1];
+
+		if (expression_type(ops[2]).vecsize > 1)
+			emit_unrolled_binary_op(result_type, id, ops[2], ops[3], "!=", false, SPIRType::Unknown);
+		else
+			HLSL_BOP(!=);
+		break;
+	}
+
+	case OpUGreaterThan:
+	case OpSGreaterThan:
+	{
+		auto result_type = ops[0];
+		auto id = ops[1];
+		auto type = opcode == OpUGreaterThan ? uint_type : int_type;
+
+		if (expression_type(ops[2]).vecsize > 1)
+			emit_unrolled_binary_op(result_type, id, ops[2], ops[3], ">", false, type);
+		else
+			HLSL_BOP_CAST(>, type);
+		break;
+	}
+
+	case OpFOrdGreaterThan:
+	{
+		auto result_type = ops[0];
+		auto id = ops[1];
+
+		if (expression_type(ops[2]).vecsize > 1)
+			emit_unrolled_binary_op(result_type, id, ops[2], ops[3], ">", false, SPIRType::Unknown);
+		else
+			HLSL_BOP(>);
+		break;
+	}
+
+	case OpFUnordGreaterThan:
+	{
+		auto result_type = ops[0];
+		auto id = ops[1];
+
+		if (expression_type(ops[2]).vecsize > 1)
+			emit_unrolled_binary_op(result_type, id, ops[2], ops[3], "<=", true, SPIRType::Unknown);
+		else
+			CompilerGLSL::emit_instruction(instruction);
+		break;
+	}
+
+	case OpUGreaterThanEqual:
+	case OpSGreaterThanEqual:
+	{
+		auto result_type = ops[0];
+		auto id = ops[1];
+
+		auto type = opcode == OpUGreaterThanEqual ? uint_type : int_type;
+		if (expression_type(ops[2]).vecsize > 1)
+			emit_unrolled_binary_op(result_type, id, ops[2], ops[3], ">=", false, type);
+		else
+			HLSL_BOP_CAST(>=, type);
+		break;
+	}
+
+	case OpFOrdGreaterThanEqual:
+	{
+		auto result_type = ops[0];
+		auto id = ops[1];
+
+		if (expression_type(ops[2]).vecsize > 1)
+			emit_unrolled_binary_op(result_type, id, ops[2], ops[3], ">=", false, SPIRType::Unknown);
+		else
+			HLSL_BOP(>=);
+		break;
+	}
+
+	case OpFUnordGreaterThanEqual:
+	{
+		auto result_type = ops[0];
+		auto id = ops[1];
+
+		if (expression_type(ops[2]).vecsize > 1)
+			emit_unrolled_binary_op(result_type, id, ops[2], ops[3], "<", true, SPIRType::Unknown);
+		else
+			CompilerGLSL::emit_instruction(instruction);
+		break;
+	}
+
+	case OpULessThan:
+	case OpSLessThan:
+	{
+		auto result_type = ops[0];
+		auto id = ops[1];
+
+		auto type = opcode == OpULessThan ? uint_type : int_type;
+		if (expression_type(ops[2]).vecsize > 1)
+			emit_unrolled_binary_op(result_type, id, ops[2], ops[3], "<", false, type);
+		else
+			HLSL_BOP_CAST(<, type);
+		break;
+	}
+
+	case OpFOrdLessThan:
+	{
+		auto result_type = ops[0];
+		auto id = ops[1];
+
+		if (expression_type(ops[2]).vecsize > 1)
+			emit_unrolled_binary_op(result_type, id, ops[2], ops[3], "<", false, SPIRType::Unknown);
+		else
+			HLSL_BOP(<);
+		break;
+	}
+
+	case OpFUnordLessThan:
+	{
+		auto result_type = ops[0];
+		auto id = ops[1];
+
+		if (expression_type(ops[2]).vecsize > 1)
+			emit_unrolled_binary_op(result_type, id, ops[2], ops[3], ">=", true, SPIRType::Unknown);
+		else
+			CompilerGLSL::emit_instruction(instruction);
+		break;
+	}
+
+	case OpULessThanEqual:
+	case OpSLessThanEqual:
+	{
+		auto result_type = ops[0];
+		auto id = ops[1];
+
+		auto type = opcode == OpULessThanEqual ? uint_type : int_type;
+		if (expression_type(ops[2]).vecsize > 1)
+			emit_unrolled_binary_op(result_type, id, ops[2], ops[3], "<=", false, type);
+		else
+			HLSL_BOP_CAST(<=, type);
+		break;
+	}
+
+	case OpFOrdLessThanEqual:
+	{
+		auto result_type = ops[0];
+		auto id = ops[1];
+
+		if (expression_type(ops[2]).vecsize > 1)
+			emit_unrolled_binary_op(result_type, id, ops[2], ops[3], "<=", false, SPIRType::Unknown);
+		else
+			HLSL_BOP(<=);
+		break;
+	}
+
+	case OpFUnordLessThanEqual:
+	{
+		auto result_type = ops[0];
+		auto id = ops[1];
+
+		if (expression_type(ops[2]).vecsize > 1)
+			emit_unrolled_binary_op(result_type, id, ops[2], ops[3], ">", true, SPIRType::Unknown);
+		else
+			CompilerGLSL::emit_instruction(instruction);
+		break;
+	}
+
+	case OpImageQueryLod:
+		emit_texture_op(instruction, false);
+		break;
+
+	case OpImageQuerySizeLod:
+	{
+		auto result_type = ops[0];
+		auto id = ops[1];
+
+		require_texture_query_variant(ops[2]);
+		auto dummy_samples_levels = join(get_fallback_name(id), "_dummy_parameter");
+		statement("uint ", dummy_samples_levels, ";");
+
+		auto expr = join("spvTextureSize(", to_non_uniform_aware_expression(ops[2]), ", ",
+		                 bitcast_expression(SPIRType::UInt, ops[3]), ", ", dummy_samples_levels, ")");
+
+		auto &restype = get<SPIRType>(ops[0]);
+		expr = bitcast_expression(restype, SPIRType::UInt, expr);
+		emit_op(result_type, id, expr, true);
+		break;
+	}
+
+	case OpImageQuerySize:
+	{
+		auto result_type = ops[0];
+		auto id = ops[1];
+
+		require_texture_query_variant(ops[2]);
+		bool uav = expression_type(ops[2]).image.sampled == 2;
+
+		if (const auto *var = maybe_get_backing_variable(ops[2]))
+			if (hlsl_options.nonwritable_uav_texture_as_srv && has_decoration(var->self, DecorationNonWritable))
+				uav = false;
+
+		auto dummy_samples_levels = join(get_fallback_name(id), "_dummy_parameter");
+		statement("uint ", dummy_samples_levels, ";");
+
+		string expr;
+		if (uav)
+			expr = join("spvImageSize(", to_non_uniform_aware_expression(ops[2]), ", ", dummy_samples_levels, ")");
+		else
+			expr = join("spvTextureSize(", to_non_uniform_aware_expression(ops[2]), ", 0u, ", dummy_samples_levels, ")");
+
+		auto &restype = get<SPIRType>(ops[0]);
+		expr = bitcast_expression(restype, SPIRType::UInt, expr);
+		emit_op(result_type, id, expr, true);
+		break;
+	}
+
+	case OpImageQuerySamples:
+	case OpImageQueryLevels:
+	{
+		auto result_type = ops[0];
+		auto id = ops[1];
+
+		require_texture_query_variant(ops[2]);
+		bool uav = expression_type(ops[2]).image.sampled == 2;
+		if (opcode == OpImageQueryLevels && uav)
+			SPIRV_CROSS_THROW("Cannot query levels for UAV images.");
+
+		if (const auto *var = maybe_get_backing_variable(ops[2]))
+			if (hlsl_options.nonwritable_uav_texture_as_srv && has_decoration(var->self, DecorationNonWritable))
+				uav = false;
+
+		// Keep it simple and do not emit special variants to make this look nicer ...
+		// This stuff is barely, if ever, used.
+		forced_temporaries.insert(id);
+		auto &type = get<SPIRType>(result_type);
+		statement(variable_decl(type, to_name(id)), ";");
+
+		if (uav)
+			statement("spvImageSize(", to_non_uniform_aware_expression(ops[2]), ", ", to_name(id), ");");
+		else
+			statement("spvTextureSize(", to_non_uniform_aware_expression(ops[2]), ", 0u, ", to_name(id), ");");
+
+		auto &restype = get<SPIRType>(ops[0]);
+		auto expr = bitcast_expression(restype, SPIRType::UInt, to_name(id));
+		set<SPIRExpression>(id, expr, result_type, true);
+		break;
+	}
+
+	case OpImageRead:
+	{
+		uint32_t result_type = ops[0];
+		uint32_t id = ops[1];
+		auto *var = maybe_get_backing_variable(ops[2]);
+		auto &type = expression_type(ops[2]);
+		bool subpass_data = type.image.dim == DimSubpassData;
+		bool pure = false;
+
+		string imgexpr;
+
+		if (subpass_data)
+		{
+			if (hlsl_options.shader_model < 40)
+				SPIRV_CROSS_THROW("Subpass loads are not supported in HLSL shader model 2/3.");
+
+			// Similar to GLSL, implement subpass loads using texelFetch.
+			if (type.image.ms)
+			{
+				uint32_t operands = ops[4];
+				if (operands != ImageOperandsSampleMask || instruction.length != 6)
+					SPIRV_CROSS_THROW("Multisampled image used in OpImageRead, but unexpected operand mask was used.");
+				uint32_t sample = ops[5];
+				imgexpr = join(to_non_uniform_aware_expression(ops[2]), ".Load(int2(gl_FragCoord.xy), ", to_expression(sample), ")");
+			}
+			else
+				imgexpr = join(to_non_uniform_aware_expression(ops[2]), ".Load(int3(int2(gl_FragCoord.xy), 0))");
+
+			pure = true;
+		}
+		else
+		{
+			imgexpr = join(to_non_uniform_aware_expression(ops[2]), "[", to_expression(ops[3]), "]");
+			// The underlying image type in HLSL depends on the image format, unlike GLSL, where all images are "vec4",
+			// except that the underlying type changes how the data is interpreted.
+
+			bool force_srv =
+			    hlsl_options.nonwritable_uav_texture_as_srv && var && has_decoration(var->self, DecorationNonWritable);
+			pure = force_srv;
+
+			if (var && !subpass_data && !force_srv)
+				imgexpr = remap_swizzle(get<SPIRType>(result_type),
+				                        image_format_to_components(get<SPIRType>(var->basetype).image.format), imgexpr);
+		}
+
+		if (var)
+		{
+			bool forward = forced_temporaries.find(id) == end(forced_temporaries);
+			auto &e = emit_op(result_type, id, imgexpr, forward);
+
+			if (!pure)
+			{
+				e.loaded_from = var->self;
+				if (forward)
+					var->dependees.push_back(id);
+			}
+		}
+		else
+			emit_op(result_type, id, imgexpr, false);
+
+		inherit_expression_dependencies(id, ops[2]);
+		if (type.image.ms)
+			inherit_expression_dependencies(id, ops[5]);
+		break;
+	}
+
+	case OpImageWrite:
+	{
+		auto *var = maybe_get_backing_variable(ops[0]);
+
+		// The underlying image type in HLSL depends on the image format, unlike GLSL, where all images are "vec4",
+		// except that the underlying type changes how the data is interpreted.
+		auto value_expr = to_expression(ops[2]);
+		if (var)
+		{
+			auto &type = get<SPIRType>(var->basetype);
+			auto narrowed_type = get<SPIRType>(type.image.type);
+			narrowed_type.vecsize = image_format_to_components(type.image.format);
+			value_expr = remap_swizzle(narrowed_type, expression_type(ops[2]).vecsize, value_expr);
+		}
+
+		statement(to_non_uniform_aware_expression(ops[0]), "[", to_expression(ops[1]), "] = ", value_expr, ";");
+		if (var && variable_storage_is_aliased(*var))
+			flush_all_aliased_variables();
+		break;
+	}
+
+	case OpImageTexelPointer:
+	{
+		uint32_t result_type = ops[0];
+		uint32_t id = ops[1];
+
+		auto expr = to_expression(ops[2]);
+		expr += join("[", to_expression(ops[3]), "]");
+		auto &e = set<SPIRExpression>(id, expr, result_type, true);
+
+		// When using the pointer, we need to know which variable it is actually loaded from.
+		auto *var = maybe_get_backing_variable(ops[2]);
+		e.loaded_from = var ? var->self : ID(0);
+		inherit_expression_dependencies(id, ops[3]);
+		break;
+	}
+
+	case OpAtomicFAddEXT:
+	case OpAtomicFMinEXT:
+	case OpAtomicFMaxEXT:
+		SPIRV_CROSS_THROW("Floating-point atomics are not supported in HLSL.");
+
+	case OpAtomicCompareExchange:
+	case OpAtomicExchange:
+	case OpAtomicISub:
+	case OpAtomicSMin:
+	case OpAtomicUMin:
+	case OpAtomicSMax:
+	case OpAtomicUMax:
+	case OpAtomicAnd:
+	case OpAtomicOr:
+	case OpAtomicXor:
+	case OpAtomicIAdd:
+	case OpAtomicIIncrement:
+	case OpAtomicIDecrement:
+	case OpAtomicLoad:
+	case OpAtomicStore:
+	{
+		emit_atomic(ops, instruction.length, opcode);
+		break;
+	}
+
+	case OpControlBarrier:
+	case OpMemoryBarrier:
+	{
+		uint32_t memory;
+		uint32_t semantics;
+
+		if (opcode == OpMemoryBarrier)
+		{
+			memory = evaluate_constant_u32(ops[0]);
+			semantics = evaluate_constant_u32(ops[1]);
+		}
+		else
+		{
+			memory = evaluate_constant_u32(ops[1]);
+			semantics = evaluate_constant_u32(ops[2]);
+		}
+
+		if (memory == ScopeSubgroup)
+		{
+			// No Wave-barriers in HLSL.
+			break;
+		}
+
+		// We only care about these flags, acquire/release and friends are not relevant to GLSL.
+		semantics = mask_relevant_memory_semantics(semantics);
+
+		if (opcode == OpMemoryBarrier)
+		{
+			// If we are a memory barrier, and the next instruction is a control barrier, check if that memory barrier
+			// does what we need, so we avoid redundant barriers.
+			const Instruction *next = get_next_instruction_in_block(instruction);
+			if (next && next->op == OpControlBarrier)
+			{
+				auto *next_ops = stream(*next);
+				uint32_t next_memory = evaluate_constant_u32(next_ops[1]);
+				uint32_t next_semantics = evaluate_constant_u32(next_ops[2]);
+				next_semantics = mask_relevant_memory_semantics(next_semantics);
+
+				// There is no "just execution barrier" in HLSL.
+				// If there are no memory semantics for next instruction, we will imply group shared memory is synced.
+				if (next_semantics == 0)
+					next_semantics = MemorySemanticsWorkgroupMemoryMask;
+
+				bool memory_scope_covered = false;
+				if (next_memory == memory)
+					memory_scope_covered = true;
+				else if (next_semantics == MemorySemanticsWorkgroupMemoryMask)
+				{
+					// If we only care about workgroup memory, either Device or Workgroup scope is fine,
+					// scope does not have to match.
+					if ((next_memory == ScopeDevice || next_memory == ScopeWorkgroup) &&
+					    (memory == ScopeDevice || memory == ScopeWorkgroup))
+					{
+						memory_scope_covered = true;
+					}
+				}
+				else if (memory == ScopeWorkgroup && next_memory == ScopeDevice)
+				{
+					// The control barrier has device scope, but the memory barrier just has workgroup scope.
+					memory_scope_covered = true;
+				}
+
+				// If we have the same memory scope, and all memory types are covered, we're good.
+				if (memory_scope_covered && (semantics & next_semantics) == semantics)
+					break;
+			}
+		}
+
+		// We are synchronizing some memory or syncing execution,
+		// so we cannot forward any loads beyond the memory barrier.
+		if (semantics || opcode == OpControlBarrier)
+		{
+			assert(current_emitting_block);
+			flush_control_dependent_expressions(current_emitting_block->self);
+			flush_all_active_variables();
+		}
+
+		if (opcode == OpControlBarrier)
+		{
+			// We cannot emit just execution barrier, for no memory semantics pick the cheapest option.
+			if (semantics == MemorySemanticsWorkgroupMemoryMask || semantics == 0)
+				statement("GroupMemoryBarrierWithGroupSync();");
+			else if (semantics != 0 && (semantics & MemorySemanticsWorkgroupMemoryMask) == 0)
+				statement("DeviceMemoryBarrierWithGroupSync();");
+			else
+				statement("AllMemoryBarrierWithGroupSync();");
+		}
+		else
+		{
+			if (semantics == MemorySemanticsWorkgroupMemoryMask)
+				statement("GroupMemoryBarrier();");
+			else if (semantics != 0 && (semantics & MemorySemanticsWorkgroupMemoryMask) == 0)
+				statement("DeviceMemoryBarrier();");
+			else
+				statement("AllMemoryBarrier();");
+		}
+		break;
+	}
+
+	case OpBitFieldInsert:
+	{
+		if (!requires_bitfield_insert)
+		{
+			requires_bitfield_insert = true;
+			force_recompile();
+		}
+
+		auto expr = join("spvBitfieldInsert(", to_expression(ops[2]), ", ", to_expression(ops[3]), ", ",
+		                 to_expression(ops[4]), ", ", to_expression(ops[5]), ")");
+
+		bool forward =
+		    should_forward(ops[2]) && should_forward(ops[3]) && should_forward(ops[4]) && should_forward(ops[5]);
+
+		auto &restype = get<SPIRType>(ops[0]);
+		expr = bitcast_expression(restype, SPIRType::UInt, expr);
+		emit_op(ops[0], ops[1], expr, forward);
+		break;
+	}
+
+	case OpBitFieldSExtract:
+	case OpBitFieldUExtract:
+	{
+		if (!requires_bitfield_extract)
+		{
+			requires_bitfield_extract = true;
+			force_recompile();
+		}
+
+		if (opcode == OpBitFieldSExtract)
+			HLSL_TFOP(spvBitfieldSExtract);
+		else
+			HLSL_TFOP(spvBitfieldUExtract);
+		break;
+	}
+
+	case OpBitCount:
+	{
+		auto basetype = expression_type(ops[2]).basetype;
+		emit_unary_func_op_cast(ops[0], ops[1], ops[2], "countbits", basetype, basetype);
+		break;
+	}
+
+	case OpBitReverse:
+		HLSL_UFOP(reversebits);
+		break;
+
+	case OpArrayLength:
+	{
+		auto *var = maybe_get_backing_variable(ops[2]);
+		if (!var)
+			SPIRV_CROSS_THROW("Array length must point directly to an SSBO block.");
+
+		auto &type = get<SPIRType>(var->basetype);
+		if (!has_decoration(type.self, DecorationBlock) && !has_decoration(type.self, DecorationBufferBlock))
+			SPIRV_CROSS_THROW("Array length expression must point to a block type.");
+
+		// This must be 32-bit uint, so we're good to go.
+		emit_uninitialized_temporary_expression(ops[0], ops[1]);
+		statement(to_non_uniform_aware_expression(ops[2]), ".GetDimensions(", to_expression(ops[1]), ");");
+		uint32_t offset = type_struct_member_offset(type, ops[3]);
+		uint32_t stride = type_struct_member_array_stride(type, ops[3]);
+		statement(to_expression(ops[1]), " = (", to_expression(ops[1]), " - ", offset, ") / ", stride, ";");
+		break;
+	}
+
+	case OpIsHelperInvocationEXT:
+		if (hlsl_options.shader_model < 50 || get_entry_point().model != ExecutionModelFragment)
+			SPIRV_CROSS_THROW("Helper Invocation input is only supported in PS 5.0 or higher.");
+		// Helper lane state with demote is volatile by nature.
+		// Do not forward this.
+		emit_op(ops[0], ops[1], "IsHelperLane()", false);
+		break;
+
+	case OpBeginInvocationInterlockEXT:
+	case OpEndInvocationInterlockEXT:
+		if (hlsl_options.shader_model < 51)
+			SPIRV_CROSS_THROW("Rasterizer order views require Shader Model 5.1.");
+		break; // Nothing to do in the body
+
+	case OpRayQueryInitializeKHR:
+	{
+		flush_variable_declaration(ops[0]);
+
+		std::string ray_desc_name = get_unique_identifier();
+		statement("RayDesc ", ray_desc_name, " = {", to_expression(ops[4]), ", ", to_expression(ops[5]), ", ",
+			to_expression(ops[6]), ", ", to_expression(ops[7]), "};");
+
+		statement(to_expression(ops[0]), ".TraceRayInline(", 
+			to_expression(ops[1]), ", ", // acc structure
+			to_expression(ops[2]), ", ", // ray flags
+			to_expression(ops[3]), ", ", // mask
+			ray_desc_name, ");"); // ray
+		break;
+	}
+	case OpRayQueryProceedKHR:
+	{
+		flush_variable_declaration(ops[0]);
+		emit_op(ops[0], ops[1], join(to_expression(ops[2]), ".Proceed()"), false);
+		break;
+	}	
+	case OpRayQueryTerminateKHR:
+	{
+		flush_variable_declaration(ops[0]);
+		statement(to_expression(ops[0]), ".Abort();");
+		break;
+	}
+	case OpRayQueryGenerateIntersectionKHR:
+	{
+		flush_variable_declaration(ops[0]);
+		statement(to_expression(ops[0]), ".CommitProceduralPrimitiveHit(", to_expression(ops[1]), ");");
+		break;
+	}
+	case OpRayQueryConfirmIntersectionKHR:
+	{
+		flush_variable_declaration(ops[0]);
+		statement(to_expression(ops[0]), ".CommitNonOpaqueTriangleHit();");
+		break;
+	}
+	case OpRayQueryGetIntersectionTypeKHR:
+	{
+		emit_rayquery_function(".CommittedStatus()", ".CandidateType()", ops);
+		break;
+	}
+	case OpRayQueryGetIntersectionTKHR:
+	{
+		emit_rayquery_function(".CommittedRayT()", ".CandidateTriangleRayT()", ops);
+		break;
+	}
+	case OpRayQueryGetIntersectionInstanceCustomIndexKHR:
+	{
+		emit_rayquery_function(".CommittedInstanceID()", ".CandidateInstanceID()", ops);
+		break;
+	}
+	case OpRayQueryGetIntersectionInstanceIdKHR:
+	{
+		emit_rayquery_function(".CommittedInstanceIndex()", ".CandidateInstanceIndex()", ops);
+		break;
+	}
+	case OpRayQueryGetIntersectionInstanceShaderBindingTableRecordOffsetKHR:
+	{
+		emit_rayquery_function(".CommittedInstanceContributionToHitGroupIndex()", 
+			".CandidateInstanceContributionToHitGroupIndex()", ops);
+		break;
+	}
+	case OpRayQueryGetIntersectionGeometryIndexKHR:
+	{
+		emit_rayquery_function(".CommittedGeometryIndex()",
+				".CandidateGeometryIndex()", ops);
+		break;
+	}
+	case OpRayQueryGetIntersectionPrimitiveIndexKHR:
+	{
+		emit_rayquery_function(".CommittedPrimitiveIndex()", ".CandidatePrimitiveIndex()", ops);
+		break;
+	}
+	case OpRayQueryGetIntersectionBarycentricsKHR:
+	{
+		emit_rayquery_function(".CommittedTriangleBarycentrics()", ".CandidateTriangleBarycentrics()", ops);
+		break;
+	}
+	case OpRayQueryGetIntersectionFrontFaceKHR:
+	{
+		emit_rayquery_function(".CommittedTriangleFrontFace()", ".CandidateTriangleFrontFace()", ops);
+		break;
+	}
+	case OpRayQueryGetIntersectionCandidateAABBOpaqueKHR:
+	{
+		flush_variable_declaration(ops[0]);
+		emit_op(ops[0], ops[1], join(to_expression(ops[2]), ".CandidateProceduralPrimitiveNonOpaque()"), false);
+		break;
+	}
+	case OpRayQueryGetIntersectionObjectRayDirectionKHR:
+	{
+		emit_rayquery_function(".CommittedObjectRayDirection()", ".CandidateObjectRayDirection()", ops);
+		break;
+	}
+	case OpRayQueryGetIntersectionObjectRayOriginKHR:
+	{
+		flush_variable_declaration(ops[0]);
+		emit_rayquery_function(".CommittedObjectRayOrigin()", ".CandidateObjectRayOrigin()", ops);
+		break;
+	}
+	case OpRayQueryGetIntersectionObjectToWorldKHR:
+	{
+		emit_rayquery_function(".CommittedObjectToWorld4x3()", ".CandidateObjectToWorld4x3()", ops);
+		break;
+	}
+	case OpRayQueryGetIntersectionWorldToObjectKHR:
+	{
+		emit_rayquery_function(".CommittedWorldToObject4x3()", ".CandidateWorldToObject4x3()", ops);
+		break;
+	}
+	case OpRayQueryGetRayFlagsKHR:
+	{
+		flush_variable_declaration(ops[0]);
+		emit_op(ops[0], ops[1], join(to_expression(ops[2]), ".RayFlags()"), false);
+		break;
+	}
+	case OpRayQueryGetRayTMinKHR:
+	{
+		flush_variable_declaration(ops[0]);
+		emit_op(ops[0], ops[1], join(to_expression(ops[2]), ".RayTMin()"), false);
+		break;
+	}
+	case OpRayQueryGetWorldRayOriginKHR:
+	{
+		flush_variable_declaration(ops[0]);
+		emit_op(ops[0], ops[1], join(to_expression(ops[2]), ".WorldRayOrigin()"), false);
+		break;
+	}
+	case OpRayQueryGetWorldRayDirectionKHR:
+	{
+		flush_variable_declaration(ops[0]);
+		emit_op(ops[0], ops[1], join(to_expression(ops[2]), ".WorldRayDirection()"), false);
+		break;
+	}
+	case OpSetMeshOutputsEXT:
+	{
+		statement("SetMeshOutputCounts(", to_unpacked_expression(ops[0]), ", ", to_unpacked_expression(ops[1]), ");");
+		break;
+	}
+	default:
+		CompilerGLSL::emit_instruction(instruction);
+		break;
+	}
+}
+
+void CompilerHLSL::require_texture_query_variant(uint32_t var_id)
+{
+	if (const auto *var = maybe_get_backing_variable(var_id))
+		var_id = var->self;
+
+	auto &type = expression_type(var_id);
+	bool uav = type.image.sampled == 2;
+	if (hlsl_options.nonwritable_uav_texture_as_srv && has_decoration(var_id, DecorationNonWritable))
+		uav = false;
+
+	uint32_t bit = 0;
+	switch (type.image.dim)
+	{
+	case Dim1D:
+		bit = type.image.arrayed ? Query1DArray : Query1D;
+		break;
+
+	case Dim2D:
+		if (type.image.ms)
+			bit = type.image.arrayed ? Query2DMSArray : Query2DMS;
+		else
+			bit = type.image.arrayed ? Query2DArray : Query2D;
+		break;
+
+	case Dim3D:
+		bit = Query3D;
+		break;
+
+	case DimCube:
+		bit = type.image.arrayed ? QueryCubeArray : QueryCube;
+		break;
+
+	case DimBuffer:
+		bit = QueryBuffer;
+		break;
+
+	default:
+		SPIRV_CROSS_THROW("Unsupported query type.");
+	}
+
+	switch (get<SPIRType>(type.image.type).basetype)
+	{
+	case SPIRType::Float:
+		bit += QueryTypeFloat;
+		break;
+
+	case SPIRType::Int:
+		bit += QueryTypeInt;
+		break;
+
+	case SPIRType::UInt:
+		bit += QueryTypeUInt;
+		break;
+
+	default:
+		SPIRV_CROSS_THROW("Unsupported query type.");
+	}
+
+	auto norm_state = image_format_to_normalized_state(type.image.format);
+	auto &variant = uav ? required_texture_size_variants
+	                          .uav[uint32_t(norm_state)][image_format_to_components(type.image.format) - 1] :
+	                      required_texture_size_variants.srv;
+
+	uint64_t mask = 1ull << bit;
+	if ((variant & mask) == 0)
+	{
+		force_recompile();
+		variant |= mask;
+	}
+}
+
+void CompilerHLSL::set_root_constant_layouts(std::vector<RootConstants> layout)
+{
+	root_constants_layout = std::move(layout);
+}
+
+void CompilerHLSL::add_vertex_attribute_remap(const HLSLVertexAttributeRemap &vertex_attributes)
+{
+	remap_vertex_attributes.push_back(vertex_attributes);
+}
+
+VariableID CompilerHLSL::remap_num_workgroups_builtin()
+{
+	update_active_builtins();
+
+	if (!active_input_builtins.get(BuiltInNumWorkgroups))
+		return 0;
+
+	// Create a new, fake UBO.
+	uint32_t offset = ir.increase_bound_by(4);
+
+	uint32_t uint_type_id = offset;
+	uint32_t block_type_id = offset + 1;
+	uint32_t block_pointer_type_id = offset + 2;
+	uint32_t variable_id = offset + 3;
+
+	SPIRType uint_type;
+	uint_type.basetype = SPIRType::UInt;
+	uint_type.width = 32;
+	uint_type.vecsize = 3;
+	uint_type.columns = 1;
+	set<SPIRType>(uint_type_id, uint_type);
+
+	SPIRType block_type;
+	block_type.basetype = SPIRType::Struct;
+	block_type.member_types.push_back(uint_type_id);
+	set<SPIRType>(block_type_id, block_type);
+	set_decoration(block_type_id, DecorationBlock);
+	set_member_name(block_type_id, 0, "count");
+	set_member_decoration(block_type_id, 0, DecorationOffset, 0);
+
+	SPIRType block_pointer_type = block_type;
+	block_pointer_type.pointer = true;
+	block_pointer_type.storage = StorageClassUniform;
+	block_pointer_type.parent_type = block_type_id;
+	auto &ptr_type = set<SPIRType>(block_pointer_type_id, block_pointer_type);
+
+	// Preserve self.
+	ptr_type.self = block_type_id;
+
+	set<SPIRVariable>(variable_id, block_pointer_type_id, StorageClassUniform);
+	ir.meta[variable_id].decoration.alias = "SPIRV_Cross_NumWorkgroups";
+
+	num_workgroups_builtin = variable_id;
+	get_entry_point().interface_variables.push_back(num_workgroups_builtin);
+	return variable_id;
+}
+
+void CompilerHLSL::set_resource_binding_flags(HLSLBindingFlags flags)
+{
+	resource_binding_flags = flags;
+}
+
+void CompilerHLSL::validate_shader_model()
+{
+	// Check for nonuniform qualifier.
+	// Instead of looping over all decorations to find this, just look at capabilities.
+	for (auto &cap : ir.declared_capabilities)
+	{
+		switch (cap)
+		{
+		case CapabilityShaderNonUniformEXT:
+		case CapabilityRuntimeDescriptorArrayEXT:
+			if (hlsl_options.shader_model < 51)
+				SPIRV_CROSS_THROW(
+				    "Shader model 5.1 or higher is required to use bindless resources or NonUniformResourceIndex.");
+			break;
+
+		case CapabilityVariablePointers:
+		case CapabilityVariablePointersStorageBuffer:
+			SPIRV_CROSS_THROW("VariablePointers capability is not supported in HLSL.");
+
+		default:
+			break;
+		}
+	}
+
+	if (ir.addressing_model != AddressingModelLogical)
+		SPIRV_CROSS_THROW("Only Logical addressing model can be used with HLSL.");
+
+	if (hlsl_options.enable_16bit_types && hlsl_options.shader_model < 62)
+		SPIRV_CROSS_THROW("Need at least shader model 6.2 when enabling native 16-bit type support.");
+}
+
+string CompilerHLSL::compile()
+{
+	ir.fixup_reserved_names();
+
+	// Do not deal with ES-isms like precision, older extensions and such.
+	options.es = false;
+	options.version = 450;
+	options.vulkan_semantics = true;
+	backend.float_literal_suffix = true;
+	backend.double_literal_suffix = false;
+	backend.long_long_literal_suffix = true;
+	backend.uint32_t_literal_suffix = true;
+	backend.int16_t_literal_suffix = "";
+	backend.uint16_t_literal_suffix = "u";
+	backend.basic_int_type = "int";
+	backend.basic_uint_type = "uint";
+	backend.demote_literal = "discard";
+	backend.boolean_mix_function = "";
+	backend.swizzle_is_function = false;
+	backend.shared_is_implied = true;
+	backend.unsized_array_supported = true;
+	backend.explicit_struct_type = false;
+	backend.use_initializer_list = true;
+	backend.use_constructor_splatting = false;
+	backend.can_swizzle_scalar = true;
+	backend.can_declare_struct_inline = false;
+	backend.can_declare_arrays_inline = false;
+	backend.can_return_array = false;
+	backend.nonuniform_qualifier = "NonUniformResourceIndex";
+	backend.support_case_fallthrough = false;
+	backend.force_merged_mesh_block = get_execution_model() == ExecutionModelMeshEXT;
+	backend.force_gl_in_out_block = backend.force_merged_mesh_block;
+
+	// SM 4.1 does not support precise for some reason.
+	backend.support_precise_qualifier = hlsl_options.shader_model >= 50 || hlsl_options.shader_model == 40;
+
+	fixup_anonymous_struct_names();
+	fixup_type_alias();
+	reorder_type_alias();
+	build_function_control_flow_graphs_and_analyze();
+	validate_shader_model();
+	update_active_builtins();
+	analyze_image_and_sampler_usage();
+	analyze_interlocked_resource_usage();
+	if (get_execution_model() == ExecutionModelMeshEXT)
+		analyze_meshlet_writes();
+
+	// Subpass input needs SV_Position.
+	if (need_subpass_input)
+		active_input_builtins.set(BuiltInFragCoord);
+
+	uint32_t pass_count = 0;
+	do
+	{
+		reset(pass_count);
+
+		// Move constructor for this type is broken on GCC 4.9 ...
+		buffer.reset();
+
+		emit_header();
+		emit_resources();
+
+		emit_function(get<SPIRFunction>(ir.default_entry_point), Bitset());
+		emit_hlsl_entry_point();
+
+		pass_count++;
+	} while (is_forcing_recompilation());
+
+	// Entry point in HLSL is always main() for the time being.
+	get_entry_point().name = "main";
+
+	return buffer.str();
+}
+
+void CompilerHLSL::emit_block_hints(const SPIRBlock &block)
+{
+	switch (block.hint)
+	{
+	case SPIRBlock::HintFlatten:
+		statement("[flatten]");
+		break;
+	case SPIRBlock::HintDontFlatten:
+		statement("[branch]");
+		break;
+	case SPIRBlock::HintUnroll:
+		statement("[unroll]");
+		break;
+	case SPIRBlock::HintDontUnroll:
+		statement("[loop]");
+		break;
+	default:
+		break;
+	}
+}
+
+string CompilerHLSL::get_unique_identifier()
+{
+	return join("_", unique_identifier_count++, "ident");
+}
+
+void CompilerHLSL::add_hlsl_resource_binding(const HLSLResourceBinding &binding)
+{
+	StageSetBinding tuple = { binding.stage, binding.desc_set, binding.binding };
+	resource_bindings[tuple] = { binding, false };
+}
+
+bool CompilerHLSL::is_hlsl_resource_binding_used(ExecutionModel model, uint32_t desc_set, uint32_t binding) const
+{
+	StageSetBinding tuple = { model, desc_set, binding };
+	auto itr = resource_bindings.find(tuple);
+	return itr != end(resource_bindings) && itr->second.second;
+}
+
+CompilerHLSL::BitcastType CompilerHLSL::get_bitcast_type(uint32_t result_type, uint32_t op0)
+{
+	auto &rslt_type = get<SPIRType>(result_type);
+	auto &expr_type = expression_type(op0);
+
+	if (rslt_type.basetype == SPIRType::BaseType::UInt64 && expr_type.basetype == SPIRType::BaseType::UInt &&
+	    expr_type.vecsize == 2)
+		return BitcastType::TypePackUint2x32;
+	else if (rslt_type.basetype == SPIRType::BaseType::UInt && rslt_type.vecsize == 2 &&
+	         expr_type.basetype == SPIRType::BaseType::UInt64)
+		return BitcastType::TypeUnpackUint64;
+
+	return BitcastType::TypeNormal;
+}
+
+bool CompilerHLSL::is_hlsl_force_storage_buffer_as_uav(ID id) const
+{
+	if (hlsl_options.force_storage_buffer_as_uav)
+	{
+		return true;
+	}
+
+	const uint32_t desc_set = get_decoration(id, spv::DecorationDescriptorSet);
+	const uint32_t binding = get_decoration(id, spv::DecorationBinding);
+
+	return (force_uav_buffer_bindings.find({ desc_set, binding }) != force_uav_buffer_bindings.end());
+}
+
+void CompilerHLSL::set_hlsl_force_storage_buffer_as_uav(uint32_t desc_set, uint32_t binding)
+{
+	SetBindingPair pair = { desc_set, binding };
+	force_uav_buffer_bindings.insert(pair);
+}
+
+bool CompilerHLSL::builtin_translates_to_nonarray(spv::BuiltIn builtin) const
+{
+	return (builtin == BuiltInSampleMask);
+}
+
+bool CompilerHLSL::is_user_type_structured(uint32_t id) const
+{
+	if (hlsl_options.preserve_structured_buffers)
+	{
+		// Compare left hand side of string only as these user types can contain more meta data such as their subtypes,
+		// e.g. "structuredbuffer:int"
+		const std::string &user_type = get_decoration_string(id, DecorationUserTypeGOOGLE);
+		return user_type.compare(0, 16, "structuredbuffer") == 0 ||
+		       user_type.compare(0, 18, "rwstructuredbuffer") == 0 ||
+		       user_type.compare(0, 33, "rasterizerorderedstructuredbuffer") == 0;
+	}
+	return false;
+}

+ 415 - 0
ThirdParty/SpirvCross/spirv_hlsl.hpp

@@ -0,0 +1,415 @@
+/*
+ * Copyright 2016-2021 Robert Konrad
+ * SPDX-License-Identifier: Apache-2.0 OR MIT
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * At your option, you may choose to accept this material under either:
+ *  1. The Apache License, Version 2.0, found at <http://www.apache.org/licenses/LICENSE-2.0>, or
+ *  2. The MIT License, found at <http://opensource.org/licenses/MIT>.
+ */
+
+#ifndef SPIRV_HLSL_HPP
+#define SPIRV_HLSL_HPP
+
+#include "spirv_glsl.hpp"
+#include <utility>
+
+namespace SPIRV_CROSS_NAMESPACE
+{
+// Interface which remaps vertex inputs to a fixed semantic name to make linking easier.
+struct HLSLVertexAttributeRemap
+{
+	uint32_t location;
+	std::string semantic;
+};
+// Specifying a root constant (d3d12) or push constant range (vulkan).
+//
+// `start` and `end` denotes the range of the root constant in bytes.
+// Both values need to be multiple of 4.
+struct RootConstants
+{
+	uint32_t start;
+	uint32_t end;
+
+	uint32_t binding;
+	uint32_t space;
+};
+
+// For finer control, decorations may be removed from specific resources instead with unset_decoration().
+enum HLSLBindingFlagBits
+{
+	HLSL_BINDING_AUTO_NONE_BIT = 0,
+
+	// Push constant (root constant) resources will be declared as CBVs (b-space) without a register() declaration.
+	// A register will be automatically assigned by the D3D compiler, but must therefore be reflected in D3D-land.
+	// Push constants do not normally have a DecorationBinding set, but if they do, this can be used to ignore it.
+	HLSL_BINDING_AUTO_PUSH_CONSTANT_BIT = 1 << 0,
+
+	// cbuffer resources will be declared as CBVs (b-space) without a register() declaration.
+	// A register will be automatically assigned, but must be reflected in D3D-land.
+	HLSL_BINDING_AUTO_CBV_BIT = 1 << 1,
+
+	// All SRVs (t-space) will be declared without a register() declaration.
+	HLSL_BINDING_AUTO_SRV_BIT = 1 << 2,
+
+	// All UAVs (u-space) will be declared without a register() declaration.
+	HLSL_BINDING_AUTO_UAV_BIT = 1 << 3,
+
+	// All samplers (s-space) will be declared without a register() declaration.
+	HLSL_BINDING_AUTO_SAMPLER_BIT = 1 << 4,
+
+	// No resources will be declared with register().
+	HLSL_BINDING_AUTO_ALL = 0x7fffffff
+};
+using HLSLBindingFlags = uint32_t;
+
+// By matching stage, desc_set and binding for a SPIR-V resource,
+// register bindings are set based on whether the HLSL resource is a
+// CBV, UAV, SRV or Sampler. A single binding in SPIR-V might contain multiple
+// resource types, e.g. COMBINED_IMAGE_SAMPLER, and SRV/Sampler bindings will be used respectively.
+// On SM 5.0 and lower, register_space is ignored.
+//
+// To remap a push constant block which does not have any desc_set/binding associated with it,
+// use ResourceBindingPushConstant{DescriptorSet,Binding} as values for desc_set/binding.
+// For deeper control of push constants, set_root_constant_layouts() can be used instead.
+struct HLSLResourceBinding
+{
+	spv::ExecutionModel stage = spv::ExecutionModelMax;
+	uint32_t desc_set = 0;
+	uint32_t binding = 0;
+
+	struct Binding
+	{
+		uint32_t register_space = 0;
+		uint32_t register_binding = 0;
+	} cbv, uav, srv, sampler;
+};
+
+enum HLSLAuxBinding
+{
+	HLSL_AUX_BINDING_BASE_VERTEX_INSTANCE = 0
+};
+
+class CompilerHLSL : public CompilerGLSL
+{
+public:
+	struct Options
+	{
+		uint32_t shader_model = 30; // TODO: map ps_4_0_level_9_0,... somehow
+
+		// Allows the PointSize builtin in SM 4.0+, and ignores it, as PointSize is not supported in SM 4+.
+		bool point_size_compat = false;
+
+		// Allows the PointCoord builtin, returns float2(0.5, 0.5), as PointCoord is not supported in HLSL.
+		bool point_coord_compat = false;
+
+		// If true, the backend will assume that VertexIndex and InstanceIndex will need to apply
+		// a base offset, and you will need to fill in a cbuffer with offsets.
+		// Set to false if you know you will never use base instance or base vertex
+		// functionality as it might remove an internal cbuffer.
+		bool support_nonzero_base_vertex_base_instance = false;
+
+		// Forces a storage buffer to always be declared as UAV, even if the readonly decoration is used.
+		// By default, a readonly storage buffer will be declared as ByteAddressBuffer (SRV) instead.
+		// Alternatively, use set_hlsl_force_storage_buffer_as_uav to specify individually.
+		bool force_storage_buffer_as_uav = false;
+
+		// Forces any storage image type marked as NonWritable to be considered an SRV instead.
+		// For this to work with function call parameters, NonWritable must be considered to be part of the type system
+		// so that NonWritable image arguments are also translated to Texture rather than RWTexture.
+		bool nonwritable_uav_texture_as_srv = false;
+
+		// Enables native 16-bit types. Needs SM 6.2.
+		// Uses half/int16_t/uint16_t instead of min16* types.
+		// Also adds support for 16-bit load-store from (RW)ByteAddressBuffer.
+		bool enable_16bit_types = false;
+
+		// If matrices are used as IO variables, flatten the attribute declaration to use
+		// TEXCOORD{N,N+1,N+2,...} rather than TEXCOORDN_{0,1,2,3}.
+		// If add_vertex_attribute_remap is used and this feature is used,
+		// the semantic name will be queried once per active location.
+		bool flatten_matrix_vertex_input_semantics = false;
+
+		// Rather than emitting main() for the entry point, use the name in SPIR-V.
+		bool use_entry_point_name = false;
+
+		// Preserve (RW)StructuredBuffer types if the input source was HLSL.
+		// This relies on UserTypeGOOGLE to encode the buffer type either as "structuredbuffer" or "rwstructuredbuffer"
+		// whereas the type can be extended with an optional subtype, e.g. "structuredbuffer:int".
+		bool preserve_structured_buffers = false;
+	};
+
+	explicit CompilerHLSL(std::vector<uint32_t> spirv_)
+	    : CompilerGLSL(std::move(spirv_))
+	{
+	}
+
+	CompilerHLSL(const uint32_t *ir_, size_t size)
+	    : CompilerGLSL(ir_, size)
+	{
+	}
+
+	explicit CompilerHLSL(const ParsedIR &ir_)
+	    : CompilerGLSL(ir_)
+	{
+	}
+
+	explicit CompilerHLSL(ParsedIR &&ir_)
+	    : CompilerGLSL(std::move(ir_))
+	{
+	}
+
+	const Options &get_hlsl_options() const
+	{
+		return hlsl_options;
+	}
+
+	void set_hlsl_options(const Options &opts)
+	{
+		hlsl_options = opts;
+	}
+
+	// Optionally specify a custom root constant layout.
+	//
+	// Push constants ranges will be split up according to the
+	// layout specified.
+	void set_root_constant_layouts(std::vector<RootConstants> layout);
+
+	// Compiles and remaps vertex attributes at specific locations to a fixed semantic.
+	// The default is TEXCOORD# where # denotes location.
+	// Matrices are unrolled to vectors with notation ${SEMANTIC}_#, where # denotes row.
+	// $SEMANTIC is either TEXCOORD# or a semantic name specified here.
+	void add_vertex_attribute_remap(const HLSLVertexAttributeRemap &vertex_attributes);
+	std::string compile() override;
+
+	// This is a special HLSL workaround for the NumWorkGroups builtin.
+	// This does not exist in HLSL, so the calling application must create a dummy cbuffer in
+	// which the application will store this builtin.
+	// The cbuffer layout will be:
+	// cbuffer SPIRV_Cross_NumWorkgroups : register(b#, space#) { uint3 SPIRV_Cross_NumWorkgroups_count; };
+	// This must be called before compile().
+	// The function returns 0 if NumWorkGroups builtin is not statically used in the shader from the current entry point.
+	// If non-zero, this returns the variable ID of a cbuffer which corresponds to
+	// the cbuffer declared above. By default, no binding or descriptor set decoration is set,
+	// so the calling application should declare explicit bindings on this ID before calling compile().
+	VariableID remap_num_workgroups_builtin();
+
+	// Controls how resource bindings are declared in the output HLSL.
+	void set_resource_binding_flags(HLSLBindingFlags flags);
+
+	// resource is a resource binding to indicate the HLSL CBV, SRV, UAV or sampler binding
+	// to use for a particular SPIR-V description set
+	// and binding. If resource bindings are provided,
+	// is_hlsl_resource_binding_used() will return true after calling ::compile() if
+	// the set/binding combination was used by the HLSL code.
+	void add_hlsl_resource_binding(const HLSLResourceBinding &resource);
+	bool is_hlsl_resource_binding_used(spv::ExecutionModel model, uint32_t set, uint32_t binding) const;
+
+	// Controls which storage buffer bindings will be forced to be declared as UAVs.
+	void set_hlsl_force_storage_buffer_as_uav(uint32_t desc_set, uint32_t binding);
+
+	// By default, these magic buffers are not assigned a specific binding.
+	void set_hlsl_aux_buffer_binding(HLSLAuxBinding binding, uint32_t register_index, uint32_t register_space);
+	void unset_hlsl_aux_buffer_binding(HLSLAuxBinding binding);
+	bool is_hlsl_aux_buffer_binding_used(HLSLAuxBinding binding) const;
+
+private:
+	std::string type_to_glsl(const SPIRType &type, uint32_t id = 0) override;
+	std::string image_type_hlsl(const SPIRType &type, uint32_t id);
+	std::string image_type_hlsl_modern(const SPIRType &type, uint32_t id);
+	std::string image_type_hlsl_legacy(const SPIRType &type, uint32_t id);
+	void emit_function_prototype(SPIRFunction &func, const Bitset &return_flags) override;
+	void emit_hlsl_entry_point();
+	void emit_header() override;
+	void emit_resources();
+	void emit_interface_block_globally(const SPIRVariable &type);
+	void emit_interface_block_in_struct(const SPIRVariable &var, std::unordered_set<uint32_t> &active_locations);
+	void emit_interface_block_member_in_struct(const SPIRVariable &var, uint32_t member_index, uint32_t location,
+	                                           std::unordered_set<uint32_t> &active_locations);
+	void emit_builtin_inputs_in_struct();
+	void emit_builtin_outputs_in_struct();
+	void emit_builtin_primitive_outputs_in_struct();
+	void emit_texture_op(const Instruction &i, bool sparse) override;
+	void emit_instruction(const Instruction &instruction) override;
+	void emit_glsl_op(uint32_t result_type, uint32_t result_id, uint32_t op, const uint32_t *args,
+	                  uint32_t count) override;
+	void emit_buffer_block(const SPIRVariable &type) override;
+	void emit_push_constant_block(const SPIRVariable &var) override;
+	void emit_uniform(const SPIRVariable &var) override;
+	void emit_modern_uniform(const SPIRVariable &var);
+	void emit_legacy_uniform(const SPIRVariable &var);
+	void emit_specialization_constants_and_structs();
+	void emit_composite_constants();
+	void emit_fixup() override;
+	std::string builtin_to_glsl(spv::BuiltIn builtin, spv::StorageClass storage) override;
+	std::string layout_for_member(const SPIRType &type, uint32_t index) override;
+	std::string to_interpolation_qualifiers(const Bitset &flags) override;
+	std::string bitcast_glsl_op(const SPIRType &result_type, const SPIRType &argument_type) override;
+	bool emit_complex_bitcast(uint32_t result_type, uint32_t id, uint32_t op0) override;
+	std::string to_func_call_arg(const SPIRFunction::Parameter &arg, uint32_t id) override;
+	std::string to_sampler_expression(uint32_t id);
+	std::string to_resource_binding(const SPIRVariable &var);
+	std::string to_resource_binding_sampler(const SPIRVariable &var);
+	std::string to_resource_register(HLSLBindingFlagBits flag, char space, uint32_t binding, uint32_t set);
+	std::string to_initializer_expression(const SPIRVariable &var) override;
+	void emit_sampled_image_op(uint32_t result_type, uint32_t result_id, uint32_t image_id, uint32_t samp_id) override;
+	void emit_access_chain(const Instruction &instruction);
+	void emit_load(const Instruction &instruction);
+	void read_access_chain(std::string *expr, const std::string &lhs, const SPIRAccessChain &chain);
+	void read_access_chain_struct(const std::string &lhs, const SPIRAccessChain &chain);
+	void read_access_chain_array(const std::string &lhs, const SPIRAccessChain &chain);
+	void write_access_chain(const SPIRAccessChain &chain, uint32_t value, const SmallVector<uint32_t> &composite_chain);
+	void write_access_chain_struct(const SPIRAccessChain &chain, uint32_t value,
+	                               const SmallVector<uint32_t> &composite_chain);
+	void write_access_chain_array(const SPIRAccessChain &chain, uint32_t value,
+	                              const SmallVector<uint32_t> &composite_chain);
+	std::string write_access_chain_value(uint32_t value, const SmallVector<uint32_t> &composite_chain, bool enclose);
+	void emit_store(const Instruction &instruction);
+	void emit_atomic(const uint32_t *ops, uint32_t length, spv::Op op);
+	void emit_subgroup_op(const Instruction &i) override;
+	void emit_block_hints(const SPIRBlock &block) override;
+
+	void emit_struct_member(const SPIRType &type, uint32_t member_type_id, uint32_t index, const std::string &qualifier,
+	                        uint32_t base_offset = 0) override;
+	void emit_rayquery_function(const char *commited, const char *candidate, const uint32_t *ops);
+	void emit_mesh_tasks(SPIRBlock &block) override;
+
+	const char *to_storage_qualifiers_glsl(const SPIRVariable &var) override;
+	void replace_illegal_names() override;
+
+	bool is_hlsl_force_storage_buffer_as_uav(ID id) const;
+
+	Options hlsl_options;
+
+	// TODO: Refactor this to be more similar to MSL, maybe have some common system in place?
+	bool requires_op_fmod = false;
+	bool requires_fp16_packing = false;
+	bool requires_uint2_packing = false;
+	bool requires_explicit_fp16_packing = false;
+	bool requires_unorm8_packing = false;
+	bool requires_snorm8_packing = false;
+	bool requires_unorm16_packing = false;
+	bool requires_snorm16_packing = false;
+	bool requires_bitfield_insert = false;
+	bool requires_bitfield_extract = false;
+	bool requires_inverse_2x2 = false;
+	bool requires_inverse_3x3 = false;
+	bool requires_inverse_4x4 = false;
+	bool requires_scalar_reflect = false;
+	bool requires_scalar_refract = false;
+	bool requires_scalar_faceforward = false;
+
+	struct TextureSizeVariants
+	{
+		// MSVC 2013 workaround.
+		TextureSizeVariants()
+		{
+			srv = 0;
+			for (auto &unorm : uav)
+				for (auto &u : unorm)
+					u = 0;
+		}
+		uint64_t srv;
+		uint64_t uav[3][4];
+	} required_texture_size_variants;
+
+	void require_texture_query_variant(uint32_t var_id);
+	void emit_texture_size_variants(uint64_t variant_mask, const char *vecsize_qualifier, bool uav,
+	                                const char *type_qualifier);
+
+	enum TextureQueryVariantDim
+	{
+		Query1D = 0,
+		Query1DArray,
+		Query2D,
+		Query2DArray,
+		Query3D,
+		QueryBuffer,
+		QueryCube,
+		QueryCubeArray,
+		Query2DMS,
+		Query2DMSArray,
+		QueryDimCount
+	};
+
+	enum TextureQueryVariantType
+	{
+		QueryTypeFloat = 0,
+		QueryTypeInt = 16,
+		QueryTypeUInt = 32,
+		QueryTypeCount = 3
+	};
+
+	enum BitcastType
+	{
+		TypeNormal,
+		TypePackUint2x32,
+		TypeUnpackUint64
+	};
+
+	void analyze_meshlet_writes();
+	void analyze_meshlet_writes(uint32_t func_id, uint32_t id_per_vertex, uint32_t id_per_primitive,
+	                            std::unordered_set<uint32_t> &processed_func_ids);
+
+	BitcastType get_bitcast_type(uint32_t result_type, uint32_t op0);
+
+	void emit_builtin_variables();
+	bool require_output = false;
+	bool require_input = false;
+	SmallVector<HLSLVertexAttributeRemap> remap_vertex_attributes;
+
+	uint32_t type_to_consumed_locations(const SPIRType &type) const;
+
+	std::string to_semantic(uint32_t location, spv::ExecutionModel em, spv::StorageClass sc);
+
+	uint32_t num_workgroups_builtin = 0;
+	HLSLBindingFlags resource_binding_flags = 0;
+
+	// Custom root constant layout, which should be emitted
+	// when translating push constant ranges.
+	std::vector<RootConstants> root_constants_layout;
+
+	void validate_shader_model();
+
+	std::string get_unique_identifier();
+	uint32_t unique_identifier_count = 0;
+
+	std::unordered_map<StageSetBinding, std::pair<HLSLResourceBinding, bool>, InternalHasher> resource_bindings;
+	void remap_hlsl_resource_binding(HLSLBindingFlagBits type, uint32_t &desc_set, uint32_t &binding);
+
+	std::unordered_set<SetBindingPair, InternalHasher> force_uav_buffer_bindings;
+
+	struct
+	{
+		uint32_t register_index = 0;
+		uint32_t register_space = 0;
+		bool explicit_binding = false;
+		bool used = false;
+	} base_vertex_info;
+
+	// Returns true for BuiltInSampleMask because gl_SampleMask[] is an array in SPIR-V, but SV_Coverage is a scalar in HLSL.
+	bool builtin_translates_to_nonarray(spv::BuiltIn builtin) const override;
+
+	// Returns true if the specified ID has a UserTypeGOOGLE decoration for StructuredBuffer or RWStructuredBuffer resources.
+	bool is_user_type_structured(uint32_t id) const override;
+
+	std::vector<TypeID> composite_selection_workaround_types;
+
+	std::string get_inner_entry_point_name() const;
+};
+} // namespace SPIRV_CROSS_NAMESPACE
+
+#endif

+ 147 - 14
ThirdParty/SpirvCross/spirv_parser.cpp

@@ -1,5 +1,6 @@
 /*
  * Copyright 2018-2021 Arm Limited
+ * SPDX-License-Identifier: Apache-2.0 OR MIT
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -18,7 +19,6 @@
  * At your option, you may choose to accept this material under either:
  *  1. The Apache License, Version 2.0, found at <http://www.apache.org/licenses/LICENSE-2.0>, or
  *  2. The MIT License, found at <http://opensource.org/licenses/MIT>.
- * SPDX-License-Identifier: Apache-2.0 OR MIT.
  */
 
 #include "spirv_parser.hpp"
@@ -31,7 +31,7 @@ namespace SPIRV_CROSS_NAMESPACE
 {
 Parser::Parser(vector<uint32_t> spirv)
 {
-	ir.spirv = move(spirv);
+	ir.spirv = std::move(spirv);
 }
 
 Parser::Parser(const uint32_t *spirv_data, size_t word_count)
@@ -68,6 +68,7 @@ static bool is_valid_spirv_version(uint32_t version)
 	case 0x10300: // SPIR-V 1.3
 	case 0x10400: // SPIR-V 1.4
 	case 0x10500: // SPIR-V 1.5
+	case 0x10600: // SPIR-V 1.6
 		return true;
 
 	default:
@@ -140,6 +141,8 @@ void Parser::parse()
 		SPIRV_CROSS_THROW("Function was not terminated.");
 	if (current_block)
 		SPIRV_CROSS_THROW("Block was not terminated.");
+	if (ir.default_entry_point == 0)
+		SPIRV_CROSS_THROW("There is no entry point in the SPIR-V module.");
 }
 
 const uint32_t *Parser::stream(const Instruction &instr) const
@@ -180,6 +183,15 @@ void Parser::parse(const Instruction &instruction)
 	auto op = static_cast<Op>(instruction.op);
 	uint32_t length = instruction.length;
 
+	// HACK for glslang that might emit OpEmitMeshTasksEXT followed by return / branch.
+	// Instead of failing hard, just ignore it.
+	if (ignore_trailing_block_opcodes)
+	{
+		ignore_trailing_block_opcodes = false;
+		if (op == OpReturn || op == OpBranch || op == OpUnreachable)
+			return;
+	}
+
 	switch (op)
 	{
 	case OpSourceContinued:
@@ -256,29 +268,37 @@ void Parser::parse(const Instruction &instruction)
 	case OpExtension:
 	{
 		auto ext = extract_string(ir.spirv, instruction.offset);
-		ir.declared_extensions.push_back(move(ext));
+		ir.declared_extensions.push_back(std::move(ext));
 		break;
 	}
 
 	case OpExtInstImport:
 	{
 		uint32_t id = ops[0];
+
+		SPIRExtension::Extension spirv_ext = SPIRExtension::Unsupported;
+
 		auto ext = extract_string(ir.spirv, instruction.offset + 1);
 		if (ext == "GLSL.std.450")
-			set<SPIRExtension>(id, SPIRExtension::GLSL);
+			spirv_ext = SPIRExtension::GLSL;
 		else if (ext == "DebugInfo")
-			set<SPIRExtension>(id, SPIRExtension::SPV_debug_info);
+			spirv_ext = SPIRExtension::SPV_debug_info;
 		else if (ext == "SPV_AMD_shader_ballot")
-			set<SPIRExtension>(id, SPIRExtension::SPV_AMD_shader_ballot);
+			spirv_ext = SPIRExtension::SPV_AMD_shader_ballot;
 		else if (ext == "SPV_AMD_shader_explicit_vertex_parameter")
-			set<SPIRExtension>(id, SPIRExtension::SPV_AMD_shader_explicit_vertex_parameter);
+			spirv_ext = SPIRExtension::SPV_AMD_shader_explicit_vertex_parameter;
 		else if (ext == "SPV_AMD_shader_trinary_minmax")
-			set<SPIRExtension>(id, SPIRExtension::SPV_AMD_shader_trinary_minmax);
+			spirv_ext = SPIRExtension::SPV_AMD_shader_trinary_minmax;
 		else if (ext == "SPV_AMD_gcn_shader")
-			set<SPIRExtension>(id, SPIRExtension::SPV_AMD_gcn_shader);
-		else
-			set<SPIRExtension>(id, SPIRExtension::Unsupported);
-
+			spirv_ext = SPIRExtension::SPV_AMD_gcn_shader;
+		else if (ext == "NonSemantic.DebugPrintf")
+			spirv_ext = SPIRExtension::NonSemanticDebugPrintf;
+		else if (ext == "NonSemantic.Shader.DebugInfo.100")
+			spirv_ext = SPIRExtension::NonSemanticShaderDebugInfo;
+		else if (ext.find("NonSemantic.") == 0)
+			spirv_ext = SPIRExtension::NonSemanticGeneric;
+
+		set<SPIRExtension>(id, spirv_ext);
 		// Other SPIR-V extensions which have ExtInstrs are currently not supported.
 
 		break;
@@ -288,7 +308,15 @@ void Parser::parse(const Instruction &instruction)
 	{
 		// The SPIR-V debug information extended instructions might come at global scope.
 		if (current_block)
+		{
 			current_block->ops.push_back(instruction);
+			if (length >= 2)
+			{
+				const auto *type = maybe_get<SPIRType>(ops[0]);
+				if (type)
+					ir.load_type_width.insert({ ops[1], type->width });
+			}
+		}
 		break;
 	}
 
@@ -336,12 +364,32 @@ void Parser::parse(const Instruction &instruction)
 			execution.output_vertices = ops[2];
 			break;
 
+		case ExecutionModeOutputPrimitivesEXT:
+			execution.output_primitives = ops[2];
+			break;
+
 		default:
 			break;
 		}
 		break;
 	}
 
+	case OpExecutionModeId:
+	{
+		auto &execution = ir.entry_points[ops[0]];
+		auto mode = static_cast<ExecutionMode>(ops[1]);
+		execution.flags.set(mode);
+
+		if (mode == ExecutionModeLocalSizeId)
+		{
+			execution.workgroup_size.id_x = ops[2];
+			execution.workgroup_size.id_y = ops[3];
+			execution.workgroup_size.id_z = ops[4];
+		}
+
+		break;
+	}
+
 	case OpName:
 	{
 		uint32_t id = ops[0];
@@ -959,6 +1007,58 @@ void Parser::parse(const Instruction &instruction)
 		current_block->false_block = ops[2];
 
 		current_block->terminator = SPIRBlock::Select;
+
+		if (current_block->true_block == current_block->false_block)
+		{
+			// Bogus conditional, translate to a direct branch.
+			// Avoids some ugly edge cases later when analyzing CFGs.
+
+			// There are some super jank cases where the merge block is different from the true/false,
+			// and later branches can "break" out of the selection construct this way.
+			// This is complete nonsense, but CTS hits this case.
+			// In this scenario, we should see the selection construct as more of a Switch with one default case.
+			// The problem here is that this breaks any attempt to break out of outer switch statements,
+			// but it's theoretically solvable if this ever comes up using the ladder breaking system ...
+
+			if (current_block->true_block != current_block->next_block &&
+			    current_block->merge == SPIRBlock::MergeSelection)
+			{
+				uint32_t ids = ir.increase_bound_by(2);
+
+				SPIRType type;
+				type.basetype = SPIRType::Int;
+				type.width = 32;
+				set<SPIRType>(ids, type);
+				auto &c = set<SPIRConstant>(ids + 1, ids);
+
+				current_block->condition = c.self;
+				current_block->default_block = current_block->true_block;
+				current_block->terminator = SPIRBlock::MultiSelect;
+				ir.block_meta[current_block->next_block] &= ~ParsedIR::BLOCK_META_SELECTION_MERGE_BIT;
+				ir.block_meta[current_block->next_block] |= ParsedIR::BLOCK_META_MULTISELECT_MERGE_BIT;
+			}
+			else
+			{
+				// Collapse loops if we have to.
+				bool collapsed_loop = current_block->true_block == current_block->merge_block &&
+				                      current_block->merge == SPIRBlock::MergeLoop;
+
+				if (collapsed_loop)
+				{
+					ir.block_meta[current_block->merge_block] &= ~ParsedIR::BLOCK_META_LOOP_MERGE_BIT;
+					ir.block_meta[current_block->continue_block] &= ~ParsedIR::BLOCK_META_CONTINUE_BIT;
+				}
+
+				current_block->next_block = current_block->true_block;
+				current_block->condition = 0;
+				current_block->true_block = 0;
+				current_block->false_block = 0;
+				current_block->merge_block = 0;
+				current_block->merge = SPIRBlock::MergeNone;
+				current_block->terminator = SPIRBlock::Direct;
+			}
+		}
+
 		current_block = nullptr;
 		break;
 	}
@@ -973,8 +1073,21 @@ void Parser::parse(const Instruction &instruction)
 		current_block->condition = ops[0];
 		current_block->default_block = ops[1];
 
-		for (uint32_t i = 2; i + 2 <= length; i += 2)
-			current_block->cases.push_back({ ops[i], ops[i + 1] });
+		uint32_t remaining_ops = length - 2;
+		if ((remaining_ops % 2) == 0)
+		{
+			for (uint32_t i = 2; i + 2 <= length; i += 2)
+				current_block->cases_32bit.push_back({ ops[i], ops[i + 1] });
+		}
+
+		if ((remaining_ops % 3) == 0)
+		{
+			for (uint32_t i = 2; i + 3 <= length; i += 3)
+			{
+				uint64_t value = (static_cast<uint64_t>(ops[i + 1]) << 32) | ops[i];
+				current_block->cases_64bit.push_back({ value, ops[i + 2] });
+			}
+		}
 
 		// If we jump to next block, make it break instead since we're inside a switch case block at that point.
 		ir.block_meta[current_block->next_block] |= ParsedIR::BLOCK_META_MULTISELECT_MERGE_BIT;
@@ -984,6 +1097,7 @@ void Parser::parse(const Instruction &instruction)
 	}
 
 	case OpKill:
+	case OpTerminateInvocation:
 	{
 		if (!current_block)
 			SPIRV_CROSS_THROW("Trying to end a non-existing block.");
@@ -1008,6 +1122,18 @@ void Parser::parse(const Instruction &instruction)
 		current_block = nullptr;
 		break;
 
+	case OpEmitMeshTasksEXT:
+		if (!current_block)
+			SPIRV_CROSS_THROW("Trying to end a non-existing block.");
+		current_block->terminator = SPIRBlock::EmitMeshTasks;
+		for (uint32_t i = 0; i < 3; i++)
+			current_block->mesh.groups[i] = ops[i];
+		current_block->mesh.payload = length >= 4 ? ops[3] : 0;
+		current_block = nullptr;
+		// Currently glslang is bugged and does not treat EmitMeshTasksEXT as a terminator.
+		ignore_trailing_block_opcodes = true;
+		break;
+
 	case OpReturn:
 	{
 		if (!current_block)
@@ -1132,6 +1258,13 @@ void Parser::parse(const Instruction &instruction)
 	// Actual opcodes.
 	default:
 	{
+		if (length >= 2)
+		{
+			const auto *type = maybe_get<SPIRType>(ops[0]);
+			if (type)
+				ir.load_type_width.insert({ ops[1], type->width });
+		}
+
 		if (!current_block)
 			SPIRV_CROSS_THROW("Currently no block to insert opcode.");
 

+ 3 - 1
ThirdParty/SpirvCross/spirv_parser.hpp

@@ -1,5 +1,6 @@
 /*
  * Copyright 2018-2021 Arm Limited
+ * SPDX-License-Identifier: Apache-2.0 OR MIT
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
@@ -18,7 +19,6 @@
  * At your option, you may choose to accept this material under either:
  *  1. The Apache License, Version 2.0, found at <http://www.apache.org/licenses/LICENSE-2.0>, or
  *  2. The MIT License, found at <http://opensource.org/licenses/MIT>.
- * SPDX-License-Identifier: Apache-2.0 OR MIT.
  */
 
 #ifndef SPIRV_CROSS_PARSER_HPP
@@ -46,6 +46,8 @@ private:
 	ParsedIR ir;
 	SPIRFunction *current_function = nullptr;
 	SPIRBlock *current_block = nullptr;
+	// For workarounds.
+	bool ignore_trailing_block_opcodes = false;
 
 	void parse(const Instruction &instr);
 	const uint32_t *stream(const Instruction &instr) const;

File diff suppressed because it is too large
+ 599 - 72
ThirdParty/Volk/volk.c


+ 517 - 53
ThirdParty/Volk/volk.h

@@ -1,7 +1,7 @@
 /**
  * volk
  *
- * Copyright (C) 2018-2019, by Arseny Kapoulkine ([email protected])
+ * Copyright (C) 2018-2023, by Arseny Kapoulkine ([email protected])
  * Report bugs and download new versions at https://github.com/zeux/volk
  *
  * This library is distributed under the MIT License. See notice at the end of this file.
@@ -15,7 +15,7 @@
 #endif
 
 /* VOLK_GENERATE_VERSION_DEFINE */
-#define VOLK_HEADER_VERSION 198
+#define VOLK_HEADER_VERSION 267
 /* VOLK_GENERATE_VERSION_DEFINE */
 
 #ifndef VK_NO_PROTOTYPES
@@ -23,8 +23,8 @@
 #endif
 
 #ifndef VULKAN_H_
-#       ifdef VOLK_VULKAN_H_PATH
-#               include VOLK_VULKAN_H_PATH
+#	ifdef VOLK_VULKAN_H_PATH
+#		include VOLK_VULKAN_H_PATH
 #	elif defined(VK_USE_PLATFORM_WIN32_KHR)
 #		include <vulkan/vk_platform.h>
 #		include <vulkan/vulkan_core.h>
@@ -285,6 +285,53 @@ struct VolkDeviceTable
 	PFN_vkSignalSemaphore vkSignalSemaphore;
 	PFN_vkWaitSemaphores vkWaitSemaphores;
 #endif /* defined(VK_VERSION_1_2) */
+#if defined(VK_VERSION_1_3)
+	PFN_vkCmdBeginRendering vkCmdBeginRendering;
+	PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2;
+	PFN_vkCmdBlitImage2 vkCmdBlitImage2;
+	PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2;
+	PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2;
+	PFN_vkCmdCopyImage2 vkCmdCopyImage2;
+	PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2;
+	PFN_vkCmdEndRendering vkCmdEndRendering;
+	PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2;
+	PFN_vkCmdResetEvent2 vkCmdResetEvent2;
+	PFN_vkCmdResolveImage2 vkCmdResolveImage2;
+	PFN_vkCmdSetCullMode vkCmdSetCullMode;
+	PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable;
+	PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable;
+	PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp;
+	PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable;
+	PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable;
+	PFN_vkCmdSetEvent2 vkCmdSetEvent2;
+	PFN_vkCmdSetFrontFace vkCmdSetFrontFace;
+	PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable;
+	PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology;
+	PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable;
+	PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount;
+	PFN_vkCmdSetStencilOp vkCmdSetStencilOp;
+	PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable;
+	PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount;
+	PFN_vkCmdWaitEvents2 vkCmdWaitEvents2;
+	PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2;
+	PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot;
+	PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot;
+	PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements;
+	PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements;
+	PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements;
+	PFN_vkGetPrivateData vkGetPrivateData;
+	PFN_vkQueueSubmit2 vkQueueSubmit2;
+	PFN_vkSetPrivateData vkSetPrivateData;
+#endif /* defined(VK_VERSION_1_3) */
+#if defined(VK_AMDX_shader_enqueue)
+	PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX;
+	PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX;
+	PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX;
+	PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX;
+	PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX;
+	PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX;
+	PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX;
+#endif /* defined(VK_AMDX_shader_enqueue) */
 #if defined(VK_AMD_buffer_marker)
 	PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD;
 #endif /* defined(VK_AMD_buffer_marker) */
@@ -302,6 +349,9 @@ struct VolkDeviceTable
 	PFN_vkGetAndroidHardwareBufferPropertiesANDROID vkGetAndroidHardwareBufferPropertiesANDROID;
 	PFN_vkGetMemoryAndroidHardwareBufferANDROID vkGetMemoryAndroidHardwareBufferANDROID;
 #endif /* defined(VK_ANDROID_external_memory_android_hardware_buffer) */
+#if defined(VK_EXT_attachment_feedback_loop_dynamic_state)
+	PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT;
+#endif /* defined(VK_EXT_attachment_feedback_loop_dynamic_state) */
 #if defined(VK_EXT_buffer_device_address)
 	PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT;
 #endif /* defined(VK_EXT_buffer_device_address) */
@@ -322,36 +372,40 @@ struct VolkDeviceTable
 	PFN_vkDebugMarkerSetObjectNameEXT vkDebugMarkerSetObjectNameEXT;
 	PFN_vkDebugMarkerSetObjectTagEXT vkDebugMarkerSetObjectTagEXT;
 #endif /* defined(VK_EXT_debug_marker) */
+#if defined(VK_EXT_depth_bias_control)
+	PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT;
+#endif /* defined(VK_EXT_depth_bias_control) */
+#if defined(VK_EXT_descriptor_buffer)
+	PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT;
+	PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT;
+	PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT;
+	PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT;
+	PFN_vkGetDescriptorEXT vkGetDescriptorEXT;
+	PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT;
+	PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT;
+	PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT;
+	PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT;
+	PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT;
+#endif /* defined(VK_EXT_descriptor_buffer) */
+#if defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing))
+	PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT;
+#endif /* defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) */
+#if defined(VK_EXT_device_fault)
+	PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT;
+#endif /* defined(VK_EXT_device_fault) */
 #if defined(VK_EXT_discard_rectangles)
 	PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT;
 #endif /* defined(VK_EXT_discard_rectangles) */
+#if defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2
+	PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT;
+	PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT;
+#endif /* defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 */
 #if defined(VK_EXT_display_control)
 	PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT;
 	PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT;
 	PFN_vkRegisterDeviceEventEXT vkRegisterDeviceEventEXT;
 	PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT;
 #endif /* defined(VK_EXT_display_control) */
-#if defined(VK_EXT_extended_dynamic_state)
-	PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT;
-	PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT;
-	PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT;
-	PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT;
-	PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT;
-	PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT;
-	PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT;
-	PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT;
-	PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT;
-	PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT;
-	PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT;
-	PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT;
-#endif /* defined(VK_EXT_extended_dynamic_state) */
-#if defined(VK_EXT_extended_dynamic_state2)
-	PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT;
-	PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT;
-	PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT;
-	PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT;
-	PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT;
-#endif /* defined(VK_EXT_extended_dynamic_state2) */
 #if defined(VK_EXT_external_memory_host)
 	PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT;
 #endif /* defined(VK_EXT_external_memory_host) */
@@ -362,6 +416,12 @@ struct VolkDeviceTable
 #if defined(VK_EXT_hdr_metadata)
 	PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT;
 #endif /* defined(VK_EXT_hdr_metadata) */
+#if defined(VK_EXT_host_image_copy)
+	PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT;
+	PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT;
+	PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT;
+	PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT;
+#endif /* defined(VK_EXT_host_image_copy) */
 #if defined(VK_EXT_host_query_reset)
 	PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT;
 #endif /* defined(VK_EXT_host_query_reset) */
@@ -371,13 +431,40 @@ struct VolkDeviceTable
 #if defined(VK_EXT_line_rasterization)
 	PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT;
 #endif /* defined(VK_EXT_line_rasterization) */
+#if defined(VK_EXT_mesh_shader)
+	PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT;
+	PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT;
+	PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT;
+#endif /* defined(VK_EXT_mesh_shader) */
+#if defined(VK_EXT_metal_objects)
+	PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT;
+#endif /* defined(VK_EXT_metal_objects) */
 #if defined(VK_EXT_multi_draw)
 	PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT;
 	PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT;
 #endif /* defined(VK_EXT_multi_draw) */
+#if defined(VK_EXT_opacity_micromap)
+	PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT;
+	PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT;
+	PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT;
+	PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT;
+	PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT;
+	PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT;
+	PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT;
+	PFN_vkCopyMicromapEXT vkCopyMicromapEXT;
+	PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT;
+	PFN_vkCreateMicromapEXT vkCreateMicromapEXT;
+	PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT;
+	PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT;
+	PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT;
+	PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT;
+#endif /* defined(VK_EXT_opacity_micromap) */
 #if defined(VK_EXT_pageable_device_local_memory)
 	PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT;
 #endif /* defined(VK_EXT_pageable_device_local_memory) */
+#if defined(VK_EXT_pipeline_properties)
+	PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT;
+#endif /* defined(VK_EXT_pipeline_properties) */
 #if defined(VK_EXT_private_data)
 	PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT;
 	PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT;
@@ -387,6 +474,19 @@ struct VolkDeviceTable
 #if defined(VK_EXT_sample_locations)
 	PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT;
 #endif /* defined(VK_EXT_sample_locations) */
+#if defined(VK_EXT_shader_module_identifier)
+	PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT;
+	PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT;
+#endif /* defined(VK_EXT_shader_module_identifier) */
+#if defined(VK_EXT_shader_object)
+	PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT;
+	PFN_vkCreateShadersEXT vkCreateShadersEXT;
+	PFN_vkDestroyShaderEXT vkDestroyShaderEXT;
+	PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT;
+#endif /* defined(VK_EXT_shader_object) */
+#if defined(VK_EXT_swapchain_maintenance1)
+	PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT;
+#endif /* defined(VK_EXT_swapchain_maintenance1) */
 #if defined(VK_EXT_transform_feedback)
 	PFN_vkCmdBeginQueryIndexedEXT vkCmdBeginQueryIndexedEXT;
 	PFN_vkCmdBeginTransformFeedbackEXT vkCmdBeginTransformFeedbackEXT;
@@ -401,9 +501,6 @@ struct VolkDeviceTable
 	PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT;
 	PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT;
 #endif /* defined(VK_EXT_validation_cache) */
-#if defined(VK_EXT_vertex_input_dynamic_state)
-	PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT;
-#endif /* defined(VK_EXT_vertex_input_dynamic_state) */
 #if defined(VK_FUCHSIA_buffer_collection)
 	PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA;
 	PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA;
@@ -423,6 +520,10 @@ struct VolkDeviceTable
 	PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE;
 	PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE;
 #endif /* defined(VK_GOOGLE_display_timing) */
+#if defined(VK_HUAWEI_cluster_culling_shader)
+	PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI;
+	PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI;
+#endif /* defined(VK_HUAWEI_cluster_culling_shader) */
 #if defined(VK_HUAWEI_invocation_mask)
 	PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI;
 #endif /* defined(VK_HUAWEI_invocation_mask) */
@@ -553,6 +654,16 @@ struct VolkDeviceTable
 	PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR;
 	PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR;
 #endif /* defined(VK_KHR_maintenance4) */
+#if defined(VK_KHR_maintenance5)
+	PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR;
+	PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR;
+	PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR;
+	PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR;
+#endif /* defined(VK_KHR_maintenance5) */
+#if defined(VK_KHR_map_memory2)
+	PFN_vkMapMemory2KHR vkMapMemory2KHR;
+	PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR;
+#endif /* defined(VK_KHR_map_memory2) */
 #if defined(VK_KHR_performance_query)
 	PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR;
 	PFN_vkReleaseProfilingLockKHR vkReleaseProfilingLockKHR;
@@ -568,6 +679,9 @@ struct VolkDeviceTable
 #if defined(VK_KHR_push_descriptor)
 	PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR;
 #endif /* defined(VK_KHR_push_descriptor) */
+#if defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline)
+	PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR;
+#endif /* defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) */
 #if defined(VK_KHR_ray_tracing_pipeline)
 	PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR;
 	PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR;
@@ -615,6 +729,7 @@ struct VolkDeviceTable
 #endif /* defined(VK_KHR_video_decode_queue) */
 #if defined(VK_KHR_video_encode_queue)
 	PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR;
+	PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR;
 #endif /* defined(VK_KHR_video_encode_queue) */
 #if defined(VK_KHR_video_queue)
 	PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR;
@@ -642,6 +757,10 @@ struct VolkDeviceTable
 #if defined(VK_NV_clip_space_w_scaling)
 	PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV;
 #endif /* defined(VK_NV_clip_space_w_scaling) */
+#if defined(VK_NV_copy_memory_indirect)
+	PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV;
+	PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV;
+#endif /* defined(VK_NV_copy_memory_indirect) */
 #if defined(VK_NV_device_diagnostic_checkpoints)
 	PFN_vkCmdSetCheckpointNV vkCmdSetCheckpointNV;
 	PFN_vkGetQueueCheckpointDataNV vkGetQueueCheckpointDataNV;
@@ -654,6 +773,11 @@ struct VolkDeviceTable
 	PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV;
 	PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV;
 #endif /* defined(VK_NV_device_generated_commands) */
+#if defined(VK_NV_device_generated_commands_compute)
+	PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV;
+	PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV;
+	PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV;
+#endif /* defined(VK_NV_device_generated_commands_compute) */
 #if defined(VK_NV_external_memory_rdma)
 	PFN_vkGetMemoryRemoteAddressNV vkGetMemoryRemoteAddressNV;
 #endif /* defined(VK_NV_external_memory_rdma) */
@@ -663,11 +787,28 @@ struct VolkDeviceTable
 #if defined(VK_NV_fragment_shading_rate_enums)
 	PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV;
 #endif /* defined(VK_NV_fragment_shading_rate_enums) */
+#if defined(VK_NV_low_latency2)
+	PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV;
+	PFN_vkLatencySleepNV vkLatencySleepNV;
+	PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV;
+	PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV;
+	PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV;
+#endif /* defined(VK_NV_low_latency2) */
+#if defined(VK_NV_memory_decompression)
+	PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV;
+	PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV;
+#endif /* defined(VK_NV_memory_decompression) */
 #if defined(VK_NV_mesh_shader)
 	PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV;
 	PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV;
 	PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV;
 #endif /* defined(VK_NV_mesh_shader) */
+#if defined(VK_NV_optical_flow)
+	PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV;
+	PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV;
+	PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV;
+	PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV;
+#endif /* defined(VK_NV_optical_flow) */
 #if defined(VK_NV_ray_tracing)
 	PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV;
 	PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV;
@@ -682,6 +823,9 @@ struct VolkDeviceTable
 	PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV;
 	PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV;
 #endif /* defined(VK_NV_ray_tracing) */
+#if defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2
+	PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV;
+#endif /* defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 */
 #if defined(VK_NV_scissor_exclusive)
 	PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV;
 #endif /* defined(VK_NV_scissor_exclusive) */
@@ -690,9 +834,94 @@ struct VolkDeviceTable
 	PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV;
 	PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV;
 #endif /* defined(VK_NV_shading_rate_image) */
+#if defined(VK_QCOM_tile_properties)
+	PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM;
+	PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM;
+#endif /* defined(VK_QCOM_tile_properties) */
+#if defined(VK_QNX_external_memory_screen_buffer)
+	PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX;
+#endif /* defined(VK_QNX_external_memory_screen_buffer) */
+#if defined(VK_VALVE_descriptor_set_host_mapping)
+	PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE;
+	PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE;
+#endif /* defined(VK_VALVE_descriptor_set_host_mapping) */
+#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object))
+	PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT;
+	PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT;
+	PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT;
+	PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT;
+	PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT;
+	PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT;
+	PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT;
+	PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT;
+	PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT;
+	PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT;
+	PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT;
+	PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT;
+#endif /* (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) */
+#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object))
+	PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT;
+	PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT;
+	PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT;
+	PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT;
+	PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT;
+#endif /* (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) */
+#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object))
+	PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT;
+	PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT;
+	PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT;
+	PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT;
+	PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT;
+	PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT;
+	PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT;
+	PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT;
+	PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT;
+	PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT;
+	PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT;
+	PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT;
+	PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT;
+	PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT;
+	PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT;
+	PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT;
+	PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT;
+	PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT;
+	PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT;
+	PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT;
+	PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT;
+#endif /* (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) */
+#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling))
+	PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV;
+#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) */
+#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle))
+	PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV;
+#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) */
+#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color))
+	PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV;
+	PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV;
+#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) */
+#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples))
+	PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV;
+	PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV;
+	PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV;
+#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) */
+#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image))
+	PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV;
+#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) */
+#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test))
+	PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV;
+#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) */
+#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode))
+	PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV;
+#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) */
 #if (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1))
 	PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT;
 #endif /* (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) */
+#if (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control))
+	PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT;
+#endif /* (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) */
+#if (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state))
+	PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT;
+#endif /* (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) */
 #if (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template))
 	PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR;
 #endif /* (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) */
@@ -891,6 +1120,54 @@ extern PFN_vkResetQueryPool vkResetQueryPool;
 extern PFN_vkSignalSemaphore vkSignalSemaphore;
 extern PFN_vkWaitSemaphores vkWaitSemaphores;
 #endif /* defined(VK_VERSION_1_2) */
+#if defined(VK_VERSION_1_3)
+extern PFN_vkCmdBeginRendering vkCmdBeginRendering;
+extern PFN_vkCmdBindVertexBuffers2 vkCmdBindVertexBuffers2;
+extern PFN_vkCmdBlitImage2 vkCmdBlitImage2;
+extern PFN_vkCmdCopyBuffer2 vkCmdCopyBuffer2;
+extern PFN_vkCmdCopyBufferToImage2 vkCmdCopyBufferToImage2;
+extern PFN_vkCmdCopyImage2 vkCmdCopyImage2;
+extern PFN_vkCmdCopyImageToBuffer2 vkCmdCopyImageToBuffer2;
+extern PFN_vkCmdEndRendering vkCmdEndRendering;
+extern PFN_vkCmdPipelineBarrier2 vkCmdPipelineBarrier2;
+extern PFN_vkCmdResetEvent2 vkCmdResetEvent2;
+extern PFN_vkCmdResolveImage2 vkCmdResolveImage2;
+extern PFN_vkCmdSetCullMode vkCmdSetCullMode;
+extern PFN_vkCmdSetDepthBiasEnable vkCmdSetDepthBiasEnable;
+extern PFN_vkCmdSetDepthBoundsTestEnable vkCmdSetDepthBoundsTestEnable;
+extern PFN_vkCmdSetDepthCompareOp vkCmdSetDepthCompareOp;
+extern PFN_vkCmdSetDepthTestEnable vkCmdSetDepthTestEnable;
+extern PFN_vkCmdSetDepthWriteEnable vkCmdSetDepthWriteEnable;
+extern PFN_vkCmdSetEvent2 vkCmdSetEvent2;
+extern PFN_vkCmdSetFrontFace vkCmdSetFrontFace;
+extern PFN_vkCmdSetPrimitiveRestartEnable vkCmdSetPrimitiveRestartEnable;
+extern PFN_vkCmdSetPrimitiveTopology vkCmdSetPrimitiveTopology;
+extern PFN_vkCmdSetRasterizerDiscardEnable vkCmdSetRasterizerDiscardEnable;
+extern PFN_vkCmdSetScissorWithCount vkCmdSetScissorWithCount;
+extern PFN_vkCmdSetStencilOp vkCmdSetStencilOp;
+extern PFN_vkCmdSetStencilTestEnable vkCmdSetStencilTestEnable;
+extern PFN_vkCmdSetViewportWithCount vkCmdSetViewportWithCount;
+extern PFN_vkCmdWaitEvents2 vkCmdWaitEvents2;
+extern PFN_vkCmdWriteTimestamp2 vkCmdWriteTimestamp2;
+extern PFN_vkCreatePrivateDataSlot vkCreatePrivateDataSlot;
+extern PFN_vkDestroyPrivateDataSlot vkDestroyPrivateDataSlot;
+extern PFN_vkGetDeviceBufferMemoryRequirements vkGetDeviceBufferMemoryRequirements;
+extern PFN_vkGetDeviceImageMemoryRequirements vkGetDeviceImageMemoryRequirements;
+extern PFN_vkGetDeviceImageSparseMemoryRequirements vkGetDeviceImageSparseMemoryRequirements;
+extern PFN_vkGetPhysicalDeviceToolProperties vkGetPhysicalDeviceToolProperties;
+extern PFN_vkGetPrivateData vkGetPrivateData;
+extern PFN_vkQueueSubmit2 vkQueueSubmit2;
+extern PFN_vkSetPrivateData vkSetPrivateData;
+#endif /* defined(VK_VERSION_1_3) */
+#if defined(VK_AMDX_shader_enqueue)
+extern PFN_vkCmdDispatchGraphAMDX vkCmdDispatchGraphAMDX;
+extern PFN_vkCmdDispatchGraphIndirectAMDX vkCmdDispatchGraphIndirectAMDX;
+extern PFN_vkCmdDispatchGraphIndirectCountAMDX vkCmdDispatchGraphIndirectCountAMDX;
+extern PFN_vkCmdInitializeGraphScratchMemoryAMDX vkCmdInitializeGraphScratchMemoryAMDX;
+extern PFN_vkCreateExecutionGraphPipelinesAMDX vkCreateExecutionGraphPipelinesAMDX;
+extern PFN_vkGetExecutionGraphPipelineNodeIndexAMDX vkGetExecutionGraphPipelineNodeIndexAMDX;
+extern PFN_vkGetExecutionGraphPipelineScratchSizeAMDX vkGetExecutionGraphPipelineScratchSizeAMDX;
+#endif /* defined(VK_AMDX_shader_enqueue) */
 #if defined(VK_AMD_buffer_marker)
 extern PFN_vkCmdWriteBufferMarkerAMD vkCmdWriteBufferMarkerAMD;
 #endif /* defined(VK_AMD_buffer_marker) */
@@ -916,6 +1193,9 @@ extern PFN_vkGetDrmDisplayEXT vkGetDrmDisplayEXT;
 extern PFN_vkAcquireXlibDisplayEXT vkAcquireXlibDisplayEXT;
 extern PFN_vkGetRandROutputDisplayEXT vkGetRandROutputDisplayEXT;
 #endif /* defined(VK_EXT_acquire_xlib_display) */
+#if defined(VK_EXT_attachment_feedback_loop_dynamic_state)
+extern PFN_vkCmdSetAttachmentFeedbackLoopEnableEXT vkCmdSetAttachmentFeedbackLoopEnableEXT;
+#endif /* defined(VK_EXT_attachment_feedback_loop_dynamic_state) */
 #if defined(VK_EXT_buffer_device_address)
 extern PFN_vkGetBufferDeviceAddressEXT vkGetBufferDeviceAddressEXT;
 #endif /* defined(VK_EXT_buffer_device_address) */
@@ -955,6 +1235,27 @@ extern PFN_vkSetDebugUtilsObjectNameEXT vkSetDebugUtilsObjectNameEXT;
 extern PFN_vkSetDebugUtilsObjectTagEXT vkSetDebugUtilsObjectTagEXT;
 extern PFN_vkSubmitDebugUtilsMessageEXT vkSubmitDebugUtilsMessageEXT;
 #endif /* defined(VK_EXT_debug_utils) */
+#if defined(VK_EXT_depth_bias_control)
+extern PFN_vkCmdSetDepthBias2EXT vkCmdSetDepthBias2EXT;
+#endif /* defined(VK_EXT_depth_bias_control) */
+#if defined(VK_EXT_descriptor_buffer)
+extern PFN_vkCmdBindDescriptorBufferEmbeddedSamplersEXT vkCmdBindDescriptorBufferEmbeddedSamplersEXT;
+extern PFN_vkCmdBindDescriptorBuffersEXT vkCmdBindDescriptorBuffersEXT;
+extern PFN_vkCmdSetDescriptorBufferOffsetsEXT vkCmdSetDescriptorBufferOffsetsEXT;
+extern PFN_vkGetBufferOpaqueCaptureDescriptorDataEXT vkGetBufferOpaqueCaptureDescriptorDataEXT;
+extern PFN_vkGetDescriptorEXT vkGetDescriptorEXT;
+extern PFN_vkGetDescriptorSetLayoutBindingOffsetEXT vkGetDescriptorSetLayoutBindingOffsetEXT;
+extern PFN_vkGetDescriptorSetLayoutSizeEXT vkGetDescriptorSetLayoutSizeEXT;
+extern PFN_vkGetImageOpaqueCaptureDescriptorDataEXT vkGetImageOpaqueCaptureDescriptorDataEXT;
+extern PFN_vkGetImageViewOpaqueCaptureDescriptorDataEXT vkGetImageViewOpaqueCaptureDescriptorDataEXT;
+extern PFN_vkGetSamplerOpaqueCaptureDescriptorDataEXT vkGetSamplerOpaqueCaptureDescriptorDataEXT;
+#endif /* defined(VK_EXT_descriptor_buffer) */
+#if defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing))
+extern PFN_vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT vkGetAccelerationStructureOpaqueCaptureDescriptorDataEXT;
+#endif /* defined(VK_EXT_descriptor_buffer) && (defined(VK_KHR_acceleration_structure) || defined(VK_NV_ray_tracing)) */
+#if defined(VK_EXT_device_fault)
+extern PFN_vkGetDeviceFaultInfoEXT vkGetDeviceFaultInfoEXT;
+#endif /* defined(VK_EXT_device_fault) */
 #if defined(VK_EXT_direct_mode_display)
 extern PFN_vkReleaseDisplayEXT vkReleaseDisplayEXT;
 #endif /* defined(VK_EXT_direct_mode_display) */
@@ -965,6 +1266,10 @@ extern PFN_vkGetPhysicalDeviceDirectFBPresentationSupportEXT vkGetPhysicalDevice
 #if defined(VK_EXT_discard_rectangles)
 extern PFN_vkCmdSetDiscardRectangleEXT vkCmdSetDiscardRectangleEXT;
 #endif /* defined(VK_EXT_discard_rectangles) */
+#if defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2
+extern PFN_vkCmdSetDiscardRectangleEnableEXT vkCmdSetDiscardRectangleEnableEXT;
+extern PFN_vkCmdSetDiscardRectangleModeEXT vkCmdSetDiscardRectangleModeEXT;
+#endif /* defined(VK_EXT_discard_rectangles) && VK_EXT_DISCARD_RECTANGLES_SPEC_VERSION >= 2 */
 #if defined(VK_EXT_display_control)
 extern PFN_vkDisplayPowerControlEXT vkDisplayPowerControlEXT;
 extern PFN_vkGetSwapchainCounterEXT vkGetSwapchainCounterEXT;
@@ -974,27 +1279,6 @@ extern PFN_vkRegisterDisplayEventEXT vkRegisterDisplayEventEXT;
 #if defined(VK_EXT_display_surface_counter)
 extern PFN_vkGetPhysicalDeviceSurfaceCapabilities2EXT vkGetPhysicalDeviceSurfaceCapabilities2EXT;
 #endif /* defined(VK_EXT_display_surface_counter) */
-#if defined(VK_EXT_extended_dynamic_state)
-extern PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT;
-extern PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT;
-extern PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT;
-extern PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT;
-extern PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT;
-extern PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT;
-extern PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT;
-extern PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT;
-extern PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT;
-extern PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT;
-extern PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT;
-extern PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT;
-#endif /* defined(VK_EXT_extended_dynamic_state) */
-#if defined(VK_EXT_extended_dynamic_state2)
-extern PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT;
-extern PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT;
-extern PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT;
-extern PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT;
-extern PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT;
-#endif /* defined(VK_EXT_extended_dynamic_state2) */
 #if defined(VK_EXT_external_memory_host)
 extern PFN_vkGetMemoryHostPointerPropertiesEXT vkGetMemoryHostPointerPropertiesEXT;
 #endif /* defined(VK_EXT_external_memory_host) */
@@ -1009,6 +1293,12 @@ extern PFN_vkSetHdrMetadataEXT vkSetHdrMetadataEXT;
 #if defined(VK_EXT_headless_surface)
 extern PFN_vkCreateHeadlessSurfaceEXT vkCreateHeadlessSurfaceEXT;
 #endif /* defined(VK_EXT_headless_surface) */
+#if defined(VK_EXT_host_image_copy)
+extern PFN_vkCopyImageToImageEXT vkCopyImageToImageEXT;
+extern PFN_vkCopyImageToMemoryEXT vkCopyImageToMemoryEXT;
+extern PFN_vkCopyMemoryToImageEXT vkCopyMemoryToImageEXT;
+extern PFN_vkTransitionImageLayoutEXT vkTransitionImageLayoutEXT;
+#endif /* defined(VK_EXT_host_image_copy) */
 #if defined(VK_EXT_host_query_reset)
 extern PFN_vkResetQueryPoolEXT vkResetQueryPoolEXT;
 #endif /* defined(VK_EXT_host_query_reset) */
@@ -1018,6 +1308,14 @@ extern PFN_vkGetImageDrmFormatModifierPropertiesEXT vkGetImageDrmFormatModifierP
 #if defined(VK_EXT_line_rasterization)
 extern PFN_vkCmdSetLineStippleEXT vkCmdSetLineStippleEXT;
 #endif /* defined(VK_EXT_line_rasterization) */
+#if defined(VK_EXT_mesh_shader)
+extern PFN_vkCmdDrawMeshTasksEXT vkCmdDrawMeshTasksEXT;
+extern PFN_vkCmdDrawMeshTasksIndirectCountEXT vkCmdDrawMeshTasksIndirectCountEXT;
+extern PFN_vkCmdDrawMeshTasksIndirectEXT vkCmdDrawMeshTasksIndirectEXT;
+#endif /* defined(VK_EXT_mesh_shader) */
+#if defined(VK_EXT_metal_objects)
+extern PFN_vkExportMetalObjectsEXT vkExportMetalObjectsEXT;
+#endif /* defined(VK_EXT_metal_objects) */
 #if defined(VK_EXT_metal_surface)
 extern PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT;
 #endif /* defined(VK_EXT_metal_surface) */
@@ -1025,9 +1323,28 @@ extern PFN_vkCreateMetalSurfaceEXT vkCreateMetalSurfaceEXT;
 extern PFN_vkCmdDrawMultiEXT vkCmdDrawMultiEXT;
 extern PFN_vkCmdDrawMultiIndexedEXT vkCmdDrawMultiIndexedEXT;
 #endif /* defined(VK_EXT_multi_draw) */
+#if defined(VK_EXT_opacity_micromap)
+extern PFN_vkBuildMicromapsEXT vkBuildMicromapsEXT;
+extern PFN_vkCmdBuildMicromapsEXT vkCmdBuildMicromapsEXT;
+extern PFN_vkCmdCopyMemoryToMicromapEXT vkCmdCopyMemoryToMicromapEXT;
+extern PFN_vkCmdCopyMicromapEXT vkCmdCopyMicromapEXT;
+extern PFN_vkCmdCopyMicromapToMemoryEXT vkCmdCopyMicromapToMemoryEXT;
+extern PFN_vkCmdWriteMicromapsPropertiesEXT vkCmdWriteMicromapsPropertiesEXT;
+extern PFN_vkCopyMemoryToMicromapEXT vkCopyMemoryToMicromapEXT;
+extern PFN_vkCopyMicromapEXT vkCopyMicromapEXT;
+extern PFN_vkCopyMicromapToMemoryEXT vkCopyMicromapToMemoryEXT;
+extern PFN_vkCreateMicromapEXT vkCreateMicromapEXT;
+extern PFN_vkDestroyMicromapEXT vkDestroyMicromapEXT;
+extern PFN_vkGetDeviceMicromapCompatibilityEXT vkGetDeviceMicromapCompatibilityEXT;
+extern PFN_vkGetMicromapBuildSizesEXT vkGetMicromapBuildSizesEXT;
+extern PFN_vkWriteMicromapsPropertiesEXT vkWriteMicromapsPropertiesEXT;
+#endif /* defined(VK_EXT_opacity_micromap) */
 #if defined(VK_EXT_pageable_device_local_memory)
 extern PFN_vkSetDeviceMemoryPriorityEXT vkSetDeviceMemoryPriorityEXT;
 #endif /* defined(VK_EXT_pageable_device_local_memory) */
+#if defined(VK_EXT_pipeline_properties)
+extern PFN_vkGetPipelinePropertiesEXT vkGetPipelinePropertiesEXT;
+#endif /* defined(VK_EXT_pipeline_properties) */
 #if defined(VK_EXT_private_data)
 extern PFN_vkCreatePrivateDataSlotEXT vkCreatePrivateDataSlotEXT;
 extern PFN_vkDestroyPrivateDataSlotEXT vkDestroyPrivateDataSlotEXT;
@@ -1038,6 +1355,19 @@ extern PFN_vkSetPrivateDataEXT vkSetPrivateDataEXT;
 extern PFN_vkCmdSetSampleLocationsEXT vkCmdSetSampleLocationsEXT;
 extern PFN_vkGetPhysicalDeviceMultisamplePropertiesEXT vkGetPhysicalDeviceMultisamplePropertiesEXT;
 #endif /* defined(VK_EXT_sample_locations) */
+#if defined(VK_EXT_shader_module_identifier)
+extern PFN_vkGetShaderModuleCreateInfoIdentifierEXT vkGetShaderModuleCreateInfoIdentifierEXT;
+extern PFN_vkGetShaderModuleIdentifierEXT vkGetShaderModuleIdentifierEXT;
+#endif /* defined(VK_EXT_shader_module_identifier) */
+#if defined(VK_EXT_shader_object)
+extern PFN_vkCmdBindShadersEXT vkCmdBindShadersEXT;
+extern PFN_vkCreateShadersEXT vkCreateShadersEXT;
+extern PFN_vkDestroyShaderEXT vkDestroyShaderEXT;
+extern PFN_vkGetShaderBinaryDataEXT vkGetShaderBinaryDataEXT;
+#endif /* defined(VK_EXT_shader_object) */
+#if defined(VK_EXT_swapchain_maintenance1)
+extern PFN_vkReleaseSwapchainImagesEXT vkReleaseSwapchainImagesEXT;
+#endif /* defined(VK_EXT_swapchain_maintenance1) */
 #if defined(VK_EXT_tooling_info)
 extern PFN_vkGetPhysicalDeviceToolPropertiesEXT vkGetPhysicalDeviceToolPropertiesEXT;
 #endif /* defined(VK_EXT_tooling_info) */
@@ -1055,9 +1385,6 @@ extern PFN_vkDestroyValidationCacheEXT vkDestroyValidationCacheEXT;
 extern PFN_vkGetValidationCacheDataEXT vkGetValidationCacheDataEXT;
 extern PFN_vkMergeValidationCachesEXT vkMergeValidationCachesEXT;
 #endif /* defined(VK_EXT_validation_cache) */
-#if defined(VK_EXT_vertex_input_dynamic_state)
-extern PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT;
-#endif /* defined(VK_EXT_vertex_input_dynamic_state) */
 #if defined(VK_FUCHSIA_buffer_collection)
 extern PFN_vkCreateBufferCollectionFUCHSIA vkCreateBufferCollectionFUCHSIA;
 extern PFN_vkDestroyBufferCollectionFUCHSIA vkDestroyBufferCollectionFUCHSIA;
@@ -1083,6 +1410,10 @@ extern PFN_vkCreateStreamDescriptorSurfaceGGP vkCreateStreamDescriptorSurfaceGGP
 extern PFN_vkGetPastPresentationTimingGOOGLE vkGetPastPresentationTimingGOOGLE;
 extern PFN_vkGetRefreshCycleDurationGOOGLE vkGetRefreshCycleDurationGOOGLE;
 #endif /* defined(VK_GOOGLE_display_timing) */
+#if defined(VK_HUAWEI_cluster_culling_shader)
+extern PFN_vkCmdDrawClusterHUAWEI vkCmdDrawClusterHUAWEI;
+extern PFN_vkCmdDrawClusterIndirectHUAWEI vkCmdDrawClusterIndirectHUAWEI;
+#endif /* defined(VK_HUAWEI_cluster_culling_shader) */
 #if defined(VK_HUAWEI_invocation_mask)
 extern PFN_vkCmdBindInvocationMaskHUAWEI vkCmdBindInvocationMaskHUAWEI;
 #endif /* defined(VK_HUAWEI_invocation_mask) */
@@ -1131,6 +1462,9 @@ extern PFN_vkGetBufferDeviceAddressKHR vkGetBufferDeviceAddressKHR;
 extern PFN_vkGetBufferOpaqueCaptureAddressKHR vkGetBufferOpaqueCaptureAddressKHR;
 extern PFN_vkGetDeviceMemoryOpaqueCaptureAddressKHR vkGetDeviceMemoryOpaqueCaptureAddressKHR;
 #endif /* defined(VK_KHR_buffer_device_address) */
+#if defined(VK_KHR_cooperative_matrix)
+extern PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR vkGetPhysicalDeviceCooperativeMatrixPropertiesKHR;
+#endif /* defined(VK_KHR_cooperative_matrix) */
 #if defined(VK_KHR_copy_commands2)
 extern PFN_vkCmdBlitImage2KHR vkCmdBlitImage2KHR;
 extern PFN_vkCmdCopyBuffer2KHR vkCmdCopyBuffer2KHR;
@@ -1257,6 +1591,16 @@ extern PFN_vkGetDeviceBufferMemoryRequirementsKHR vkGetDeviceBufferMemoryRequire
 extern PFN_vkGetDeviceImageMemoryRequirementsKHR vkGetDeviceImageMemoryRequirementsKHR;
 extern PFN_vkGetDeviceImageSparseMemoryRequirementsKHR vkGetDeviceImageSparseMemoryRequirementsKHR;
 #endif /* defined(VK_KHR_maintenance4) */
+#if defined(VK_KHR_maintenance5)
+extern PFN_vkCmdBindIndexBuffer2KHR vkCmdBindIndexBuffer2KHR;
+extern PFN_vkGetDeviceImageSubresourceLayoutKHR vkGetDeviceImageSubresourceLayoutKHR;
+extern PFN_vkGetImageSubresourceLayout2KHR vkGetImageSubresourceLayout2KHR;
+extern PFN_vkGetRenderingAreaGranularityKHR vkGetRenderingAreaGranularityKHR;
+#endif /* defined(VK_KHR_maintenance5) */
+#if defined(VK_KHR_map_memory2)
+extern PFN_vkMapMemory2KHR vkMapMemory2KHR;
+extern PFN_vkUnmapMemory2KHR vkUnmapMemory2KHR;
+#endif /* defined(VK_KHR_map_memory2) */
 #if defined(VK_KHR_performance_query)
 extern PFN_vkAcquireProfilingLockKHR vkAcquireProfilingLockKHR;
 extern PFN_vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR vkEnumeratePhysicalDeviceQueueFamilyPerformanceQueryCountersKHR;
@@ -1274,6 +1618,9 @@ extern PFN_vkWaitForPresentKHR vkWaitForPresentKHR;
 #if defined(VK_KHR_push_descriptor)
 extern PFN_vkCmdPushDescriptorSetKHR vkCmdPushDescriptorSetKHR;
 #endif /* defined(VK_KHR_push_descriptor) */
+#if defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline)
+extern PFN_vkCmdTraceRaysIndirect2KHR vkCmdTraceRaysIndirect2KHR;
+#endif /* defined(VK_KHR_ray_tracing_maintenance1) && defined(VK_KHR_ray_tracing_pipeline) */
 #if defined(VK_KHR_ray_tracing_pipeline)
 extern PFN_vkCmdSetRayTracingPipelineStackSizeKHR vkCmdSetRayTracingPipelineStackSizeKHR;
 extern PFN_vkCmdTraceRaysIndirectKHR vkCmdTraceRaysIndirectKHR;
@@ -1328,6 +1675,8 @@ extern PFN_vkCmdDecodeVideoKHR vkCmdDecodeVideoKHR;
 #endif /* defined(VK_KHR_video_decode_queue) */
 #if defined(VK_KHR_video_encode_queue)
 extern PFN_vkCmdEncodeVideoKHR vkCmdEncodeVideoKHR;
+extern PFN_vkGetEncodedVideoSessionParametersKHR vkGetEncodedVideoSessionParametersKHR;
+extern PFN_vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR vkGetPhysicalDeviceVideoEncodeQualityLevelPropertiesKHR;
 #endif /* defined(VK_KHR_video_encode_queue) */
 #if defined(VK_KHR_video_queue)
 extern PFN_vkBindVideoSessionMemoryKHR vkBindVideoSessionMemoryKHR;
@@ -1389,6 +1738,10 @@ extern PFN_vkCmdSetViewportWScalingNV vkCmdSetViewportWScalingNV;
 #if defined(VK_NV_cooperative_matrix)
 extern PFN_vkGetPhysicalDeviceCooperativeMatrixPropertiesNV vkGetPhysicalDeviceCooperativeMatrixPropertiesNV;
 #endif /* defined(VK_NV_cooperative_matrix) */
+#if defined(VK_NV_copy_memory_indirect)
+extern PFN_vkCmdCopyMemoryIndirectNV vkCmdCopyMemoryIndirectNV;
+extern PFN_vkCmdCopyMemoryToImageIndirectNV vkCmdCopyMemoryToImageIndirectNV;
+#endif /* defined(VK_NV_copy_memory_indirect) */
 #if defined(VK_NV_coverage_reduction_mode)
 extern PFN_vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV vkGetPhysicalDeviceSupportedFramebufferMixedSamplesCombinationsNV;
 #endif /* defined(VK_NV_coverage_reduction_mode) */
@@ -1404,6 +1757,11 @@ extern PFN_vkCreateIndirectCommandsLayoutNV vkCreateIndirectCommandsLayoutNV;
 extern PFN_vkDestroyIndirectCommandsLayoutNV vkDestroyIndirectCommandsLayoutNV;
 extern PFN_vkGetGeneratedCommandsMemoryRequirementsNV vkGetGeneratedCommandsMemoryRequirementsNV;
 #endif /* defined(VK_NV_device_generated_commands) */
+#if defined(VK_NV_device_generated_commands_compute)
+extern PFN_vkCmdUpdatePipelineIndirectBufferNV vkCmdUpdatePipelineIndirectBufferNV;
+extern PFN_vkGetPipelineIndirectDeviceAddressNV vkGetPipelineIndirectDeviceAddressNV;
+extern PFN_vkGetPipelineIndirectMemoryRequirementsNV vkGetPipelineIndirectMemoryRequirementsNV;
+#endif /* defined(VK_NV_device_generated_commands_compute) */
 #if defined(VK_NV_external_memory_capabilities)
 extern PFN_vkGetPhysicalDeviceExternalImageFormatPropertiesNV vkGetPhysicalDeviceExternalImageFormatPropertiesNV;
 #endif /* defined(VK_NV_external_memory_capabilities) */
@@ -1416,11 +1774,29 @@ extern PFN_vkGetMemoryWin32HandleNV vkGetMemoryWin32HandleNV;
 #if defined(VK_NV_fragment_shading_rate_enums)
 extern PFN_vkCmdSetFragmentShadingRateEnumNV vkCmdSetFragmentShadingRateEnumNV;
 #endif /* defined(VK_NV_fragment_shading_rate_enums) */
+#if defined(VK_NV_low_latency2)
+extern PFN_vkGetLatencyTimingsNV vkGetLatencyTimingsNV;
+extern PFN_vkLatencySleepNV vkLatencySleepNV;
+extern PFN_vkQueueNotifyOutOfBandNV vkQueueNotifyOutOfBandNV;
+extern PFN_vkSetLatencyMarkerNV vkSetLatencyMarkerNV;
+extern PFN_vkSetLatencySleepModeNV vkSetLatencySleepModeNV;
+#endif /* defined(VK_NV_low_latency2) */
+#if defined(VK_NV_memory_decompression)
+extern PFN_vkCmdDecompressMemoryIndirectCountNV vkCmdDecompressMemoryIndirectCountNV;
+extern PFN_vkCmdDecompressMemoryNV vkCmdDecompressMemoryNV;
+#endif /* defined(VK_NV_memory_decompression) */
 #if defined(VK_NV_mesh_shader)
 extern PFN_vkCmdDrawMeshTasksIndirectCountNV vkCmdDrawMeshTasksIndirectCountNV;
 extern PFN_vkCmdDrawMeshTasksIndirectNV vkCmdDrawMeshTasksIndirectNV;
 extern PFN_vkCmdDrawMeshTasksNV vkCmdDrawMeshTasksNV;
 #endif /* defined(VK_NV_mesh_shader) */
+#if defined(VK_NV_optical_flow)
+extern PFN_vkBindOpticalFlowSessionImageNV vkBindOpticalFlowSessionImageNV;
+extern PFN_vkCmdOpticalFlowExecuteNV vkCmdOpticalFlowExecuteNV;
+extern PFN_vkCreateOpticalFlowSessionNV vkCreateOpticalFlowSessionNV;
+extern PFN_vkDestroyOpticalFlowSessionNV vkDestroyOpticalFlowSessionNV;
+extern PFN_vkGetPhysicalDeviceOpticalFlowImageFormatsNV vkGetPhysicalDeviceOpticalFlowImageFormatsNV;
+#endif /* defined(VK_NV_optical_flow) */
 #if defined(VK_NV_ray_tracing)
 extern PFN_vkBindAccelerationStructureMemoryNV vkBindAccelerationStructureMemoryNV;
 extern PFN_vkCmdBuildAccelerationStructureNV vkCmdBuildAccelerationStructureNV;
@@ -1435,6 +1811,9 @@ extern PFN_vkGetAccelerationStructureHandleNV vkGetAccelerationStructureHandleNV
 extern PFN_vkGetAccelerationStructureMemoryRequirementsNV vkGetAccelerationStructureMemoryRequirementsNV;
 extern PFN_vkGetRayTracingShaderGroupHandlesNV vkGetRayTracingShaderGroupHandlesNV;
 #endif /* defined(VK_NV_ray_tracing) */
+#if defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2
+extern PFN_vkCmdSetExclusiveScissorEnableNV vkCmdSetExclusiveScissorEnableNV;
+#endif /* defined(VK_NV_scissor_exclusive) && VK_NV_SCISSOR_EXCLUSIVE_SPEC_VERSION >= 2 */
 #if defined(VK_NV_scissor_exclusive)
 extern PFN_vkCmdSetExclusiveScissorNV vkCmdSetExclusiveScissorNV;
 #endif /* defined(VK_NV_scissor_exclusive) */
@@ -1443,13 +1822,98 @@ extern PFN_vkCmdBindShadingRateImageNV vkCmdBindShadingRateImageNV;
 extern PFN_vkCmdSetCoarseSampleOrderNV vkCmdSetCoarseSampleOrderNV;
 extern PFN_vkCmdSetViewportShadingRatePaletteNV vkCmdSetViewportShadingRatePaletteNV;
 #endif /* defined(VK_NV_shading_rate_image) */
+#if defined(VK_QCOM_tile_properties)
+extern PFN_vkGetDynamicRenderingTilePropertiesQCOM vkGetDynamicRenderingTilePropertiesQCOM;
+extern PFN_vkGetFramebufferTilePropertiesQCOM vkGetFramebufferTilePropertiesQCOM;
+#endif /* defined(VK_QCOM_tile_properties) */
+#if defined(VK_QNX_external_memory_screen_buffer)
+extern PFN_vkGetScreenBufferPropertiesQNX vkGetScreenBufferPropertiesQNX;
+#endif /* defined(VK_QNX_external_memory_screen_buffer) */
 #if defined(VK_QNX_screen_surface)
 extern PFN_vkCreateScreenSurfaceQNX vkCreateScreenSurfaceQNX;
 extern PFN_vkGetPhysicalDeviceScreenPresentationSupportQNX vkGetPhysicalDeviceScreenPresentationSupportQNX;
 #endif /* defined(VK_QNX_screen_surface) */
+#if defined(VK_VALVE_descriptor_set_host_mapping)
+extern PFN_vkGetDescriptorSetHostMappingVALVE vkGetDescriptorSetHostMappingVALVE;
+extern PFN_vkGetDescriptorSetLayoutHostMappingInfoVALVE vkGetDescriptorSetLayoutHostMappingInfoVALVE;
+#endif /* defined(VK_VALVE_descriptor_set_host_mapping) */
+#if (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object))
+extern PFN_vkCmdBindVertexBuffers2EXT vkCmdBindVertexBuffers2EXT;
+extern PFN_vkCmdSetCullModeEXT vkCmdSetCullModeEXT;
+extern PFN_vkCmdSetDepthBoundsTestEnableEXT vkCmdSetDepthBoundsTestEnableEXT;
+extern PFN_vkCmdSetDepthCompareOpEXT vkCmdSetDepthCompareOpEXT;
+extern PFN_vkCmdSetDepthTestEnableEXT vkCmdSetDepthTestEnableEXT;
+extern PFN_vkCmdSetDepthWriteEnableEXT vkCmdSetDepthWriteEnableEXT;
+extern PFN_vkCmdSetFrontFaceEXT vkCmdSetFrontFaceEXT;
+extern PFN_vkCmdSetPrimitiveTopologyEXT vkCmdSetPrimitiveTopologyEXT;
+extern PFN_vkCmdSetScissorWithCountEXT vkCmdSetScissorWithCountEXT;
+extern PFN_vkCmdSetStencilOpEXT vkCmdSetStencilOpEXT;
+extern PFN_vkCmdSetStencilTestEnableEXT vkCmdSetStencilTestEnableEXT;
+extern PFN_vkCmdSetViewportWithCountEXT vkCmdSetViewportWithCountEXT;
+#endif /* (defined(VK_EXT_extended_dynamic_state)) || (defined(VK_EXT_shader_object)) */
+#if (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object))
+extern PFN_vkCmdSetDepthBiasEnableEXT vkCmdSetDepthBiasEnableEXT;
+extern PFN_vkCmdSetLogicOpEXT vkCmdSetLogicOpEXT;
+extern PFN_vkCmdSetPatchControlPointsEXT vkCmdSetPatchControlPointsEXT;
+extern PFN_vkCmdSetPrimitiveRestartEnableEXT vkCmdSetPrimitiveRestartEnableEXT;
+extern PFN_vkCmdSetRasterizerDiscardEnableEXT vkCmdSetRasterizerDiscardEnableEXT;
+#endif /* (defined(VK_EXT_extended_dynamic_state2)) || (defined(VK_EXT_shader_object)) */
+#if (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object))
+extern PFN_vkCmdSetAlphaToCoverageEnableEXT vkCmdSetAlphaToCoverageEnableEXT;
+extern PFN_vkCmdSetAlphaToOneEnableEXT vkCmdSetAlphaToOneEnableEXT;
+extern PFN_vkCmdSetColorBlendAdvancedEXT vkCmdSetColorBlendAdvancedEXT;
+extern PFN_vkCmdSetColorBlendEnableEXT vkCmdSetColorBlendEnableEXT;
+extern PFN_vkCmdSetColorBlendEquationEXT vkCmdSetColorBlendEquationEXT;
+extern PFN_vkCmdSetColorWriteMaskEXT vkCmdSetColorWriteMaskEXT;
+extern PFN_vkCmdSetConservativeRasterizationModeEXT vkCmdSetConservativeRasterizationModeEXT;
+extern PFN_vkCmdSetDepthClampEnableEXT vkCmdSetDepthClampEnableEXT;
+extern PFN_vkCmdSetDepthClipEnableEXT vkCmdSetDepthClipEnableEXT;
+extern PFN_vkCmdSetDepthClipNegativeOneToOneEXT vkCmdSetDepthClipNegativeOneToOneEXT;
+extern PFN_vkCmdSetExtraPrimitiveOverestimationSizeEXT vkCmdSetExtraPrimitiveOverestimationSizeEXT;
+extern PFN_vkCmdSetLineRasterizationModeEXT vkCmdSetLineRasterizationModeEXT;
+extern PFN_vkCmdSetLineStippleEnableEXT vkCmdSetLineStippleEnableEXT;
+extern PFN_vkCmdSetLogicOpEnableEXT vkCmdSetLogicOpEnableEXT;
+extern PFN_vkCmdSetPolygonModeEXT vkCmdSetPolygonModeEXT;
+extern PFN_vkCmdSetProvokingVertexModeEXT vkCmdSetProvokingVertexModeEXT;
+extern PFN_vkCmdSetRasterizationSamplesEXT vkCmdSetRasterizationSamplesEXT;
+extern PFN_vkCmdSetRasterizationStreamEXT vkCmdSetRasterizationStreamEXT;
+extern PFN_vkCmdSetSampleLocationsEnableEXT vkCmdSetSampleLocationsEnableEXT;
+extern PFN_vkCmdSetSampleMaskEXT vkCmdSetSampleMaskEXT;
+extern PFN_vkCmdSetTessellationDomainOriginEXT vkCmdSetTessellationDomainOriginEXT;
+#endif /* (defined(VK_EXT_extended_dynamic_state3)) || (defined(VK_EXT_shader_object)) */
+#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling))
+extern PFN_vkCmdSetViewportWScalingEnableNV vkCmdSetViewportWScalingEnableNV;
+#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_clip_space_w_scaling)) || (defined(VK_EXT_shader_object) && defined(VK_NV_clip_space_w_scaling)) */
+#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle))
+extern PFN_vkCmdSetViewportSwizzleNV vkCmdSetViewportSwizzleNV;
+#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_viewport_swizzle)) || (defined(VK_EXT_shader_object) && defined(VK_NV_viewport_swizzle)) */
+#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color))
+extern PFN_vkCmdSetCoverageToColorEnableNV vkCmdSetCoverageToColorEnableNV;
+extern PFN_vkCmdSetCoverageToColorLocationNV vkCmdSetCoverageToColorLocationNV;
+#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_fragment_coverage_to_color)) || (defined(VK_EXT_shader_object) && defined(VK_NV_fragment_coverage_to_color)) */
+#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples))
+extern PFN_vkCmdSetCoverageModulationModeNV vkCmdSetCoverageModulationModeNV;
+extern PFN_vkCmdSetCoverageModulationTableEnableNV vkCmdSetCoverageModulationTableEnableNV;
+extern PFN_vkCmdSetCoverageModulationTableNV vkCmdSetCoverageModulationTableNV;
+#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_framebuffer_mixed_samples)) || (defined(VK_EXT_shader_object) && defined(VK_NV_framebuffer_mixed_samples)) */
+#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image))
+extern PFN_vkCmdSetShadingRateImageEnableNV vkCmdSetShadingRateImageEnableNV;
+#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_shading_rate_image)) || (defined(VK_EXT_shader_object) && defined(VK_NV_shading_rate_image)) */
+#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test))
+extern PFN_vkCmdSetRepresentativeFragmentTestEnableNV vkCmdSetRepresentativeFragmentTestEnableNV;
+#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_representative_fragment_test)) || (defined(VK_EXT_shader_object) && defined(VK_NV_representative_fragment_test)) */
+#if (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode))
+extern PFN_vkCmdSetCoverageReductionModeNV vkCmdSetCoverageReductionModeNV;
+#endif /* (defined(VK_EXT_extended_dynamic_state3) && defined(VK_NV_coverage_reduction_mode)) || (defined(VK_EXT_shader_object) && defined(VK_NV_coverage_reduction_mode)) */
 #if (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1))
 extern PFN_vkGetDeviceGroupSurfacePresentModes2EXT vkGetDeviceGroupSurfacePresentModes2EXT;
 #endif /* (defined(VK_EXT_full_screen_exclusive) && defined(VK_KHR_device_group)) || (defined(VK_EXT_full_screen_exclusive) && defined(VK_VERSION_1_1)) */
+#if (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control))
+extern PFN_vkGetImageSubresourceLayout2EXT vkGetImageSubresourceLayout2EXT;
+#endif /* (defined(VK_EXT_host_image_copy)) || (defined(VK_EXT_image_compression_control)) */
+#if (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state))
+extern PFN_vkCmdSetVertexInputEXT vkCmdSetVertexInputEXT;
+#endif /* (defined(VK_EXT_shader_object)) || (defined(VK_EXT_vertex_input_dynamic_state)) */
 #if (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template))
 extern PFN_vkCmdPushDescriptorSetWithTemplateKHR vkCmdPushDescriptorSetWithTemplateKHR;
 #endif /* (defined(VK_KHR_descriptor_update_template) && defined(VK_KHR_push_descriptor)) || (defined(VK_KHR_push_descriptor) && defined(VK_VERSION_1_1)) || (defined(VK_KHR_push_descriptor) && defined(VK_KHR_descriptor_update_template)) */
@@ -1478,7 +1942,7 @@ extern PFN_vkAcquireNextImage2KHR vkAcquireNextImage2KHR;
 #endif
 
 /**
- * Copyright (c) 2018-2019 Arseny Kapoulkine
+ * Copyright (c) 2018-2023 Arseny Kapoulkine
  *
  * Permission is hereby granted, free of charge, to any person obtaining a copy
  * of this software and associated documentation files (the "Software"), to deal

Some files were not shown because too many files changed in this diff