|
@@ -1383,6 +1383,15 @@ Error RenderingDeviceDriverVulkan::initialize(uint32_t p_device_index, uint32_t
|
|
physical_device = context_driver->physical_device_get(p_device_index);
|
|
physical_device = context_driver->physical_device_get(p_device_index);
|
|
vkGetPhysicalDeviceProperties(physical_device, &physical_device_properties);
|
|
vkGetPhysicalDeviceProperties(physical_device, &physical_device_properties);
|
|
|
|
|
|
|
|
+ // Workaround a driver bug on Adreno 730 GPUs that keeps leaking memory on each call to vkResetDescriptorPool.
|
|
|
|
+ // Which eventually run out of memory. in such case we should not be using linear allocated pools
|
|
|
|
+ // Bug introduced in driver 512.597.0 and fixed in 512.671.0
|
|
|
|
+ // Confirmed by Qualcomm
|
|
|
|
+ if (linear_descriptor_pools_enabled) {
|
|
|
|
+ const uint32_t reset_descriptor_pool_broken_driver_begin = VK_MAKE_VERSION(512u, 597u, 0u);
|
|
|
|
+ const uint32_t reset_descriptor_pool_fixed_driver_begin = VK_MAKE_VERSION(512u, 671u, 0u);
|
|
|
|
+ linear_descriptor_pools_enabled = physical_device_properties.driverVersion < reset_descriptor_pool_broken_driver_begin || physical_device_properties.driverVersion > reset_descriptor_pool_fixed_driver_begin;
|
|
|
|
+ }
|
|
frame_count = p_frame_count;
|
|
frame_count = p_frame_count;
|
|
|
|
|
|
// Copy the queue family properties the context already retrieved.
|
|
// Copy the queue family properties the context already retrieved.
|
|
@@ -1728,7 +1737,27 @@ RDD::TextureID RenderingDeviceDriverVulkan::texture_create(const TextureFormat &
|
|
|
|
|
|
VmaAllocationCreateInfo alloc_create_info = {};
|
|
VmaAllocationCreateInfo alloc_create_info = {};
|
|
alloc_create_info.flags = (p_format.usage_bits & TEXTURE_USAGE_CPU_READ_BIT) ? VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT : 0;
|
|
alloc_create_info.flags = (p_format.usage_bits & TEXTURE_USAGE_CPU_READ_BIT) ? VMA_ALLOCATION_CREATE_HOST_ACCESS_RANDOM_BIT : 0;
|
|
- alloc_create_info.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
|
|
|
|
|
|
+
|
|
|
|
+ if (p_format.usage_bits & TEXTURE_USAGE_TRANSIENT_BIT) {
|
|
|
|
+ uint32_t memory_type_index = 0;
|
|
|
|
+ VmaAllocationCreateInfo lazy_memory_requirements = alloc_create_info;
|
|
|
|
+ lazy_memory_requirements.usage = VMA_MEMORY_USAGE_GPU_LAZILY_ALLOCATED;
|
|
|
|
+ VkResult result = vmaFindMemoryTypeIndex(allocator, UINT32_MAX, &lazy_memory_requirements, &memory_type_index);
|
|
|
|
+ if (VK_SUCCESS == result) {
|
|
|
|
+ alloc_create_info = lazy_memory_requirements;
|
|
|
|
+ create_info.usage |= VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT;
|
|
|
|
+ // VUID-VkImageCreateInfo-usage-00963 :
|
|
|
|
+ // If usage includes VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT,
|
|
|
|
+ // then bits other than VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT, VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT,
|
|
|
|
+ // and VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT must not be set
|
|
|
|
+ create_info.usage &= (VK_IMAGE_USAGE_TRANSIENT_ATTACHMENT_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_INPUT_ATTACHMENT_BIT);
|
|
|
|
+ } else {
|
|
|
|
+ alloc_create_info.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
|
|
|
|
+ }
|
|
|
|
+ } else {
|
|
|
|
+ alloc_create_info.preferredFlags = VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
|
|
|
|
+ }
|
|
|
|
+
|
|
if (image_size <= SMALL_ALLOCATION_MAX_SIZE) {
|
|
if (image_size <= SMALL_ALLOCATION_MAX_SIZE) {
|
|
uint32_t mem_type_index = 0;
|
|
uint32_t mem_type_index = 0;
|
|
vmaFindMemoryTypeIndexForImageInfo(allocator, &create_info, &alloc_create_info, &mem_type_index);
|
|
vmaFindMemoryTypeIndexForImageInfo(allocator, &create_info, &alloc_create_info, &mem_type_index);
|
|
@@ -1794,6 +1823,9 @@ RDD::TextureID RenderingDeviceDriverVulkan::texture_create(const TextureFormat &
|
|
tex_info->vk_create_info = create_info;
|
|
tex_info->vk_create_info = create_info;
|
|
tex_info->vk_view_create_info = image_view_create_info;
|
|
tex_info->vk_view_create_info = image_view_create_info;
|
|
tex_info->allocation.handle = allocation;
|
|
tex_info->allocation.handle = allocation;
|
|
|
|
+#ifdef DEBUG_ENABLED
|
|
|
|
+ tex_info->transient = (p_format.usage_bits & TEXTURE_USAGE_TRANSIENT_BIT) != 0;
|
|
|
|
+#endif
|
|
vmaGetAllocationInfo(allocator, tex_info->allocation.handle, &tex_info->allocation.info);
|
|
vmaGetAllocationInfo(allocator, tex_info->allocation.handle, &tex_info->allocation.info);
|
|
|
|
|
|
#if PRINT_NATIVE_COMMANDS
|
|
#if PRINT_NATIVE_COMMANDS
|
|
@@ -2659,7 +2691,10 @@ RDD::CommandPoolID RenderingDeviceDriverVulkan::command_pool_create(CommandQueue
|
|
VkCommandPoolCreateInfo cmd_pool_info = {};
|
|
VkCommandPoolCreateInfo cmd_pool_info = {};
|
|
cmd_pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
|
|
cmd_pool_info.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
|
|
cmd_pool_info.queueFamilyIndex = family_index;
|
|
cmd_pool_info.queueFamilyIndex = family_index;
|
|
- cmd_pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
|
|
|
|
|
|
+
|
|
|
|
+ if (!command_pool_reset_enabled) {
|
|
|
|
+ cmd_pool_info.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
|
|
|
|
+ }
|
|
|
|
|
|
VkCommandPool vk_command_pool = VK_NULL_HANDLE;
|
|
VkCommandPool vk_command_pool = VK_NULL_HANDLE;
|
|
VkResult res = vkCreateCommandPool(vk_device, &cmd_pool_info, VKC::get_allocation_callbacks(VK_OBJECT_TYPE_COMMAND_POOL), &vk_command_pool);
|
|
VkResult res = vkCreateCommandPool(vk_device, &cmd_pool_info, VKC::get_allocation_callbacks(VK_OBJECT_TYPE_COMMAND_POOL), &vk_command_pool);
|
|
@@ -2671,6 +2706,16 @@ RDD::CommandPoolID RenderingDeviceDriverVulkan::command_pool_create(CommandQueue
|
|
return CommandPoolID(command_pool);
|
|
return CommandPoolID(command_pool);
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+bool RenderingDeviceDriverVulkan::command_pool_reset(CommandPoolID p_cmd_pool) {
|
|
|
|
+ DEV_ASSERT(p_cmd_pool);
|
|
|
|
+
|
|
|
|
+ CommandPool *command_pool = (CommandPool *)(p_cmd_pool.id);
|
|
|
|
+ VkResult err = vkResetCommandPool(vk_device, command_pool->vk_command_pool, 0);
|
|
|
|
+ ERR_FAIL_COND_V_MSG(err, false, "vkResetCommandPool failed with error " + itos(err) + ".");
|
|
|
|
+
|
|
|
|
+ return true;
|
|
|
|
+}
|
|
|
|
+
|
|
void RenderingDeviceDriverVulkan::command_pool_free(CommandPoolID p_cmd_pool) {
|
|
void RenderingDeviceDriverVulkan::command_pool_free(CommandPoolID p_cmd_pool) {
|
|
DEV_ASSERT(p_cmd_pool);
|
|
DEV_ASSERT(p_cmd_pool);
|
|
|
|
|
|
@@ -2704,8 +2749,6 @@ RDD::CommandBufferID RenderingDeviceDriverVulkan::command_buffer_create(CommandP
|
|
}
|
|
}
|
|
|
|
|
|
bool RenderingDeviceDriverVulkan::command_buffer_begin(CommandBufferID p_cmd_buffer) {
|
|
bool RenderingDeviceDriverVulkan::command_buffer_begin(CommandBufferID p_cmd_buffer) {
|
|
- // Reset is implicit (VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT).
|
|
|
|
-
|
|
|
|
VkCommandBufferBeginInfo cmd_buf_begin_info = {};
|
|
VkCommandBufferBeginInfo cmd_buf_begin_info = {};
|
|
cmd_buf_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
|
|
cmd_buf_begin_info.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
|
|
cmd_buf_begin_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
|
|
cmd_buf_begin_info.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
|
|
@@ -2717,8 +2760,6 @@ bool RenderingDeviceDriverVulkan::command_buffer_begin(CommandBufferID p_cmd_buf
|
|
}
|
|
}
|
|
|
|
|
|
bool RenderingDeviceDriverVulkan::command_buffer_begin_secondary(CommandBufferID p_cmd_buffer, RenderPassID p_render_pass, uint32_t p_subpass, FramebufferID p_framebuffer) {
|
|
bool RenderingDeviceDriverVulkan::command_buffer_begin_secondary(CommandBufferID p_cmd_buffer, RenderPassID p_render_pass, uint32_t p_subpass, FramebufferID p_framebuffer) {
|
|
- // Reset is implicit (VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT).
|
|
|
|
-
|
|
|
|
Framebuffer *framebuffer = (Framebuffer *)(p_framebuffer.id);
|
|
Framebuffer *framebuffer = (Framebuffer *)(p_framebuffer.id);
|
|
|
|
|
|
VkCommandBufferInheritanceInfo inheritance_info = {};
|
|
VkCommandBufferInheritanceInfo inheritance_info = {};
|
|
@@ -3477,7 +3518,7 @@ Vector<uint8_t> RenderingDeviceDriverVulkan::shader_compile_binary_from_spirv(Ve
|
|
return ret;
|
|
return ret;
|
|
}
|
|
}
|
|
|
|
|
|
-RDD::ShaderID RenderingDeviceDriverVulkan::shader_create_from_bytecode(const Vector<uint8_t> &p_shader_binary, ShaderDescription &r_shader_desc, String &r_name) {
|
|
|
|
|
|
+RDD::ShaderID RenderingDeviceDriverVulkan::shader_create_from_bytecode(const Vector<uint8_t> &p_shader_binary, ShaderDescription &r_shader_desc, String &r_name, const Vector<ImmutableSampler> &p_immutable_samplers) {
|
|
r_shader_desc = {}; // Driver-agnostic.
|
|
r_shader_desc = {}; // Driver-agnostic.
|
|
ShaderInfo shader_info; // Driver-specific.
|
|
ShaderInfo shader_info; // Driver-specific.
|
|
|
|
|
|
@@ -3549,6 +3590,19 @@ RDD::ShaderID RenderingDeviceDriverVulkan::shader_create_from_bytecode(const Vec
|
|
case UNIFORM_TYPE_SAMPLER: {
|
|
case UNIFORM_TYPE_SAMPLER: {
|
|
layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
|
|
layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
|
|
layout_binding.descriptorCount = set_ptr[j].length;
|
|
layout_binding.descriptorCount = set_ptr[j].length;
|
|
|
|
+ // Immutable samplers: here they get set in the layoutbinding, given that they will not be changed later.
|
|
|
|
+ int immutable_bind_index = -1;
|
|
|
|
+ if (immutable_samplers_enabled && p_immutable_samplers.size() > 0) {
|
|
|
|
+ for (int k = 0; k < p_immutable_samplers.size(); k++) {
|
|
|
|
+ if (p_immutable_samplers[k].binding == layout_binding.binding) {
|
|
|
|
+ immutable_bind_index = k;
|
|
|
|
+ break;
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ if (immutable_bind_index >= 0) {
|
|
|
|
+ layout_binding.pImmutableSamplers = (VkSampler *)&p_immutable_samplers[immutable_bind_index].ids[0].id;
|
|
|
|
+ }
|
|
|
|
+ }
|
|
} break;
|
|
} break;
|
|
case UNIFORM_TYPE_SAMPLER_WITH_TEXTURE: {
|
|
case UNIFORM_TYPE_SAMPLER_WITH_TEXTURE: {
|
|
layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
|
|
layout_binding.descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
|
|
@@ -3770,9 +3824,9 @@ void RenderingDeviceDriverVulkan::shader_destroy_modules(ShaderID p_shader) {
|
|
/*********************/
|
|
/*********************/
|
|
/**** UNIFORM SET ****/
|
|
/**** UNIFORM SET ****/
|
|
/*********************/
|
|
/*********************/
|
|
-
|
|
|
|
-VkDescriptorPool RenderingDeviceDriverVulkan::_descriptor_set_pool_find_or_create(const DescriptorSetPoolKey &p_key, DescriptorSetPools::Iterator *r_pool_sets_it) {
|
|
|
|
- DescriptorSetPools::Iterator pool_sets_it = descriptor_set_pools.find(p_key);
|
|
|
|
|
|
+VkDescriptorPool RenderingDeviceDriverVulkan::_descriptor_set_pool_find_or_create(const DescriptorSetPoolKey &p_key, DescriptorSetPools::Iterator *r_pool_sets_it, int p_linear_pool_index) {
|
|
|
|
+ bool linear_pool = p_linear_pool_index >= 0;
|
|
|
|
+ DescriptorSetPools::Iterator pool_sets_it = linear_pool ? linear_descriptor_set_pools[p_linear_pool_index].find(p_key) : descriptor_set_pools.find(p_key);
|
|
|
|
|
|
if (pool_sets_it) {
|
|
if (pool_sets_it) {
|
|
for (KeyValue<VkDescriptorPool, uint32_t> &E : pool_sets_it->value) {
|
|
for (KeyValue<VkDescriptorPool, uint32_t> &E : pool_sets_it->value) {
|
|
@@ -3858,7 +3912,11 @@ VkDescriptorPool RenderingDeviceDriverVulkan::_descriptor_set_pool_find_or_creat
|
|
|
|
|
|
VkDescriptorPoolCreateInfo descriptor_set_pool_create_info = {};
|
|
VkDescriptorPoolCreateInfo descriptor_set_pool_create_info = {};
|
|
descriptor_set_pool_create_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
|
|
descriptor_set_pool_create_info.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
|
|
- descriptor_set_pool_create_info.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT; // Can't think how somebody may NOT need this flag.
|
|
|
|
|
|
+ if (linear_descriptor_pools_enabled && linear_pool) {
|
|
|
|
+ descriptor_set_pool_create_info.flags = 0;
|
|
|
|
+ } else {
|
|
|
|
+ descriptor_set_pool_create_info.flags = VK_DESCRIPTOR_POOL_CREATE_FREE_DESCRIPTOR_SET_BIT; // Can't think how somebody may NOT need this flag.
|
|
|
|
+ }
|
|
descriptor_set_pool_create_info.maxSets = max_descriptor_sets_per_pool;
|
|
descriptor_set_pool_create_info.maxSets = max_descriptor_sets_per_pool;
|
|
descriptor_set_pool_create_info.poolSizeCount = vk_sizes_count;
|
|
descriptor_set_pool_create_info.poolSizeCount = vk_sizes_count;
|
|
descriptor_set_pool_create_info.pPoolSizes = vk_sizes;
|
|
descriptor_set_pool_create_info.pPoolSizes = vk_sizes;
|
|
@@ -3872,7 +3930,11 @@ VkDescriptorPool RenderingDeviceDriverVulkan::_descriptor_set_pool_find_or_creat
|
|
// Bookkeep.
|
|
// Bookkeep.
|
|
|
|
|
|
if (!pool_sets_it) {
|
|
if (!pool_sets_it) {
|
|
- pool_sets_it = descriptor_set_pools.insert(p_key, HashMap<VkDescriptorPool, uint32_t>());
|
|
|
|
|
|
+ if (linear_pool) {
|
|
|
|
+ pool_sets_it = linear_descriptor_set_pools[p_linear_pool_index].insert(p_key, HashMap<VkDescriptorPool, uint32_t>());
|
|
|
|
+ } else {
|
|
|
|
+ pool_sets_it = descriptor_set_pools.insert(p_key, HashMap<VkDescriptorPool, uint32_t>());
|
|
|
|
+ }
|
|
}
|
|
}
|
|
HashMap<VkDescriptorPool, uint32_t> &pool_rcs = pool_sets_it->value;
|
|
HashMap<VkDescriptorPool, uint32_t> &pool_rcs = pool_sets_it->value;
|
|
pool_rcs.insert(vk_pool, 0);
|
|
pool_rcs.insert(vk_pool, 0);
|
|
@@ -3880,34 +3942,43 @@ VkDescriptorPool RenderingDeviceDriverVulkan::_descriptor_set_pool_find_or_creat
|
|
return vk_pool;
|
|
return vk_pool;
|
|
}
|
|
}
|
|
|
|
|
|
-void RenderingDeviceDriverVulkan::_descriptor_set_pool_unreference(DescriptorSetPools::Iterator p_pool_sets_it, VkDescriptorPool p_vk_descriptor_pool) {
|
|
|
|
|
|
+void RenderingDeviceDriverVulkan::_descriptor_set_pool_unreference(DescriptorSetPools::Iterator p_pool_sets_it, VkDescriptorPool p_vk_descriptor_pool, int p_linear_pool_index) {
|
|
HashMap<VkDescriptorPool, uint32_t>::Iterator pool_rcs_it = p_pool_sets_it->value.find(p_vk_descriptor_pool);
|
|
HashMap<VkDescriptorPool, uint32_t>::Iterator pool_rcs_it = p_pool_sets_it->value.find(p_vk_descriptor_pool);
|
|
pool_rcs_it->value--;
|
|
pool_rcs_it->value--;
|
|
if (pool_rcs_it->value == 0) {
|
|
if (pool_rcs_it->value == 0) {
|
|
vkDestroyDescriptorPool(vk_device, p_vk_descriptor_pool, VKC::get_allocation_callbacks(VK_OBJECT_TYPE_DESCRIPTOR_POOL));
|
|
vkDestroyDescriptorPool(vk_device, p_vk_descriptor_pool, VKC::get_allocation_callbacks(VK_OBJECT_TYPE_DESCRIPTOR_POOL));
|
|
p_pool_sets_it->value.erase(p_vk_descriptor_pool);
|
|
p_pool_sets_it->value.erase(p_vk_descriptor_pool);
|
|
if (p_pool_sets_it->value.is_empty()) {
|
|
if (p_pool_sets_it->value.is_empty()) {
|
|
- descriptor_set_pools.remove(p_pool_sets_it);
|
|
|
|
|
|
+ if (linear_descriptor_pools_enabled && p_linear_pool_index >= 0) {
|
|
|
|
+ linear_descriptor_set_pools[p_linear_pool_index].remove(p_pool_sets_it);
|
|
|
|
+ } else {
|
|
|
|
+ descriptor_set_pools.remove(p_pool_sets_it);
|
|
|
|
+ }
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
-RDD::UniformSetID RenderingDeviceDriverVulkan::uniform_set_create(VectorView<BoundUniform> p_uniforms, ShaderID p_shader, uint32_t p_set_index) {
|
|
|
|
|
|
+RDD::UniformSetID RenderingDeviceDriverVulkan::uniform_set_create(VectorView<BoundUniform> p_uniforms, ShaderID p_shader, uint32_t p_set_index, int p_linear_pool_index) {
|
|
|
|
+ if (!linear_descriptor_pools_enabled) {
|
|
|
|
+ p_linear_pool_index = -1;
|
|
|
|
+ }
|
|
DescriptorSetPoolKey pool_key;
|
|
DescriptorSetPoolKey pool_key;
|
|
-
|
|
|
|
|
|
+ // Immutable samplers will be skipped so we need to track the number of vk_writes used.
|
|
VkWriteDescriptorSet *vk_writes = ALLOCA_ARRAY(VkWriteDescriptorSet, p_uniforms.size());
|
|
VkWriteDescriptorSet *vk_writes = ALLOCA_ARRAY(VkWriteDescriptorSet, p_uniforms.size());
|
|
|
|
+ uint32_t writes_amount = 0;
|
|
for (uint32_t i = 0; i < p_uniforms.size(); i++) {
|
|
for (uint32_t i = 0; i < p_uniforms.size(); i++) {
|
|
const BoundUniform &uniform = p_uniforms[i];
|
|
const BoundUniform &uniform = p_uniforms[i];
|
|
|
|
|
|
- vk_writes[i] = {};
|
|
|
|
- vk_writes[i].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
|
|
|
|
- vk_writes[i].dstBinding = uniform.binding;
|
|
|
|
- vk_writes[i].descriptorType = VK_DESCRIPTOR_TYPE_MAX_ENUM; // Invalid value.
|
|
|
|
|
|
+ vk_writes[writes_amount] = {};
|
|
|
|
+ vk_writes[writes_amount].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
|
|
|
|
|
|
uint32_t num_descriptors = 1;
|
|
uint32_t num_descriptors = 1;
|
|
|
|
|
|
switch (uniform.type) {
|
|
switch (uniform.type) {
|
|
case UNIFORM_TYPE_SAMPLER: {
|
|
case UNIFORM_TYPE_SAMPLER: {
|
|
|
|
+ if (uniform.immutable_sampler && immutable_samplers_enabled) {
|
|
|
|
+ continue; // Skipping immutable samplers.
|
|
|
|
+ }
|
|
num_descriptors = uniform.ids.size();
|
|
num_descriptors = uniform.ids.size();
|
|
VkDescriptorImageInfo *vk_img_infos = ALLOCA_ARRAY(VkDescriptorImageInfo, num_descriptors);
|
|
VkDescriptorImageInfo *vk_img_infos = ALLOCA_ARRAY(VkDescriptorImageInfo, num_descriptors);
|
|
|
|
|
|
@@ -3918,48 +3989,63 @@ RDD::UniformSetID RenderingDeviceDriverVulkan::uniform_set_create(VectorView<Bou
|
|
vk_img_infos[j].imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
|
vk_img_infos[j].imageLayout = VK_IMAGE_LAYOUT_UNDEFINED;
|
|
}
|
|
}
|
|
|
|
|
|
- vk_writes[i].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
|
|
|
|
- vk_writes[i].pImageInfo = vk_img_infos;
|
|
|
|
|
|
+ vk_writes[writes_amount].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLER;
|
|
|
|
+ vk_writes[writes_amount].pImageInfo = vk_img_infos;
|
|
} break;
|
|
} break;
|
|
case UNIFORM_TYPE_SAMPLER_WITH_TEXTURE: {
|
|
case UNIFORM_TYPE_SAMPLER_WITH_TEXTURE: {
|
|
num_descriptors = uniform.ids.size() / 2;
|
|
num_descriptors = uniform.ids.size() / 2;
|
|
VkDescriptorImageInfo *vk_img_infos = ALLOCA_ARRAY(VkDescriptorImageInfo, num_descriptors);
|
|
VkDescriptorImageInfo *vk_img_infos = ALLOCA_ARRAY(VkDescriptorImageInfo, num_descriptors);
|
|
|
|
|
|
for (uint32_t j = 0; j < num_descriptors; j++) {
|
|
for (uint32_t j = 0; j < num_descriptors; j++) {
|
|
|
|
+#ifdef DEBUG_ENABLED
|
|
|
|
+ if (((const TextureInfo *)uniform.ids[j * 2 + 1].id)->transient) {
|
|
|
|
+ ERR_PRINT("TEXTURE_USAGE_TRANSIENT_BIT texture must not be used for sampling in a shader.");
|
|
|
|
+ }
|
|
|
|
+#endif
|
|
vk_img_infos[j] = {};
|
|
vk_img_infos[j] = {};
|
|
vk_img_infos[j].sampler = (VkSampler)uniform.ids[j * 2 + 0].id;
|
|
vk_img_infos[j].sampler = (VkSampler)uniform.ids[j * 2 + 0].id;
|
|
vk_img_infos[j].imageView = ((const TextureInfo *)uniform.ids[j * 2 + 1].id)->vk_view;
|
|
vk_img_infos[j].imageView = ((const TextureInfo *)uniform.ids[j * 2 + 1].id)->vk_view;
|
|
vk_img_infos[j].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
|
vk_img_infos[j].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
|
}
|
|
}
|
|
|
|
|
|
- vk_writes[i].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
|
|
|
|
- vk_writes[i].pImageInfo = vk_img_infos;
|
|
|
|
|
|
+ vk_writes[writes_amount].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
|
|
|
|
+ vk_writes[writes_amount].pImageInfo = vk_img_infos;
|
|
} break;
|
|
} break;
|
|
case UNIFORM_TYPE_TEXTURE: {
|
|
case UNIFORM_TYPE_TEXTURE: {
|
|
num_descriptors = uniform.ids.size();
|
|
num_descriptors = uniform.ids.size();
|
|
VkDescriptorImageInfo *vk_img_infos = ALLOCA_ARRAY(VkDescriptorImageInfo, num_descriptors);
|
|
VkDescriptorImageInfo *vk_img_infos = ALLOCA_ARRAY(VkDescriptorImageInfo, num_descriptors);
|
|
|
|
|
|
for (uint32_t j = 0; j < num_descriptors; j++) {
|
|
for (uint32_t j = 0; j < num_descriptors; j++) {
|
|
|
|
+#ifdef DEBUG_ENABLED
|
|
|
|
+ if (((const TextureInfo *)uniform.ids[j].id)->transient) {
|
|
|
|
+ ERR_PRINT("TEXTURE_USAGE_TRANSIENT_BIT texture must not be used for sampling in a shader.");
|
|
|
|
+ }
|
|
|
|
+#endif
|
|
vk_img_infos[j] = {};
|
|
vk_img_infos[j] = {};
|
|
vk_img_infos[j].imageView = ((const TextureInfo *)uniform.ids[j].id)->vk_view;
|
|
vk_img_infos[j].imageView = ((const TextureInfo *)uniform.ids[j].id)->vk_view;
|
|
vk_img_infos[j].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
|
vk_img_infos[j].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
|
}
|
|
}
|
|
|
|
|
|
- vk_writes[i].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
|
|
|
|
- vk_writes[i].pImageInfo = vk_img_infos;
|
|
|
|
|
|
+ vk_writes[writes_amount].descriptorType = VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
|
|
|
|
+ vk_writes[writes_amount].pImageInfo = vk_img_infos;
|
|
} break;
|
|
} break;
|
|
case UNIFORM_TYPE_IMAGE: {
|
|
case UNIFORM_TYPE_IMAGE: {
|
|
num_descriptors = uniform.ids.size();
|
|
num_descriptors = uniform.ids.size();
|
|
VkDescriptorImageInfo *vk_img_infos = ALLOCA_ARRAY(VkDescriptorImageInfo, num_descriptors);
|
|
VkDescriptorImageInfo *vk_img_infos = ALLOCA_ARRAY(VkDescriptorImageInfo, num_descriptors);
|
|
|
|
|
|
for (uint32_t j = 0; j < num_descriptors; j++) {
|
|
for (uint32_t j = 0; j < num_descriptors; j++) {
|
|
|
|
+#ifdef DEBUG_ENABLED
|
|
|
|
+ if (((const TextureInfo *)uniform.ids[j].id)->transient) {
|
|
|
|
+ ERR_PRINT("TEXTURE_USAGE_TRANSIENT_BIT texture must not be used for sampling in a shader.");
|
|
|
|
+ }
|
|
|
|
+#endif
|
|
vk_img_infos[j] = {};
|
|
vk_img_infos[j] = {};
|
|
vk_img_infos[j].imageView = ((const TextureInfo *)uniform.ids[j].id)->vk_view;
|
|
vk_img_infos[j].imageView = ((const TextureInfo *)uniform.ids[j].id)->vk_view;
|
|
vk_img_infos[j].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
|
|
vk_img_infos[j].imageLayout = VK_IMAGE_LAYOUT_GENERAL;
|
|
}
|
|
}
|
|
|
|
|
|
- vk_writes[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
|
|
|
|
- vk_writes[i].pImageInfo = vk_img_infos;
|
|
|
|
|
|
+ vk_writes[writes_amount].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_IMAGE;
|
|
|
|
+ vk_writes[writes_amount].pImageInfo = vk_img_infos;
|
|
} break;
|
|
} break;
|
|
case UNIFORM_TYPE_TEXTURE_BUFFER: {
|
|
case UNIFORM_TYPE_TEXTURE_BUFFER: {
|
|
num_descriptors = uniform.ids.size();
|
|
num_descriptors = uniform.ids.size();
|
|
@@ -3975,9 +4061,9 @@ RDD::UniformSetID RenderingDeviceDriverVulkan::uniform_set_create(VectorView<Bou
|
|
vk_buf_views[j] = buf_info->vk_view;
|
|
vk_buf_views[j] = buf_info->vk_view;
|
|
}
|
|
}
|
|
|
|
|
|
- vk_writes[i].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
|
|
|
|
- vk_writes[i].pBufferInfo = vk_buf_infos;
|
|
|
|
- vk_writes[i].pTexelBufferView = vk_buf_views;
|
|
|
|
|
|
+ vk_writes[writes_amount].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
|
|
|
|
+ vk_writes[writes_amount].pBufferInfo = vk_buf_infos;
|
|
|
|
+ vk_writes[writes_amount].pTexelBufferView = vk_buf_views;
|
|
} break;
|
|
} break;
|
|
case UNIFORM_TYPE_SAMPLER_WITH_TEXTURE_BUFFER: {
|
|
case UNIFORM_TYPE_SAMPLER_WITH_TEXTURE_BUFFER: {
|
|
num_descriptors = uniform.ids.size() / 2;
|
|
num_descriptors = uniform.ids.size() / 2;
|
|
@@ -3997,10 +4083,10 @@ RDD::UniformSetID RenderingDeviceDriverVulkan::uniform_set_create(VectorView<Bou
|
|
vk_buf_views[j] = buf_info->vk_view;
|
|
vk_buf_views[j] = buf_info->vk_view;
|
|
}
|
|
}
|
|
|
|
|
|
- vk_writes[i].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
|
|
|
|
- vk_writes[i].pImageInfo = vk_img_infos;
|
|
|
|
- vk_writes[i].pBufferInfo = vk_buf_infos;
|
|
|
|
- vk_writes[i].pTexelBufferView = vk_buf_views;
|
|
|
|
|
|
+ vk_writes[writes_amount].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_TEXEL_BUFFER;
|
|
|
|
+ vk_writes[writes_amount].pImageInfo = vk_img_infos;
|
|
|
|
+ vk_writes[writes_amount].pBufferInfo = vk_buf_infos;
|
|
|
|
+ vk_writes[writes_amount].pTexelBufferView = vk_buf_views;
|
|
} break;
|
|
} break;
|
|
case UNIFORM_TYPE_IMAGE_BUFFER: {
|
|
case UNIFORM_TYPE_IMAGE_BUFFER: {
|
|
CRASH_NOW_MSG("Unimplemented!"); // TODO.
|
|
CRASH_NOW_MSG("Unimplemented!"); // TODO.
|
|
@@ -4012,8 +4098,8 @@ RDD::UniformSetID RenderingDeviceDriverVulkan::uniform_set_create(VectorView<Bou
|
|
vk_buf_info->buffer = buf_info->vk_buffer;
|
|
vk_buf_info->buffer = buf_info->vk_buffer;
|
|
vk_buf_info->range = buf_info->size;
|
|
vk_buf_info->range = buf_info->size;
|
|
|
|
|
|
- vk_writes[i].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
|
|
|
|
- vk_writes[i].pBufferInfo = vk_buf_info;
|
|
|
|
|
|
+ vk_writes[writes_amount].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
|
|
|
|
+ vk_writes[writes_amount].pBufferInfo = vk_buf_info;
|
|
} break;
|
|
} break;
|
|
case UNIFORM_TYPE_STORAGE_BUFFER: {
|
|
case UNIFORM_TYPE_STORAGE_BUFFER: {
|
|
const BufferInfo *buf_info = (const BufferInfo *)uniform.ids[0].id;
|
|
const BufferInfo *buf_info = (const BufferInfo *)uniform.ids[0].id;
|
|
@@ -4022,8 +4108,8 @@ RDD::UniformSetID RenderingDeviceDriverVulkan::uniform_set_create(VectorView<Bou
|
|
vk_buf_info->buffer = buf_info->vk_buffer;
|
|
vk_buf_info->buffer = buf_info->vk_buffer;
|
|
vk_buf_info->range = buf_info->size;
|
|
vk_buf_info->range = buf_info->size;
|
|
|
|
|
|
- vk_writes[i].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
|
|
|
|
- vk_writes[i].pBufferInfo = vk_buf_info;
|
|
|
|
|
|
+ vk_writes[writes_amount].descriptorType = VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
|
|
|
|
+ vk_writes[writes_amount].pBufferInfo = vk_buf_info;
|
|
} break;
|
|
} break;
|
|
case UNIFORM_TYPE_INPUT_ATTACHMENT: {
|
|
case UNIFORM_TYPE_INPUT_ATTACHMENT: {
|
|
num_descriptors = uniform.ids.size();
|
|
num_descriptors = uniform.ids.size();
|
|
@@ -4035,24 +4121,26 @@ RDD::UniformSetID RenderingDeviceDriverVulkan::uniform_set_create(VectorView<Bou
|
|
vk_img_infos[j].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
|
vk_img_infos[j].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
|
|
}
|
|
}
|
|
|
|
|
|
- vk_writes[i].descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
|
|
|
|
- vk_writes[i].pImageInfo = vk_img_infos;
|
|
|
|
|
|
+ vk_writes[writes_amount].descriptorType = VK_DESCRIPTOR_TYPE_INPUT_ATTACHMENT;
|
|
|
|
+ vk_writes[writes_amount].pImageInfo = vk_img_infos;
|
|
} break;
|
|
} break;
|
|
default: {
|
|
default: {
|
|
DEV_ASSERT(false);
|
|
DEV_ASSERT(false);
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
- vk_writes[i].descriptorCount = num_descriptors;
|
|
|
|
|
|
+ vk_writes[writes_amount].dstBinding = uniform.binding;
|
|
|
|
+ vk_writes[writes_amount].descriptorCount = num_descriptors;
|
|
|
|
|
|
ERR_FAIL_COND_V_MSG(pool_key.uniform_type[uniform.type] == MAX_UNIFORM_POOL_ELEMENT, UniformSetID(),
|
|
ERR_FAIL_COND_V_MSG(pool_key.uniform_type[uniform.type] == MAX_UNIFORM_POOL_ELEMENT, UniformSetID(),
|
|
"Uniform set reached the limit of bindings for the same type (" + itos(MAX_UNIFORM_POOL_ELEMENT) + ").");
|
|
"Uniform set reached the limit of bindings for the same type (" + itos(MAX_UNIFORM_POOL_ELEMENT) + ").");
|
|
pool_key.uniform_type[uniform.type] += num_descriptors;
|
|
pool_key.uniform_type[uniform.type] += num_descriptors;
|
|
|
|
+ writes_amount++;
|
|
}
|
|
}
|
|
|
|
|
|
// Need a descriptor pool.
|
|
// Need a descriptor pool.
|
|
DescriptorSetPools::Iterator pool_sets_it;
|
|
DescriptorSetPools::Iterator pool_sets_it;
|
|
- VkDescriptorPool vk_pool = _descriptor_set_pool_find_or_create(pool_key, &pool_sets_it);
|
|
|
|
|
|
+ VkDescriptorPool vk_pool = _descriptor_set_pool_find_or_create(pool_key, &pool_sets_it, p_linear_pool_index);
|
|
DEV_ASSERT(vk_pool);
|
|
DEV_ASSERT(vk_pool);
|
|
pool_sets_it->value[vk_pool]++;
|
|
pool_sets_it->value[vk_pool]++;
|
|
|
|
|
|
@@ -4064,22 +4152,27 @@ RDD::UniformSetID RenderingDeviceDriverVulkan::uniform_set_create(VectorView<Bou
|
|
descriptor_set_allocate_info.pSetLayouts = &shader_info->vk_descriptor_set_layouts[p_set_index];
|
|
descriptor_set_allocate_info.pSetLayouts = &shader_info->vk_descriptor_set_layouts[p_set_index];
|
|
|
|
|
|
VkDescriptorSet vk_descriptor_set = VK_NULL_HANDLE;
|
|
VkDescriptorSet vk_descriptor_set = VK_NULL_HANDLE;
|
|
|
|
+
|
|
VkResult res = vkAllocateDescriptorSets(vk_device, &descriptor_set_allocate_info, &vk_descriptor_set);
|
|
VkResult res = vkAllocateDescriptorSets(vk_device, &descriptor_set_allocate_info, &vk_descriptor_set);
|
|
if (res) {
|
|
if (res) {
|
|
- _descriptor_set_pool_unreference(pool_sets_it, vk_pool);
|
|
|
|
|
|
+ _descriptor_set_pool_unreference(pool_sets_it, vk_pool, p_linear_pool_index);
|
|
ERR_FAIL_V_MSG(UniformSetID(), "Cannot allocate descriptor sets, error " + itos(res) + ".");
|
|
ERR_FAIL_V_MSG(UniformSetID(), "Cannot allocate descriptor sets, error " + itos(res) + ".");
|
|
}
|
|
}
|
|
|
|
|
|
- for (uint32_t i = 0; i < p_uniforms.size(); i++) {
|
|
|
|
|
|
+ for (uint32_t i = 0; i < writes_amount; i++) {
|
|
vk_writes[i].dstSet = vk_descriptor_set;
|
|
vk_writes[i].dstSet = vk_descriptor_set;
|
|
}
|
|
}
|
|
- vkUpdateDescriptorSets(vk_device, p_uniforms.size(), vk_writes, 0, nullptr);
|
|
|
|
|
|
+ vkUpdateDescriptorSets(vk_device, writes_amount, vk_writes, 0, nullptr);
|
|
|
|
|
|
// Bookkeep.
|
|
// Bookkeep.
|
|
|
|
|
|
UniformSetInfo *usi = VersatileResource::allocate<UniformSetInfo>(resources_allocator);
|
|
UniformSetInfo *usi = VersatileResource::allocate<UniformSetInfo>(resources_allocator);
|
|
usi->vk_descriptor_set = vk_descriptor_set;
|
|
usi->vk_descriptor_set = vk_descriptor_set;
|
|
- usi->vk_descriptor_pool = vk_pool;
|
|
|
|
|
|
+ if (p_linear_pool_index >= 0) {
|
|
|
|
+ usi->vk_linear_descriptor_pool = vk_pool;
|
|
|
|
+ } else {
|
|
|
|
+ usi->vk_descriptor_pool = vk_pool;
|
|
|
|
+ }
|
|
usi->pool_sets_it = pool_sets_it;
|
|
usi->pool_sets_it = pool_sets_it;
|
|
|
|
|
|
return UniformSetID(usi);
|
|
return UniformSetID(usi);
|
|
@@ -4087,13 +4180,43 @@ RDD::UniformSetID RenderingDeviceDriverVulkan::uniform_set_create(VectorView<Bou
|
|
|
|
|
|
void RenderingDeviceDriverVulkan::uniform_set_free(UniformSetID p_uniform_set) {
|
|
void RenderingDeviceDriverVulkan::uniform_set_free(UniformSetID p_uniform_set) {
|
|
UniformSetInfo *usi = (UniformSetInfo *)p_uniform_set.id;
|
|
UniformSetInfo *usi = (UniformSetInfo *)p_uniform_set.id;
|
|
- vkFreeDescriptorSets(vk_device, usi->vk_descriptor_pool, 1, &usi->vk_descriptor_set);
|
|
|
|
|
|
|
|
- _descriptor_set_pool_unreference(usi->pool_sets_it, usi->vk_descriptor_pool);
|
|
|
|
|
|
+ if (usi->vk_linear_descriptor_pool) {
|
|
|
|
+ // Nothing to do. All sets are freed at once using vkResetDescriptorPool.
|
|
|
|
+ //
|
|
|
|
+ // We can NOT decrease the reference count (i.e. call _descriptor_set_pool_unreference())
|
|
|
|
+ // because the pool is linear (i.e. the freed set can't be recycled) and further calls to
|
|
|
|
+ // _descriptor_set_pool_find_or_create() need usi->pool_sets_it->value to stay so that we can
|
|
|
|
+ // tell if the pool has ran out of space and we need to create a new pool.
|
|
|
|
+ } else {
|
|
|
|
+ vkFreeDescriptorSets(vk_device, usi->vk_descriptor_pool, 1, &usi->vk_descriptor_set);
|
|
|
|
+ _descriptor_set_pool_unreference(usi->pool_sets_it, usi->vk_descriptor_pool, -1);
|
|
|
|
+ }
|
|
|
|
|
|
VersatileResource::free(resources_allocator, usi);
|
|
VersatileResource::free(resources_allocator, usi);
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+bool RenderingDeviceDriverVulkan::uniform_sets_have_linear_pools() const {
|
|
|
|
+ return true;
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+void RenderingDeviceDriverVulkan::linear_uniform_set_pools_reset(int p_linear_pool_index) {
|
|
|
|
+ if (linear_descriptor_pools_enabled) {
|
|
|
|
+ DescriptorSetPools &pools_to_reset = linear_descriptor_set_pools[p_linear_pool_index];
|
|
|
|
+ DescriptorSetPools::Iterator curr_pool = pools_to_reset.begin();
|
|
|
|
+
|
|
|
|
+ while (curr_pool != pools_to_reset.end()) {
|
|
|
|
+ HashMap<VkDescriptorPool, uint32_t>::Iterator curr_pair = curr_pool->value.begin();
|
|
|
|
+ while (curr_pair != curr_pool->value.end()) {
|
|
|
|
+ vkResetDescriptorPool(vk_device, curr_pair->key, 0);
|
|
|
|
+ curr_pair->value = 0;
|
|
|
|
+ ++curr_pair;
|
|
|
|
+ }
|
|
|
|
+ ++curr_pool;
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+}
|
|
|
|
+
|
|
// ----- COMMANDS -----
|
|
// ----- COMMANDS -----
|
|
|
|
|
|
void RenderingDeviceDriverVulkan::command_uniform_set_prepare_for_use(CommandBufferID p_cmd_buffer, UniformSetID p_uniform_set, ShaderID p_shader, uint32_t p_set_index) {
|
|
void RenderingDeviceDriverVulkan::command_uniform_set_prepare_for_use(CommandBufferID p_cmd_buffer, UniformSetID p_uniform_set, ShaderID p_shader, uint32_t p_set_index) {
|
|
@@ -4168,6 +4291,16 @@ void RenderingDeviceDriverVulkan::command_copy_texture(CommandBufferID p_cmd_buf
|
|
|
|
|
|
const TextureInfo *src_tex_info = (const TextureInfo *)p_src_texture.id;
|
|
const TextureInfo *src_tex_info = (const TextureInfo *)p_src_texture.id;
|
|
const TextureInfo *dst_tex_info = (const TextureInfo *)p_dst_texture.id;
|
|
const TextureInfo *dst_tex_info = (const TextureInfo *)p_dst_texture.id;
|
|
|
|
+
|
|
|
|
+#ifdef DEBUG_ENABLED
|
|
|
|
+ if (src_tex_info->transient) {
|
|
|
|
+ ERR_PRINT("TEXTURE_USAGE_TRANSIENT_BIT p_src_texture must not be used in command_copy_texture.");
|
|
|
|
+ }
|
|
|
|
+ if (dst_tex_info->transient) {
|
|
|
|
+ ERR_PRINT("TEXTURE_USAGE_TRANSIENT_BIT p_dst_texture must not be used in command_copy_texture.");
|
|
|
|
+ }
|
|
|
|
+#endif
|
|
|
|
+
|
|
vkCmdCopyImage((VkCommandBuffer)p_cmd_buffer.id, src_tex_info->vk_view_create_info.image, RD_TO_VK_LAYOUT[p_src_texture_layout], dst_tex_info->vk_view_create_info.image, RD_TO_VK_LAYOUT[p_dst_texture_layout], p_regions.size(), vk_copy_regions);
|
|
vkCmdCopyImage((VkCommandBuffer)p_cmd_buffer.id, src_tex_info->vk_view_create_info.image, RD_TO_VK_LAYOUT[p_src_texture_layout], dst_tex_info->vk_view_create_info.image, RD_TO_VK_LAYOUT[p_dst_texture_layout], p_regions.size(), vk_copy_regions);
|
|
}
|
|
}
|
|
|
|
|
|
@@ -4188,6 +4321,15 @@ void RenderingDeviceDriverVulkan::command_resolve_texture(CommandBufferID p_cmd_
|
|
vk_resolve.extent.height = MAX(1u, src_tex_info->vk_create_info.extent.height >> p_src_mipmap);
|
|
vk_resolve.extent.height = MAX(1u, src_tex_info->vk_create_info.extent.height >> p_src_mipmap);
|
|
vk_resolve.extent.depth = MAX(1u, src_tex_info->vk_create_info.extent.depth >> p_src_mipmap);
|
|
vk_resolve.extent.depth = MAX(1u, src_tex_info->vk_create_info.extent.depth >> p_src_mipmap);
|
|
|
|
|
|
|
|
+#ifdef DEBUG_ENABLED
|
|
|
|
+ if (src_tex_info->transient) {
|
|
|
|
+ ERR_PRINT("TEXTURE_USAGE_TRANSIENT_BIT p_src_texture must not be used in command_resolve_texture. Use a resolve store action pass instead.");
|
|
|
|
+ }
|
|
|
|
+ if (dst_tex_info->transient) {
|
|
|
|
+ ERR_PRINT("TEXTURE_USAGE_TRANSIENT_BIT p_dst_texture must not be used in command_resolve_texture.");
|
|
|
|
+ }
|
|
|
|
+#endif
|
|
|
|
+
|
|
vkCmdResolveImage((VkCommandBuffer)p_cmd_buffer.id, src_tex_info->vk_view_create_info.image, RD_TO_VK_LAYOUT[p_src_texture_layout], dst_tex_info->vk_view_create_info.image, RD_TO_VK_LAYOUT[p_dst_texture_layout], 1, &vk_resolve);
|
|
vkCmdResolveImage((VkCommandBuffer)p_cmd_buffer.id, src_tex_info->vk_view_create_info.image, RD_TO_VK_LAYOUT[p_src_texture_layout], dst_tex_info->vk_view_create_info.image, RD_TO_VK_LAYOUT[p_dst_texture_layout], 1, &vk_resolve);
|
|
}
|
|
}
|
|
|
|
|
|
@@ -4199,6 +4341,11 @@ void RenderingDeviceDriverVulkan::command_clear_color_texture(CommandBufferID p_
|
|
_texture_subresource_range_to_vk(p_subresources, &vk_subresources);
|
|
_texture_subresource_range_to_vk(p_subresources, &vk_subresources);
|
|
|
|
|
|
const TextureInfo *tex_info = (const TextureInfo *)p_texture.id;
|
|
const TextureInfo *tex_info = (const TextureInfo *)p_texture.id;
|
|
|
|
+#ifdef DEBUG_ENABLED
|
|
|
|
+ if (tex_info->transient) {
|
|
|
|
+ ERR_PRINT("TEXTURE_USAGE_TRANSIENT_BIT p_texture must not be used in command_clear_color_texture. Use a clear store action pass instead.");
|
|
|
|
+ }
|
|
|
|
+#endif
|
|
vkCmdClearColorImage((VkCommandBuffer)p_cmd_buffer.id, tex_info->vk_view_create_info.image, RD_TO_VK_LAYOUT[p_texture_layout], &vk_color, 1, &vk_subresources);
|
|
vkCmdClearColorImage((VkCommandBuffer)p_cmd_buffer.id, tex_info->vk_view_create_info.image, RD_TO_VK_LAYOUT[p_texture_layout], &vk_color, 1, &vk_subresources);
|
|
}
|
|
}
|
|
|
|
|
|
@@ -4210,6 +4357,11 @@ void RenderingDeviceDriverVulkan::command_copy_buffer_to_texture(CommandBufferID
|
|
|
|
|
|
const BufferInfo *buf_info = (const BufferInfo *)p_src_buffer.id;
|
|
const BufferInfo *buf_info = (const BufferInfo *)p_src_buffer.id;
|
|
const TextureInfo *tex_info = (const TextureInfo *)p_dst_texture.id;
|
|
const TextureInfo *tex_info = (const TextureInfo *)p_dst_texture.id;
|
|
|
|
+#ifdef DEBUG_ENABLED
|
|
|
|
+ if (tex_info->transient) {
|
|
|
|
+ ERR_PRINT("TEXTURE_USAGE_TRANSIENT_BIT p_dst_texture must not be used in command_copy_buffer_to_texture.");
|
|
|
|
+ }
|
|
|
|
+#endif
|
|
vkCmdCopyBufferToImage((VkCommandBuffer)p_cmd_buffer.id, buf_info->vk_buffer, tex_info->vk_view_create_info.image, RD_TO_VK_LAYOUT[p_dst_texture_layout], p_regions.size(), vk_copy_regions);
|
|
vkCmdCopyBufferToImage((VkCommandBuffer)p_cmd_buffer.id, buf_info->vk_buffer, tex_info->vk_view_create_info.image, RD_TO_VK_LAYOUT[p_dst_texture_layout], p_regions.size(), vk_copy_regions);
|
|
}
|
|
}
|
|
|
|
|
|
@@ -4221,6 +4373,11 @@ void RenderingDeviceDriverVulkan::command_copy_texture_to_buffer(CommandBufferID
|
|
|
|
|
|
const TextureInfo *tex_info = (const TextureInfo *)p_src_texture.id;
|
|
const TextureInfo *tex_info = (const TextureInfo *)p_src_texture.id;
|
|
const BufferInfo *buf_info = (const BufferInfo *)p_dst_buffer.id;
|
|
const BufferInfo *buf_info = (const BufferInfo *)p_dst_buffer.id;
|
|
|
|
+#ifdef DEBUG_ENABLED
|
|
|
|
+ if (tex_info->transient) {
|
|
|
|
+ ERR_PRINT("TEXTURE_USAGE_TRANSIENT_BIT p_src_texture must not be used in command_copy_texture_to_buffer.");
|
|
|
|
+ }
|
|
|
|
+#endif
|
|
vkCmdCopyImageToBuffer((VkCommandBuffer)p_cmd_buffer.id, tex_info->vk_view_create_info.image, RD_TO_VK_LAYOUT[p_src_texture_layout], buf_info->vk_buffer, p_regions.size(), vk_copy_regions);
|
|
vkCmdCopyImageToBuffer((VkCommandBuffer)p_cmd_buffer.id, tex_info->vk_view_create_info.image, RD_TO_VK_LAYOUT[p_src_texture_layout], buf_info->vk_buffer, p_regions.size(), vk_copy_regions);
|
|
}
|
|
}
|
|
|
|
|
|
@@ -4602,6 +4759,23 @@ void RenderingDeviceDriverVulkan::command_bind_render_uniform_set(CommandBufferI
|
|
vkCmdBindDescriptorSets((VkCommandBuffer)p_cmd_buffer.id, VK_PIPELINE_BIND_POINT_GRAPHICS, shader_info->vk_pipeline_layout, p_set_index, 1, &usi->vk_descriptor_set, 0, nullptr);
|
|
vkCmdBindDescriptorSets((VkCommandBuffer)p_cmd_buffer.id, VK_PIPELINE_BIND_POINT_GRAPHICS, shader_info->vk_pipeline_layout, p_set_index, 1, &usi->vk_descriptor_set, 0, nullptr);
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+void RenderingDeviceDriverVulkan::command_bind_render_uniform_sets(CommandBufferID p_cmd_buffer, VectorView<UniformSetID> p_uniform_sets, ShaderID p_shader, uint32_t p_first_set_index, uint32_t p_set_count) {
|
|
|
|
+ if (p_set_count == 0) {
|
|
|
|
+ return;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ thread_local LocalVector<VkDescriptorSet> sets;
|
|
|
|
+ sets.clear();
|
|
|
|
+ sets.resize(p_set_count);
|
|
|
|
+
|
|
|
|
+ for (uint32_t i = 0; i < p_set_count; i++) {
|
|
|
|
+ sets[i] = ((const UniformSetInfo *)p_uniform_sets[i].id)->vk_descriptor_set;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ const ShaderInfo *shader_info = (const ShaderInfo *)p_shader.id;
|
|
|
|
+ vkCmdBindDescriptorSets((VkCommandBuffer)p_cmd_buffer.id, VK_PIPELINE_BIND_POINT_GRAPHICS, shader_info->vk_pipeline_layout, p_first_set_index, p_set_count, &sets[0], 0, nullptr);
|
|
|
|
+}
|
|
|
|
+
|
|
void RenderingDeviceDriverVulkan::command_render_draw(CommandBufferID p_cmd_buffer, uint32_t p_vertex_count, uint32_t p_instance_count, uint32_t p_base_vertex, uint32_t p_first_instance) {
|
|
void RenderingDeviceDriverVulkan::command_render_draw(CommandBufferID p_cmd_buffer, uint32_t p_vertex_count, uint32_t p_instance_count, uint32_t p_base_vertex, uint32_t p_first_instance) {
|
|
vkCmdDraw((VkCommandBuffer)p_cmd_buffer.id, p_vertex_count, p_instance_count, p_base_vertex, p_first_instance);
|
|
vkCmdDraw((VkCommandBuffer)p_cmd_buffer.id, p_vertex_count, p_instance_count, p_base_vertex, p_first_instance);
|
|
}
|
|
}
|
|
@@ -5017,6 +5191,23 @@ void RenderingDeviceDriverVulkan::command_bind_compute_uniform_set(CommandBuffer
|
|
vkCmdBindDescriptorSets((VkCommandBuffer)p_cmd_buffer.id, VK_PIPELINE_BIND_POINT_COMPUTE, shader_info->vk_pipeline_layout, p_set_index, 1, &usi->vk_descriptor_set, 0, nullptr);
|
|
vkCmdBindDescriptorSets((VkCommandBuffer)p_cmd_buffer.id, VK_PIPELINE_BIND_POINT_COMPUTE, shader_info->vk_pipeline_layout, p_set_index, 1, &usi->vk_descriptor_set, 0, nullptr);
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+void RenderingDeviceDriverVulkan::command_bind_compute_uniform_sets(CommandBufferID p_cmd_buffer, VectorView<UniformSetID> p_uniform_sets, ShaderID p_shader, uint32_t p_first_set_index, uint32_t p_set_count) {
|
|
|
|
+ if (p_set_count == 0) {
|
|
|
|
+ return;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ thread_local LocalVector<VkDescriptorSet> sets;
|
|
|
|
+ sets.clear();
|
|
|
|
+ sets.resize(p_set_count);
|
|
|
|
+
|
|
|
|
+ for (uint32_t i = 0; i < p_set_count; i++) {
|
|
|
|
+ sets[i] = ((const UniformSetInfo *)p_uniform_sets[i].id)->vk_descriptor_set;
|
|
|
|
+ }
|
|
|
|
+
|
|
|
|
+ const ShaderInfo *shader_info = (const ShaderInfo *)p_shader.id;
|
|
|
|
+ vkCmdBindDescriptorSets((VkCommandBuffer)p_cmd_buffer.id, VK_PIPELINE_BIND_POINT_COMPUTE, shader_info->vk_pipeline_layout, p_first_set_index, p_set_count, &sets[0], 0, nullptr);
|
|
|
|
+}
|
|
|
|
+
|
|
void RenderingDeviceDriverVulkan::command_compute_dispatch(CommandBufferID p_cmd_buffer, uint32_t p_x_groups, uint32_t p_y_groups, uint32_t p_z_groups) {
|
|
void RenderingDeviceDriverVulkan::command_compute_dispatch(CommandBufferID p_cmd_buffer, uint32_t p_x_groups, uint32_t p_y_groups, uint32_t p_z_groups) {
|
|
vkCmdDispatch((VkCommandBuffer)p_cmd_buffer.id, p_x_groups, p_y_groups, p_z_groups);
|
|
vkCmdDispatch((VkCommandBuffer)p_cmd_buffer.id, p_x_groups, p_y_groups, p_z_groups);
|
|
}
|
|
}
|
|
@@ -5557,6 +5748,10 @@ uint64_t RenderingDeviceDriverVulkan::get_total_memory_used() {
|
|
return stats.total.statistics.allocationBytes;
|
|
return stats.total.statistics.allocationBytes;
|
|
}
|
|
}
|
|
|
|
|
|
|
|
+uint64_t RenderingDeviceDriverVulkan::get_lazily_memory_used() {
|
|
|
|
+ return vmaCalculateLazilyAllocatedBytes(allocator);
|
|
|
|
+}
|
|
|
|
+
|
|
uint64_t RenderingDeviceDriverVulkan::limit_get(Limit p_limit) {
|
|
uint64_t RenderingDeviceDriverVulkan::limit_get(Limit p_limit) {
|
|
const VkPhysicalDeviceLimits &limits = physical_device_properties.limits;
|
|
const VkPhysicalDeviceLimits &limits = physical_device_properties.limits;
|
|
switch (p_limit) {
|
|
switch (p_limit) {
|
|
@@ -5730,6 +5925,15 @@ RenderingDeviceDriverVulkan::~RenderingDeviceDriverVulkan() {
|
|
}
|
|
}
|
|
vmaDestroyAllocator(allocator);
|
|
vmaDestroyAllocator(allocator);
|
|
|
|
|
|
|
|
+ // Destroy linearly allocated descriptor pools
|
|
|
|
+ for (KeyValue<int, DescriptorSetPools> &pool_map : linear_descriptor_set_pools) {
|
|
|
|
+ for (KeyValue<DescriptorSetPoolKey, HashMap<VkDescriptorPool, uint32_t>> pools : pool_map.value) {
|
|
|
|
+ for (KeyValue<VkDescriptorPool, uint32_t> descriptor_pool : pools.value) {
|
|
|
|
+ vkDestroyDescriptorPool(vk_device, descriptor_pool.key, VKC::get_allocation_callbacks(VK_OBJECT_TYPE_DESCRIPTOR_POOL));
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+ }
|
|
|
|
+
|
|
if (vk_device != VK_NULL_HANDLE) {
|
|
if (vk_device != VK_NULL_HANDLE) {
|
|
vkDestroyDevice(vk_device, VKC::get_allocation_callbacks(VK_OBJECT_TYPE_DEVICE));
|
|
vkDestroyDevice(vk_device, VKC::get_allocation_callbacks(VK_OBJECT_TYPE_DEVICE));
|
|
}
|
|
}
|