|
@@ -747,8 +747,54 @@ VkIndexType Vulkan::getVulkanIndexBufferType(IndexDataType type)
|
|
}
|
|
}
|
|
}
|
|
}
|
|
|
|
|
|
-void Vulkan::cmdTransitionImageLayout(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout oldLayout, VkImageLayout newLayout,
|
|
|
|
- uint32_t baseLevel, uint32_t levelCount, uint32_t baseLayer, uint32_t layerCount)
|
|
|
|
|
|
+static void setImageLayoutTransitionOptions(bool previous, VkImageLayout layout, VkAccessFlags &accessMask, VkPipelineStageFlags &stageFlags, bool &depthStencil)
|
|
|
|
+{
|
|
|
|
+ switch (layout)
|
|
|
|
+ {
|
|
|
|
+ case VK_IMAGE_LAYOUT_UNDEFINED:
|
|
|
|
+ accessMask = 0;
|
|
|
|
+ if (previous)
|
|
|
|
+ stageFlags = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
|
|
|
|
+ else
|
|
|
|
+ stageFlags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
|
|
|
|
+ break;
|
|
|
|
+ case VK_IMAGE_LAYOUT_GENERAL:
|
|
|
|
+ // We use the general image layout for images that are both compute write and readable.
|
|
|
|
+ // todo: can we optimize this?
|
|
|
|
+ accessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT;
|
|
|
|
+ stageFlags = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT;
|
|
|
|
+ break;
|
|
|
|
+ case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
|
|
|
|
+ accessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
|
|
|
|
+ stageFlags = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
|
|
|
|
+ break;
|
|
|
|
+ case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
|
|
|
|
+ depthStencil = true;
|
|
|
|
+ accessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT;
|
|
|
|
+ stageFlags = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
|
|
|
|
+ break;
|
|
|
|
+ case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
|
|
|
|
+ accessMask = VK_ACCESS_SHADER_READ_BIT;
|
|
|
|
+ stageFlags = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
|
|
|
+ break;
|
|
|
|
+ case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
|
|
|
|
+ accessMask = VK_ACCESS_TRANSFER_READ_BIT;
|
|
|
|
+ stageFlags = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
|
|
|
+ break;
|
|
|
|
+ case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
|
|
|
|
+ accessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
|
|
|
+ stageFlags = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
|
|
|
+ break;
|
|
|
|
+ case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR:
|
|
|
|
+ accessMask = 0;
|
|
|
|
+ stageFlags = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
|
|
|
|
+ break;
|
|
|
|
+ default:
|
|
|
|
+ throw love::Exception("unimplemented image layout");
|
|
|
|
+ }
|
|
|
|
+}
|
|
|
|
+
|
|
|
|
+void Vulkan::cmdTransitionImageLayout(VkCommandBuffer commandBuffer, VkImage image, VkImageLayout oldLayout, VkImageLayout newLayout, uint32_t baseLevel, uint32_t levelCount, uint32_t baseLayer, uint32_t layerCount)
|
|
{
|
|
{
|
|
VkImageMemoryBarrier barrier{};
|
|
VkImageMemoryBarrier barrier{};
|
|
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
|
|
barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
|
|
@@ -757,176 +803,22 @@ void Vulkan::cmdTransitionImageLayout(VkCommandBuffer commandBuffer, VkImage ima
|
|
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
|
barrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
|
barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
|
barrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
|
|
barrier.image = image;
|
|
barrier.image = image;
|
|
- barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
|
|
|
barrier.subresourceRange.baseMipLevel = baseLevel;
|
|
barrier.subresourceRange.baseMipLevel = baseLevel;
|
|
barrier.subresourceRange.levelCount = levelCount;
|
|
barrier.subresourceRange.levelCount = levelCount;
|
|
barrier.subresourceRange.baseArrayLayer = baseLayer;
|
|
barrier.subresourceRange.baseArrayLayer = baseLayer;
|
|
barrier.subresourceRange.layerCount = layerCount;
|
|
barrier.subresourceRange.layerCount = layerCount;
|
|
|
|
|
|
|
|
+ bool depthStencil = false;
|
|
VkPipelineStageFlags sourceStage;
|
|
VkPipelineStageFlags sourceStage;
|
|
VkPipelineStageFlags destinationStage;
|
|
VkPipelineStageFlags destinationStage;
|
|
|
|
|
|
- if (oldLayout == VK_IMAGE_LAYOUT_UNDEFINED && newLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)
|
|
|
|
- {
|
|
|
|
- barrier.srcAccessMask = 0;
|
|
|
|
- barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
|
|
|
|
-
|
|
|
|
- sourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
|
|
|
|
- destinationStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
|
|
|
- }
|
|
|
|
- else if (oldLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL && newLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)
|
|
|
|
- {
|
|
|
|
- barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
|
|
|
- barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
|
|
|
|
-
|
|
|
|
- sourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
|
|
|
- destinationStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
|
|
|
- }
|
|
|
|
- else if (oldLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL && newLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
|
|
|
|
- {
|
|
|
|
- barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
|
|
|
|
- barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
|
|
|
-
|
|
|
|
- sourceStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
|
|
|
- destinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
|
|
|
- }
|
|
|
|
- else if (oldLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL && newLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)
|
|
|
|
- {
|
|
|
|
- barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
|
|
|
- barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
|
|
|
|
-
|
|
|
|
- sourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
|
|
|
- destinationStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
|
|
|
- }
|
|
|
|
- else if (oldLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL && newLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
|
|
|
|
- {
|
|
|
|
- barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
|
|
|
|
- barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
|
|
|
-
|
|
|
|
- sourceStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
|
|
|
- destinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
|
|
|
- }
|
|
|
|
- else if (oldLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL && newLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL)
|
|
|
|
- {
|
|
|
|
- barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
|
|
|
|
- barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
|
|
|
|
-
|
|
|
|
- sourceStage = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
|
|
|
|
- destinationStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
|
|
|
- }
|
|
|
|
- else if (oldLayout == VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL && newLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)
|
|
|
|
- {
|
|
|
|
- barrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
|
|
|
|
- barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
|
|
|
|
-
|
|
|
|
- sourceStage = VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT;
|
|
|
|
- destinationStage = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
|
|
|
|
- }
|
|
|
|
- else if (oldLayout == VK_IMAGE_LAYOUT_UNDEFINED && newLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL)
|
|
|
|
- {
|
|
|
|
- barrier.srcAccessMask = 0;
|
|
|
|
- barrier.dstAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
|
|
|
|
-
|
|
|
|
- sourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
|
|
|
|
- destinationStage = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
|
|
|
|
- }
|
|
|
|
- else if (oldLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL && newLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
|
|
|
|
- {
|
|
|
|
- barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
|
|
|
|
- barrier.dstAccessMask = 0;
|
|
|
|
-
|
|
|
|
- sourceStage = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
|
|
|
|
- destinationStage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
|
|
|
|
- }
|
|
|
|
- // we use general for images that are both sampled and compute write
|
|
|
|
- else if (oldLayout == VK_IMAGE_LAYOUT_UNDEFINED && newLayout == VK_IMAGE_LAYOUT_GENERAL)
|
|
|
|
- {
|
|
|
|
- barrier.srcAccessMask = 0;
|
|
|
|
- barrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT | VK_ACCESS_TRANSFER_WRITE_BIT | VK_ACCESS_TRANSFER_READ_BIT;
|
|
|
|
-
|
|
|
|
- sourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
|
|
|
|
- destinationStage = VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT | VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | VK_PIPELINE_STAGE_TRANSFER_BIT;
|
|
|
|
- }
|
|
|
|
- else if (oldLayout == VK_IMAGE_LAYOUT_UNDEFINED && newLayout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL)
|
|
|
|
- {
|
|
|
|
- barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
|
|
|
|
|
|
+ setImageLayoutTransitionOptions(true, oldLayout, barrier.srcAccessMask, sourceStage, depthStencil);
|
|
|
|
+ setImageLayoutTransitionOptions(false, newLayout, barrier.dstAccessMask, destinationStage, depthStencil);
|
|
|
|
|
|
- barrier.srcAccessMask = 0;
|
|
|
|
- barrier.dstAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT;
|
|
|
|
-
|
|
|
|
- sourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
|
|
|
|
- destinationStage = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
|
|
|
|
- }
|
|
|
|
- else if (oldLayout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL && newLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
|
|
|
|
- {
|
|
|
|
|
|
+ if (depthStencil)
|
|
barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
|
|
barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
|
|
-
|
|
|
|
- barrier.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT;
|
|
|
|
- barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
|
|
|
-
|
|
|
|
- sourceStage = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
|
|
|
|
- destinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
|
|
|
- }
|
|
|
|
- else if (oldLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL && newLayout == VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL)
|
|
|
|
- {
|
|
|
|
- barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT | VK_IMAGE_ASPECT_STENCIL_BIT;
|
|
|
|
-
|
|
|
|
- barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
|
|
|
- barrier.dstAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT | VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT;
|
|
|
|
-
|
|
|
|
- sourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
|
|
|
- destinationStage = VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT;
|
|
|
|
- }
|
|
|
|
- else if (oldLayout == VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL && newLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
|
|
|
|
- {
|
|
|
|
- barrier.srcAccessMask = VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT;
|
|
|
|
- barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
|
|
|
|
-
|
|
|
|
- sourceStage = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
|
|
|
|
- destinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
|
|
|
- }
|
|
|
|
- else if (oldLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL && newLayout == VK_IMAGE_LAYOUT_PRESENT_SRC_KHR)
|
|
|
|
- {
|
|
|
|
- barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
|
|
|
|
- barrier.dstAccessMask = 0;
|
|
|
|
-
|
|
|
|
- sourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
|
|
|
- destinationStage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
|
|
|
|
- }
|
|
|
|
- else if (oldLayout == VK_IMAGE_LAYOUT_UNDEFINED && newLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
|
|
|
|
- {
|
|
|
|
- barrier.srcAccessMask = 0;
|
|
|
|
- barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
|
|
|
-
|
|
|
|
- sourceStage = VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT;
|
|
|
|
- destinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
|
|
|
- }
|
|
|
|
- else if (oldLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL && newLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL)
|
|
|
|
- {
|
|
|
|
- barrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
|
|
|
- barrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
|
|
|
|
-
|
|
|
|
- sourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
|
|
|
- destinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
|
|
|
- }
|
|
|
|
- else if (oldLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL && newLayout == VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL)
|
|
|
|
- {
|
|
|
|
- barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
|
|
|
|
- barrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
|
|
|
|
-
|
|
|
|
- sourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
|
|
|
- destinationStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
|
|
|
- }
|
|
|
|
- else if (oldLayout == VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL && newLayout == VK_IMAGE_LAYOUT_UNDEFINED)
|
|
|
|
- {
|
|
|
|
- barrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
|
|
|
|
- barrier.dstAccessMask = 0;
|
|
|
|
-
|
|
|
|
- sourceStage = VK_PIPELINE_STAGE_TRANSFER_BIT;
|
|
|
|
- destinationStage = VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT;
|
|
|
|
- }
|
|
|
|
else
|
|
else
|
|
- throw std::invalid_argument("unsupported layout transition!");
|
|
|
|
|
|
+ barrier.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
|
|
|
|
|
|
vkCmdPipelineBarrier(
|
|
vkCmdPipelineBarrier(
|
|
commandBuffer,
|
|
commandBuffer,
|