From 83dd9c854034289d6cab8a8594b0ab438af7a553 Mon Sep 17 00:00:00 2001 From: jpark37 Date: Sun, 31 May 2020 22:31:39 -0700 Subject: [PATCH] win-capture: Improve Vulkan synchronization Use general layout and 0 access masks for external synchronization as specified in the spec. Also set pipeline stages for maximum synchronization just in case because it doesn't seem like the pipeline stages are specified. "Vulkan-incompatible APIs will require the image to be in the GENERAL layout whenever they are accessing them." "Whilst it is not invalid to provide destination or source access masks for memory barriers used for release or acquire operations, respectively, they have no practical effect. Access after a release operation has undefined results, and so visibility for those accesses has no practical effect. Similarly, write access before an acquire operation will produce undefined results for future access, so availability of those writes has no practical use. In an earlier version of the specification, these were required to match on both sides - but this was subsequently relaxed. These masks should be set to 0." --- .../graphics-hook/vulkan-capture.c | 21 +++++++++++-------- 1 file changed, 12 insertions(+), 9 deletions(-) diff --git a/plugins/win-capture/graphics-hook/vulkan-capture.c b/plugins/win-capture/graphics-hook/vulkan-capture.c index 4a94596ba..3723dc0d9 100644 --- a/plugins/win-capture/graphics-hook/vulkan-capture.c +++ b/plugins/win-capture/graphics-hook/vulkan-capture.c @@ -854,9 +854,9 @@ static void vk_shtex_capture(struct vk_data *data, imb.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; imb.pNext = NULL; imb.srcAccessMask = 0; - imb.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; + imb.dstAccessMask = 0; imb.oldLayout = VK_IMAGE_LAYOUT_UNDEFINED; - imb.newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL; + imb.newLayout = VK_IMAGE_LAYOUT_GENERAL; imb.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; imb.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED; imb.image = swap->export_image; @@ -868,8 +868,8 @@ static void vk_shtex_capture(struct vk_data *data, funcs->CmdPipelineBarrier(cmd_buffer, VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, - VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, - NULL, 0, NULL, 1, &imb); + VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT, 0, + 0, NULL, 0, NULL, 1, &imb); swap->layout_initialized = true; } @@ -897,9 +897,9 @@ static void vk_shtex_capture(struct vk_data *data, dst_mb->sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; dst_mb->pNext = NULL; - dst_mb->srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; + dst_mb->srcAccessMask = 0; dst_mb->dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; - dst_mb->oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL; + dst_mb->oldLayout = VK_IMAGE_LAYOUT_GENERAL; dst_mb->newLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL; dst_mb->srcQueueFamilyIndex = VK_QUEUE_FAMILY_EXTERNAL; dst_mb->dstQueueFamilyIndex = fam_idx; @@ -911,8 +911,7 @@ static void vk_shtex_capture(struct vk_data *data, dst_mb->subresourceRange.layerCount = 1; funcs->CmdPipelineBarrier(cmd_buffer, - VK_PIPELINE_STAGE_TRANSFER_BIT | - VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, + VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, VK_PIPELINE_STAGE_TRANSFER_BIT, 0, 0, NULL, 0, NULL, 2, mb); @@ -953,11 +952,15 @@ static void vk_shtex_capture(struct vk_data *data, src_mb->oldLayout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL; src_mb->newLayout = VK_IMAGE_LAYOUT_PRESENT_SRC_KHR; + dst_mb->srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT; + dst_mb->dstAccessMask = 0; + dst_mb->oldLayout = VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL; + dst_mb->newLayout = VK_IMAGE_LAYOUT_GENERAL; dst_mb->srcQueueFamilyIndex = fam_idx; dst_mb->dstQueueFamilyIndex = VK_QUEUE_FAMILY_EXTERNAL; funcs->CmdPipelineBarrier(cmd_buffer, VK_PIPELINE_STAGE_TRANSFER_BIT, - VK_PIPELINE_STAGE_TRANSFER_BIT | + VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT | VK_PIPELINE_STAGE_BOTTOM_OF_PIPE_BIT, 0, 0, NULL, 0, NULL, 2, mb);