diff --git a/src/backend/vulkan/CommandBufferVk.cpp b/src/backend/vulkan/CommandBufferVk.cpp index 51dec02378..7f61e5314a 100644 --- a/src/backend/vulkan/CommandBufferVk.cpp +++ b/src/backend/vulkan/CommandBufferVk.cpp @@ -16,6 +16,7 @@ #include "backend/Commands.h" #include "backend/vulkan/BufferVk.h" +#include "backend/vulkan/TextureVk.h" #include "backend/vulkan/VulkanBackend.h" namespace backend { namespace vulkan { @@ -58,6 +59,15 @@ namespace backend { namespace vulkan { buffer->UpdateUsageInternal(cmd->usage); } break; + case Command::TransitionTextureUsage: { + TransitionTextureUsageCmd* cmd = + mCommands.NextCommand(); + + Texture* texture = ToBackend(cmd->texture.Get()); + texture->RecordBarrier(commands, texture->GetUsage(), cmd->usage); + texture->UpdateUsageInternal(cmd->usage); + } break; + default: { UNREACHABLE(); } break; } } diff --git a/src/backend/vulkan/TextureVk.cpp b/src/backend/vulkan/TextureVk.cpp index a264823b88..d12b69b3d1 100644 --- a/src/backend/vulkan/TextureVk.cpp +++ b/src/backend/vulkan/TextureVk.cpp @@ -76,6 +76,128 @@ namespace backend { namespace vulkan { return flags; } + // Computes which vulkan access type could be required for the given NXT usage. + VkAccessFlags VulkanAccessFlags(nxt::TextureUsageBit usage, nxt::TextureFormat format) { + VkAccessFlags flags = 0; + + if (usage & nxt::TextureUsageBit::TransferSrc) { + flags |= VK_ACCESS_TRANSFER_READ_BIT; + } + if (usage & nxt::TextureUsageBit::TransferDst) { + flags |= VK_ACCESS_TRANSFER_WRITE_BIT; + } + if (usage & nxt::TextureUsageBit::Sampled) { + flags |= VK_ACCESS_SHADER_READ_BIT; + } + if (usage & nxt::TextureUsageBit::Storage) { + flags |= VK_ACCESS_SHADER_READ_BIT | VK_ACCESS_SHADER_WRITE_BIT; + } + if (usage & nxt::TextureUsageBit::OutputAttachment) { + if (TextureFormatHasDepthOrStencil(format)) { + flags |= VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_READ_BIT | + VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT; + } else { + flags |= + VK_ACCESS_COLOR_ATTACHMENT_READ_BIT | VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT; + } + } + + // TODO(cwallez@chromium.org): What about present? Does it require VK_MEMORY_READ_BIT? + + return flags; + } + + // Chooses which Vulkan image layout should be used for the given NXT usage + VkImageLayout VulkanImageLayout(nxt::TextureUsageBit usage, nxt::TextureFormat format) { + if (usage == nxt::TextureUsageBit::None) { + return VK_IMAGE_LAYOUT_UNDEFINED; + } + + if (!nxt::HasZeroOrOneBits(usage)) { + return VK_IMAGE_LAYOUT_GENERAL; + } + + // Usage has a single bit so we can switch on its value directly. + switch (usage) { + case nxt::TextureUsageBit::TransferDst: + return VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL; + case nxt::TextureUsageBit::Sampled: + return VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL; + // Vulkan texture copy functions require the image to be in _one_ known layout. + // Depending on whether parts of the texture have been transitioned to only + // TransferSrc or a combination with something else, the texture could be in a + // combination of GENERAL and TRANSFER_SRC_OPTIMAL. This would be a problem, so we + // make TransferSrc use GENERAL. + case nxt::TextureUsageBit::TransferSrc: + // Writable storage textures must use general. If we could know the texture is read + // only we could use SHADER_READ_ONLY_OPTIMAL + case nxt::TextureUsageBit::Storage: + case nxt::TextureUsageBit::Present: + return VK_IMAGE_LAYOUT_GENERAL; + case nxt::TextureUsageBit::OutputAttachment: + if (TextureFormatHasDepthOrStencil(format)) { + return VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL; + } else { + return VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL; + } + default: + UNREACHABLE(); + } + } + + // Computes which Vulkan pipeline stage can access a texture in the given NXT usage + VkPipelineStageFlags VulkanPipelineStage(nxt::TextureUsageBit usage, + nxt::TextureFormat format) { + VkPipelineStageFlags flags = 0; + + if (usage == nxt::TextureUsageBit::None) { + // This only happens when a texture is initially created (and for srcAccessMask) in + // which case there is no need to wait on anything to stop accessing this texture. + return VK_PIPELINE_STAGE_TOP_OF_PIPE_BIT; + } + if (usage & (nxt::TextureUsageBit::TransferSrc | nxt::TextureUsageBit::TransferDst)) { + flags |= VK_PIPELINE_STAGE_TRANSFER_BIT; + } + if (usage & (nxt::TextureUsageBit::Sampled | nxt::TextureUsageBit::Storage)) { + flags |= VK_PIPELINE_STAGE_VERTEX_SHADER_BIT | + VK_PIPELINE_STAGE_FRAGMENT_SHADER_BIT | + VK_PIPELINE_STAGE_COMPUTE_SHADER_BIT; + } + if (usage & nxt::TextureUsageBit::OutputAttachment) { + if (TextureFormatHasDepthOrStencil(format)) { + flags |= VK_PIPELINE_STAGE_EARLY_FRAGMENT_TESTS_BIT | + VK_PIPELINE_STAGE_LATE_FRAGMENT_TESTS_BIT; + // TODO(cwallez@chromium.org): This is missing the stage where the depth and + // stencil values are written, but it isn't clear which one it is. + } else { + flags |= VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT; + } + } + + // TODO(cwallez@chromium.org) What about present? + + return flags; + } + + // Computes which Vulkan texture aspects are relevant for the given NXT format + VkImageAspectFlags VulkanAspectMask(nxt::TextureFormat format) { + bool isDepth = TextureFormatHasDepth(format); + bool isStencil = TextureFormatHasStencil(format); + + VkImageAspectFlags flags = 0; + if (isDepth) { + flags |= VK_IMAGE_ASPECT_DEPTH_BIT; + } + if (isStencil) { + flags |= VK_IMAGE_ASPECT_STENCIL_BIT; + } + + if (flags != 0) { + return flags; + } + return VK_IMAGE_ASPECT_COLOR_BIT; + } + } // namespace Texture::Texture(TextureBuilder* builder) : TextureBase(builder) { @@ -138,13 +260,42 @@ namespace backend { namespace vulkan { return mHandle; } - void Texture::RecordBarrier(VkCommandBuffer, - nxt::TextureUsageBit, - nxt::TextureUsageBit) const { + // Helper function to add a texture barrier to a command buffer. This is inefficient because we + // should be coalescing barriers as much as possible. + void Texture::RecordBarrier(VkCommandBuffer commands, + nxt::TextureUsageBit currentUsage, + nxt::TextureUsageBit targetUsage) const { + nxt::TextureFormat format = GetFormat(); + VkPipelineStageFlags srcStages = VulkanPipelineStage(currentUsage, format); + VkPipelineStageFlags dstStages = VulkanPipelineStage(targetUsage, format); + + VkImageMemoryBarrier barrier; + barrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER; + barrier.pNext = nullptr; + barrier.srcAccessMask = VulkanAccessFlags(currentUsage, format); + barrier.dstAccessMask = VulkanAccessFlags(targetUsage, format); + barrier.oldLayout = VulkanImageLayout(currentUsage, format); + barrier.newLayout = VulkanImageLayout(targetUsage, format); + barrier.srcQueueFamilyIndex = 0; + barrier.dstQueueFamilyIndex = 0; + barrier.image = mHandle; + // This transitions the whole resource but assumes it is a 2D texture + ASSERT(GetDimension() == nxt::TextureDimension::e2D); + barrier.subresourceRange.aspectMask = VulkanAspectMask(format); + barrier.subresourceRange.baseMipLevel = 0; + barrier.subresourceRange.levelCount = GetNumMipLevels(); + barrier.subresourceRange.baseArrayLayer = 0; + barrier.subresourceRange.layerCount = 1; + + ToBackend(GetDevice()) + ->fn.CmdPipelineBarrier(commands, srcStages, dstStages, 0, 0, nullptr, 0, nullptr, 1, + &barrier); } - void Texture::TransitionUsageImpl(nxt::TextureUsageBit, - nxt::TextureUsageBit) { + void Texture::TransitionUsageImpl(nxt::TextureUsageBit currentUsage, + nxt::TextureUsageBit targetUsage) { + VkCommandBuffer commands = ToBackend(GetDevice())->GetPendingCommandBuffer(); + RecordBarrier(commands, currentUsage, targetUsage); } }} // namespace backend::vulkan