Vulkan: Implement BindGroups

For now only StorageBuffer and UniformBuffer bindings are supported as
we don't have samplers yet.
This commit is contained in:
Corentin Wallez 2018-02-02 16:49:51 -05:00 committed by Corentin Wallez
parent e7362d0b1b
commit 14e0687f94
12 changed files with 253 additions and 18 deletions

View File

@ -61,6 +61,10 @@ namespace backend {
return reinterpret_cast<TextureViewBase*>(mBindings[binding].Get());
}
DeviceBase* BindGroupBase::GetDevice() const {
return mLayout->GetDevice();
}
// BindGroupBuilder
enum BindGroupSetProperties {

View File

@ -15,6 +15,7 @@
#ifndef BACKEND_BINDGROUP_H_
#define BACKEND_BINDGROUP_H_
#include "backend/BindGroupLayout.h"
#include "backend/Builder.h"
#include "backend/Forward.h"
#include "backend/RefCounted.h"
@ -38,6 +39,8 @@ namespace backend {
SamplerBase* GetBindingAsSampler(size_t binding);
TextureViewBase* GetBindingAsTextureView(size_t binding);
DeviceBase* GetDevice() const;
private:
Ref<BindGroupLayoutBase> mLayout;
nxt::BindGroupUsage mUsage;

View File

@ -288,6 +288,8 @@ if (NXT_ENABLE_VULKAN)
target_include_directories(vulkan_autogen PUBLIC ${SRC_DIR})
list(APPEND BACKEND_SOURCES
${VULKAN_DIR}/BindGroupVk.cpp
${VULKAN_DIR}/BindGroupVk.h
${VULKAN_DIR}/BindGroupLayoutVk.cpp
${VULKAN_DIR}/BindGroupLayoutVk.h
${VULKAN_DIR}/BlendStateVk.cpp

View File

@ -20,21 +20,6 @@ namespace backend { namespace vulkan {
namespace {
VkDescriptorType VulkanDescriptorType(nxt::BindingType type) {
switch (type) {
case nxt::BindingType::UniformBuffer:
return VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
case nxt::BindingType::Sampler:
return VK_DESCRIPTOR_TYPE_SAMPLER;
case nxt::BindingType::SampledTexture:
return VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
case nxt::BindingType::StorageBuffer:
return VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
default:
UNREACHABLE();
}
}
VkShaderStageFlags VulkanShaderStageFlags(nxt::ShaderStageBit stages) {
VkShaderStageFlags flags = 0;
@ -53,6 +38,21 @@ namespace backend { namespace vulkan {
} // anonymous namespace
VkDescriptorType VulkanDescriptorType(nxt::BindingType type) {
switch (type) {
case nxt::BindingType::UniformBuffer:
return VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
case nxt::BindingType::Sampler:
return VK_DESCRIPTOR_TYPE_SAMPLER;
case nxt::BindingType::SampledTexture:
return VK_DESCRIPTOR_TYPE_SAMPLED_IMAGE;
case nxt::BindingType::StorageBuffer:
return VK_DESCRIPTOR_TYPE_STORAGE_BUFFER;
default:
UNREACHABLE();
}
}
BindGroupLayout::BindGroupLayout(BindGroupLayoutBuilder* builder)
: BindGroupLayoutBase(builder) {
const auto& info = GetBindingInfo();
@ -101,4 +101,53 @@ namespace backend { namespace vulkan {
return mHandle;
}
BindGroupLayout::PoolSizeSpec BindGroupLayout::ComputePoolSizes(uint32_t* numPoolSizes) const {
uint32_t numSizes = 0;
PoolSizeSpec result{};
// Defines an array and indices into it that will contain for each sampler type at which
// position it is in the PoolSizeSpec, or -1 if it isn't present yet.
enum DescriptorType {
UNIFORM_BUFFER,
SAMPLER,
SAMPLED_IMAGE,
STORAGE_BUFFER,
MAX_TYPE,
};
static_assert(MAX_TYPE == kMaxPoolSizesNeeded, "");
auto ToDescriptorType = [](nxt::BindingType type) -> DescriptorType {
switch (type) {
case nxt::BindingType::UniformBuffer:
return UNIFORM_BUFFER;
case nxt::BindingType::Sampler:
return SAMPLER;
case nxt::BindingType::SampledTexture:
return SAMPLED_IMAGE;
case nxt::BindingType::StorageBuffer:
return STORAGE_BUFFER;
default:
UNREACHABLE();
}
};
std::array<int, MAX_TYPE> descriptorTypeIndex;
descriptorTypeIndex.fill(-1);
const auto& info = GetBindingInfo();
for (uint32_t bindingIndex : IterateBitSet(info.mask)) {
DescriptorType type = ToDescriptorType(info.types[bindingIndex]);
if (descriptorTypeIndex[type] == -1) {
descriptorTypeIndex[type] = numSizes;
result[numSizes].type = VulkanDescriptorType(info.types[bindingIndex]);
result[numSizes].descriptorCount = 1;
numSizes++;
} else {
result[descriptorTypeIndex[type]].descriptorCount++;
}
}
*numPoolSizes = numSizes;
return result;
}
}} // namespace backend::vulkan

View File

@ -23,6 +23,8 @@ namespace backend { namespace vulkan {
class Device;
VkDescriptorType VulkanDescriptorType(nxt::BindingType type);
class BindGroupLayout : public BindGroupLayoutBase {
public:
BindGroupLayout(BindGroupLayoutBuilder* builder);
@ -30,6 +32,10 @@ namespace backend { namespace vulkan {
VkDescriptorSetLayout GetHandle() const;
static constexpr size_t kMaxPoolSizesNeeded = 4;
using PoolSizeSpec = std::array<VkDescriptorPoolSize, kMaxPoolSizesNeeded>;
PoolSizeSpec ComputePoolSizes(uint32_t* numPoolSizes) const;
private:
VkDescriptorSetLayout mHandle = VK_NULL_HANDLE;
};

View File

@ -0,0 +1,118 @@
// Copyright 2018 The NXT Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include "backend/vulkan/BindGroupVk.h"
#include "BindGroupLayoutVk.h"
#include "BufferVk.h"
#include "FencedDeleter.h"
#include "VulkanBackend.h"
namespace backend { namespace vulkan {
BindGroup::BindGroup(BindGroupBuilder* builder) : BindGroupBase(builder) {
// Create a pool to hold our descriptor set.
// TODO(cwallez@chromium.org): This horribly inefficient, find a way to be better, for
// example by having one pool per bind group layout instead.
uint32_t numPoolSizes = 0;
auto poolSizes = ToBackend(GetLayout())->ComputePoolSizes(&numPoolSizes);
VkDescriptorPoolCreateInfo createInfo;
createInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
createInfo.pNext = nullptr;
createInfo.flags = 0;
createInfo.maxSets = 1;
createInfo.poolSizeCount = numPoolSizes;
createInfo.pPoolSizes = poolSizes.data();
Device* device = ToBackend(GetDevice());
if (device->fn.CreateDescriptorPool(device->GetVkDevice(), &createInfo, nullptr, &mPool) !=
VK_SUCCESS) {
ASSERT(false);
}
// Now do the allocation of one descriptor set, this is very suboptimal too.
VkDescriptorSetLayout vkLayout = ToBackend(GetLayout())->GetHandle();
VkDescriptorSetAllocateInfo allocateInfo;
allocateInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
allocateInfo.pNext = nullptr;
allocateInfo.descriptorPool = mPool;
allocateInfo.descriptorSetCount = 1;
allocateInfo.pSetLayouts = &vkLayout;
if (device->fn.AllocateDescriptorSets(device->GetVkDevice(), &allocateInfo, &mHandle) !=
VK_SUCCESS) {
ASSERT(false);
}
// Now do a write of a single descriptor set with all possible chained data allocated on the
// stack.
uint32_t numWrites = 0;
std::array<VkWriteDescriptorSet, kMaxBindingsPerGroup> writes;
std::array<VkDescriptorBufferInfo, kMaxBindingsPerGroup> writeBufferInfo;
const auto& layoutInfo = GetLayout()->GetBindingInfo();
for (uint32_t bindingIndex : IterateBitSet(layoutInfo.mask)) {
auto& write = writes[numWrites];
write.sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
write.pNext = nullptr;
write.dstSet = mHandle;
write.dstBinding = bindingIndex;
write.dstArrayElement = 0;
write.descriptorCount = 1;
write.descriptorType = VulkanDescriptorType(layoutInfo.types[bindingIndex]);
switch (layoutInfo.types[bindingIndex]) {
case nxt::BindingType::UniformBuffer:
case nxt::BindingType::StorageBuffer: {
BufferViewBase* view = GetBindingAsBufferView(bindingIndex);
Buffer* buffer = ToBackend(view->GetBuffer());
writeBufferInfo[numWrites].buffer = buffer->GetHandle();
writeBufferInfo[numWrites].offset = view->GetOffset();
writeBufferInfo[numWrites].range = view->GetSize();
write.pBufferInfo = &writeBufferInfo[numWrites];
} break;
case nxt::BindingType::Sampler:
case nxt::BindingType::SampledTexture:
default:
UNREACHABLE();
}
numWrites++;
}
device->fn.UpdateDescriptorSets(device->GetVkDevice(), numWrites, writes.data(), 0,
nullptr);
}
BindGroup::~BindGroup() {
// The descriptor set doesn't need to be delete because it's done implicitly when the
// descriptor pool is destroyed.
mHandle = VK_NULL_HANDLE;
if (mPool != VK_NULL_HANDLE) {
ToBackend(GetDevice())->GetFencedDeleter()->DeleteWhenUnused(mPool);
mPool = VK_NULL_HANDLE;
}
}
VkDescriptorSet BindGroup::GetHandle() const {
return mHandle;
}
}} // namespace backend::vulkan

View File

@ -0,0 +1,40 @@
// Copyright 2018 The NXT Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#ifndef BACKEND_VULKAN_BINDGROUPVK_H_
#define BACKEND_VULKAN_BINDGROUPVK_H_
#include "backend/BindGroup.h"
#include "common/vulkan_platform.h"
namespace backend { namespace vulkan {
class Device;
class BindGroup : public BindGroupBase {
public:
BindGroup(BindGroupBuilder* builder);
~BindGroup();
VkDescriptorSet GetHandle() const;
private:
VkDescriptorPool mPool = VK_NULL_HANDLE;
VkDescriptorSet mHandle = VK_NULL_HANDLE;
};
}} // namespace backend::vulkan
#endif // BACKEND_VULKAN_BINDGROUPVK_H_

View File

@ -23,6 +23,7 @@ namespace backend { namespace vulkan {
FencedDeleter::~FencedDeleter() {
ASSERT(mBuffersToDelete.Empty());
ASSERT(mDescriptorPoolsToDelete.Empty());
ASSERT(mFramebuffersToDelete.Empty());
ASSERT(mImagesToDelete.Empty());
ASSERT(mImageViewsToDelete.Empty());
@ -40,6 +41,10 @@ namespace backend { namespace vulkan {
mBuffersToDelete.Enqueue(buffer, mDevice->GetSerial());
}
void FencedDeleter::DeleteWhenUnused(VkDescriptorPool pool) {
mDescriptorPoolsToDelete.Enqueue(pool, mDevice->GetSerial());
}
void FencedDeleter::DeleteWhenUnused(VkDeviceMemory memory) {
mMemoriesToDelete.Enqueue(memory, mDevice->GetSerial());
}
@ -148,6 +153,11 @@ namespace backend { namespace vulkan {
mDevice->fn.DestroySemaphore(vkDevice, semaphore, nullptr);
}
mSemaphoresToDelete.ClearUpTo(completedSerial);
for (VkDescriptorPool pool : mDescriptorPoolsToDelete.IterateUpTo(completedSerial)) {
mDevice->fn.DestroyDescriptorPool(vkDevice, pool, nullptr);
}
mDescriptorPoolsToDelete.ClearUpTo(completedSerial);
}
}} // namespace backend::vulkan

View File

@ -28,6 +28,7 @@ namespace backend { namespace vulkan {
~FencedDeleter();
void DeleteWhenUnused(VkBuffer buffer);
void DeleteWhenUnused(VkDescriptorPool pool);
void DeleteWhenUnused(VkDeviceMemory memory);
void DeleteWhenUnused(VkFramebuffer framebuffer);
void DeleteWhenUnused(VkImage image);
@ -45,8 +46,9 @@ namespace backend { namespace vulkan {
private:
Device* mDevice = nullptr;
SerialQueue<VkBuffer> mBuffersToDelete;
SerialQueue<VkFramebuffer> mFramebuffersToDelete;
SerialQueue<VkDescriptorPool> mDescriptorPoolsToDelete;
SerialQueue<VkDeviceMemory> mMemoriesToDelete;
SerialQueue<VkFramebuffer> mFramebuffersToDelete;
SerialQueue<VkImage> mImagesToDelete;
SerialQueue<VkImageView> mImageViewsToDelete;
SerialQueue<VkPipeline> mPipelinesToDelete;

View File

@ -13,6 +13,7 @@
// limitations under the License.
#include "backend/vulkan/BindGroupLayoutVk.h"
#include "backend/vulkan/BindGroupVk.h"
#include "backend/vulkan/BlendStateVk.h"
#include "backend/vulkan/BufferVk.h"
#include "backend/vulkan/CommandBufferVk.h"

View File

@ -16,6 +16,7 @@
#include "backend/Commands.h"
#include "backend/vulkan/BindGroupLayoutVk.h"
#include "backend/vulkan/BindGroupVk.h"
#include "backend/vulkan/BlendStateVk.h"
#include "backend/vulkan/BufferUploader.h"
#include "backend/vulkan/BufferVk.h"

View File

@ -17,7 +17,6 @@
#include "nxt/nxtcpp.h"
#include "backend/BindGroup.h"
#include "backend/ComputePipeline.h"
#include "backend/DepthStencilState.h"
#include "backend/Device.h"
@ -34,7 +33,7 @@
namespace backend { namespace vulkan {
using BindGroup = BindGroupBase;
class BindGroup;
class BindGroupLayout;
class BlendState;
class Buffer;