boo/lib/graphicsdev/Vulkan.cpp

4169 lines
167 KiB
C++
Raw Normal View History

#include "boo/graphicsdev/Vulkan.hpp"
#include "boo/IGraphicsContext.hpp"
#include <vector>
2016-07-17 21:15:57 +00:00
#include <array>
2016-07-02 03:44:57 +00:00
#include <cmath>
#include <glslang/Public/ShaderLang.h>
2016-06-24 03:03:06 +00:00
#include <StandAlone/ResourceLimits.h>
#include <SPIRV/GlslangToSpv.h>
2016-02-24 03:11:58 +00:00
#include <SPIRV/disassemble.h>
#include "boo/graphicsdev/GLSLMacros.hpp"
#include "Common.hpp"
2017-03-05 22:59:58 +00:00
#include "xxhash.h"
2018-06-01 06:36:57 +00:00
#define AMD_PAL_HACK 1
2018-05-22 07:48:13 +00:00
#define VMA_IMPLEMENTATION
#define VMA_STATIC_VULKAN_FUNCTIONS 0
#include "vk_mem_alloc.h"
2016-03-04 23:02:18 +00:00
#include "logvisor/logvisor.hpp"
2018-05-27 21:35:21 +00:00
#define BOO_VK_MAX_DESCRIPTOR_SETS 65536
2018-05-22 07:48:13 +00:00
#undef min
#undef max
2016-02-22 02:47:45 +00:00
#undef None
static const char* GammaVS =
"#version 330\n"
BOO_GLSL_BINDING_HEAD
"layout(location=0) in vec4 posIn;\n"
"layout(location=1) in vec4 uvIn;\n"
"\n"
"struct VertToFrag\n"
"{\n"
" vec2 uv;\n"
"};\n"
"\n"
"SBINDING(0) out VertToFrag vtf;\n"
"void main()\n"
"{\n"
" vtf.uv = uvIn.xy;\n"
" gl_Position = posIn;\n"
"}\n";
static const char* GammaFS =
"#version 330\n"
BOO_GLSL_BINDING_HEAD
"struct VertToFrag\n"
"{\n"
" vec2 uv;\n"
"};\n"
"\n"
"SBINDING(0) in VertToFrag vtf;\n"
"layout(location=0) out vec4 colorOut;\n"
"TBINDING0 uniform sampler2D screenTex;\n"
"TBINDING1 uniform sampler2D gammaLUT;\n"
"void main()\n"
"{\n"
" ivec4 tex = ivec4(texture(screenTex, vtf.uv) * 65535.0);\n"
" for (int i=0 ; i<3 ; ++i)\n"
" colorOut[i] = texelFetch(gammaLUT, ivec2(tex[i] % 256, tex[i] / 256), 0).r;\n"
"}\n";
namespace boo
{
2016-03-04 23:02:18 +00:00
static logvisor::Module Log("boo::Vulkan");
2016-02-22 02:47:45 +00:00
VulkanContext g_VulkanContext;
2017-03-05 22:59:58 +00:00
class VulkanDataFactoryImpl;
2018-05-22 07:48:13 +00:00
struct VulkanCommandQueue;
2018-05-27 21:35:21 +00:00
struct VulkanDescriptorPool;
2017-03-05 22:59:58 +00:00
struct VulkanShareableShader : IShareableShader<VulkanDataFactoryImpl, VulkanShareableShader>
{
2017-11-08 02:24:07 +00:00
VkShaderModule m_shader;
2017-03-05 22:59:58 +00:00
VulkanShareableShader(VulkanDataFactoryImpl& fac, uint64_t srcKey, uint64_t binKey,
2017-11-25 02:49:20 +00:00
VkShaderModule s)
: IShareableShader(fac, srcKey, binKey), m_shader(s) {}
~VulkanShareableShader() { vk::DestroyShaderModule(g_VulkanContext.m_dev, m_shader, nullptr); }
2017-03-05 22:59:58 +00:00
};
class VulkanDataFactoryImpl : public VulkanDataFactory, public GraphicsDataFactoryHead
2017-03-05 22:59:58 +00:00
{
friend struct VulkanCommandQueue;
friend class VulkanDataFactory::Context;
friend struct VulkanData;
friend struct VulkanPool;
2018-05-27 21:35:21 +00:00
friend struct VulkanDescriptorPool;
friend struct VulkanShaderDataBinding;
2017-03-05 22:59:58 +00:00
IGraphicsContext* m_parent;
VulkanContext* m_ctx;
2018-05-27 21:35:21 +00:00
VulkanDescriptorPool* m_descPoolHead = nullptr;
2017-03-05 22:59:58 +00:00
std::unordered_map<uint64_t, std::unique_ptr<VulkanShareableShader>> m_sharedShaders;
2018-05-20 22:37:26 +00:00
std::unordered_map<uint64_t, uint64_t> m_sourceToBinary;
2017-03-05 22:59:58 +00:00
std::vector<int> m_texUnis;
float m_gamma = 1.f;
ObjToken<IShaderPipeline> m_gammaShader;
ObjToken<ITextureD> m_gammaLUT;
ObjToken<IGraphicsBufferS> m_gammaVBO;
ObjToken<IVertexFormat> m_gammaVFMT;
ObjToken<IShaderDataBinding> m_gammaBinding;
void SetupGammaResources()
{
2018-05-25 06:30:42 +00:00
commitTransaction([this](IGraphicsDataFactory::Context& ctx)
{
const VertexElementDescriptor vfmt[] = {
{nullptr, nullptr, VertexSemantic::Position4},
{nullptr, nullptr, VertexSemantic::UV4}
};
m_gammaVFMT = ctx.newVertexFormat(2, vfmt);
m_gammaShader = static_cast<Context&>(ctx).newShaderPipeline(GammaVS, GammaFS,
m_gammaVFMT, BlendFactor::One, BlendFactor::Zero,
Primitive::TriStrips, ZTest::None, false, true, false, CullMode::None);
m_gammaLUT = ctx.newDynamicTexture(256, 256, TextureFormat::I16, TextureClampMode::ClampToEdge);
setDisplayGamma(1.f);
const struct Vert {
float pos[4];
float uv[4];
} verts[4] = {
{{-1.f, -1.f, 0.f, 1.f}, {0.f, 0.f, 0.f, 0.f}},
{{ 1.f, -1.f, 0.f, 1.f}, {1.f, 0.f, 0.f, 0.f}},
{{-1.f, 1.f, 0.f, 1.f}, {0.f, 1.f, 0.f, 0.f}},
{{ 1.f, 1.f, 0.f, 1.f}, {1.f, 1.f, 0.f, 0.f}}
};
m_gammaVBO = ctx.newStaticBuffer(BufferUse::Vertex, verts, 32, 4);
ObjToken<ITexture> texs[] = {{}, m_gammaLUT.get()};
m_gammaBinding = ctx.newShaderDataBinding(m_gammaShader, m_gammaVFMT, m_gammaVBO.get(), {}, {},
0, nullptr, nullptr, 2, texs, nullptr, nullptr);
return true;
2018-05-25 06:30:42 +00:00
} BooTrace);
}
2018-05-20 22:37:26 +00:00
void DestroyGammaResources()
{
m_gammaBinding.reset();
m_gammaVFMT.reset();
m_gammaVBO.reset();
m_gammaLUT.reset();
m_gammaShader.reset();
}
2017-03-05 22:59:58 +00:00
public:
VulkanDataFactoryImpl(IGraphicsContext* parent, VulkanContext* ctx);
2018-05-27 21:35:21 +00:00
~VulkanDataFactoryImpl()
{
assert(m_descPoolHead == nullptr && "Dangling descriptor pools detected");
}
2017-03-05 22:59:58 +00:00
Platform platform() const {return Platform::Vulkan;}
const SystemChar* platformName() const {return _S("Vulkan");}
2018-05-27 21:35:21 +00:00
boo::ObjToken<VulkanDescriptorPool> allocateDescriptorSets(VkDescriptorSet* out);
2018-05-20 06:11:49 +00:00
void commitTransaction(const FactoryCommitFunc& __BooTraceArgs);
2018-05-20 06:11:49 +00:00
boo::ObjToken<IGraphicsBufferD> newPoolBuffer(BufferUse use, size_t stride, size_t count __BooTraceArgs);
2017-03-05 22:59:58 +00:00
void _unregisterShareableShader(uint64_t srcKey, uint64_t binKey)
{
if (srcKey)
m_sourceToBinary.erase(srcKey);
m_sharedShaders.erase(binKey);
}
2018-01-20 05:50:01 +00:00
void setDisplayGamma(float gamma)
{
m_gamma = gamma;
if (gamma != 1.f)
UpdateGammaLUT(m_gammaLUT.get(), gamma);
}
2018-05-20 22:37:26 +00:00
bool isTessellationSupported(uint32_t& maxPatchSizeOut)
{
maxPatchSizeOut = 0;
if (!m_ctx->m_features.tessellationShader)
return false;
maxPatchSizeOut = m_ctx->m_gpuProps.limits.maxTessellationPatchSize;
return true;
}
VulkanShareableShader::Token PrepareShaderStage(const char* source, std::vector<unsigned int>* blobOut,
EShLanguage lang);
uint64_t Compile(std::vector<unsigned int>& out, const char* source, uint64_t srcKey, EShLanguage lang);
2017-03-05 22:59:58 +00:00
};
2016-02-21 06:27:54 +00:00
static inline void ThrowIfFailed(VkResult res)
{
2016-02-21 06:27:54 +00:00
if (res != VK_SUCCESS)
2016-03-04 23:02:18 +00:00
Log.report(logvisor::Fatal, "%d\n", res);
2016-02-21 06:27:54 +00:00
}
static inline void ThrowIfFalse(bool res)
{
if (!res)
2016-03-04 23:02:18 +00:00
Log.report(logvisor::Fatal, "operation failed\n", res);
2016-02-21 06:27:54 +00:00
}
2016-02-23 01:13:03 +00:00
static VKAPI_ATTR VkBool32 VKAPI_CALL
dbgFunc(VkDebugReportFlagsEXT msgFlags, VkDebugReportObjectTypeEXT objType,
uint64_t srcObject, size_t location, int32_t msgCode,
const char *pLayerPrefix, const char *pMsg, void *pUserData)
{
if (msgFlags & VK_DEBUG_REPORT_ERROR_BIT_EXT) {
2017-09-16 01:55:41 +00:00
Log.report(logvisor::Error, "[%s] Code %d : %s", pLayerPrefix, msgCode, pMsg);
2016-02-23 01:13:03 +00:00
} else if (msgFlags & VK_DEBUG_REPORT_WARNING_BIT_EXT) {
2016-03-04 23:02:18 +00:00
Log.report(logvisor::Warning, "[%s] Code %d : %s", pLayerPrefix, msgCode, pMsg);
2016-02-23 01:13:03 +00:00
} else if (msgFlags & VK_DEBUG_REPORT_PERFORMANCE_WARNING_BIT_EXT) {
2016-03-04 23:02:18 +00:00
Log.report(logvisor::Warning, "[%s] Code %d : %s", pLayerPrefix, msgCode, pMsg);
2016-02-23 01:13:03 +00:00
} else if (msgFlags & VK_DEBUG_REPORT_INFORMATION_BIT_EXT) {
2016-03-04 23:02:18 +00:00
Log.report(logvisor::Info, "[%s] Code %d : %s", pLayerPrefix, msgCode, pMsg);
2016-02-23 01:13:03 +00:00
} else if (msgFlags & VK_DEBUG_REPORT_DEBUG_BIT_EXT) {
2016-03-04 23:02:18 +00:00
Log.report(logvisor::Info, "[%s] Code %d : %s", pLayerPrefix, msgCode, pMsg);
2016-02-23 01:13:03 +00:00
}
/*
* false indicates that layer should not bail-out of an
* API call that had validation failures. This may mean that the
* app dies inside the driver due to invalid parameter(s).
* That's what would happen without validation layers, so we'll
* keep that behavior here.
*/
2018-05-27 21:35:21 +00:00
return VK_FALSE;
2016-02-23 01:13:03 +00:00
}
2016-02-21 06:27:54 +00:00
static void SetImageLayout(VkCommandBuffer cmd, VkImage image,
VkImageAspectFlags aspectMask,
VkImageLayout old_image_layout,
2016-06-24 03:03:06 +00:00
VkImageLayout new_image_layout,
uint32_t mipCount, uint32_t layerCount)
2016-02-21 06:27:54 +00:00
{
2016-02-22 02:47:45 +00:00
VkImageMemoryBarrier imageMemoryBarrier = {};
imageMemoryBarrier.sType = VK_STRUCTURE_TYPE_IMAGE_MEMORY_BARRIER;
imageMemoryBarrier.pNext = NULL;
imageMemoryBarrier.srcAccessMask = 0;
imageMemoryBarrier.dstAccessMask = 0;
imageMemoryBarrier.oldLayout = old_image_layout;
imageMemoryBarrier.newLayout = new_image_layout;
imageMemoryBarrier.image = image;
imageMemoryBarrier.subresourceRange.aspectMask = aspectMask;
imageMemoryBarrier.subresourceRange.baseMipLevel = 0;
2016-06-24 03:03:06 +00:00
imageMemoryBarrier.subresourceRange.levelCount = mipCount;
imageMemoryBarrier.subresourceRange.layerCount = layerCount;
2017-03-17 23:31:51 +00:00
imageMemoryBarrier.srcQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
imageMemoryBarrier.dstQueueFamilyIndex = VK_QUEUE_FAMILY_IGNORED;
2018-06-01 06:36:57 +00:00
VkPipelineStageFlags src_stages = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT;
VkPipelineStageFlags dest_stages = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT;
2016-02-21 06:27:54 +00:00
2017-02-15 23:10:30 +00:00
switch (old_image_layout)
{
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
2016-02-22 02:47:45 +00:00
imageMemoryBarrier.srcAccessMask =
2016-07-02 03:44:57 +00:00
VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT;
2017-03-17 23:31:51 +00:00
src_stages = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT;
2017-02-15 23:10:30 +00:00
break;
case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
imageMemoryBarrier.srcAccessMask = VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
2017-03-17 23:31:51 +00:00
src_stages = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT;
2017-02-15 23:10:30 +00:00
break;
case VK_IMAGE_LAYOUT_PREINITIALIZED:
imageMemoryBarrier.srcAccessMask = VK_ACCESS_HOST_WRITE_BIT;
break;
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
imageMemoryBarrier.srcAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
2017-03-17 23:31:51 +00:00
src_stages = VK_PIPELINE_STAGE_TRANSFER_BIT;
2017-02-15 23:10:30 +00:00
break;
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
imageMemoryBarrier.srcAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2017-03-17 23:31:51 +00:00
src_stages = VK_PIPELINE_STAGE_TRANSFER_BIT;
2017-02-15 23:10:30 +00:00
break;
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
imageMemoryBarrier.srcAccessMask = VK_ACCESS_SHADER_READ_BIT;
2017-03-17 23:31:51 +00:00
src_stages = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT;
2017-02-15 23:10:30 +00:00
break;
default: break;
2016-02-21 06:27:54 +00:00
}
2017-02-15 23:10:30 +00:00
switch (new_image_layout)
{
case VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL:
2016-06-24 03:03:06 +00:00
imageMemoryBarrier.dstAccessMask = VK_ACCESS_TRANSFER_WRITE_BIT;
2017-03-17 23:31:51 +00:00
dest_stages = VK_PIPELINE_STAGE_TRANSFER_BIT;
2017-02-15 23:10:30 +00:00
break;
case VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL:
2016-06-24 03:03:06 +00:00
imageMemoryBarrier.dstAccessMask = VK_ACCESS_TRANSFER_READ_BIT;
2017-03-17 23:31:51 +00:00
dest_stages = VK_PIPELINE_STAGE_TRANSFER_BIT;
2017-02-15 23:10:30 +00:00
break;
case VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL:
2016-02-22 02:47:45 +00:00
imageMemoryBarrier.dstAccessMask = VK_ACCESS_SHADER_READ_BIT;
2017-03-17 23:31:51 +00:00
dest_stages = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT;
2017-02-15 23:10:30 +00:00
break;
case VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL:
2016-02-22 02:47:45 +00:00
imageMemoryBarrier.dstAccessMask =
2016-07-02 03:44:57 +00:00
VK_ACCESS_COLOR_ATTACHMENT_WRITE_BIT | VK_ACCESS_COLOR_ATTACHMENT_READ_BIT;
2017-03-17 23:31:51 +00:00
dest_stages = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT;
2017-02-15 23:10:30 +00:00
break;
case VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL:
2016-02-22 02:47:45 +00:00
imageMemoryBarrier.dstAccessMask =
2016-06-25 07:04:58 +00:00
VK_ACCESS_DEPTH_STENCIL_ATTACHMENT_WRITE_BIT;
2017-03-17 23:31:51 +00:00
dest_stages = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT;
2017-02-15 23:10:30 +00:00
break;
case VK_IMAGE_LAYOUT_PRESENT_SRC_KHR:
imageMemoryBarrier.dstAccessMask = VK_ACCESS_MEMORY_READ_BIT;
2017-03-17 23:31:51 +00:00
dest_stages = VK_PIPELINE_STAGE_ALL_GRAPHICS_BIT;
2017-02-15 23:10:30 +00:00
break;
default: break;
2016-02-21 06:27:54 +00:00
}
2016-06-30 04:54:29 +00:00
vk::CmdPipelineBarrier(cmd, src_stages, dest_stages, 0, 0, NULL, 0, NULL,
1, &imageMemoryBarrier);
2016-02-22 02:47:45 +00:00
}
2016-02-23 01:13:03 +00:00
static VkResult InitGlobalExtensionProperties(VulkanContext::LayerProperties& layerProps) {
VkExtensionProperties *instance_extensions;
uint32_t instance_extension_count;
VkResult res;
char *layer_name = nullptr;
layer_name = layerProps.properties.layerName;
do {
2016-06-30 04:54:29 +00:00
res = vk::EnumerateInstanceExtensionProperties(
2016-02-23 01:13:03 +00:00
layer_name, &instance_extension_count, nullptr);
if (res)
return res;
if (instance_extension_count == 0) {
return VK_SUCCESS;
}
layerProps.extensions.resize(instance_extension_count);
instance_extensions = layerProps.extensions.data();
2016-06-30 04:54:29 +00:00
res = vk::EnumerateInstanceExtensionProperties(
2016-02-23 01:13:03 +00:00
layer_name, &instance_extension_count, instance_extensions);
} while (res == VK_INCOMPLETE);
return res;
}
/*
* Return 1 (true) if all layer names specified in check_names
* can be found in given layer properties.
*/
static void demo_check_layers(const std::vector<VulkanContext::LayerProperties>& layerProps,
const std::vector<const char*> &layerNames) {
uint32_t check_count = layerNames.size();
uint32_t layer_count = layerProps.size();
for (uint32_t i = 0; i < check_count; i++) {
VkBool32 found = 0;
for (uint32_t j = 0; j < layer_count; j++) {
if (!strcmp(layerNames[i], layerProps[j].properties.layerName)) {
found = 1;
}
}
if (!found) {
2016-03-04 23:02:18 +00:00
Log.report(logvisor::Fatal, "Cannot find layer: %s", layerNames[i]);
2016-02-23 01:13:03 +00:00
}
}
}
2018-05-22 07:48:13 +00:00
bool VulkanContext::initVulkan(std::string_view appName, PFN_vkGetInstanceProcAddr getVkProc)
2016-02-22 02:47:45 +00:00
{
2018-05-22 07:48:13 +00:00
vk::init_dispatch_table_top(getVkProc);
2016-02-23 01:13:03 +00:00
if (!glslang::InitializeProcess())
2017-11-25 02:49:20 +00:00
{
Log.report(logvisor::Error, "unable to initialize glslang");
return false;
}
2016-02-23 01:13:03 +00:00
uint32_t instanceLayerCount;
VkLayerProperties* vkProps = nullptr;
VkResult res;
/*
* It's possible, though very rare, that the number of
* instance layers could change. For example, installing something
* could include new layers that the loader would pick up
* between the initial query for the count and the
* request for VkLayerProperties. The loader indicates that
* by returning a VK_INCOMPLETE status and will update the
* the count parameter.
* The count parameter will be updated with the number of
* entries loaded into the data pointer - in case the number
* of layers went down or is smaller than the size given.
*/
2016-07-17 21:15:57 +00:00
#ifdef _WIN32
2016-09-19 01:02:33 +00:00
char* vkSdkPath = getenv("VK_SDK_PATH");
if (vkSdkPath)
{
std::string str = "VK_LAYER_PATH=";
str += vkSdkPath;
str += "\\Bin";
_putenv(str.c_str());
}
2016-07-17 21:15:57 +00:00
#else
2016-02-24 03:11:58 +00:00
setenv("VK_LAYER_PATH", "/usr/share/vulkan/explicit_layer.d", 1);
2016-07-17 21:15:57 +00:00
#endif
2016-02-23 01:13:03 +00:00
do {
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::EnumerateInstanceLayerProperties(&instanceLayerCount, nullptr));
2016-02-23 01:13:03 +00:00
if (instanceLayerCount == 0)
break;
vkProps = (VkLayerProperties *)realloc(vkProps, instanceLayerCount * sizeof(VkLayerProperties));
2016-06-30 04:54:29 +00:00
res = vk::EnumerateInstanceLayerProperties(&instanceLayerCount, vkProps);
2016-02-23 01:13:03 +00:00
} while (res == VK_INCOMPLETE);
/*
* Now gather the extension list for each instance layer.
*/
for (uint32_t i=0 ; i<instanceLayerCount ; ++i)
{
LayerProperties layerProps;
layerProps.properties = vkProps[i];
ThrowIfFailed(InitGlobalExtensionProperties(layerProps));
m_instanceLayerProperties.push_back(layerProps);
}
free(vkProps);
2016-02-22 02:47:45 +00:00
/* need platform surface extensions */
m_instanceExtensionNames.push_back(VK_KHR_SURFACE_EXTENSION_NAME);
#ifdef _WIN32
m_instanceExtensionNames.push_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
#else
2016-02-23 01:13:03 +00:00
m_instanceExtensionNames.push_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
2016-02-22 02:47:45 +00:00
#endif
/* need swapchain device extension */
m_deviceExtensionNames.push_back(VK_KHR_SWAPCHAIN_EXTENSION_NAME);
2016-02-23 01:13:03 +00:00
#ifndef NDEBUG
2017-11-09 08:10:43 +00:00
m_layerNames.push_back("VK_LAYER_LUNARG_standard_validation");
2018-06-01 06:36:57 +00:00
//m_layerNames.push_back("VK_LAYER_LUNARG_api_dump");
2017-11-09 08:10:43 +00:00
//m_layerNames.push_back("VK_LAYER_LUNARG_core_validation");
2017-11-08 02:24:07 +00:00
//m_layerNames.push_back("VK_LAYER_LUNARG_object_tracker");
2017-11-09 08:10:43 +00:00
//m_layerNames.push_back("VK_LAYER_LUNARG_parameter_validation");
//m_layerNames.push_back("VK_LAYER_GOOGLE_threading");
2016-02-23 01:13:03 +00:00
#endif
demo_check_layers(m_instanceLayerProperties, m_layerNames);
#ifndef NDEBUG
/* Enable debug callback extension */
m_instanceExtensionNames.push_back(VK_EXT_DEBUG_REPORT_EXTENSION_NAME);
#endif
2016-02-22 02:47:45 +00:00
/* create the instance */
VkApplicationInfo appInfo = {};
appInfo.sType = VK_STRUCTURE_TYPE_APPLICATION_INFO;
appInfo.pNext = nullptr;
2017-11-13 07:19:49 +00:00
appInfo.pApplicationName = appName.data();
2016-02-22 02:47:45 +00:00
appInfo.applicationVersion = 1;
2016-03-07 03:09:46 +00:00
appInfo.pEngineName = "Boo";
2016-02-22 02:47:45 +00:00
appInfo.engineVersion = 1;
2016-03-14 23:31:43 +00:00
appInfo.apiVersion = VK_API_VERSION_1_0;
2016-02-22 02:47:45 +00:00
VkInstanceCreateInfo instInfo = {};
instInfo.sType = VK_STRUCTURE_TYPE_INSTANCE_CREATE_INFO;
instInfo.pNext = nullptr;
instInfo.flags = 0;
instInfo.pApplicationInfo = &appInfo;
2016-02-23 01:13:03 +00:00
instInfo.enabledLayerCount = m_layerNames.size();
instInfo.ppEnabledLayerNames = m_layerNames.size()
? m_layerNames.data()
2016-02-22 02:47:45 +00:00
: nullptr;
instInfo.enabledExtensionCount = m_instanceExtensionNames.size();
instInfo.ppEnabledExtensionNames = m_instanceExtensionNames.data();
2016-07-17 21:15:57 +00:00
VkResult instRes = vk::CreateInstance(&instInfo, nullptr, &m_instance);
if (instRes != VK_SUCCESS)
{
2017-11-25 02:49:20 +00:00
Log.report(logvisor::Error, "The Vulkan runtime is installed, but there are no supported "
"hardware vendor interfaces present");
2017-11-25 02:49:20 +00:00
return false;
2016-07-17 21:15:57 +00:00
}
2016-02-22 02:47:45 +00:00
2016-02-23 01:13:03 +00:00
#ifndef NDEBUG
VkDebugReportCallbackEXT debugReportCallback;
PFN_vkCreateDebugReportCallbackEXT createDebugReportCallback =
2016-06-30 04:54:29 +00:00
(PFN_vkCreateDebugReportCallbackEXT)vk::GetInstanceProcAddr(m_instance, "vkCreateDebugReportCallbackEXT");
2016-02-23 01:13:03 +00:00
if (!createDebugReportCallback)
2016-03-04 23:02:18 +00:00
Log.report(logvisor::Fatal, "GetInstanceProcAddr: Unable to find vkCreateDebugReportCallbackEXT function.");
2016-02-23 01:13:03 +00:00
VkDebugReportCallbackCreateInfoEXT debugCreateInfo = {};
debugCreateInfo.sType = VK_STRUCTURE_TYPE_DEBUG_REPORT_CREATE_INFO_EXT;
debugCreateInfo.pNext = nullptr;
debugCreateInfo.flags = VK_DEBUG_REPORT_ERROR_BIT_EXT | VK_DEBUG_REPORT_WARNING_BIT_EXT;
debugCreateInfo.pfnCallback = dbgFunc;
debugCreateInfo.pUserData = nullptr;
ThrowIfFailed(createDebugReportCallback(m_instance, &debugCreateInfo, nullptr, &debugReportCallback));
#endif
2017-11-25 02:49:20 +00:00
2018-05-22 07:48:13 +00:00
vk::init_dispatch_table_middle(m_instance, false);
2017-11-25 02:49:20 +00:00
return true;
2016-06-30 04:54:29 +00:00
}
2016-02-23 01:13:03 +00:00
2016-07-20 17:14:18 +00:00
bool VulkanContext::enumerateDevices()
2016-06-30 04:54:29 +00:00
{
2016-02-22 02:47:45 +00:00
uint32_t gpuCount = 1;
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::EnumeratePhysicalDevices(m_instance, &gpuCount, nullptr));
2016-07-20 17:14:18 +00:00
if (!gpuCount)
return false;
2016-02-22 02:47:45 +00:00
m_gpus.resize(gpuCount);
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::EnumeratePhysicalDevices(m_instance, &gpuCount, m_gpus.data()));
2016-07-20 17:14:18 +00:00
if (!gpuCount)
return false;
2016-02-22 02:47:45 +00:00
2016-06-30 04:54:29 +00:00
vk::GetPhysicalDeviceQueueFamilyProperties(m_gpus[0], &m_queueCount, nullptr);
2016-07-20 17:14:18 +00:00
if (!m_queueCount)
return false;
2016-02-22 02:47:45 +00:00
m_queueProps.resize(m_queueCount);
2016-06-30 04:54:29 +00:00
vk::GetPhysicalDeviceQueueFamilyProperties(m_gpus[0], &m_queueCount, m_queueProps.data());
2016-07-20 17:14:18 +00:00
if (!m_queueCount)
return false;
2016-02-22 02:47:45 +00:00
/* This is as good a place as any to do this */
2016-06-30 04:54:29 +00:00
vk::GetPhysicalDeviceMemoryProperties(m_gpus[0], &m_memoryProperties);
vk::GetPhysicalDeviceProperties(m_gpus[0], &m_gpuProps);
2016-07-20 17:14:18 +00:00
return true;
2016-02-22 02:47:45 +00:00
}
void VulkanContext::initDevice()
{
if (m_graphicsQueueFamilyIndex == UINT32_MAX)
2016-03-04 23:02:18 +00:00
Log.report(logvisor::Fatal,
2016-02-22 02:47:45 +00:00
"VulkanContext::m_graphicsQueueFamilyIndex hasn't been initialized");
/* create the device and queues */
2016-02-22 02:47:45 +00:00
VkDeviceQueueCreateInfo queueInfo = {};
float queuePriorities[1] = {0.0};
queueInfo.sType = VK_STRUCTURE_TYPE_DEVICE_QUEUE_CREATE_INFO;
queueInfo.pNext = nullptr;
queueInfo.queueCount = 1;
queueInfo.pQueuePriorities = queuePriorities;
queueInfo.queueFamilyIndex = m_graphicsQueueFamilyIndex;
2017-11-25 02:49:20 +00:00
vk::GetPhysicalDeviceFeatures(m_gpus[0], &m_features);
VkPhysicalDeviceFeatures features = {};
if (m_features.samplerAnisotropy)
features.samplerAnisotropy = VK_TRUE;
if (!m_features.textureCompressionBC)
Log.report(logvisor::Fatal,
"Vulkan device does not support DXT-format textures");
features.textureCompressionBC = VK_TRUE;
VkShaderStageFlagBits tessellationDescriptorBit = VkShaderStageFlagBits(0);
if (m_features.tessellationShader)
{
tessellationDescriptorBit = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
features.tessellationShader = VK_TRUE;
}
2017-11-25 02:49:20 +00:00
2018-05-22 07:48:13 +00:00
uint32_t extCount = 0;
vk::EnumerateDeviceExtensionProperties(m_gpus[0], nullptr, &extCount, nullptr);
std::vector<VkExtensionProperties> extensions(extCount);
vk::EnumerateDeviceExtensionProperties(m_gpus[0], nullptr, &extCount, extensions.data());
bool hasGetMemReq2 = false;
bool hasDedicatedAllocation = false;
for (const VkExtensionProperties& ext : extensions)
{
if (!hasGetMemReq2 && !strcmp(ext.extensionName, VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME))
hasGetMemReq2 = true;
else if (!hasDedicatedAllocation && !strcmp(ext.extensionName, VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME))
hasDedicatedAllocation = true;
}
VmaAllocatorCreateFlags allocFlags = VMA_ALLOCATOR_CREATE_EXTERNALLY_SYNCHRONIZED_BIT;
if (hasGetMemReq2 && hasDedicatedAllocation)
{
m_deviceExtensionNames.push_back(VK_KHR_GET_MEMORY_REQUIREMENTS_2_EXTENSION_NAME);
m_deviceExtensionNames.push_back(VK_KHR_DEDICATED_ALLOCATION_EXTENSION_NAME);
allocFlags |= VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT;
}
2016-02-22 02:47:45 +00:00
VkDeviceCreateInfo deviceInfo = {};
deviceInfo.sType = VK_STRUCTURE_TYPE_DEVICE_CREATE_INFO;
deviceInfo.pNext = nullptr;
deviceInfo.queueCreateInfoCount = 1;
deviceInfo.pQueueCreateInfos = &queueInfo;
2016-02-23 01:13:03 +00:00
deviceInfo.enabledLayerCount = m_layerNames.size();
2016-02-22 02:47:45 +00:00
deviceInfo.ppEnabledLayerNames =
2016-02-23 01:13:03 +00:00
deviceInfo.enabledLayerCount ? m_layerNames.data() : nullptr;
2016-02-22 02:47:45 +00:00
deviceInfo.enabledExtensionCount = m_deviceExtensionNames.size();
deviceInfo.ppEnabledExtensionNames =
deviceInfo.enabledExtensionCount ? m_deviceExtensionNames.data() : nullptr;
2017-11-25 02:49:20 +00:00
deviceInfo.pEnabledFeatures = &features;
2016-02-22 02:47:45 +00:00
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::CreateDevice(m_gpus[0], &deviceInfo, nullptr, &m_dev));
2017-11-09 08:10:43 +00:00
2018-05-22 07:48:13 +00:00
vk::init_dispatch_table_bottom(m_instance, m_dev);
/* allocator */
VmaVulkanFunctions vulkanFunctions = {};
vulkanFunctions.vkGetPhysicalDeviceProperties = vk::GetPhysicalDeviceProperties;
vulkanFunctions.vkGetPhysicalDeviceMemoryProperties = vk::GetPhysicalDeviceMemoryProperties;
vulkanFunctions.vkAllocateMemory = vk::AllocateMemory;
vulkanFunctions.vkFreeMemory = vk::FreeMemory;
vulkanFunctions.vkMapMemory = vk::MapMemory;
vulkanFunctions.vkUnmapMemory = vk::UnmapMemory;
vulkanFunctions.vkBindBufferMemory = vk::BindBufferMemory;
vulkanFunctions.vkBindImageMemory = vk::BindImageMemory;
vulkanFunctions.vkGetBufferMemoryRequirements = vk::GetBufferMemoryRequirements;
vulkanFunctions.vkGetImageMemoryRequirements = vk::GetImageMemoryRequirements;
vulkanFunctions.vkCreateBuffer = vk::CreateBuffer;
vulkanFunctions.vkDestroyBuffer = vk::DestroyBuffer;
vulkanFunctions.vkCreateImage = vk::CreateImage;
vulkanFunctions.vkDestroyImage = vk::DestroyImage;
if (hasGetMemReq2 && hasDedicatedAllocation)
{
vulkanFunctions.vkGetBufferMemoryRequirements2KHR = reinterpret_cast<PFN_vkGetBufferMemoryRequirements2KHR>(
vk::GetDeviceProcAddr(m_dev, "vkGetBufferMemoryRequirements2KHR"));
vulkanFunctions.vkGetImageMemoryRequirements2KHR = reinterpret_cast<PFN_vkGetImageMemoryRequirements2KHR>(
vk::GetDeviceProcAddr(m_dev, "vkGetImageMemoryRequirements2KHR"));
}
VmaAllocatorCreateInfo allocatorInfo = {};
allocatorInfo.flags = allocFlags;
allocatorInfo.physicalDevice = m_gpus[0];
allocatorInfo.device = m_dev;
allocatorInfo.pVulkanFunctions = &vulkanFunctions;
ThrowIfFailed(vmaCreateAllocator(&allocatorInfo, &m_allocator));
// Going to need a command buffer to send the memory barriers in
// set_image_layout but we couldn't have created one before we knew
// what our graphics_queue_family_index is, but now that we have it,
// create the command buffer
VkCommandPoolCreateInfo cmdPoolInfo = {};
cmdPoolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
cmdPoolInfo.pNext = nullptr;
cmdPoolInfo.queueFamilyIndex = m_graphicsQueueFamilyIndex;
cmdPoolInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
ThrowIfFailed(vk::CreateCommandPool(m_dev, &cmdPoolInfo, nullptr, &m_loadPool));
VkCommandBufferAllocateInfo cmd = {};
cmd.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
cmd.pNext = nullptr;
cmd.commandPool = m_loadPool;
cmd.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
cmd.commandBufferCount = 1;
ThrowIfFailed(vk::AllocateCommandBuffers(m_dev, &cmd, &m_loadCmdBuf));
vk::GetDeviceQueue(m_dev, m_graphicsQueueFamilyIndex, 0, &m_queue);
/* Begin load command buffer here */
VkCommandBufferBeginInfo cmdBufBeginInfo = {};
cmdBufBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
cmdBufBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
ThrowIfFailed(vk::BeginCommandBuffer(m_loadCmdBuf, &cmdBufBeginInfo));
m_sampleCountColor = flp2(std::min(m_gpuProps.limits.framebufferColorSampleCounts, m_sampleCountColor));
m_sampleCountDepth = flp2(std::min(m_gpuProps.limits.framebufferDepthSampleCounts, m_sampleCountDepth));
if (m_features.samplerAnisotropy)
m_anisotropy = std::min(m_gpuProps.limits.maxSamplerAnisotropy, m_anisotropy);
else
m_anisotropy = 1;
VkDescriptorSetLayoutBinding layoutBindings[BOO_GLSL_MAX_UNIFORM_COUNT + BOO_GLSL_MAX_TEXTURE_COUNT];
for (int i=0 ; i<BOO_GLSL_MAX_UNIFORM_COUNT ; ++i)
{
layoutBindings[i].binding = i;
layoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
layoutBindings[i].descriptorCount = 1;
layoutBindings[i].stageFlags = VK_SHADER_STAGE_VERTEX_BIT | VK_SHADER_STAGE_FRAGMENT_BIT |
tessellationDescriptorBit;
2018-05-22 07:48:13 +00:00
layoutBindings[i].pImmutableSamplers = nullptr;
}
for (int i=BOO_GLSL_MAX_UNIFORM_COUNT ; i<BOO_GLSL_MAX_UNIFORM_COUNT+BOO_GLSL_MAX_TEXTURE_COUNT ; ++i)
{
layoutBindings[i].binding = i;
layoutBindings[i].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
layoutBindings[i].descriptorCount = 1;
layoutBindings[i].stageFlags = VK_SHADER_STAGE_FRAGMENT_BIT | tessellationDescriptorBit;
2018-05-22 07:48:13 +00:00
layoutBindings[i].pImmutableSamplers = nullptr;
}
VkDescriptorSetLayoutCreateInfo descriptorLayout = {};
descriptorLayout.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_LAYOUT_CREATE_INFO;
descriptorLayout.pNext = nullptr;
descriptorLayout.bindingCount = BOO_GLSL_MAX_UNIFORM_COUNT + BOO_GLSL_MAX_TEXTURE_COUNT;
descriptorLayout.pBindings = layoutBindings;
ThrowIfFailed(vk::CreateDescriptorSetLayout(m_dev, &descriptorLayout, nullptr,
&m_descSetLayout));
VkPipelineLayoutCreateInfo pipelineLayout = {};
pipelineLayout.sType = VK_STRUCTURE_TYPE_PIPELINE_LAYOUT_CREATE_INFO;
pipelineLayout.setLayoutCount = 1;
pipelineLayout.pSetLayouts = &m_descSetLayout;
ThrowIfFailed(vk::CreatePipelineLayout(m_dev, &pipelineLayout, nullptr, &m_pipelinelayout));
2017-11-09 08:10:43 +00:00
std::string gpuName = m_gpuProps.deviceName;
Log.report(logvisor::Info, "Initialized %s", gpuName.c_str());
Log.report(logvisor::Info, "Vulkan version %d.%d.%d",
m_gpuProps.apiVersion >> 22,
(m_gpuProps.apiVersion >> 12) & 0b1111111111,
m_gpuProps.apiVersion & 0b111111111111);
Log.report(logvisor::Info, "Driver version %d.%d.%d",
m_gpuProps.driverVersion >> 22,
(m_gpuProps.driverVersion >> 12) & 0b1111111111,
m_gpuProps.driverVersion & 0b111111111111);
2016-02-22 02:47:45 +00:00
}
2018-05-22 07:48:13 +00:00
void VulkanContext::destroyDevice()
{
if (m_passColorOnly)
{
vk::DestroyRenderPass(m_dev, m_passColorOnly, nullptr);
m_passColorOnly = VK_NULL_HANDLE;
}
if (m_pass)
{
vk::DestroyRenderPass(m_dev, m_pass, nullptr);
m_pass = VK_NULL_HANDLE;
}
if (m_pipelinelayout)
{
vk::DestroyPipelineLayout(m_dev, m_pipelinelayout, nullptr);
m_pipelinelayout = VK_NULL_HANDLE;
}
if (m_descSetLayout)
{
vk::DestroyDescriptorSetLayout(m_dev, m_descSetLayout, nullptr);
m_descSetLayout = VK_NULL_HANDLE;
}
if (m_loadPool)
{
vk::DestroyCommandPool(m_dev, m_loadPool, nullptr);
m_loadPool = VK_NULL_HANDLE;
}
if (m_allocator)
{
vmaDestroyAllocator(m_allocator);
m_allocator = VK_NULL_HANDLE;
}
if (m_dev)
{
vk::DestroyDevice(m_dev, nullptr);
m_dev = VK_NULL_HANDLE;
}
if (m_instance)
{
vk::DestroyInstance(m_instance, nullptr);
m_instance = VK_NULL_HANDLE;
}
}
void VulkanContext::Window::SwapChain::Buffer::setImage
(VulkanContext* ctx, VkImage image, uint32_t width, uint32_t height)
{
m_image = image;
if (m_colorView)
vk::DestroyImageView(ctx->m_dev, m_colorView, nullptr);
if (m_framebuffer)
vk::DestroyFramebuffer(ctx->m_dev, m_framebuffer, nullptr);
/* Create resource views */
VkImageViewCreateInfo viewCreateInfo = {};
viewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
viewCreateInfo.pNext = nullptr;
viewCreateInfo.image = m_image;
viewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
viewCreateInfo.format = ctx->m_displayFormat;
viewCreateInfo.components.r = VK_COMPONENT_SWIZZLE_R;
viewCreateInfo.components.g = VK_COMPONENT_SWIZZLE_G;
viewCreateInfo.components.b = VK_COMPONENT_SWIZZLE_B;
viewCreateInfo.components.a = VK_COMPONENT_SWIZZLE_A;
viewCreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
viewCreateInfo.subresourceRange.baseMipLevel = 0;
viewCreateInfo.subresourceRange.levelCount = 1;
viewCreateInfo.subresourceRange.baseArrayLayer = 0;
viewCreateInfo.subresourceRange.layerCount = 1;
ThrowIfFailed(vk::CreateImageView(ctx->m_dev, &viewCreateInfo, nullptr, &m_colorView));
/* framebuffer */
VkFramebufferCreateInfo fbCreateInfo = {};
fbCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
fbCreateInfo.pNext = nullptr;
fbCreateInfo.renderPass = ctx->m_passColorOnly;
fbCreateInfo.attachmentCount = 1;
fbCreateInfo.width = width;
fbCreateInfo.height = height;
fbCreateInfo.layers = 1;
fbCreateInfo.pAttachments = &m_colorView;
ThrowIfFailed(vk::CreateFramebuffer(ctx->m_dev, &fbCreateInfo, nullptr, &m_framebuffer));
m_passBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
m_passBeginInfo.pNext = nullptr;
m_passBeginInfo.renderPass = ctx->m_passColorOnly;
m_passBeginInfo.framebuffer = m_framebuffer;
m_passBeginInfo.renderArea.offset.x = 0;
m_passBeginInfo.renderArea.offset.y = 0;
m_passBeginInfo.renderArea.extent.width = width;
m_passBeginInfo.renderArea.extent.height = height;
m_passBeginInfo.clearValueCount = 0;
m_passBeginInfo.pClearValues = nullptr;
}
2018-05-20 22:37:26 +00:00
void VulkanContext::Window::SwapChain::Buffer::destroy(VkDevice dev)
{
if (m_colorView)
vk::DestroyImageView(dev, m_colorView, nullptr);
if (m_framebuffer)
vk::DestroyFramebuffer(dev, m_framebuffer, nullptr);
}
void VulkanContext::initSwapChain(VulkanContext::Window& windowCtx, VkSurfaceKHR surface,
VkFormat format, VkColorSpaceKHR colorspace)
2016-02-22 02:47:45 +00:00
{
2018-01-22 07:33:47 +00:00
m_internalFormat = m_displayFormat = format;
if (m_deepColor)
m_internalFormat = VK_FORMAT_R16G16B16A16_UNORM;
2018-05-22 07:48:13 +00:00
/* bootstrap render passes if needed */
if (!m_pass)
{
VkAttachmentDescription attachments[2] = {};
/* color attachment */
attachments[0].format = m_internalFormat;
attachments[0].samples = VkSampleCountFlagBits(m_sampleCountColor);
attachments[0].loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
attachments[0].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
attachments[0].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachments[0].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachments[0].initialLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
attachments[0].finalLayout = VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL;
VkAttachmentReference colorAttachmentRef = {0, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL};
/* depth attachment */
attachments[1].format = VK_FORMAT_D32_SFLOAT;
attachments[1].samples = VkSampleCountFlagBits(m_sampleCountDepth);
attachments[1].loadOp = VK_ATTACHMENT_LOAD_OP_LOAD;
attachments[1].storeOp = VK_ATTACHMENT_STORE_OP_STORE;
attachments[1].stencilLoadOp = VK_ATTACHMENT_LOAD_OP_DONT_CARE;
attachments[1].stencilStoreOp = VK_ATTACHMENT_STORE_OP_DONT_CARE;
attachments[1].initialLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
attachments[1].finalLayout = VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL;
VkAttachmentReference depthAttachmentRef = {1, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL};
/* render subpass */
VkSubpassDescription subpass = {};
subpass.pipelineBindPoint = VK_PIPELINE_BIND_POINT_GRAPHICS;
subpass.colorAttachmentCount = 1;
subpass.pColorAttachments = &colorAttachmentRef;
subpass.pDepthStencilAttachment = &depthAttachmentRef;
/* render pass */
VkRenderPassCreateInfo renderPass = {};
renderPass.sType = VK_STRUCTURE_TYPE_RENDER_PASS_CREATE_INFO;
renderPass.attachmentCount = 2;
renderPass.pAttachments = attachments;
renderPass.subpassCount = 1;
renderPass.pSubpasses = &subpass;
ThrowIfFailed(vk::CreateRenderPass(m_dev, &renderPass, nullptr, &m_pass));
/* render pass color only */
attachments[0].format = m_displayFormat;
2018-05-27 05:13:50 +00:00
attachments[0].samples = VK_SAMPLE_COUNT_1_BIT;
2018-05-22 07:48:13 +00:00
renderPass.attachmentCount = 1;
subpass.pDepthStencilAttachment = nullptr;
ThrowIfFailed(vk::CreateRenderPass(m_dev, &renderPass, nullptr, &m_passColorOnly));
}
2016-02-22 02:47:45 +00:00
VkSurfaceCapabilitiesKHR surfCapabilities;
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::GetPhysicalDeviceSurfaceCapabilitiesKHR(m_gpus[0], surface, &surfCapabilities));
2016-02-22 02:47:45 +00:00
uint32_t presentModeCount;
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::GetPhysicalDeviceSurfacePresentModesKHR(m_gpus[0], surface, &presentModeCount, nullptr));
2016-07-20 22:19:53 +00:00
std::unique_ptr<VkPresentModeKHR[]> presentModes(new VkPresentModeKHR[presentModeCount]);
2016-02-22 02:47:45 +00:00
2016-07-20 22:19:53 +00:00
ThrowIfFailed(vk::GetPhysicalDeviceSurfacePresentModesKHR(m_gpus[0], surface, &presentModeCount, presentModes.get()));
2016-02-22 02:47:45 +00:00
VkExtent2D swapChainExtent;
// width and height are either both -1, or both not -1.
if (surfCapabilities.currentExtent.width == (uint32_t)-1)
{
// If the surface size is undefined, the size is set to
// the size of the images requested.
swapChainExtent.width = 50;
swapChainExtent.height = 50;
}
else
{
// If the surface size is defined, the swap chain size must match
swapChainExtent = surfCapabilities.currentExtent;
}
// If mailbox mode is available, use it, as is the lowest-latency non-
// tearing mode. If not, try IMMEDIATE which will usually be available,
// and is fastest (though it tears). If not, fall back to FIFO which is
// always available.
VkPresentModeKHR swapchainPresentMode = VK_PRESENT_MODE_FIFO_KHR;
for (size_t i=0 ; i<presentModeCount ; ++i)
{
if (presentModes[i] == VK_PRESENT_MODE_MAILBOX_KHR)
{
swapchainPresentMode = VK_PRESENT_MODE_MAILBOX_KHR;
break;
}
if ((swapchainPresentMode != VK_PRESENT_MODE_MAILBOX_KHR) &&
(presentModes[i] == VK_PRESENT_MODE_IMMEDIATE_KHR))
{
swapchainPresentMode = VK_PRESENT_MODE_IMMEDIATE_KHR;
}
}
// Determine the number of VkImage's to use in the swap chain (we desire to
// own only 1 image at a time, besides the images being displayed and
// queued for display):
uint32_t desiredNumberOfSwapChainImages = surfCapabilities.minImageCount + 1;
if ((surfCapabilities.maxImageCount > 0) &&
(desiredNumberOfSwapChainImages > surfCapabilities.maxImageCount))
{
// Application must settle for fewer images than desired:
desiredNumberOfSwapChainImages = surfCapabilities.maxImageCount;
}
VkSurfaceTransformFlagBitsKHR preTransform;
if (surfCapabilities.supportedTransforms & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR)
preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
else
preTransform = surfCapabilities.currentTransform;
VkSwapchainCreateInfoKHR swapChainInfo = {};
swapChainInfo.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
swapChainInfo.pNext = nullptr;
swapChainInfo.surface = surface;
swapChainInfo.minImageCount = desiredNumberOfSwapChainImages;
swapChainInfo.imageFormat = format;
2016-02-22 02:47:45 +00:00
swapChainInfo.imageExtent.width = swapChainExtent.width;
swapChainInfo.imageExtent.height = swapChainExtent.height;
swapChainInfo.preTransform = preTransform;
swapChainInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
swapChainInfo.imageArrayLayers = 1;
swapChainInfo.presentMode = swapchainPresentMode;
swapChainInfo.oldSwapchain = nullptr;
swapChainInfo.clipped = true;
2016-07-02 03:44:57 +00:00
swapChainInfo.imageColorSpace = colorspace;
swapChainInfo.imageUsage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2016-02-22 02:47:45 +00:00
swapChainInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
swapChainInfo.queueFamilyIndexCount = 0;
swapChainInfo.pQueueFamilyIndices = nullptr;
Window::SwapChain& sc = windowCtx.m_swapChains[windowCtx.m_activeSwapChain];
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::CreateSwapchainKHR(m_dev, &swapChainInfo, nullptr, &sc.m_swapChain));
sc.m_format = format;
2016-02-22 02:47:45 +00:00
uint32_t swapchainImageCount;
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::GetSwapchainImagesKHR(m_dev, sc.m_swapChain, &swapchainImageCount, nullptr));
2016-02-22 02:47:45 +00:00
2016-07-20 22:19:53 +00:00
std::unique_ptr<VkImage[]> swapchainImages(new VkImage[swapchainImageCount]);
ThrowIfFailed(vk::GetSwapchainImagesKHR(m_dev, sc.m_swapChain, &swapchainImageCount, swapchainImages.get()));
2016-02-22 02:47:45 +00:00
/* images */
2016-07-02 03:44:57 +00:00
sc.m_bufs.resize(swapchainImageCount);
for (uint32_t i=0 ; i<swapchainImageCount ; ++i)
{
Window::SwapChain::Buffer& buf = sc.m_bufs[i];
buf.setImage(this, swapchainImages[i], swapChainExtent.width, swapChainExtent.height);
2016-07-02 03:44:57 +00:00
}
}
2016-02-22 02:47:45 +00:00
2017-02-16 03:17:18 +00:00
void VulkanContext::resizeSwapChain(VulkanContext::Window& windowCtx, VkSurfaceKHR surface,
VkFormat format, VkColorSpaceKHR colorspace,
const SWindowRect& rect)
{
std::unique_lock<std::mutex> lk(m_resizeLock);
m_deferredResizes.emplace(windowCtx, surface, format, colorspace, rect);
}
2016-02-22 02:47:45 +00:00
2017-02-16 03:17:18 +00:00
bool VulkanContext::_resizeSwapChains()
{
2017-02-16 03:17:18 +00:00
std::unique_lock<std::mutex> lk(m_resizeLock);
if (m_deferredResizes.empty())
return false;
2017-02-16 03:17:18 +00:00
while (m_deferredResizes.size())
{
SwapChainResize& resize = m_deferredResizes.front();
2017-02-16 03:17:18 +00:00
VkSurfaceCapabilitiesKHR surfCapabilities;
ThrowIfFailed(vk::GetPhysicalDeviceSurfaceCapabilitiesKHR(m_gpus[0], resize.m_surface, &surfCapabilities));
2017-02-16 03:17:18 +00:00
uint32_t presentModeCount;
ThrowIfFailed(vk::GetPhysicalDeviceSurfacePresentModesKHR(m_gpus[0], resize.m_surface, &presentModeCount, nullptr));
std::unique_ptr<VkPresentModeKHR[]> presentModes(new VkPresentModeKHR[presentModeCount]);
2016-02-22 02:47:45 +00:00
2017-02-16 03:17:18 +00:00
ThrowIfFailed(vk::GetPhysicalDeviceSurfacePresentModesKHR(m_gpus[0], resize.m_surface, &presentModeCount, presentModes.get()));
VkExtent2D swapChainExtent;
// width and height are either both -1, or both not -1.
if (surfCapabilities.currentExtent.width == (uint32_t)-1)
{
2017-02-16 03:17:18 +00:00
// If the surface size is undefined, the size is set to
// the size of the images requested.
swapChainExtent.width = 50;
swapChainExtent.height = 50;
}
2017-02-16 03:17:18 +00:00
else
{
2017-02-16 03:17:18 +00:00
// If the surface size is defined, the swap chain size must match
swapChainExtent = surfCapabilities.currentExtent;
}
2017-02-16 03:17:18 +00:00
// If mailbox mode is available, use it, as is the lowest-latency non-
// tearing mode. If not, try IMMEDIATE which will usually be available,
// and is fastest (though it tears). If not, fall back to FIFO which is
// always available.
VkPresentModeKHR swapchainPresentMode = VK_PRESENT_MODE_FIFO_KHR;
for (size_t i=0 ; i<presentModeCount ; ++i)
{
if (presentModes[i] == VK_PRESENT_MODE_MAILBOX_KHR)
{
swapchainPresentMode = VK_PRESENT_MODE_MAILBOX_KHR;
break;
}
if ((swapchainPresentMode != VK_PRESENT_MODE_MAILBOX_KHR) &&
(presentModes[i] == VK_PRESENT_MODE_IMMEDIATE_KHR))
{
swapchainPresentMode = VK_PRESENT_MODE_IMMEDIATE_KHR;
}
}
2017-02-16 03:17:18 +00:00
// Determine the number of VkImage's to use in the swap chain (we desire to
// own only 1 image at a time, besides the images being displayed and
// queued for display):
uint32_t desiredNumberOfSwapChainImages = surfCapabilities.minImageCount + 1;
if ((surfCapabilities.maxImageCount > 0) &&
(desiredNumberOfSwapChainImages > surfCapabilities.maxImageCount))
{
// Application must settle for fewer images than desired:
desiredNumberOfSwapChainImages = surfCapabilities.maxImageCount;
}
2017-02-16 03:17:18 +00:00
VkSurfaceTransformFlagBitsKHR preTransform;
if (surfCapabilities.supportedTransforms & VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR)
preTransform = VK_SURFACE_TRANSFORM_IDENTITY_BIT_KHR;
else
preTransform = surfCapabilities.currentTransform;
Window::SwapChain& oldSc = resize.m_windowCtx.m_swapChains[resize.m_windowCtx.m_activeSwapChain];
VkSwapchainCreateInfoKHR swapChainInfo = {};
swapChainInfo.sType = VK_STRUCTURE_TYPE_SWAPCHAIN_CREATE_INFO_KHR;
swapChainInfo.pNext = nullptr;
swapChainInfo.surface = resize.m_surface;
swapChainInfo.minImageCount = desiredNumberOfSwapChainImages;
swapChainInfo.imageFormat = resize.m_format;
swapChainInfo.imageExtent.width = swapChainExtent.width;
swapChainInfo.imageExtent.height = swapChainExtent.height;
swapChainInfo.preTransform = preTransform;
swapChainInfo.compositeAlpha = VK_COMPOSITE_ALPHA_OPAQUE_BIT_KHR;
swapChainInfo.imageArrayLayers = 1;
swapChainInfo.presentMode = swapchainPresentMode;
swapChainInfo.oldSwapchain = oldSc.m_swapChain;
swapChainInfo.clipped = true;
swapChainInfo.imageColorSpace = resize.m_colorspace;
swapChainInfo.imageUsage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT;
2017-02-16 03:17:18 +00:00
swapChainInfo.imageSharingMode = VK_SHARING_MODE_EXCLUSIVE;
swapChainInfo.queueFamilyIndexCount = 0;
swapChainInfo.pQueueFamilyIndices = nullptr;
resize.m_windowCtx.m_activeSwapChain ^= 1;
Window::SwapChain& sc = resize.m_windowCtx.m_swapChains[resize.m_windowCtx.m_activeSwapChain];
sc.destroy(m_dev);
ThrowIfFailed(vk::CreateSwapchainKHR(m_dev, &swapChainInfo, nullptr, &sc.m_swapChain));
sc.m_format = resize.m_format;
uint32_t swapchainImageCount;
ThrowIfFailed(vk::GetSwapchainImagesKHR(m_dev, sc.m_swapChain, &swapchainImageCount, nullptr));
std::unique_ptr<VkImage[]> swapchainImages(new VkImage[swapchainImageCount]);
ThrowIfFailed(vk::GetSwapchainImagesKHR(m_dev, sc.m_swapChain, &swapchainImageCount, swapchainImages.get()));
/* images */
sc.m_bufs.resize(swapchainImageCount);
for (uint32_t i=0 ; i<swapchainImageCount ; ++i)
{
Window::SwapChain::Buffer& buf = sc.m_bufs[i];
buf.setImage(this, swapchainImages[i], swapChainExtent.width, swapChainExtent.height);
2017-02-16 03:17:18 +00:00
}
2017-02-16 03:17:18 +00:00
m_deferredResizes.pop();
2016-07-02 03:44:57 +00:00
}
2017-02-16 03:17:18 +00:00
return true;
2016-02-21 06:27:54 +00:00
}
2018-05-27 21:35:21 +00:00
struct VulkanDescriptorPool : ListNode<VulkanDescriptorPool, VulkanDataFactoryImpl*>
{
VkDescriptorPool m_descPool;
int m_allocatedSets = 0;
VulkanDescriptorPool(VulkanDataFactoryImpl* factory)
: ListNode<VulkanDescriptorPool, VulkanDataFactoryImpl*>(factory)
{
VkDescriptorPoolSize poolSizes[2] = {};
VkDescriptorPoolCreateInfo descriptorPoolInfo = {};
descriptorPoolInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_POOL_CREATE_INFO;
descriptorPoolInfo.pNext = nullptr;
descriptorPoolInfo.maxSets = BOO_VK_MAX_DESCRIPTOR_SETS;
descriptorPoolInfo.poolSizeCount = 2;
descriptorPoolInfo.pPoolSizes = poolSizes;
poolSizes[0].type = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
poolSizes[0].descriptorCount = BOO_GLSL_MAX_UNIFORM_COUNT * BOO_VK_MAX_DESCRIPTOR_SETS;
poolSizes[1].type = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
poolSizes[1].descriptorCount = BOO_GLSL_MAX_TEXTURE_COUNT * BOO_VK_MAX_DESCRIPTOR_SETS;
ThrowIfFailed(vk::CreateDescriptorPool(factory->m_ctx->m_dev, &descriptorPoolInfo, nullptr, &m_descPool));
}
~VulkanDescriptorPool()
{
vk::DestroyDescriptorPool(m_head->m_ctx->m_dev, m_descPool, nullptr);
}
std::unique_lock<std::recursive_mutex> destructorLock() override
{ return std::unique_lock<std::recursive_mutex>{m_head->m_dataMutex}; }
static std::unique_lock<std::recursive_mutex> _getHeadLock(VulkanDataFactoryImpl* factory)
{ return std::unique_lock<std::recursive_mutex>{factory->m_dataMutex}; }
static VulkanDescriptorPool*& _getHeadPtr(VulkanDataFactoryImpl* factory)
{ return factory->m_descPoolHead; }
};
boo::ObjToken<VulkanDescriptorPool> VulkanDataFactoryImpl::allocateDescriptorSets(VkDescriptorSet* out)
{
std::lock_guard<std::recursive_mutex> lk(m_dataMutex);
boo::ObjToken<VulkanDescriptorPool> pool;
if (!m_descPoolHead || m_descPoolHead->m_allocatedSets == BOO_VK_MAX_DESCRIPTOR_SETS)
pool = new VulkanDescriptorPool(this);
else
pool = m_descPoolHead;
VkDescriptorSetLayout layouts[] = {m_ctx->m_descSetLayout, m_ctx->m_descSetLayout};
VkDescriptorSetAllocateInfo descAllocInfo;
descAllocInfo.sType = VK_STRUCTURE_TYPE_DESCRIPTOR_SET_ALLOCATE_INFO;
descAllocInfo.pNext = nullptr;
descAllocInfo.descriptorPool = pool->m_descPool;
descAllocInfo.descriptorSetCount = 2;
descAllocInfo.pSetLayouts = layouts;
ThrowIfFailed(vk::AllocateDescriptorSets(m_ctx->m_dev, &descAllocInfo, out));
pool->m_allocatedSets += 2;
return pool;
}
2018-05-22 07:48:13 +00:00
struct AllocatedBuffer
{
VkBuffer m_buffer = VK_NULL_HANDLE;
VmaAllocation m_allocation;
void* _create(VulkanContext* ctx, const VkBufferCreateInfo* pBufferCreateInfo, VmaMemoryUsage usage)
{
assert(m_buffer == VK_NULL_HANDLE && "create may only be called once");
VmaAllocationCreateInfo bufAllocInfo = {};
bufAllocInfo.flags = VMA_ALLOCATION_CREATE_MAPPED_BIT;
bufAllocInfo.usage = usage;
VmaAllocationInfo allocInfo;
ThrowIfFailed(vmaCreateBuffer(ctx->m_allocator, pBufferCreateInfo, &bufAllocInfo,
&m_buffer, &m_allocation, &allocInfo));
return allocInfo.pMappedData;
}
void* createCPU(VulkanContext* ctx, const VkBufferCreateInfo* pBufferCreateInfo)
{
return _create(ctx, pBufferCreateInfo, VMA_MEMORY_USAGE_CPU_ONLY);
}
void* createCPUtoGPU(VulkanContext* ctx, const VkBufferCreateInfo* pBufferCreateInfo)
{
return _create(ctx, pBufferCreateInfo, VMA_MEMORY_USAGE_CPU_TO_GPU);
}
void destroy(VulkanContext* ctx)
{
if (m_buffer)
{
vmaDestroyBuffer(ctx->m_allocator, m_buffer, m_allocation);
m_buffer = VK_NULL_HANDLE;
}
}
};
struct AllocatedImage
{
VkImage m_image = VK_NULL_HANDLE;
VmaAllocation m_allocation;
void _create(VulkanContext* ctx, const VkImageCreateInfo* pImageCreateInfo, VmaAllocationCreateFlags flags)
{
assert(m_image == VK_NULL_HANDLE && "create may only be called once");
VmaAllocationCreateInfo bufAllocInfo = {};
bufAllocInfo.flags = flags;
bufAllocInfo.usage = VMA_MEMORY_USAGE_GPU_ONLY;
ThrowIfFailed(vmaCreateImage(ctx->m_allocator, pImageCreateInfo, &bufAllocInfo,
&m_image, &m_allocation, nullptr));
}
void create(VulkanContext* ctx, const VkImageCreateInfo* pImageCreateInf)
{
_create(ctx, pImageCreateInf, 0);
}
void createFB(VulkanContext* ctx, const VkImageCreateInfo* pImageCreateInf)
{
_create(ctx, pImageCreateInf, VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT);
}
void destroy(VulkanContext* ctx)
{
if (m_image)
{
vmaDestroyImage(ctx->m_allocator, m_image, m_allocation);
m_image = VK_NULL_HANDLE;
}
}
};
struct VulkanData : BaseGraphicsData
2016-02-22 02:47:45 +00:00
{
VulkanContext* m_ctx;
2018-05-22 07:48:13 +00:00
/* Vertex, Index, Uniform */
AllocatedBuffer m_constantBuffers[3];
AllocatedBuffer m_texStagingBuffer;
2018-05-20 06:11:49 +00:00
explicit VulkanData(VulkanDataFactoryImpl& head __BooTraceArgs)
: BaseGraphicsData(head __BooTraceArgsUse), m_ctx(head.m_ctx) {}
2016-02-22 02:47:45 +00:00
~VulkanData()
{
2018-05-22 07:48:13 +00:00
for (int i=0 ; i<3 ; ++i)
m_constantBuffers[i].destroy(m_ctx);
m_texStagingBuffer.destroy(m_ctx);
2016-02-22 02:47:45 +00:00
}
};
struct VulkanPool : BaseGraphicsPool
2016-12-11 01:50:26 +00:00
{
VulkanContext* m_ctx;
2018-05-22 07:48:13 +00:00
AllocatedBuffer m_constantBuffer;
2018-05-20 06:11:49 +00:00
explicit VulkanPool(VulkanDataFactoryImpl& head __BooTraceArgs)
: BaseGraphicsPool(head __BooTraceArgsUse), m_ctx(head.m_ctx) {}
~VulkanPool()
2016-12-11 01:50:26 +00:00
{
2018-05-22 07:48:13 +00:00
m_constantBuffer.destroy(m_ctx);
2017-03-17 23:31:51 +00:00
}
};
2016-02-22 02:47:45 +00:00
static const VkBufferUsageFlagBits USE_TABLE[] =
{
VkBufferUsageFlagBits(0),
VK_BUFFER_USAGE_VERTEX_BUFFER_BIT,
VK_BUFFER_USAGE_INDEX_BUFFER_BIT,
VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT
};
class VulkanGraphicsBufferS : public GraphicsDataNode<IGraphicsBufferS>
2016-02-21 06:27:54 +00:00
{
friend class VulkanDataFactory;
friend struct VulkanCommandQueue;
VulkanContext* m_ctx;
size_t m_sz;
std::unique_ptr<uint8_t[]> m_stagingBuf;
2018-05-22 07:48:13 +00:00
VulkanGraphicsBufferS(const boo::ObjToken<BaseGraphicsData>& parent, BufferUse use,
VulkanContext* ctx, const void* data, size_t stride, size_t count)
: GraphicsDataNode<IGraphicsBufferS>(parent),
m_ctx(ctx), m_stride(stride), m_count(count), m_sz(stride * count),
2018-05-22 07:48:13 +00:00
m_stagingBuf(new uint8_t[m_sz]), m_use(use)
2016-02-21 06:27:54 +00:00
{
2016-06-25 07:04:58 +00:00
memmove(m_stagingBuf.get(), data, m_sz);
2016-07-01 02:30:29 +00:00
m_bufferInfo.range = m_sz;
2016-02-21 06:27:54 +00:00
}
public:
size_t size() const {return m_sz;}
2016-02-21 06:27:54 +00:00
size_t m_stride;
size_t m_count;
VkDescriptorBufferInfo m_bufferInfo;
2018-05-22 07:48:13 +00:00
BufferUse m_use;
2016-02-21 06:27:54 +00:00
2018-05-22 07:48:13 +00:00
VkDeviceSize sizeForGPU(VulkanContext* ctx, VkDeviceSize offset)
2016-02-21 06:27:54 +00:00
{
2018-05-22 07:48:13 +00:00
if (m_use == BufferUse::Uniform)
{
2016-07-01 02:30:29 +00:00
size_t minOffset = std::max(VkDeviceSize(256),
ctx->m_gpuProps.limits.minUniformBufferOffsetAlignment);
offset = (offset + minOffset - 1) & ~(minOffset - 1);
}
2018-05-22 07:48:13 +00:00
m_bufferInfo.offset = offset;
offset += m_sz;
2016-02-21 06:27:54 +00:00
return offset;
}
2018-05-22 07:48:13 +00:00
void placeForGPU(VkBuffer bufObj, uint8_t* buf)
2016-02-21 06:27:54 +00:00
{
2018-05-22 07:48:13 +00:00
m_bufferInfo.buffer = bufObj;
memmove(buf + m_bufferInfo.offset, m_stagingBuf.get(), m_sz);
2016-02-21 06:27:54 +00:00
m_stagingBuf.reset();
}
};
template <class DataCls>
class VulkanGraphicsBufferD : public GraphicsDataNode<IGraphicsBufferD, DataCls>
2016-02-21 06:27:54 +00:00
{
friend class VulkanDataFactory;
2017-03-05 22:59:58 +00:00
friend class VulkanDataFactoryImpl;
2016-02-21 06:27:54 +00:00
friend struct VulkanCommandQueue;
2018-05-22 07:48:13 +00:00
VulkanContext* m_ctx;
2016-02-21 06:27:54 +00:00
size_t m_cpuSz;
2016-06-25 07:04:58 +00:00
std::unique_ptr<uint8_t[]> m_cpuBuf;
2016-02-21 06:27:54 +00:00
int m_validSlots = 0;
2018-05-22 07:48:13 +00:00
VulkanGraphicsBufferD(const boo::ObjToken<DataCls>& parent, BufferUse use,
2017-03-17 23:31:51 +00:00
VulkanContext* ctx, size_t stride, size_t count)
: GraphicsDataNode<IGraphicsBufferD, DataCls>(parent),
2018-05-22 07:48:13 +00:00
m_ctx(ctx), m_stride(stride), m_count(count),
m_cpuSz(stride * count), m_cpuBuf(new uint8_t[m_cpuSz]), m_use(use)
{
2016-07-01 02:30:29 +00:00
m_bufferInfo[0].range = m_cpuSz;
m_bufferInfo[1].range = m_cpuSz;
2016-02-21 06:27:54 +00:00
}
void update(int b);
2016-06-26 00:33:22 +00:00
2016-02-21 06:27:54 +00:00
public:
size_t m_stride;
size_t m_count;
VkDescriptorBufferInfo m_bufferInfo[2];
2018-05-22 07:48:13 +00:00
uint8_t* m_bufferPtrs[2] = {};
BufferUse m_use;
2016-02-21 06:27:54 +00:00
void load(const void* data, size_t sz);
void* map(size_t sz);
void unmap();
2018-05-22 07:48:13 +00:00
VkDeviceSize sizeForGPU(VulkanContext* ctx, VkDeviceSize offset)
2016-02-21 06:27:54 +00:00
{
for (int i=0 ; i<2 ; ++i)
{
2018-05-22 07:48:13 +00:00
if (m_use == BufferUse::Uniform)
2016-02-21 06:27:54 +00:00
{
2016-07-01 02:30:29 +00:00
size_t minOffset = std::max(VkDeviceSize(256),
ctx->m_gpuProps.limits.minUniformBufferOffsetAlignment);
offset = (offset + minOffset - 1) & ~(minOffset - 1);
2016-02-21 06:27:54 +00:00
}
2018-05-22 07:48:13 +00:00
m_bufferInfo[i].offset = offset;
offset += m_cpuSz;
}
2016-02-21 06:27:54 +00:00
return offset;
}
2018-05-22 07:48:13 +00:00
void placeForGPU(VkBuffer bufObj, uint8_t* buf)
2016-02-21 06:27:54 +00:00
{
2018-05-22 07:48:13 +00:00
m_bufferInfo[0].buffer = bufObj;
m_bufferInfo[1].buffer = bufObj;
m_bufferPtrs[0] = buf + m_bufferInfo[0].offset;
m_bufferPtrs[1] = buf + m_bufferInfo[1].offset;
}
2016-02-21 06:27:54 +00:00
};
static void MakeSampler(VulkanContext* ctx, VkSampler& sampOut, TextureClampMode mode, int mips)
{
uint32_t key = (uint32_t(mode) << 16) | mips;
auto search = ctx->m_samplers.find(key);
if (search != ctx->m_samplers.end())
{
sampOut = search->second;
return;
}
/* Create linear sampler */
VkSamplerCreateInfo samplerInfo = {};
samplerInfo.sType = VK_STRUCTURE_TYPE_SAMPLER_CREATE_INFO;
samplerInfo.pNext = nullptr;
samplerInfo.magFilter = VK_FILTER_LINEAR;
samplerInfo.minFilter = VK_FILTER_LINEAR;
samplerInfo.mipmapMode = VK_SAMPLER_MIPMAP_MODE_LINEAR;
samplerInfo.anisotropyEnable = ctx->m_features.samplerAnisotropy;
samplerInfo.maxAnisotropy = ctx->m_anisotropy;
samplerInfo.maxLod = mips - 1;
switch (mode)
{
case TextureClampMode::Repeat:
default:
samplerInfo.addressModeU = VK_SAMPLER_ADDRESS_MODE_REPEAT;
samplerInfo.addressModeV = VK_SAMPLER_ADDRESS_MODE_REPEAT;
samplerInfo.addressModeW = VK_SAMPLER_ADDRESS_MODE_REPEAT;
break;
case TextureClampMode::ClampToWhite:
samplerInfo.addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
samplerInfo.addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
samplerInfo.addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
samplerInfo.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_WHITE;
break;
case TextureClampMode::ClampToBlack:
samplerInfo.addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
samplerInfo.addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
samplerInfo.addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_BORDER;
samplerInfo.borderColor = VK_BORDER_COLOR_FLOAT_OPAQUE_BLACK;
break;
case TextureClampMode::ClampToEdge:
samplerInfo.addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
samplerInfo.addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
samplerInfo.addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
break;
case TextureClampMode::ClampToEdgeNearest:
samplerInfo.magFilter = VK_FILTER_NEAREST;
samplerInfo.minFilter = VK_FILTER_NEAREST;
samplerInfo.mipmapMode = VK_SAMPLER_MIPMAP_MODE_NEAREST;
samplerInfo.anisotropyEnable = VK_FALSE;
samplerInfo.addressModeU = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
samplerInfo.addressModeV = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
samplerInfo.addressModeW = VK_SAMPLER_ADDRESS_MODE_CLAMP_TO_EDGE;
break;
}
ThrowIfFailed(vk::CreateSampler(ctx->m_dev, &samplerInfo, nullptr, &sampOut));
ctx->m_samplers[key] = sampOut;
}
class VulkanTextureS : public GraphicsDataNode<ITextureS>
2016-02-21 06:27:54 +00:00
{
friend class VulkanDataFactory;
VulkanContext* m_ctx;
TextureFormat m_fmt;
size_t m_sz;
size_t m_width, m_height, m_mips;
2018-05-22 07:48:13 +00:00
TextureClampMode m_clampMode;
2016-02-21 06:27:54 +00:00
VkFormat m_vkFmt;
2016-06-26 00:33:22 +00:00
int m_pixelPitchNum = 1;
int m_pixelPitchDenom = 1;
VulkanTextureS(const boo::ObjToken<BaseGraphicsData>& parent, VulkanContext* ctx,
2017-03-17 23:31:51 +00:00
size_t width, size_t height, size_t mips,
2017-10-01 04:23:28 +00:00
TextureFormat fmt, TextureClampMode clampMode,
const void* data, size_t sz)
: GraphicsDataNode<ITextureS>(parent), m_ctx(ctx), m_fmt(fmt), m_sz(sz),
2018-05-22 07:48:13 +00:00
m_width(width), m_height(height), m_mips(mips), m_clampMode(clampMode)
2016-02-21 06:27:54 +00:00
{
VkFormat pfmt;
switch (fmt)
{
case TextureFormat::RGBA8:
pfmt = VK_FORMAT_R8G8B8A8_UNORM;
2016-06-26 00:33:22 +00:00
m_pixelPitchNum = 4;
2016-02-21 06:27:54 +00:00
break;
case TextureFormat::I8:
pfmt = VK_FORMAT_R8_UNORM;
break;
case TextureFormat::I16:
pfmt = VK_FORMAT_R16_UNORM;
m_pixelPitchNum = 2;
break;
2016-02-21 06:27:54 +00:00
case TextureFormat::DXT1:
pfmt = VK_FORMAT_BC1_RGBA_UNORM_BLOCK;
2016-06-26 00:33:22 +00:00
m_pixelPitchNum = 1;
m_pixelPitchDenom = 2;
2016-02-21 06:27:54 +00:00
break;
default:
2016-03-04 23:02:18 +00:00
Log.report(logvisor::Fatal, "unsupported tex format");
2016-02-21 06:27:54 +00:00
}
m_vkFmt = pfmt;
2016-06-26 00:33:22 +00:00
/* create cpu image buffer */
VkBufferCreateInfo bufCreateInfo = {};
bufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
bufCreateInfo.size = sz;
bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
bufCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
2018-05-22 07:48:13 +00:00
void* mappedData = m_cpuBuf.createCPU(ctx, &bufCreateInfo);
2016-06-26 00:33:22 +00:00
memmove(mappedData, data, sz);
2016-02-21 06:27:54 +00:00
}
public:
2018-05-22 07:48:13 +00:00
AllocatedBuffer m_cpuBuf;
AllocatedImage m_gpuTex;
2016-06-26 00:33:22 +00:00
VkImageView m_gpuView = VK_NULL_HANDLE;
VkSampler m_sampler = VK_NULL_HANDLE;
2016-02-21 06:27:54 +00:00
VkDescriptorImageInfo m_descInfo;
~VulkanTextureS()
{
2016-06-30 04:54:29 +00:00
vk::DestroyImageView(m_ctx->m_dev, m_gpuView, nullptr);
2018-05-22 07:48:13 +00:00
m_gpuTex.destroy(m_ctx);
m_cpuBuf.destroy(m_ctx);
2016-02-21 06:27:54 +00:00
}
void setClampMode(TextureClampMode mode)
{
2018-05-22 07:48:13 +00:00
m_clampMode = mode;
MakeSampler(m_ctx, m_sampler, mode, m_mips);
m_descInfo.sampler = m_sampler;
}
2016-02-21 06:27:54 +00:00
void deleteUploadObjects()
{
2018-05-22 07:48:13 +00:00
m_cpuBuf.destroy(m_ctx);
2016-02-21 06:27:54 +00:00
}
2018-05-22 07:48:13 +00:00
void placeForGPU(VulkanContext* ctx)
2016-02-21 06:27:54 +00:00
{
2018-05-22 07:48:13 +00:00
/* create gpu image */
VkImageCreateInfo texCreateInfo = {};
texCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
texCreateInfo.imageType = VK_IMAGE_TYPE_2D;
texCreateInfo.format = m_vkFmt;
texCreateInfo.mipLevels = m_mips;
texCreateInfo.arrayLayers = 1;
texCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
texCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
texCreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
texCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
texCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
texCreateInfo.extent = { uint32_t(m_width), uint32_t(m_height), 1 };
texCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
m_gpuTex.create(m_ctx, &texCreateInfo);
2016-02-21 06:27:54 +00:00
2018-05-22 07:48:13 +00:00
setClampMode(m_clampMode);
m_descInfo.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
2016-02-21 06:27:54 +00:00
2016-06-26 00:33:22 +00:00
/* create image view */
VkImageViewCreateInfo viewInfo = {};
viewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
viewInfo.pNext = nullptr;
2018-05-22 07:48:13 +00:00
viewInfo.image = m_gpuTex.m_image;
2016-06-26 00:33:22 +00:00
viewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
viewInfo.format = m_vkFmt;
viewInfo.components.r = VK_COMPONENT_SWIZZLE_R;
viewInfo.components.g = VK_COMPONENT_SWIZZLE_G;
viewInfo.components.b = VK_COMPONENT_SWIZZLE_B;
viewInfo.components.a = VK_COMPONENT_SWIZZLE_A;
viewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
viewInfo.subresourceRange.baseMipLevel = 0;
viewInfo.subresourceRange.levelCount = m_mips;
viewInfo.subresourceRange.baseArrayLayer = 0;
viewInfo.subresourceRange.layerCount = 1;
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::CreateImageView(ctx->m_dev, &viewInfo, nullptr, &m_gpuView));
2016-06-26 00:33:22 +00:00
m_descInfo.imageView = m_gpuView;
2016-02-21 06:27:54 +00:00
/* Since we're going to blit to the texture image, set its layout to
* DESTINATION_OPTIMAL */
2018-05-22 07:48:13 +00:00
SetImageLayout(ctx->m_loadCmdBuf, m_gpuTex.m_image, VK_IMAGE_ASPECT_COLOR_BIT,
2016-02-21 06:27:54 +00:00
VK_IMAGE_LAYOUT_UNDEFINED,
2016-06-24 03:03:06 +00:00
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, m_mips, 1);
2016-02-21 06:27:54 +00:00
2016-06-26 00:33:22 +00:00
VkBufferImageCopy copyRegions[16] = {};
2016-02-21 06:27:54 +00:00
size_t width = m_width;
size_t height = m_height;
size_t regionCount = std::min(size_t(16), m_mips);
2016-06-26 00:33:22 +00:00
size_t offset = 0;
2016-02-21 06:27:54 +00:00
for (int i=0 ; i<regionCount ; ++i)
{
size_t regionPitch = width * height * m_pixelPitchNum / m_pixelPitchDenom;
2016-06-26 00:33:22 +00:00
copyRegions[i].imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
copyRegions[i].imageSubresource.mipLevel = i;
copyRegions[i].imageSubresource.baseArrayLayer = 0;
copyRegions[i].imageSubresource.layerCount = 1;
copyRegions[i].imageExtent.width = width;
copyRegions[i].imageExtent.height = height;
copyRegions[i].imageExtent.depth = 1;
copyRegions[i].bufferOffset = offset;
2016-02-21 06:27:54 +00:00
if (width > 1)
width /= 2;
if (height > 1)
height /= 2;
offset += regionPitch;
2016-02-21 06:27:54 +00:00
}
/* Put the copy command into the command buffer */
2016-06-30 04:54:29 +00:00
vk::CmdCopyBufferToImage(ctx->m_loadCmdBuf,
2018-05-22 07:48:13 +00:00
m_cpuBuf.m_buffer,
m_gpuTex.m_image,
2016-06-30 04:54:29 +00:00
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
regionCount,
copyRegions);
2016-02-21 06:27:54 +00:00
/* Set the layout for the texture image from DESTINATION_OPTIMAL to
* SHADER_READ_ONLY */
2018-05-22 07:48:13 +00:00
SetImageLayout(ctx->m_loadCmdBuf, m_gpuTex.m_image, VK_IMAGE_ASPECT_COLOR_BIT,
2016-02-21 06:27:54 +00:00
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2016-06-24 03:03:06 +00:00
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, m_mips, 1);
2016-02-21 06:27:54 +00:00
}
TextureFormat format() const {return m_fmt;}
};
class VulkanTextureSA : public GraphicsDataNode<ITextureSA>
2016-02-21 06:27:54 +00:00
{
friend class VulkanDataFactory;
VulkanContext* m_ctx;
TextureFormat m_fmt;
size_t m_sz;
2017-01-29 07:26:42 +00:00
size_t m_width, m_height, m_layers, m_mips;
2018-05-22 07:48:13 +00:00
TextureClampMode m_clampMode;
2016-02-21 06:27:54 +00:00
VkFormat m_vkFmt;
2016-06-26 00:33:22 +00:00
int m_pixelPitchNum = 1;
int m_pixelPitchDenom = 1;
VulkanTextureSA(const boo::ObjToken<BaseGraphicsData>& parent, VulkanContext* ctx,
2017-03-17 23:31:51 +00:00
size_t width, size_t height, size_t layers,
2017-10-01 04:23:28 +00:00
size_t mips, TextureFormat fmt, TextureClampMode clampMode,
const void* data, size_t sz)
: GraphicsDataNode<ITextureSA>(parent),
2018-05-22 07:48:13 +00:00
m_ctx(ctx), m_fmt(fmt), m_sz(sz), m_width(width), m_height(height),
m_layers(layers), m_mips(mips), m_clampMode(clampMode)
2016-02-21 06:27:54 +00:00
{
VkFormat pfmt;
switch (fmt)
{
case TextureFormat::RGBA8:
pfmt = VK_FORMAT_R8G8B8A8_UNORM;
2016-06-26 00:33:22 +00:00
m_pixelPitchNum = 4;
2016-02-21 06:27:54 +00:00
break;
case TextureFormat::I8:
pfmt = VK_FORMAT_R8_UNORM;
break;
case TextureFormat::I16:
pfmt = VK_FORMAT_R16_UNORM;
m_pixelPitchNum = 2;
break;
2016-02-21 06:27:54 +00:00
default:
2016-03-04 23:02:18 +00:00
Log.report(logvisor::Fatal, "unsupported tex format");
2016-02-21 06:27:54 +00:00
}
m_vkFmt = pfmt;
2016-06-26 00:33:22 +00:00
/* create cpu image buffer */
VkBufferCreateInfo bufCreateInfo = {};
bufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
bufCreateInfo.size = sz;
bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
bufCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
2018-05-22 07:48:13 +00:00
void* mappedData = m_cpuBuf.createCPU(ctx, &bufCreateInfo);
2016-06-26 00:33:22 +00:00
memmove(mappedData, data, sz);
2016-02-21 06:27:54 +00:00
}
public:
2018-05-22 07:48:13 +00:00
AllocatedBuffer m_cpuBuf;
AllocatedImage m_gpuTex;
2016-06-26 00:33:22 +00:00
VkImageView m_gpuView = VK_NULL_HANDLE;
VkSampler m_sampler = VK_NULL_HANDLE;
2016-02-21 06:27:54 +00:00
VkDescriptorImageInfo m_descInfo;
~VulkanTextureSA()
{
2016-06-30 04:54:29 +00:00
vk::DestroyImageView(m_ctx->m_dev, m_gpuView, nullptr);
2018-05-22 07:48:13 +00:00
m_gpuTex.destroy(m_ctx);
m_cpuBuf.destroy(m_ctx);
2016-02-21 06:27:54 +00:00
}
void setClampMode(TextureClampMode mode)
{
2018-05-22 07:48:13 +00:00
m_clampMode = mode;
MakeSampler(m_ctx, m_sampler, mode, m_mips);
m_descInfo.sampler = m_sampler;
}
2016-02-21 06:27:54 +00:00
void deleteUploadObjects()
{
2018-05-22 07:48:13 +00:00
m_cpuBuf.destroy(m_ctx);
2016-02-21 06:27:54 +00:00
}
2018-05-22 07:48:13 +00:00
void placeForGPU(VulkanContext* ctx)
2016-02-21 06:27:54 +00:00
{
2018-05-22 07:48:13 +00:00
/* create gpu image */
VkImageCreateInfo texCreateInfo = {};
texCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
texCreateInfo.imageType = VK_IMAGE_TYPE_2D;
texCreateInfo.format = m_vkFmt;
texCreateInfo.mipLevels = m_mips;
texCreateInfo.arrayLayers = m_layers;
texCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
texCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
texCreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT;
texCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
texCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
texCreateInfo.extent = { uint32_t(m_width), uint32_t(m_height), 1 };
texCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
m_gpuTex.create(m_ctx, &texCreateInfo);
2016-02-21 06:27:54 +00:00
2018-05-22 07:48:13 +00:00
setClampMode(m_clampMode);
m_descInfo.imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
2016-02-21 06:27:54 +00:00
2016-06-26 00:33:22 +00:00
/* create image view */
VkImageViewCreateInfo viewInfo = {};
viewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
viewInfo.pNext = nullptr;
2018-05-22 07:48:13 +00:00
viewInfo.image = m_gpuTex.m_image;
viewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D_ARRAY;
2016-06-26 00:33:22 +00:00
viewInfo.format = m_vkFmt;
viewInfo.components.r = VK_COMPONENT_SWIZZLE_R;
viewInfo.components.g = VK_COMPONENT_SWIZZLE_G;
viewInfo.components.b = VK_COMPONENT_SWIZZLE_B;
viewInfo.components.a = VK_COMPONENT_SWIZZLE_A;
viewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
viewInfo.subresourceRange.baseMipLevel = 0;
2017-01-29 07:26:42 +00:00
viewInfo.subresourceRange.levelCount = m_mips;
2016-06-26 00:33:22 +00:00
viewInfo.subresourceRange.baseArrayLayer = 0;
viewInfo.subresourceRange.layerCount = m_layers;
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::CreateImageView(ctx->m_dev, &viewInfo, nullptr, &m_gpuView));
2016-06-26 00:33:22 +00:00
m_descInfo.imageView = m_gpuView;
2016-02-21 06:27:54 +00:00
/* Since we're going to blit to the texture image, set its layout to
* DESTINATION_OPTIMAL */
2018-05-22 07:48:13 +00:00
SetImageLayout(ctx->m_loadCmdBuf, m_gpuTex.m_image, VK_IMAGE_ASPECT_COLOR_BIT,
2016-02-21 06:27:54 +00:00
VK_IMAGE_LAYOUT_UNDEFINED,
2017-01-29 07:26:42 +00:00
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, m_mips, m_layers);
2016-02-21 06:27:54 +00:00
2017-01-29 07:26:42 +00:00
VkBufferImageCopy copyRegions[16] = {};
size_t width = m_width;
size_t height = m_height;
size_t regionCount = std::min(size_t(16), m_mips);
size_t offset = 0;
for (int i=0 ; i<regionCount ; ++i)
{
size_t regionPitch = width * height * m_layers * m_pixelPitchNum / m_pixelPitchDenom;
2017-01-29 07:26:42 +00:00
copyRegions[i].imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
copyRegions[i].imageSubresource.mipLevel = i;
copyRegions[i].imageSubresource.baseArrayLayer = 0;
copyRegions[i].imageSubresource.layerCount = m_layers;
copyRegions[i].imageExtent.width = width;
copyRegions[i].imageExtent.height = height;
copyRegions[i].imageExtent.depth = 1;
copyRegions[i].bufferOffset = offset;
if (width > 1)
width /= 2;
if (height > 1)
height /= 2;
offset += regionPitch;
2017-01-29 07:26:42 +00:00
}
2016-02-21 06:27:54 +00:00
/* Put the copy command into the command buffer */
2016-06-30 04:54:29 +00:00
vk::CmdCopyBufferToImage(ctx->m_loadCmdBuf,
2018-05-22 07:48:13 +00:00
m_cpuBuf.m_buffer,
m_gpuTex.m_image,
2016-06-30 04:54:29 +00:00
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2017-01-29 07:26:42 +00:00
regionCount,
copyRegions);
2016-02-21 06:27:54 +00:00
/* Set the layout for the texture image from DESTINATION_OPTIMAL to
* SHADER_READ_ONLY */
2018-05-22 07:48:13 +00:00
SetImageLayout(ctx->m_loadCmdBuf, m_gpuTex.m_image, VK_IMAGE_ASPECT_COLOR_BIT,
2016-02-21 06:27:54 +00:00
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2017-01-29 07:26:42 +00:00
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, m_mips, m_layers);
2016-02-21 06:27:54 +00:00
}
TextureFormat format() const {return m_fmt;}
size_t layers() const {return m_layers;}
};
class VulkanTextureD : public GraphicsDataNode<ITextureD>
2016-02-21 06:27:54 +00:00
{
friend class VulkanDataFactory;
friend struct VulkanCommandQueue;
size_t m_width;
size_t m_height;
TextureFormat m_fmt;
2018-05-22 07:48:13 +00:00
TextureClampMode m_clampMode;
2016-02-21 06:27:54 +00:00
VulkanCommandQueue* m_q;
2016-06-26 00:33:22 +00:00
std::unique_ptr<uint8_t[]> m_stagingBuf;
2016-02-21 06:27:54 +00:00
size_t m_cpuSz;
VkDeviceSize m_cpuOffsets[2];
2016-06-26 00:33:22 +00:00
VkFormat m_vkFmt;
2016-02-21 06:27:54 +00:00
int m_validSlots = 0;
2018-05-22 07:48:13 +00:00
VulkanTextureD(const boo::ObjToken<BaseGraphicsData>& parent, VulkanCommandQueue* q,
2017-10-01 04:23:28 +00:00
size_t width, size_t height, TextureFormat fmt, TextureClampMode clampMode)
2018-05-22 07:48:13 +00:00
: GraphicsDataNode<ITextureD>(parent), m_width(width), m_height(height), m_fmt(fmt), m_clampMode(clampMode), m_q(q)
2016-02-21 06:27:54 +00:00
{
VkFormat pfmt;
switch (fmt)
{
case TextureFormat::RGBA8:
pfmt = VK_FORMAT_R8G8B8A8_UNORM;
m_cpuSz = width * height * 4;
2016-02-21 06:27:54 +00:00
break;
case TextureFormat::I8:
pfmt = VK_FORMAT_R8_UNORM;
m_cpuSz = width * height;
2016-02-21 06:27:54 +00:00
break;
case TextureFormat::I16:
pfmt = VK_FORMAT_R16_UNORM;
m_cpuSz = width * height * 2;
break;
2016-02-21 06:27:54 +00:00
default:
2016-03-04 23:02:18 +00:00
Log.report(logvisor::Fatal, "unsupported tex format");
2016-02-21 06:27:54 +00:00
}
2016-06-26 00:33:22 +00:00
m_vkFmt = pfmt;
m_stagingBuf.reset(new uint8_t[m_cpuSz]);
2018-05-22 07:48:13 +00:00
}
void update(int b);
public:
VkBuffer m_cpuBuf = VK_NULL_HANDLE; /* Owned externally */
uint8_t* m_cpuBufPtrs[2] = {};
AllocatedImage m_gpuTex[2];
VkImageView m_gpuView[2];
VkSampler m_sampler = VK_NULL_HANDLE;
VkDescriptorImageInfo m_descInfo[2];
~VulkanTextureD();
2016-02-21 06:27:54 +00:00
2018-05-22 07:48:13 +00:00
void setClampMode(TextureClampMode mode);
void load(const void* data, size_t sz);
void* map(size_t sz);
void unmap();
2016-02-21 06:27:54 +00:00
2018-05-22 07:48:13 +00:00
VkDeviceSize sizeForGPU(VulkanContext* ctx, VkDeviceSize offset)
{
2016-02-21 06:27:54 +00:00
for (int i=0 ; i<2 ; ++i)
{
2018-05-22 07:48:13 +00:00
m_cpuOffsets[i] = offset;
offset += m_cpuSz;
2016-02-21 06:27:54 +00:00
}
2018-05-22 07:48:13 +00:00
return offset;
}
void placeForGPU(VulkanContext* ctx, VkBuffer bufObj, uint8_t* buf)
{
m_cpuBuf = bufObj;
m_cpuBufPtrs[0] = buf + m_cpuOffsets[0];
m_cpuBufPtrs[1] = buf + m_cpuOffsets[1];
2016-02-21 06:27:54 +00:00
2018-05-22 07:48:13 +00:00
/* Create images */
2016-06-26 00:33:22 +00:00
VkImageCreateInfo texCreateInfo = {};
texCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
texCreateInfo.pNext = nullptr;
texCreateInfo.imageType = VK_IMAGE_TYPE_2D;
2018-05-22 07:48:13 +00:00
texCreateInfo.format = m_vkFmt;
texCreateInfo.extent.width = m_width;
texCreateInfo.extent.height = m_height;
2016-06-26 00:33:22 +00:00
texCreateInfo.extent.depth = 1;
texCreateInfo.mipLevels = 1;
texCreateInfo.arrayLayers = 1;
texCreateInfo.samples = VK_SAMPLE_COUNT_1_BIT;
2016-02-21 06:27:54 +00:00
texCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
2016-07-08 00:05:22 +00:00
texCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
2016-02-21 06:27:54 +00:00
texCreateInfo.usage = VK_IMAGE_USAGE_TRANSFER_DST_BIT | VK_IMAGE_USAGE_SAMPLED_BIT;
2016-06-26 00:33:22 +00:00
texCreateInfo.queueFamilyIndexCount = 0;
texCreateInfo.pQueueFamilyIndices = nullptr;
texCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
texCreateInfo.flags = 0;
2016-02-21 06:27:54 +00:00
2018-05-22 07:48:13 +00:00
setClampMode(m_clampMode);
2016-02-21 06:27:54 +00:00
for (int i=0 ; i<2 ; ++i)
{
/* create gpu image */
2018-05-22 07:48:13 +00:00
m_gpuTex[i].create(ctx, &texCreateInfo);
m_descInfo[i].sampler = m_sampler;
2016-02-21 06:27:54 +00:00
m_descInfo[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
}
2016-06-26 00:33:22 +00:00
VkImageViewCreateInfo viewInfo = {};
viewInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
viewInfo.pNext = nullptr;
viewInfo.image = nullptr;
viewInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
viewInfo.format = m_vkFmt;
viewInfo.components.r = VK_COMPONENT_SWIZZLE_R;
viewInfo.components.g = VK_COMPONENT_SWIZZLE_G;
viewInfo.components.b = VK_COMPONENT_SWIZZLE_B;
viewInfo.components.a = VK_COMPONENT_SWIZZLE_A;
viewInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
viewInfo.subresourceRange.baseMipLevel = 0;
viewInfo.subresourceRange.levelCount = 1;
viewInfo.subresourceRange.baseArrayLayer = 0;
viewInfo.subresourceRange.layerCount = 1;
2016-02-21 06:27:54 +00:00
for (int i=0 ; i<2 ; ++i)
{
2016-06-26 00:33:22 +00:00
/* create image view */
2018-05-22 07:48:13 +00:00
viewInfo.image = m_gpuTex[i].m_image;
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::CreateImageView(ctx->m_dev, &viewInfo, nullptr, &m_gpuView[i]));
2016-07-08 00:05:22 +00:00
m_descInfo[i].imageView = m_gpuView[i];
2016-02-21 06:27:54 +00:00
}
}
TextureFormat format() const {return m_fmt;}
};
2017-03-17 23:31:51 +00:00
#define MAX_BIND_TEXS 4
class VulkanTextureR : public GraphicsDataNode<ITextureR>
2016-02-21 06:27:54 +00:00
{
friend class VulkanDataFactory;
friend struct VulkanCommandQueue;
size_t m_width = 0;
size_t m_height = 0;
VkSampleCountFlags m_samplesColor, m_samplesDepth;
2016-02-21 06:27:54 +00:00
2017-03-17 23:31:51 +00:00
size_t m_colorBindCount;
size_t m_depthBindCount;
2017-10-01 04:23:28 +00:00
void Setup(VulkanContext* ctx)
2016-02-21 06:27:54 +00:00
{
/* no-ops on first call */
doDestroy();
m_layout = VK_IMAGE_LAYOUT_UNDEFINED;
2016-02-21 06:27:54 +00:00
/* color target */
2016-02-21 06:27:54 +00:00
VkImageCreateInfo texCreateInfo = {};
texCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_CREATE_INFO;
texCreateInfo.pNext = nullptr;
texCreateInfo.imageType = VK_IMAGE_TYPE_2D;
2018-01-22 07:33:47 +00:00
texCreateInfo.format = ctx->m_internalFormat;
2017-10-01 04:23:28 +00:00
texCreateInfo.extent.width = m_width;
texCreateInfo.extent.height = m_height;
2016-02-21 06:27:54 +00:00
texCreateInfo.extent.depth = 1;
texCreateInfo.mipLevels = 1;
texCreateInfo.arrayLayers = 1;
texCreateInfo.samples = VkSampleCountFlagBits(m_samplesColor);
2016-02-21 06:27:54 +00:00
texCreateInfo.tiling = VK_IMAGE_TILING_OPTIMAL;
texCreateInfo.initialLayout = VK_IMAGE_LAYOUT_UNDEFINED;
texCreateInfo.usage = VK_IMAGE_USAGE_COLOR_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
2016-02-21 06:27:54 +00:00
texCreateInfo.queueFamilyIndexCount = 0;
texCreateInfo.pQueueFamilyIndices = nullptr;
texCreateInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
texCreateInfo.flags = 0;
2018-05-22 07:48:13 +00:00
m_colorTex.createFB(ctx, &texCreateInfo);
2016-02-21 06:27:54 +00:00
/* depth target */
texCreateInfo.samples = VkSampleCountFlagBits(m_samplesDepth);
2017-11-09 08:10:43 +00:00
texCreateInfo.format = VK_FORMAT_D32_SFLOAT;
2017-03-17 23:31:51 +00:00
texCreateInfo.usage = VK_IMAGE_USAGE_DEPTH_STENCIL_ATTACHMENT_BIT | VK_IMAGE_USAGE_TRANSFER_SRC_BIT;
2018-05-22 07:48:13 +00:00
m_depthTex.createFB(ctx, &texCreateInfo);
2016-02-21 06:27:54 +00:00
2017-10-01 04:23:28 +00:00
texCreateInfo.samples = VkSampleCountFlagBits(1);
for (size_t i=0 ; i<m_colorBindCount ; ++i)
2016-02-21 06:27:54 +00:00
{
2017-03-17 23:31:51 +00:00
m_colorBindLayout[i] = VK_IMAGE_LAYOUT_UNDEFINED;
2018-01-22 07:33:47 +00:00
texCreateInfo.format = ctx->m_internalFormat;
texCreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
2018-05-22 07:48:13 +00:00
m_colorBindTex[i].createFB(ctx, &texCreateInfo);
2016-02-21 06:27:54 +00:00
m_colorBindDescInfo[i].sampler = m_sampler;
2017-03-17 23:31:51 +00:00
m_colorBindDescInfo[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
2016-02-21 06:27:54 +00:00
}
2017-10-01 04:23:28 +00:00
for (size_t i=0 ; i<m_depthBindCount ; ++i)
2016-02-21 06:27:54 +00:00
{
2017-03-17 23:31:51 +00:00
m_depthBindLayout[i] = VK_IMAGE_LAYOUT_UNDEFINED;
2017-11-09 08:10:43 +00:00
texCreateInfo.format = VK_FORMAT_D32_SFLOAT;
texCreateInfo.usage = VK_IMAGE_USAGE_SAMPLED_BIT | VK_IMAGE_USAGE_TRANSFER_DST_BIT;
2018-05-22 07:48:13 +00:00
m_depthBindTex[i].createFB(ctx, &texCreateInfo);
2016-02-21 06:27:54 +00:00
m_depthBindDescInfo[i].sampler = m_sampler;
2017-03-17 23:31:51 +00:00
m_depthBindDescInfo[i].imageLayout = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
2016-02-21 06:27:54 +00:00
}
2016-06-26 00:33:22 +00:00
/* Create resource views */
VkImageViewCreateInfo viewCreateInfo = {};
viewCreateInfo.sType = VK_STRUCTURE_TYPE_IMAGE_VIEW_CREATE_INFO;
viewCreateInfo.pNext = nullptr;
2018-05-22 07:48:13 +00:00
viewCreateInfo.image = m_colorTex.m_image;
2016-06-26 00:33:22 +00:00
viewCreateInfo.viewType = VK_IMAGE_VIEW_TYPE_2D;
2018-01-22 07:33:47 +00:00
viewCreateInfo.format = ctx->m_internalFormat;
2016-06-26 00:33:22 +00:00
viewCreateInfo.components.r = VK_COMPONENT_SWIZZLE_R;
viewCreateInfo.components.g = VK_COMPONENT_SWIZZLE_G;
viewCreateInfo.components.b = VK_COMPONENT_SWIZZLE_B;
viewCreateInfo.components.a = VK_COMPONENT_SWIZZLE_A;
viewCreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
viewCreateInfo.subresourceRange.baseMipLevel = 0;
viewCreateInfo.subresourceRange.levelCount = 1;
viewCreateInfo.subresourceRange.baseArrayLayer = 0;
viewCreateInfo.subresourceRange.layerCount = 1;
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::CreateImageView(ctx->m_dev, &viewCreateInfo, nullptr, &m_colorView));
2016-06-26 00:33:22 +00:00
2018-05-22 07:48:13 +00:00
viewCreateInfo.image = m_depthTex.m_image;
2017-11-09 08:10:43 +00:00
viewCreateInfo.format = VK_FORMAT_D32_SFLOAT;
viewCreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::CreateImageView(ctx->m_dev, &viewCreateInfo, nullptr, &m_depthView));
2016-06-26 00:33:22 +00:00
2017-10-01 04:23:28 +00:00
for (size_t i=0 ; i<m_colorBindCount ; ++i)
2016-06-26 00:33:22 +00:00
{
2018-05-22 07:48:13 +00:00
viewCreateInfo.image = m_colorBindTex[i].m_image;
2018-01-22 07:33:47 +00:00
viewCreateInfo.format = ctx->m_internalFormat;
2016-06-26 00:33:22 +00:00
viewCreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2017-03-17 23:31:51 +00:00
ThrowIfFailed(vk::CreateImageView(ctx->m_dev, &viewCreateInfo, nullptr, &m_colorBindView[i]));
m_colorBindDescInfo[i].imageView = m_colorBindView[i];
2016-06-26 00:33:22 +00:00
}
2017-10-01 04:23:28 +00:00
for (size_t i=0 ; i<m_depthBindCount ; ++i)
2016-06-26 00:33:22 +00:00
{
2018-05-22 07:48:13 +00:00
viewCreateInfo.image = m_depthBindTex[i].m_image;
2017-11-09 08:10:43 +00:00
viewCreateInfo.format = VK_FORMAT_D32_SFLOAT;
2017-03-17 23:31:51 +00:00
viewCreateInfo.subresourceRange.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
ThrowIfFailed(vk::CreateImageView(ctx->m_dev, &viewCreateInfo, nullptr, &m_depthBindView[i]));
m_depthBindDescInfo[i].imageView = m_depthBindView[i];
2016-06-26 00:33:22 +00:00
}
/* framebuffer */
VkFramebufferCreateInfo fbCreateInfo = {};
fbCreateInfo.sType = VK_STRUCTURE_TYPE_FRAMEBUFFER_CREATE_INFO;
fbCreateInfo.pNext = nullptr;
fbCreateInfo.renderPass = ctx->m_pass;
fbCreateInfo.attachmentCount = 2;
2017-10-01 04:23:28 +00:00
fbCreateInfo.width = m_width;
fbCreateInfo.height = m_height;
2016-06-26 00:33:22 +00:00
fbCreateInfo.layers = 1;
VkImageView attachments[2] = {m_colorView, m_depthView};
fbCreateInfo.pAttachments = attachments;
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::CreateFramebuffer(ctx->m_dev, &fbCreateInfo, nullptr, &m_framebuffer));
2016-06-26 00:33:22 +00:00
m_passBeginInfo.sType = VK_STRUCTURE_TYPE_RENDER_PASS_BEGIN_INFO;
m_passBeginInfo.pNext = nullptr;
m_passBeginInfo.renderPass = ctx->m_pass;
m_passBeginInfo.framebuffer = m_framebuffer;
m_passBeginInfo.renderArea.offset.x = 0;
m_passBeginInfo.renderArea.offset.y = 0;
2017-10-01 04:23:28 +00:00
m_passBeginInfo.renderArea.extent.width = m_width;
m_passBeginInfo.renderArea.extent.height = m_height;
2016-06-29 01:22:13 +00:00
m_passBeginInfo.clearValueCount = 0;
m_passBeginInfo.pClearValues = nullptr;
2016-02-21 06:27:54 +00:00
}
VulkanCommandQueue* m_q;
2018-05-22 07:48:13 +00:00
VulkanTextureR(const boo::ObjToken<BaseGraphicsData>& parent, VulkanCommandQueue* q,
size_t width, size_t height, TextureClampMode clampMode,
2018-05-22 07:48:13 +00:00
size_t colorBindCount, size_t depthBindCount);
2016-02-21 06:27:54 +00:00
public:
2018-05-22 07:48:13 +00:00
AllocatedImage m_colorTex;
VkImageView m_colorView = VK_NULL_HANDLE;
2018-05-22 07:48:13 +00:00
AllocatedImage m_depthTex;
VkImageView m_depthView = VK_NULL_HANDLE;
2018-05-22 07:48:13 +00:00
AllocatedImage m_colorBindTex[MAX_BIND_TEXS] = {};
2017-03-17 23:31:51 +00:00
VkImageView m_colorBindView[MAX_BIND_TEXS] = {};
VkDescriptorImageInfo m_colorBindDescInfo[MAX_BIND_TEXS] = {};
2018-05-22 07:48:13 +00:00
AllocatedImage m_depthBindTex[MAX_BIND_TEXS] = {};
2017-03-17 23:31:51 +00:00
VkImageView m_depthBindView[MAX_BIND_TEXS] = {};
VkDescriptorImageInfo m_depthBindDescInfo[MAX_BIND_TEXS] = {};
2016-02-21 06:27:54 +00:00
VkFramebuffer m_framebuffer = VK_NULL_HANDLE;
2016-06-25 07:04:58 +00:00
VkRenderPassBeginInfo m_passBeginInfo = {};
VkImageLayout m_layout = VK_IMAGE_LAYOUT_UNDEFINED;
2017-03-17 23:31:51 +00:00
VkImageLayout m_colorBindLayout[MAX_BIND_TEXS] = {};
VkImageLayout m_depthBindLayout[MAX_BIND_TEXS] = {};
VkSampler m_sampler = VK_NULL_HANDLE;
2018-05-22 07:48:13 +00:00
void setClampMode(TextureClampMode mode);
2016-02-21 06:27:54 +00:00
void doDestroy();
~VulkanTextureR();
void resize(VulkanContext* ctx, size_t width, size_t height)
{
if (width < 1)
width = 1;
if (height < 1)
height = 1;
m_width = width;
m_height = height;
2017-10-01 04:23:28 +00:00
Setup(ctx);
2016-02-21 06:27:54 +00:00
}
};
static const size_t SEMANTIC_SIZE_TABLE[] =
{
0,
12,
16,
12,
16,
16,
4,
8,
16,
16,
16
};
static const VkFormat SEMANTIC_TYPE_TABLE[] =
{
VK_FORMAT_UNDEFINED,
VK_FORMAT_R32G32B32_SFLOAT,
VK_FORMAT_R32G32B32A32_SFLOAT,
VK_FORMAT_R32G32B32_SFLOAT,
VK_FORMAT_R32G32B32A32_SFLOAT,
VK_FORMAT_R32G32B32A32_SFLOAT,
VK_FORMAT_R8G8B8A8_UNORM,
VK_FORMAT_R32G32_SFLOAT,
VK_FORMAT_R32G32B32A32_SFLOAT,
VK_FORMAT_R32G32B32A32_SFLOAT,
VK_FORMAT_R32G32B32A32_SFLOAT
};
struct VulkanVertexFormat : GraphicsDataNode<IVertexFormat>
2016-02-21 06:27:54 +00:00
{
VkVertexInputBindingDescription m_bindings[2];
std::unique_ptr<VkVertexInputAttributeDescription[]> m_attributes;
VkPipelineVertexInputStateCreateInfo m_info;
2016-12-11 01:50:26 +00:00
size_t m_stride = 0;
size_t m_instStride = 0;
VulkanVertexFormat(const boo::ObjToken<BaseGraphicsData>& parent, size_t elementCount,
2017-03-17 23:31:51 +00:00
const VertexElementDescriptor* elements)
: GraphicsDataNode<IVertexFormat>(parent),
m_attributes(new VkVertexInputAttributeDescription[elementCount])
2016-02-21 06:27:54 +00:00
{
m_info.sType = VK_STRUCTURE_TYPE_PIPELINE_VERTEX_INPUT_STATE_CREATE_INFO;
m_info.pNext = nullptr;
m_info.flags = 0;
2016-06-24 03:03:06 +00:00
m_info.vertexBindingDescriptionCount = 0;
2016-02-21 06:27:54 +00:00
m_info.pVertexBindingDescriptions = m_bindings;
m_info.vertexAttributeDescriptionCount = elementCount;
m_info.pVertexAttributeDescriptions = m_attributes.get();
for (size_t i=0 ; i<elementCount ; ++i)
{
const VertexElementDescriptor* elemin = &elements[i];
VkVertexInputAttributeDescription& attribute = m_attributes[i];
int semantic = int(elemin->semantic & boo::VertexSemantic::SemanticMask);
attribute.location = i;
attribute.format = SEMANTIC_TYPE_TABLE[semantic];
if ((elemin->semantic & boo::VertexSemantic::Instanced) != boo::VertexSemantic::None)
{
attribute.binding = 1;
2016-12-11 01:50:26 +00:00
attribute.offset = m_instStride;
m_instStride += SEMANTIC_SIZE_TABLE[semantic];
2016-02-21 06:27:54 +00:00
}
else
{
attribute.binding = 0;
2016-12-11 01:50:26 +00:00
attribute.offset = m_stride;
m_stride += SEMANTIC_SIZE_TABLE[semantic];
2016-02-21 06:27:54 +00:00
}
}
2016-12-11 01:50:26 +00:00
if (m_stride)
2016-06-24 03:03:06 +00:00
{
m_bindings[0].binding = 0;
2016-12-11 01:50:26 +00:00
m_bindings[0].stride = m_stride;
2016-06-24 03:03:06 +00:00
m_bindings[0].inputRate = VK_VERTEX_INPUT_RATE_VERTEX;
++m_info.vertexBindingDescriptionCount;
}
2016-12-11 01:50:26 +00:00
if (m_instStride)
2016-02-21 06:27:54 +00:00
{
2016-06-24 03:03:06 +00:00
m_bindings[m_info.vertexBindingDescriptionCount].binding = 1;
2016-12-11 01:50:26 +00:00
m_bindings[m_info.vertexBindingDescriptionCount].stride = m_instStride;
2016-06-24 03:03:06 +00:00
m_bindings[m_info.vertexBindingDescriptionCount].inputRate = VK_VERTEX_INPUT_RATE_INSTANCE;
++m_info.vertexBindingDescriptionCount;
2016-02-21 06:27:54 +00:00
}
}
};
2016-03-24 08:15:15 +00:00
static const VkPrimitiveTopology PRIMITIVE_TABLE[] =
{
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_LIST,
VK_PRIMITIVE_TOPOLOGY_TRIANGLE_STRIP,
VK_PRIMITIVE_TOPOLOGY_PATCH_LIST
2016-03-24 08:15:15 +00:00
};
2016-02-21 06:27:54 +00:00
static const VkBlendFactor BLEND_FACTOR_TABLE[] =
{
VK_BLEND_FACTOR_ZERO,
VK_BLEND_FACTOR_ONE,
VK_BLEND_FACTOR_SRC_COLOR,
VK_BLEND_FACTOR_ONE_MINUS_SRC_COLOR,
VK_BLEND_FACTOR_DST_COLOR,
VK_BLEND_FACTOR_ONE_MINUS_DST_COLOR,
VK_BLEND_FACTOR_SRC_ALPHA,
VK_BLEND_FACTOR_ONE_MINUS_SRC_ALPHA,
VK_BLEND_FACTOR_DST_ALPHA,
VK_BLEND_FACTOR_ONE_MINUS_DST_ALPHA,
VK_BLEND_FACTOR_SRC1_COLOR,
VK_BLEND_FACTOR_ONE_MINUS_SRC1_COLOR
};
class VulkanShaderPipeline : public GraphicsDataNode<IShaderPipeline>
2016-02-21 06:27:54 +00:00
{
protected:
2016-02-21 06:27:54 +00:00
friend class VulkanDataFactory;
2016-12-11 01:50:26 +00:00
friend struct VulkanShaderDataBinding;
2016-02-21 06:27:54 +00:00
VulkanContext* m_ctx;
2016-07-01 02:30:29 +00:00
VkPipelineCache m_pipelineCache;
boo::ObjToken<IVertexFormat> m_vtxFmt;
mutable VulkanShareableShader::Token m_vert;
mutable VulkanShareableShader::Token m_frag;
BlendFactor m_srcFac;
BlendFactor m_dstFac;
Primitive m_prim;
ZTest m_depthTest;
bool m_depthWrite;
bool m_colorWrite;
bool m_alphaWrite;
bool m_overwriteAlpha;
CullMode m_culling;
mutable VkPipeline m_pipeline = VK_NULL_HANDLE;
VulkanShaderPipeline(const boo::ObjToken<BaseGraphicsData>& parent,
2017-03-17 23:31:51 +00:00
VulkanContext* ctx,
2017-03-05 22:59:58 +00:00
VulkanShareableShader::Token&& vert,
VulkanShareableShader::Token&& frag,
2016-02-21 06:27:54 +00:00
VkPipelineCache pipelineCache,
const boo::ObjToken<IVertexFormat>& vtxFmt,
2016-03-24 08:15:15 +00:00
BlendFactor srcFac, BlendFactor dstFac, Primitive prim,
2017-03-17 23:31:51 +00:00
ZTest depthTest, bool depthWrite, bool colorWrite,
bool alphaWrite, bool overwriteAlpha, CullMode culling)
: GraphicsDataNode<IShaderPipeline>(parent),
m_ctx(ctx), m_pipelineCache(pipelineCache), m_vtxFmt(vtxFmt),
m_vert(std::move(vert)), m_frag(std::move(frag)), m_srcFac(srcFac), m_dstFac(dstFac),
m_prim(prim), m_depthTest(depthTest), m_depthWrite(depthWrite), m_colorWrite(colorWrite),
m_alphaWrite(alphaWrite), m_overwriteAlpha(overwriteAlpha), m_culling(culling)
{}
2016-02-21 06:27:54 +00:00
public:
~VulkanShaderPipeline()
{
if (m_pipeline)
vk::DestroyPipeline(m_ctx->m_dev, m_pipeline, nullptr);
2017-03-05 22:59:58 +00:00
if (m_pipelineCache)
vk::DestroyPipelineCache(m_ctx->m_dev, m_pipelineCache, nullptr);
2016-02-21 06:27:54 +00:00
}
VulkanShaderPipeline& operator=(const VulkanShaderPipeline&) = delete;
VulkanShaderPipeline(const VulkanShaderPipeline&) = delete;
virtual uint32_t defineExtraStages(VkPipelineShaderStageCreateInfo* stages) const { return 0; }
virtual const VkPipelineTessellationStateCreateInfo* getTessellationInfo() const { return nullptr; }
virtual void resetExtraStages() const {}
VkPipeline bind(VkRenderPass rPass = 0) const
{
if (!m_pipeline)
{
if (!rPass)
rPass = m_ctx->m_pass;
VkCullModeFlagBits cullMode;
switch (m_culling)
{
case CullMode::None:
default:
cullMode = VK_CULL_MODE_NONE;
break;
case CullMode::Backface:
cullMode = VK_CULL_MODE_BACK_BIT;
break;
case CullMode::Frontface:
cullMode = VK_CULL_MODE_FRONT_BIT;
break;
}
VkDynamicState dynamicStateEnables[VK_DYNAMIC_STATE_RANGE_SIZE] = {};
VkPipelineDynamicStateCreateInfo dynamicState = {};
dynamicState.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
dynamicState.pNext = nullptr;
dynamicState.pDynamicStates = dynamicStateEnables;
dynamicState.dynamicStateCount = 0;
VkPipelineShaderStageCreateInfo stages[4] = {};
stages[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
stages[0].pNext = nullptr;
stages[0].flags = 0;
stages[0].stage = VK_SHADER_STAGE_VERTEX_BIT;
2017-11-08 02:24:07 +00:00
stages[0].module = m_vert.get().m_shader;
stages[0].pName = "main";
stages[0].pSpecializationInfo = nullptr;
stages[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
stages[1].pNext = nullptr;
stages[1].flags = 0;
stages[1].stage = VK_SHADER_STAGE_FRAGMENT_BIT;
2017-11-08 02:24:07 +00:00
stages[1].module = m_frag.get().m_shader;
stages[1].pName = "main";
stages[1].pSpecializationInfo = nullptr;
uint32_t extraStages = defineExtraStages(&stages[2]);
VkPipelineInputAssemblyStateCreateInfo assemblyInfo = {};
assemblyInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_INPUT_ASSEMBLY_STATE_CREATE_INFO;
assemblyInfo.pNext = nullptr;
assemblyInfo.flags = 0;
assemblyInfo.topology = PRIMITIVE_TABLE[int(m_prim)];
2018-05-20 15:36:35 +00:00
assemblyInfo.primitiveRestartEnable = VK_TRUE;
VkPipelineViewportStateCreateInfo viewportInfo = {};
viewportInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_VIEWPORT_STATE_CREATE_INFO;
viewportInfo.pNext = nullptr;
viewportInfo.flags = 0;
viewportInfo.viewportCount = 1;
viewportInfo.pViewports = nullptr;
viewportInfo.scissorCount = 1;
viewportInfo.pScissors = nullptr;
dynamicStateEnables[dynamicState.dynamicStateCount++] = VK_DYNAMIC_STATE_VIEWPORT;
dynamicStateEnables[dynamicState.dynamicStateCount++] = VK_DYNAMIC_STATE_SCISSOR;
2018-06-01 06:36:57 +00:00
#if AMD_PAL_HACK
dynamicStateEnables[dynamicState.dynamicStateCount++] = VK_DYNAMIC_STATE_BLEND_CONSTANTS;
#endif
VkPipelineRasterizationStateCreateInfo rasterizationInfo = {};
rasterizationInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_RASTERIZATION_STATE_CREATE_INFO;
rasterizationInfo.pNext = nullptr;
rasterizationInfo.flags = 0;
rasterizationInfo.depthClampEnable = VK_FALSE;
rasterizationInfo.rasterizerDiscardEnable = VK_FALSE;
rasterizationInfo.polygonMode = VK_POLYGON_MODE_FILL;
rasterizationInfo.cullMode = cullMode;
rasterizationInfo.frontFace = VK_FRONT_FACE_COUNTER_CLOCKWISE;
rasterizationInfo.depthBiasEnable = VK_FALSE;
rasterizationInfo.lineWidth = 1.f;
VkPipelineMultisampleStateCreateInfo multisampleInfo = {};
multisampleInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_MULTISAMPLE_STATE_CREATE_INFO;
multisampleInfo.pNext = nullptr;
multisampleInfo.flags = 0;
multisampleInfo.rasterizationSamples = VkSampleCountFlagBits(m_ctx->m_sampleCountColor);
VkPipelineDepthStencilStateCreateInfo depthStencilInfo = {};
depthStencilInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_DEPTH_STENCIL_STATE_CREATE_INFO;
depthStencilInfo.pNext = nullptr;
depthStencilInfo.flags = 0;
depthStencilInfo.depthTestEnable = m_depthTest != ZTest::None;
depthStencilInfo.depthWriteEnable = m_depthWrite;
depthStencilInfo.front.compareOp = VK_COMPARE_OP_ALWAYS;
depthStencilInfo.back.compareOp = VK_COMPARE_OP_ALWAYS;
switch (m_depthTest)
{
case ZTest::None:
default:
depthStencilInfo.depthCompareOp = VK_COMPARE_OP_ALWAYS;
break;
case ZTest::LEqual:
depthStencilInfo.depthCompareOp = VK_COMPARE_OP_LESS_OR_EQUAL;
break;
case ZTest::Greater:
depthStencilInfo.depthCompareOp = VK_COMPARE_OP_GREATER;
break;
case ZTest::Equal:
depthStencilInfo.depthCompareOp = VK_COMPARE_OP_EQUAL;
break;
case ZTest::GEqual:
depthStencilInfo.depthCompareOp = VK_COMPARE_OP_GREATER_OR_EQUAL;
break;
}
VkPipelineColorBlendAttachmentState colorAttachment = {};
colorAttachment.blendEnable = m_dstFac != BlendFactor::Zero;
if (m_srcFac == BlendFactor::Subtract || m_dstFac == BlendFactor::Subtract)
{
2018-01-30 07:49:00 +00:00
colorAttachment.srcColorBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
colorAttachment.dstColorBlendFactor = VK_BLEND_FACTOR_ONE;
colorAttachment.colorBlendOp = VK_BLEND_OP_REVERSE_SUBTRACT;
if (m_overwriteAlpha)
{
colorAttachment.srcAlphaBlendFactor = VK_BLEND_FACTOR_ONE;
colorAttachment.dstAlphaBlendFactor = VK_BLEND_FACTOR_ZERO;
colorAttachment.alphaBlendOp = VK_BLEND_OP_ADD;
}
else
{
colorAttachment.srcAlphaBlendFactor = VK_BLEND_FACTOR_SRC_ALPHA;
colorAttachment.dstAlphaBlendFactor = VK_BLEND_FACTOR_ONE;
colorAttachment.alphaBlendOp = VK_BLEND_OP_REVERSE_SUBTRACT;
}
}
else
{
colorAttachment.srcColorBlendFactor = BLEND_FACTOR_TABLE[int(m_srcFac)];
colorAttachment.dstColorBlendFactor = BLEND_FACTOR_TABLE[int(m_dstFac)];
colorAttachment.colorBlendOp = VK_BLEND_OP_ADD;
if (m_overwriteAlpha)
{
colorAttachment.srcAlphaBlendFactor = VK_BLEND_FACTOR_ONE;
colorAttachment.dstAlphaBlendFactor = VK_BLEND_FACTOR_ZERO;
}
else
{
colorAttachment.srcAlphaBlendFactor = BLEND_FACTOR_TABLE[int(m_srcFac)];
colorAttachment.dstAlphaBlendFactor = BLEND_FACTOR_TABLE[int(m_dstFac)];
}
colorAttachment.alphaBlendOp = VK_BLEND_OP_ADD;
}
colorAttachment.colorWriteMask =
(m_colorWrite ? (VK_COLOR_COMPONENT_R_BIT |
VK_COLOR_COMPONENT_G_BIT |
VK_COLOR_COMPONENT_B_BIT) : 0) |
(m_alphaWrite ? VK_COLOR_COMPONENT_A_BIT : 0);
VkPipelineColorBlendStateCreateInfo colorBlendInfo = {};
colorBlendInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_COLOR_BLEND_STATE_CREATE_INFO;
colorBlendInfo.pNext = nullptr;
colorBlendInfo.flags = 0;
colorBlendInfo.logicOpEnable = VK_FALSE;
colorBlendInfo.attachmentCount = 1;
colorBlendInfo.pAttachments = &colorAttachment;
VkGraphicsPipelineCreateInfo pipelineCreateInfo = {};
pipelineCreateInfo.sType = VK_STRUCTURE_TYPE_GRAPHICS_PIPELINE_CREATE_INFO;
pipelineCreateInfo.pNext = nullptr;
pipelineCreateInfo.flags = 0;
pipelineCreateInfo.stageCount = 2 + extraStages;
pipelineCreateInfo.pStages = stages;
pipelineCreateInfo.pVertexInputState = &m_vtxFmt.cast<VulkanVertexFormat>()->m_info;
pipelineCreateInfo.pInputAssemblyState = &assemblyInfo;
pipelineCreateInfo.pTessellationState = getTessellationInfo();
pipelineCreateInfo.pViewportState = &viewportInfo;
pipelineCreateInfo.pRasterizationState = &rasterizationInfo;
pipelineCreateInfo.pMultisampleState = &multisampleInfo;
pipelineCreateInfo.pDepthStencilState = &depthStencilInfo;
pipelineCreateInfo.pColorBlendState = &colorBlendInfo;
pipelineCreateInfo.pDynamicState = &dynamicState;
pipelineCreateInfo.layout = m_ctx->m_pipelinelayout;
pipelineCreateInfo.renderPass = rPass;
ThrowIfFailed(vk::CreateGraphicsPipelines(m_ctx->m_dev, m_pipelineCache, 1, &pipelineCreateInfo,
nullptr, &m_pipeline));
m_vert.reset();
m_frag.reset();
resetExtraStages();
}
return m_pipeline;
}
2016-02-21 06:27:54 +00:00
};
class VulkanTessellationShaderPipeline : public VulkanShaderPipeline
{
friend class VulkanDataFactory;
friend struct VulkanShaderDataBinding;
mutable VulkanShareableShader::Token m_control;
mutable VulkanShareableShader::Token m_evaluation;
VkPipelineTessellationStateCreateInfo m_tessInfo;
VulkanTessellationShaderPipeline(const boo::ObjToken<BaseGraphicsData>& parent,
VulkanContext* ctx,
VulkanShareableShader::Token&& vert,
VulkanShareableShader::Token&& frag,
VulkanShareableShader::Token&& control,
VulkanShareableShader::Token&& evaluation,
VkPipelineCache pipelineCache,
const boo::ObjToken<IVertexFormat>& vtxFmt,
BlendFactor srcFac, BlendFactor dstFac, uint32_t patchSize,
ZTest depthTest, bool depthWrite, bool colorWrite,
bool alphaWrite, bool overwriteAlpha, CullMode culling)
: VulkanShaderPipeline(parent, ctx, std::move(vert), std::move(frag), pipelineCache, vtxFmt, srcFac, dstFac,
Primitive::Patches, depthTest, depthWrite, colorWrite, alphaWrite, overwriteAlpha, culling),
m_control(std::move(control)), m_evaluation(std::move(evaluation))
{
m_tessInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_TESSELLATION_STATE_CREATE_INFO;
m_tessInfo.pNext = nullptr;
m_tessInfo.flags = 0;
m_tessInfo.patchControlPoints = patchSize;
}
public:
~VulkanTessellationShaderPipeline() = default;
uint32_t defineExtraStages(VkPipelineShaderStageCreateInfo* stages) const
{
stages[0].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
stages[0].pNext = nullptr;
stages[0].flags = 0;
stages[0].stage = VK_SHADER_STAGE_TESSELLATION_CONTROL_BIT;
stages[0].module = m_control.get().m_shader;
stages[0].pName = "main";
stages[0].pSpecializationInfo = nullptr;
stages[1].sType = VK_STRUCTURE_TYPE_PIPELINE_SHADER_STAGE_CREATE_INFO;
stages[1].pNext = nullptr;
stages[1].flags = 0;
stages[1].stage = VK_SHADER_STAGE_TESSELLATION_EVALUATION_BIT;
stages[1].module = m_evaluation.get().m_shader;
stages[1].pName = "main";
stages[1].pSpecializationInfo = nullptr;
return 2;
}
const VkPipelineTessellationStateCreateInfo* getTessellationInfo() const
{
return &m_tessInfo;
}
void resetExtraStages() const
{
m_control.reset();
m_evaluation.reset();
}
};
2016-02-21 06:27:54 +00:00
static const VkDescriptorBufferInfo* GetBufferGPUResource(const IGraphicsBuffer* buf, int idx)
{
if (buf->dynamic())
{
const VulkanGraphicsBufferD<BaseGraphicsData>* cbuf =
static_cast<const VulkanGraphicsBufferD<BaseGraphicsData>*>(buf);
2016-02-21 06:27:54 +00:00
return &cbuf->m_bufferInfo[idx];
}
else
{
const VulkanGraphicsBufferS* cbuf = static_cast<const VulkanGraphicsBufferS*>(buf);
return &cbuf->m_bufferInfo;
}
}
2017-03-17 23:31:51 +00:00
static const VkDescriptorImageInfo* GetTextureGPUResource(const ITexture* tex, int idx, int bindIdx, bool depth)
2016-02-21 06:27:54 +00:00
{
switch (tex->type())
{
case TextureType::Dynamic:
{
const VulkanTextureD* ctex = static_cast<const VulkanTextureD*>(tex);
return &ctex->m_descInfo[idx];
}
case TextureType::Static:
{
const VulkanTextureS* ctex = static_cast<const VulkanTextureS*>(tex);
return &ctex->m_descInfo;
}
case TextureType::StaticArray:
{
const VulkanTextureSA* ctex = static_cast<const VulkanTextureSA*>(tex);
return &ctex->m_descInfo;
}
case TextureType::Render:
{
const VulkanTextureR* ctex = static_cast<const VulkanTextureR*>(tex);
2017-03-17 23:31:51 +00:00
return depth ? &ctex->m_depthBindDescInfo[bindIdx] : &ctex->m_colorBindDescInfo[bindIdx];
2016-02-21 06:27:54 +00:00
}
default: break;
}
return nullptr;
}
struct VulkanShaderDataBinding : GraphicsDataNode<IShaderDataBinding>
2016-02-21 06:27:54 +00:00
{
VulkanContext* m_ctx;
boo::ObjToken<IShaderPipeline> m_pipeline;
boo::ObjToken<IGraphicsBuffer> m_vbuf;
boo::ObjToken<IGraphicsBuffer> m_instVbuf;
boo::ObjToken<IGraphicsBuffer> m_ibuf;
std::vector<boo::ObjToken<IGraphicsBuffer>> m_ubufs;
2016-07-01 02:30:29 +00:00
std::vector<std::array<VkDescriptorBufferInfo, 2>> m_ubufOffs;
VkImageView m_knownViewHandles[2][BOO_GLSL_MAX_TEXTURE_COUNT] = {};
2017-03-17 23:31:51 +00:00
struct BindTex
{
boo::ObjToken<ITexture> tex;
2017-03-17 23:31:51 +00:00
int idx;
bool depth;
};
std::vector<BindTex> m_texs;
2016-02-21 06:27:54 +00:00
VkBuffer m_vboBufs[2][2] = {{},{}};
VkDeviceSize m_vboOffs[2][2] = {{},{}};
VkBuffer m_iboBufs[2] = {};
VkDeviceSize m_iboOffs[2] = {};
2018-05-27 21:35:21 +00:00
boo::ObjToken<VulkanDescriptorPool> m_descPool;
2017-03-17 23:31:51 +00:00
VkDescriptorSet m_descSets[2] = {};
2016-02-21 06:27:54 +00:00
2016-12-11 01:50:26 +00:00
size_t m_vertOffset;
size_t m_instOffset;
2016-02-21 06:27:54 +00:00
#ifndef NDEBUG
/* Debugging aids */
bool m_committed = false;
#endif
VulkanShaderDataBinding(const boo::ObjToken<BaseGraphicsData>& d,
2018-05-27 21:35:21 +00:00
VulkanDataFactoryImpl& factory,
const boo::ObjToken<IShaderPipeline>& pipeline,
const boo::ObjToken<IGraphicsBuffer>& vbuf,
const boo::ObjToken<IGraphicsBuffer>& instVbuf,
const boo::ObjToken<IGraphicsBuffer>& ibuf,
size_t ubufCount, const boo::ObjToken<IGraphicsBuffer>* ubufs,
const size_t* ubufOffs, const size_t* ubufSizes,
size_t texCount, const boo::ObjToken<ITexture>* texs,
2017-03-17 23:31:51 +00:00
const int* bindIdxs, const bool* depthBinds,
2016-12-11 01:50:26 +00:00
size_t baseVert, size_t baseInst)
: GraphicsDataNode<IShaderDataBinding>(d),
2018-05-27 21:35:21 +00:00
m_ctx(factory.m_ctx),
m_pipeline(pipeline),
2016-02-21 06:27:54 +00:00
m_vbuf(vbuf),
m_instVbuf(instVbuf),
m_ibuf(ibuf)
2016-02-21 06:27:54 +00:00
{
VulkanShaderPipeline* cpipeline = m_pipeline.cast<VulkanShaderPipeline>();
VulkanVertexFormat* vtxFmt = cpipeline->m_vtxFmt.cast<VulkanVertexFormat>();
m_vertOffset = baseVert * vtxFmt->m_stride;
m_instOffset = baseInst * vtxFmt->m_instStride;
2016-12-11 01:50:26 +00:00
if (ubufOffs && ubufSizes)
{
m_ubufOffs.reserve(ubufCount);
for (size_t i=0 ; i<ubufCount ; ++i)
2016-03-30 20:42:36 +00:00
{
#ifndef NDEBUG
if (ubufOffs[i] % 256)
Log.report(logvisor::Fatal, "non-256-byte-aligned uniform-offset %d provided to newShaderDataBinding", int(i));
#endif
2016-07-01 02:30:29 +00:00
std::array<VkDescriptorBufferInfo, 2> fillArr;
fillArr.fill({VK_NULL_HANDLE, ubufOffs[i], (ubufSizes[i] + 255) & ~255});
m_ubufOffs.push_back(fillArr);
2016-03-30 20:42:36 +00:00
}
}
m_ubufs.reserve(ubufCount);
2016-02-21 06:27:54 +00:00
for (size_t i=0 ; i<ubufCount ; ++i)
{
#ifndef NDEBUG
if (!ubufs[i])
Log.report(logvisor::Fatal, "null uniform-buffer %d provided to newShaderDataBinding", int(i));
#endif
m_ubufs.push_back(ubufs[i]);
}
m_texs.reserve(texCount);
2016-02-21 06:27:54 +00:00
for (size_t i=0 ; i<texCount ; ++i)
2017-03-17 23:31:51 +00:00
{
m_texs.push_back({texs[i], bindIdxs ? bindIdxs[i] : 0, depthBinds ? depthBinds[i] : false});
2017-03-17 23:31:51 +00:00
}
2016-02-21 06:27:54 +00:00
size_t totalDescs = ubufCount + texCount;
if (totalDescs > 0)
2018-05-27 21:35:21 +00:00
m_descPool = factory.allocateDescriptorSets(m_descSets);
2016-02-21 06:27:54 +00:00
}
void commit(VulkanContext* ctx)
2016-12-11 01:50:26 +00:00
{
2016-02-24 03:11:58 +00:00
VkWriteDescriptorSet writes[(BOO_GLSL_MAX_UNIFORM_COUNT + BOO_GLSL_MAX_TEXTURE_COUNT) * 2] = {};
2016-02-21 06:27:54 +00:00
size_t totalWrites = 0;
for (int b=0 ; b<2 ; ++b)
{
if (m_vbuf)
{
const VkDescriptorBufferInfo* vbufInfo = GetBufferGPUResource(m_vbuf.get(), b);
2016-02-21 06:27:54 +00:00
m_vboBufs[b][0] = vbufInfo->buffer;
2016-12-11 01:50:26 +00:00
m_vboOffs[b][0] = vbufInfo->offset + m_vertOffset;
2016-02-21 06:27:54 +00:00
}
if (m_instVbuf)
{
const VkDescriptorBufferInfo* vbufInfo = GetBufferGPUResource(m_instVbuf.get(), b);
2016-02-21 06:27:54 +00:00
m_vboBufs[b][1] = vbufInfo->buffer;
2016-12-11 01:50:26 +00:00
m_vboOffs[b][1] = vbufInfo->offset + m_instOffset;
2016-02-21 06:27:54 +00:00
}
if (m_ibuf)
{
const VkDescriptorBufferInfo* ibufInfo = GetBufferGPUResource(m_ibuf.get(), b);
2016-02-21 06:27:54 +00:00
m_iboBufs[b] = ibufInfo->buffer;
m_iboOffs[b] = ibufInfo->offset;
}
size_t binding = 0;
if (m_ubufOffs.size())
2016-02-21 06:27:54 +00:00
{
for (size_t i=0 ; i<BOO_GLSL_MAX_UNIFORM_COUNT ; ++i)
2016-02-21 06:27:54 +00:00
{
if (i<m_ubufs.size())
{
2016-07-01 02:30:29 +00:00
VkDescriptorBufferInfo& modInfo = m_ubufOffs[i][b];
2016-07-27 03:37:39 +00:00
if (modInfo.range)
{
writes[totalWrites].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
writes[totalWrites].pNext = nullptr;
writes[totalWrites].dstSet = m_descSets[b];
writes[totalWrites].descriptorCount = 1;
writes[totalWrites].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
const VkDescriptorBufferInfo* origInfo = GetBufferGPUResource(m_ubufs[i].get(), b);
2016-07-27 03:37:39 +00:00
modInfo.buffer = origInfo->buffer;
modInfo.offset += origInfo->offset;
writes[totalWrites].pBufferInfo = &modInfo;
writes[totalWrites].dstArrayElement = 0;
writes[totalWrites].dstBinding = binding;
++totalWrites;
}
}
++binding;
2016-02-21 06:27:54 +00:00
}
}
else
{
for (size_t i=0 ; i<BOO_GLSL_MAX_UNIFORM_COUNT ; ++i)
{
if (i<m_ubufs.size())
{
writes[totalWrites].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
writes[totalWrites].pNext = nullptr;
writes[totalWrites].dstSet = m_descSets[b];
writes[totalWrites].descriptorCount = 1;
2016-06-25 07:04:58 +00:00
writes[totalWrites].descriptorType = VK_DESCRIPTOR_TYPE_UNIFORM_BUFFER;
writes[totalWrites].pBufferInfo = GetBufferGPUResource(m_ubufs[i].get(), b);
writes[totalWrites].dstArrayElement = 0;
writes[totalWrites].dstBinding = binding;
++totalWrites;
}
++binding;
}
}
2016-02-24 03:11:58 +00:00
for (size_t i=0 ; i<BOO_GLSL_MAX_TEXTURE_COUNT ; ++i)
2016-02-21 06:27:54 +00:00
{
if (i<m_texs.size() && m_texs[i].tex)
2016-02-21 06:27:54 +00:00
{
writes[totalWrites].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
writes[totalWrites].pNext = nullptr;
writes[totalWrites].dstSet = m_descSets[b];
writes[totalWrites].descriptorCount = 1;
writes[totalWrites].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
writes[totalWrites].pImageInfo = GetTextureGPUResource(m_texs[i].tex.get(), b,
m_texs[i].idx, m_texs[i].depth);
2016-02-21 06:27:54 +00:00
writes[totalWrites].dstArrayElement = 0;
writes[totalWrites].dstBinding = binding;
2016-07-27 04:13:34 +00:00
m_knownViewHandles[b][i] = writes[totalWrites].pImageInfo->imageView;
2016-02-21 06:27:54 +00:00
++totalWrites;
}
++binding;
}
}
if (totalWrites)
2016-06-30 04:54:29 +00:00
vk::UpdateDescriptorSets(ctx->m_dev, totalWrites, writes, 0, nullptr);
2016-02-21 06:27:54 +00:00
#ifndef NDEBUG
m_committed = true;
#endif
}
void bind(VkCommandBuffer cmdBuf, int b, VkRenderPass rPass = 0)
2016-02-21 06:27:54 +00:00
{
#ifndef NDEBUG
if (!m_committed)
2016-03-04 23:02:18 +00:00
Log.report(logvisor::Fatal,
2016-02-21 06:27:54 +00:00
"attempted to use uncommitted VulkanShaderDataBinding");
#endif
2016-07-27 04:13:34 +00:00
/* Ensure resized texture bindings are re-bound */
size_t binding = BOO_GLSL_MAX_UNIFORM_COUNT;
VkWriteDescriptorSet writes[BOO_GLSL_MAX_TEXTURE_COUNT] = {};
size_t totalWrites = 0;
for (size_t i=0 ; i<BOO_GLSL_MAX_TEXTURE_COUNT ; ++i)
{
if (i<m_texs.size() && m_texs[i].tex)
2016-07-27 04:13:34 +00:00
{
const VkDescriptorImageInfo* resComp = GetTextureGPUResource(m_texs[i].tex.get(), b,
m_texs[i].idx, m_texs[i].depth);
2016-07-27 04:13:34 +00:00
if (resComp->imageView != m_knownViewHandles[b][i])
{
writes[totalWrites].sType = VK_STRUCTURE_TYPE_WRITE_DESCRIPTOR_SET;
writes[totalWrites].pNext = nullptr;
writes[totalWrites].dstSet = m_descSets[b];
writes[totalWrites].descriptorCount = 1;
writes[totalWrites].descriptorType = VK_DESCRIPTOR_TYPE_COMBINED_IMAGE_SAMPLER;
writes[totalWrites].pImageInfo = resComp;
writes[totalWrites].dstArrayElement = 0;
writes[totalWrites].dstBinding = binding;
++totalWrites;
m_knownViewHandles[b][i] = resComp->imageView;
}
}
++binding;
}
if (totalWrites)
vk::UpdateDescriptorSets(m_ctx->m_dev, totalWrites, writes, 0, nullptr);
vk::CmdBindPipeline(cmdBuf, VK_PIPELINE_BIND_POINT_GRAPHICS, m_pipeline.cast<VulkanShaderPipeline>()->bind(rPass));
2017-03-17 23:31:51 +00:00
if (m_descSets[b])
vk::CmdBindDescriptorSets(cmdBuf, VK_PIPELINE_BIND_POINT_GRAPHICS, m_ctx->m_pipelinelayout,
0, 1, &m_descSets[b], 0, nullptr);
2016-06-25 07:04:58 +00:00
if (m_vbuf && m_instVbuf)
2016-06-30 04:54:29 +00:00
vk::CmdBindVertexBuffers(cmdBuf, 0, 2, m_vboBufs[b], m_vboOffs[b]);
2016-06-25 07:04:58 +00:00
else if (m_vbuf)
2016-06-30 04:54:29 +00:00
vk::CmdBindVertexBuffers(cmdBuf, 0, 1, m_vboBufs[b], m_vboOffs[b]);
2016-06-25 07:04:58 +00:00
else if (m_instVbuf)
2016-06-30 04:54:29 +00:00
vk::CmdBindVertexBuffers(cmdBuf, 1, 1, &m_vboBufs[b][1], &m_vboOffs[b][1]);
2016-06-25 07:04:58 +00:00
2016-02-21 06:27:54 +00:00
if (m_ibuf)
2016-06-30 04:54:29 +00:00
vk::CmdBindIndexBuffer(cmdBuf, m_iboBufs[b], m_iboOffs[b], VK_INDEX_TYPE_UINT32);
2018-06-01 06:36:57 +00:00
#if AMD_PAL_HACK
/* AMD GCN architecture is prone to hanging after binding a new pipeline without also refreshing the
* device context registers (i.e. viewport, scissor, line width, blend constants). Blend Constants
* are the simplest register to set within the PAL codebase. */
float dummy[4] = {};
vk::CmdSetBlendConstants(cmdBuf, dummy);
#endif
2016-02-21 06:27:54 +00:00
}
};
struct VulkanCommandQueue : IGraphicsCommandQueue
{
Platform platform() const {return IGraphicsDataFactory::Platform::Vulkan;}
const SystemChar* platformName() const {return _S("Vulkan");}
VulkanContext* m_ctx;
VulkanContext::Window* m_windowCtx;
IGraphicsContext* m_parent;
VkCommandPool m_cmdPool;
VkCommandBuffer m_cmdBufs[2];
2016-06-26 00:33:22 +00:00
VkSemaphore m_swapChainReadySem = VK_NULL_HANDLE;
VkSemaphore m_drawCompleteSem = VK_NULL_HANDLE;
2016-02-21 06:27:54 +00:00
VkFence m_drawCompleteFence;
VkCommandPool m_dynamicCmdPool;
VkCommandBuffer m_dynamicCmdBufs[2];
VkFence m_dynamicBufFence;
bool m_running = true;
bool m_dynamicNeedsReset = false;
bool m_submitted = false;
2016-02-21 06:27:54 +00:00
int m_fillBuf = 0;
int m_drawBuf = 0;
std::vector<boo::ObjToken<boo::IObj>> m_drawResTokens[2];
2016-02-21 06:27:54 +00:00
void resetCommandBuffer()
{
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::ResetCommandBuffer(m_cmdBufs[m_fillBuf], 0));
2016-06-25 07:04:58 +00:00
VkCommandBufferBeginInfo cmdBufBeginInfo = {};
cmdBufBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
2018-05-22 07:48:13 +00:00
cmdBufBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::BeginCommandBuffer(m_cmdBufs[m_fillBuf], &cmdBufBeginInfo));
2016-02-21 06:27:54 +00:00
}
void resetDynamicCommandBuffer()
{
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::ResetCommandBuffer(m_dynamicCmdBufs[m_fillBuf], 0));
2016-06-25 07:04:58 +00:00
VkCommandBufferBeginInfo cmdBufBeginInfo = {};
cmdBufBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
2018-05-22 07:48:13 +00:00
cmdBufBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::BeginCommandBuffer(m_dynamicCmdBufs[m_fillBuf], &cmdBufBeginInfo));
2016-02-21 06:27:54 +00:00
m_dynamicNeedsReset = false;
}
void stallDynamicUpload()
{
if (m_dynamicNeedsReset)
{
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::WaitForFences(m_ctx->m_dev, 1, &m_dynamicBufFence, VK_FALSE, -1));
2016-02-21 06:27:54 +00:00
resetDynamicCommandBuffer();
}
}
VulkanCommandQueue(VulkanContext* ctx, VulkanContext::Window* windowCtx, IGraphicsContext* parent)
: m_ctx(ctx), m_windowCtx(windowCtx), m_parent(parent)
{
VkCommandPoolCreateInfo poolInfo = {};
poolInfo.sType = VK_STRUCTURE_TYPE_COMMAND_POOL_CREATE_INFO;
2016-06-30 04:54:29 +00:00
poolInfo.flags = VK_COMMAND_POOL_CREATE_RESET_COMMAND_BUFFER_BIT;
2016-02-23 01:13:03 +00:00
poolInfo.queueFamilyIndex = m_ctx->m_graphicsQueueFamilyIndex;
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::CreateCommandPool(ctx->m_dev, &poolInfo, nullptr, &m_cmdPool));
ThrowIfFailed(vk::CreateCommandPool(ctx->m_dev, &poolInfo, nullptr, &m_dynamicCmdPool));
2016-02-21 06:27:54 +00:00
VkCommandBufferAllocateInfo allocInfo = {};
allocInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_ALLOCATE_INFO;
allocInfo.commandPool = m_cmdPool;
allocInfo.level = VK_COMMAND_BUFFER_LEVEL_PRIMARY;
allocInfo.commandBufferCount = 2;
2016-06-25 07:04:58 +00:00
VkCommandBufferBeginInfo cmdBufBeginInfo = {};
cmdBufBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
2018-05-22 07:48:13 +00:00
cmdBufBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
2016-06-25 07:04:58 +00:00
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::AllocateCommandBuffers(m_ctx->m_dev, &allocInfo, m_cmdBufs));
ThrowIfFailed(vk::BeginCommandBuffer(m_cmdBufs[0], &cmdBufBeginInfo));
2016-02-21 06:27:54 +00:00
2016-06-25 07:04:58 +00:00
allocInfo.commandPool = m_dynamicCmdPool;
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::AllocateCommandBuffers(m_ctx->m_dev, &allocInfo, m_dynamicCmdBufs));
ThrowIfFailed(vk::BeginCommandBuffer(m_dynamicCmdBufs[0], &cmdBufBeginInfo));
2016-02-21 06:27:54 +00:00
VkSemaphoreCreateInfo semInfo = {};
semInfo.sType = VK_STRUCTURE_TYPE_SEMAPHORE_CREATE_INFO;
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::CreateSemaphore(ctx->m_dev, &semInfo, nullptr, &m_swapChainReadySem));
ThrowIfFailed(vk::CreateSemaphore(ctx->m_dev, &semInfo, nullptr, &m_drawCompleteSem));
2016-02-21 06:27:54 +00:00
VkFenceCreateInfo fenceInfo = {};
fenceInfo.sType = VK_STRUCTURE_TYPE_FENCE_CREATE_INFO;
2016-06-26 00:33:22 +00:00
fenceInfo.flags = VK_FENCE_CREATE_SIGNALED_BIT;
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::CreateFence(m_ctx->m_dev, &fenceInfo, nullptr, &m_drawCompleteFence));
ThrowIfFailed(vk::CreateFence(m_ctx->m_dev, &fenceInfo, nullptr, &m_dynamicBufFence));
2016-02-21 06:27:54 +00:00
}
void startRenderer()
{
static_cast<VulkanDataFactoryImpl*>(m_parent->getDataFactory())->SetupGammaResources();
}
2018-01-20 03:02:29 +00:00
2016-02-21 06:27:54 +00:00
void stopRenderer()
{
m_running = false;
if (m_submitted && vk::GetFenceStatus(m_ctx->m_dev, m_drawCompleteFence) == VK_NOT_READY)
vk::WaitForFences(m_ctx->m_dev, 1, &m_drawCompleteFence, VK_FALSE, -1);
2018-05-28 20:23:20 +00:00
stallDynamicUpload();
2018-05-20 22:37:26 +00:00
static_cast<VulkanDataFactoryImpl*>(m_parent->getDataFactory())->DestroyGammaResources();
m_drawResTokens[0].clear();
m_drawResTokens[1].clear();
m_boundTarget.reset();
m_resolveDispSource.reset();
2016-02-21 06:27:54 +00:00
}
~VulkanCommandQueue()
{
if (m_running)
stopRenderer();
2016-06-30 04:54:29 +00:00
vk::DestroyFence(m_ctx->m_dev, m_dynamicBufFence, nullptr);
vk::DestroyFence(m_ctx->m_dev, m_drawCompleteFence, nullptr);
vk::DestroySemaphore(m_ctx->m_dev, m_drawCompleteSem, nullptr);
vk::DestroySemaphore(m_ctx->m_dev, m_swapChainReadySem, nullptr);
vk::DestroyCommandPool(m_ctx->m_dev, m_dynamicCmdPool, nullptr);
vk::DestroyCommandPool(m_ctx->m_dev, m_cmdPool, nullptr);
2016-02-21 06:27:54 +00:00
}
void setShaderDataBinding(const boo::ObjToken<IShaderDataBinding>& binding)
2016-02-21 06:27:54 +00:00
{
VulkanShaderDataBinding* cbind = binding.cast<VulkanShaderDataBinding>();
2016-02-21 06:27:54 +00:00
cbind->bind(m_cmdBufs[m_fillBuf], m_fillBuf);
m_drawResTokens[m_fillBuf].push_back(binding.get());
2016-02-21 06:27:54 +00:00
}
boo::ObjToken<ITextureR> m_boundTarget;
void setRenderTarget(const boo::ObjToken<ITextureR>& target)
2016-02-21 06:27:54 +00:00
{
VulkanTextureR* ctarget = target.cast<VulkanTextureR>();
VkCommandBuffer cmdBuf = m_cmdBufs[m_fillBuf];
2016-02-21 06:27:54 +00:00
if (m_boundTarget.get() != ctarget)
2016-02-21 06:27:54 +00:00
{
if (m_boundTarget)
{
2017-11-16 23:48:18 +00:00
vk::CmdEndRenderPass(cmdBuf);
VulkanTextureR* btarget = m_boundTarget.cast<VulkanTextureR>();
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, btarget->m_colorTex.m_image, VK_IMAGE_ASPECT_COLOR_BIT,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, 1, 1);
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, btarget->m_depthTex.m_image, VK_IMAGE_ASPECT_DEPTH_BIT,
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, 1, 1);
}
2016-02-21 06:27:54 +00:00
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, ctarget->m_colorTex.m_image, VK_IMAGE_ASPECT_COLOR_BIT,
ctarget->m_layout, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, 1, 1);
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, ctarget->m_depthTex.m_image, VK_IMAGE_ASPECT_DEPTH_BIT,
ctarget->m_layout, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, 1, 1);
ctarget->m_layout = VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL;
m_boundTarget = target;
m_drawResTokens[m_fillBuf].push_back(target.get());
}
2016-02-21 06:27:54 +00:00
2016-06-30 04:54:29 +00:00
vk::CmdBeginRenderPass(cmdBuf, &ctarget->m_passBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
2016-02-21 06:27:54 +00:00
}
2016-04-05 01:49:08 +00:00
void setViewport(const SWindowRect& rect, float znear, float zfar)
2016-02-21 06:27:54 +00:00
{
if (m_boundTarget)
{
VulkanTextureR* ctarget = m_boundTarget.cast<VulkanTextureR>();
2017-02-15 23:10:30 +00:00
VkViewport vp = {float(rect.location[0]),
float(std::max(0, int(ctarget->m_height) - rect.location[1] - rect.size[1])),
2016-04-05 01:49:08 +00:00
float(rect.size[0]), float(rect.size[1]), znear, zfar};
2016-06-30 04:54:29 +00:00
vk::CmdSetViewport(m_cmdBufs[m_fillBuf], 0, 1, &vp);
2016-02-21 06:27:54 +00:00
}
}
void setScissor(const SWindowRect& rect)
{
if (m_boundTarget)
{
VulkanTextureR* ctarget = m_boundTarget.cast<VulkanTextureR>();
2016-02-21 06:27:54 +00:00
VkRect2D vkrect =
{
2017-02-15 23:10:30 +00:00
{int32_t(rect.location[0]),
int32_t(std::max(0, int(ctarget->m_height) - rect.location[1] - rect.size[1]))},
2016-02-21 06:27:54 +00:00
{uint32_t(rect.size[0]), uint32_t(rect.size[1])}
};
2016-06-30 04:54:29 +00:00
vk::CmdSetScissor(m_cmdBufs[m_fillBuf], 0, 1, &vkrect);
2016-02-21 06:27:54 +00:00
}
}
std::unordered_map<VulkanTextureR*, std::pair<size_t, size_t>> m_texResizes;
void resizeRenderTexture(const boo::ObjToken<ITextureR>& tex, size_t width, size_t height)
2016-02-21 06:27:54 +00:00
{
VulkanTextureR* ctex = tex.cast<VulkanTextureR>();
2016-02-21 06:27:54 +00:00
m_texResizes[ctex] = std::make_pair(width, height);
m_drawResTokens[m_fillBuf].push_back(tex.get());
2016-02-21 06:27:54 +00:00
}
void schedulePostFrameHandler(std::function<void(void)>&& func)
{
func();
}
float m_clearColor[4] = {0.0,0.0,0.0,0.0};
2016-02-21 06:27:54 +00:00
void setClearColor(const float rgba[4])
{
m_clearColor[0] = rgba[0];
m_clearColor[1] = rgba[1];
m_clearColor[2] = rgba[2];
m_clearColor[3] = rgba[3];
}
void clearTarget(bool render=true, bool depth=true)
{
if (!m_boundTarget)
return;
VulkanTextureR* ctarget = m_boundTarget.cast<VulkanTextureR>();
2016-06-25 07:04:58 +00:00
VkClearAttachment clr[2] = {};
VkClearRect rect = {};
rect.layerCount = 1;
rect.rect.extent.width = ctarget->m_width;
rect.rect.extent.height = ctarget->m_height;
2016-06-25 07:04:58 +00:00
if (render && depth)
{
clr[0].clearValue.color.float32[0] = m_clearColor[0];
clr[0].clearValue.color.float32[1] = m_clearColor[1];
clr[0].clearValue.color.float32[2] = m_clearColor[2];
clr[0].clearValue.color.float32[3] = m_clearColor[3];
2016-06-25 07:04:58 +00:00
clr[0].aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2017-11-09 08:10:43 +00:00
clr[1].aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
2016-07-27 06:33:42 +00:00
clr[1].clearValue.depthStencil.depth = 1.f;
2016-06-30 04:54:29 +00:00
vk::CmdClearAttachments(m_cmdBufs[m_fillBuf], 2, clr, 1, &rect);
2016-06-25 07:04:58 +00:00
}
else if (render)
{
clr[0].clearValue.color.float32[0] = m_clearColor[0];
clr[0].clearValue.color.float32[1] = m_clearColor[1];
clr[0].clearValue.color.float32[2] = m_clearColor[2];
clr[0].clearValue.color.float32[3] = m_clearColor[3];
2016-06-25 07:04:58 +00:00
clr[0].aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
2016-06-30 04:54:29 +00:00
vk::CmdClearAttachments(m_cmdBufs[m_fillBuf], 1, clr, 1, &rect);
2016-06-25 07:04:58 +00:00
}
else if (depth)
{
2017-11-09 08:10:43 +00:00
clr[0].aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
2016-07-27 06:33:42 +00:00
clr[0].clearValue.depthStencil.depth = 1.f;
2016-06-30 04:54:29 +00:00
vk::CmdClearAttachments(m_cmdBufs[m_fillBuf], 1, clr, 1, &rect);
2016-06-25 07:04:58 +00:00
}
2016-02-21 06:27:54 +00:00
}
void draw(size_t start, size_t count)
{
2016-06-30 04:54:29 +00:00
vk::CmdDraw(m_cmdBufs[m_fillBuf], count, 1, start, 0);
2016-02-21 06:27:54 +00:00
}
void drawIndexed(size_t start, size_t count)
{
2016-06-30 04:54:29 +00:00
vk::CmdDrawIndexed(m_cmdBufs[m_fillBuf], count, 1, start, 0, 0);
2016-02-21 06:27:54 +00:00
}
void drawInstances(size_t start, size_t count, size_t instCount)
{
2016-06-30 04:54:29 +00:00
vk::CmdDraw(m_cmdBufs[m_fillBuf], count, instCount, start, 0);
2016-02-21 06:27:54 +00:00
}
void drawInstancesIndexed(size_t start, size_t count, size_t instCount)
{
2016-06-30 04:54:29 +00:00
vk::CmdDrawIndexed(m_cmdBufs[m_fillBuf], count, instCount, start, 0, 0);
2016-02-21 06:27:54 +00:00
}
boo::ObjToken<ITextureR> m_resolveDispSource;
void resolveDisplay(const boo::ObjToken<ITextureR>& source)
2016-02-21 06:27:54 +00:00
{
m_resolveDispSource = source;
}
2016-02-21 06:27:54 +00:00
bool _resolveDisplay()
{
if (!m_resolveDispSource)
return false;
VulkanContext::Window::SwapChain& sc = m_windowCtx->m_swapChains[m_windowCtx->m_activeSwapChain];
if (!sc.m_swapChain)
return false;
2016-06-26 00:33:22 +00:00
VkCommandBuffer cmdBuf = m_cmdBufs[m_drawBuf];
VulkanTextureR* csource = m_resolveDispSource.cast<VulkanTextureR>();
#ifndef NDEBUG
if (!csource->m_colorBindCount)
Log.report(logvisor::Fatal,
"texture provided to resolveDisplay() must have at least 1 color binding");
#endif
2016-06-26 00:33:22 +00:00
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::AcquireNextImageKHR(m_ctx->m_dev, sc.m_swapChain, UINT64_MAX,
m_swapChainReadySem, nullptr, &sc.m_backBuf));
VulkanContext::Window::SwapChain::Buffer& dest = sc.m_bufs[sc.m_backBuf];
2016-02-21 06:27:54 +00:00
VulkanDataFactoryImpl* dataFactory = static_cast<VulkanDataFactoryImpl*>(m_parent->getDataFactory());
2018-01-22 07:33:47 +00:00
if (dataFactory->m_gamma != 1.f || m_ctx->m_internalFormat != m_ctx->m_displayFormat)
2016-02-21 06:27:54 +00:00
{
SWindowRect rect(0, 0, csource->m_width, csource->m_height);
_resolveBindTexture(cmdBuf, csource, rect, true, 0, true, false);
VulkanShaderDataBinding* gammaBinding = dataFactory->m_gammaBinding.cast<VulkanShaderDataBinding>();
SetImageLayout(cmdBuf, dest.m_image, VK_IMAGE_ASPECT_COLOR_BIT,
VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, 1, 1);
vk::CmdBeginRenderPass(cmdBuf, &dest.m_passBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
gammaBinding->m_texs[0].tex = m_resolveDispSource.get();
gammaBinding->bind(cmdBuf, m_drawBuf, m_ctx->m_passColorOnly);
vk::CmdDraw(cmdBuf, 4, 1, 0, 0);
gammaBinding->m_texs[0].tex.reset();
vk::CmdEndRenderPass(cmdBuf);
SetImageLayout(cmdBuf, dest.m_image, VK_IMAGE_ASPECT_COLOR_BIT,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, 1, 1);
2016-02-21 06:27:54 +00:00
}
else
{
SetImageLayout(cmdBuf, dest.m_image, VK_IMAGE_ASPECT_COLOR_BIT,
VK_IMAGE_LAYOUT_UNDEFINED, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, 1);
if (m_resolveDispSource == m_boundTarget)
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, csource->m_colorTex.m_image, VK_IMAGE_ASPECT_COLOR_BIT,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, 1, 1);
if (csource->m_samplesColor > 1)
{
VkImageResolve resolveInfo = {};
resolveInfo.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
resolveInfo.srcSubresource.mipLevel = 0;
resolveInfo.srcSubresource.baseArrayLayer = 0;
resolveInfo.srcSubresource.layerCount = 1;
resolveInfo.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
resolveInfo.dstSubresource.mipLevel = 0;
resolveInfo.dstSubresource.baseArrayLayer = 0;
resolveInfo.dstSubresource.layerCount = 1;
resolveInfo.extent.width = csource->m_width;
resolveInfo.extent.height = csource->m_height;
resolveInfo.extent.depth = 1;
vk::CmdResolveImage(cmdBuf,
2018-05-22 07:48:13 +00:00
csource->m_colorTex.m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
dest.m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1, &resolveInfo);
}
else
{
VkImageCopy copyInfo = {};
copyInfo.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
copyInfo.srcSubresource.mipLevel = 0;
copyInfo.srcSubresource.baseArrayLayer = 0;
copyInfo.srcSubresource.layerCount = 1;
copyInfo.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
copyInfo.dstSubresource.mipLevel = 0;
copyInfo.dstSubresource.baseArrayLayer = 0;
copyInfo.dstSubresource.layerCount = 1;
copyInfo.extent.width = csource->m_width;
copyInfo.extent.height = csource->m_height;
copyInfo.extent.depth = 1;
vk::CmdCopyImage(cmdBuf,
2018-05-22 07:48:13 +00:00
csource->m_colorTex.m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
dest.m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1, &copyInfo);
}
SetImageLayout(cmdBuf, dest.m_image, VK_IMAGE_ASPECT_COLOR_BIT,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_PRESENT_SRC_KHR, 1, 1);
if (m_resolveDispSource == m_boundTarget)
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, csource->m_colorTex.m_image, VK_IMAGE_ASPECT_COLOR_BIT,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, 1, 1);
}
m_resolveDispSource.reset();
return true;
2016-02-21 06:27:54 +00:00
}
void _resolveBindTexture(VkCommandBuffer cmdBuf, VulkanTextureR* ctexture,
const SWindowRect& rect, bool tlOrigin,
int bindIdx, bool color, bool depth)
{
2017-03-17 23:31:51 +00:00
if (color && ctexture->m_colorBindCount)
{
if (ctexture->m_samplesColor <= 1)
{
VkImageCopy copyInfo = {};
SWindowRect intersectRect = rect.intersect(SWindowRect(0, 0, ctexture->m_width, ctexture->m_height));
copyInfo.srcOffset.y = tlOrigin ? intersectRect.location[1] :
(ctexture->m_height - intersectRect.size[1] - intersectRect.location[1]);
copyInfo.srcOffset.x = intersectRect.location[0];
copyInfo.dstOffset = copyInfo.srcOffset;
copyInfo.extent.width = intersectRect.size[0];
copyInfo.extent.height = intersectRect.size[1];
copyInfo.extent.depth = 1;
copyInfo.dstSubresource.mipLevel = 0;
copyInfo.dstSubresource.baseArrayLayer = 0;
copyInfo.dstSubresource.layerCount = 1;
copyInfo.srcSubresource.mipLevel = 0;
copyInfo.srcSubresource.baseArrayLayer = 0;
copyInfo.srcSubresource.layerCount = 1;
if (ctexture == m_boundTarget.get())
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, ctexture->m_colorTex.m_image, VK_IMAGE_ASPECT_COLOR_BIT,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, 1, 1);
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, ctexture->m_colorBindTex[bindIdx].m_image, VK_IMAGE_ASPECT_COLOR_BIT,
ctexture->m_colorBindLayout[bindIdx], VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, 1);
copyInfo.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
copyInfo.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
vk::CmdCopyImage(cmdBuf,
2018-05-22 07:48:13 +00:00
ctexture->m_colorTex.m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
ctexture->m_colorBindTex[bindIdx].m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1, &copyInfo);
if (ctexture == m_boundTarget.get())
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, ctexture->m_colorTex.m_image, VK_IMAGE_ASPECT_COLOR_BIT,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, 1, 1);
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, ctexture->m_colorBindTex[bindIdx].m_image, VK_IMAGE_ASPECT_COLOR_BIT,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, 1, 1);
ctexture->m_colorBindLayout[bindIdx] = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
}
else
{
VkImageResolve resolveInfo = {};
SWindowRect intersectRect = rect.intersect(SWindowRect(0, 0, ctexture->m_width, ctexture->m_height));
resolveInfo.srcOffset.y = tlOrigin ? intersectRect.location[1] :
(ctexture->m_height - intersectRect.size[1] - intersectRect.location[1]);
resolveInfo.srcOffset.x = intersectRect.location[0];
resolveInfo.dstOffset = resolveInfo.srcOffset;
resolveInfo.extent.width = intersectRect.size[0];
resolveInfo.extent.height = intersectRect.size[1];
resolveInfo.extent.depth = 1;
resolveInfo.dstSubresource.mipLevel = 0;
resolveInfo.dstSubresource.baseArrayLayer = 0;
resolveInfo.dstSubresource.layerCount = 1;
resolveInfo.srcSubresource.mipLevel = 0;
resolveInfo.srcSubresource.baseArrayLayer = 0;
resolveInfo.srcSubresource.layerCount = 1;
if (ctexture == m_boundTarget.get())
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, ctexture->m_colorTex.m_image, VK_IMAGE_ASPECT_COLOR_BIT,
VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, 1, 1);
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, ctexture->m_colorBindTex[bindIdx].m_image, VK_IMAGE_ASPECT_COLOR_BIT,
ctexture->m_colorBindLayout[bindIdx], VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, 1);
resolveInfo.srcSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
resolveInfo.dstSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
vk::CmdResolveImage(cmdBuf,
2018-05-22 07:48:13 +00:00
ctexture->m_colorTex.m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
ctexture->m_colorBindTex[bindIdx].m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1, &resolveInfo);
if (ctexture == m_boundTarget.get())
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, ctexture->m_colorTex.m_image, VK_IMAGE_ASPECT_COLOR_BIT,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, VK_IMAGE_LAYOUT_COLOR_ATTACHMENT_OPTIMAL, 1, 1);
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, ctexture->m_colorBindTex[bindIdx].m_image, VK_IMAGE_ASPECT_COLOR_BIT,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, 1, 1);
ctexture->m_colorBindLayout[bindIdx] = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
}
}
2017-03-17 23:31:51 +00:00
if (depth && ctexture->m_depthBindCount)
{
if (ctexture->m_samplesDepth <= 1)
{
VkImageCopy copyInfo = {};
SWindowRect intersectRect = rect.intersect(SWindowRect(0, 0, ctexture->m_width, ctexture->m_height));
copyInfo.srcOffset.y = tlOrigin ? intersectRect.location[1] :
(ctexture->m_height - intersectRect.size[1] - intersectRect.location[1]);
copyInfo.srcOffset.x = intersectRect.location[0];
copyInfo.dstOffset = copyInfo.srcOffset;
copyInfo.extent.width = intersectRect.size[0];
copyInfo.extent.height = intersectRect.size[1];
copyInfo.extent.depth = 1;
copyInfo.dstSubresource.mipLevel = 0;
copyInfo.dstSubresource.baseArrayLayer = 0;
copyInfo.dstSubresource.layerCount = 1;
copyInfo.srcSubresource.mipLevel = 0;
copyInfo.srcSubresource.baseArrayLayer = 0;
copyInfo.srcSubresource.layerCount = 1;
if (ctexture == m_boundTarget.get())
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, ctexture->m_depthTex.m_image, VK_IMAGE_ASPECT_DEPTH_BIT,
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, 1, 1);
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, ctexture->m_depthBindTex[bindIdx].m_image, VK_IMAGE_ASPECT_DEPTH_BIT,
ctexture->m_depthBindLayout[bindIdx], VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, 1);
copyInfo.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
copyInfo.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
vk::CmdCopyImage(cmdBuf,
2018-05-22 07:48:13 +00:00
ctexture->m_depthTex.m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
ctexture->m_depthBindTex[bindIdx].m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1, &copyInfo);
if (ctexture == m_boundTarget.get())
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, ctexture->m_depthTex.m_image, VK_IMAGE_ASPECT_DEPTH_BIT,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, 1, 1);
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, ctexture->m_depthBindTex[bindIdx].m_image, VK_IMAGE_ASPECT_DEPTH_BIT,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, 1, 1);
ctexture->m_depthBindLayout[bindIdx] = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
}
else
{
VkImageResolve resolveInfo = {};
SWindowRect intersectRect = rect.intersect(SWindowRect(0, 0, ctexture->m_width, ctexture->m_height));
resolveInfo.srcOffset.y = tlOrigin ? intersectRect.location[1] :
(ctexture->m_height - intersectRect.size[1] - intersectRect.location[1]);
resolveInfo.srcOffset.x = intersectRect.location[0];
resolveInfo.dstOffset = resolveInfo.srcOffset;
resolveInfo.extent.width = intersectRect.size[0];
resolveInfo.extent.height = intersectRect.size[1];
resolveInfo.extent.depth = 1;
resolveInfo.dstSubresource.mipLevel = 0;
resolveInfo.dstSubresource.baseArrayLayer = 0;
resolveInfo.dstSubresource.layerCount = 1;
resolveInfo.srcSubresource.mipLevel = 0;
resolveInfo.srcSubresource.baseArrayLayer = 0;
resolveInfo.srcSubresource.layerCount = 1;
if (ctexture == m_boundTarget.get())
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, ctexture->m_depthTex.m_image, VK_IMAGE_ASPECT_DEPTH_BIT,
VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, 1, 1);
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, ctexture->m_depthBindTex[bindIdx].m_image, VK_IMAGE_ASPECT_DEPTH_BIT,
ctexture->m_depthBindLayout[bindIdx], VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, 1);
resolveInfo.srcSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
resolveInfo.dstSubresource.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
vk::CmdResolveImage(cmdBuf,
2018-05-22 07:48:13 +00:00
ctexture->m_depthTex.m_image, VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL,
ctexture->m_depthBindTex[bindIdx].m_image, VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1, &resolveInfo);
if (ctexture == m_boundTarget.get())
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, ctexture->m_depthTex.m_image, VK_IMAGE_ASPECT_DEPTH_BIT,
VK_IMAGE_LAYOUT_TRANSFER_SRC_OPTIMAL, VK_IMAGE_LAYOUT_DEPTH_STENCIL_ATTACHMENT_OPTIMAL, 1, 1);
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, ctexture->m_depthBindTex[bindIdx].m_image, VK_IMAGE_ASPECT_DEPTH_BIT,
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, 1, 1);
ctexture->m_depthBindLayout[bindIdx] = VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL;
}
}
}
void resolveBindTexture(const boo::ObjToken<ITextureR>& texture,
const SWindowRect& rect, bool tlOrigin,
int bindIdx, bool color, bool depth, bool clearDepth)
{
VkCommandBuffer cmdBuf = m_cmdBufs[m_fillBuf];
VulkanTextureR* ctexture = texture.cast<VulkanTextureR>();
2016-07-27 03:37:39 +00:00
vk::CmdEndRenderPass(cmdBuf);
_resolveBindTexture(cmdBuf, ctexture, rect, tlOrigin, bindIdx, color, depth);
vk::CmdBeginRenderPass(cmdBuf, &m_boundTarget.cast<VulkanTextureR>()->m_passBeginInfo, VK_SUBPASS_CONTENTS_INLINE);
if (clearDepth)
{
VkClearAttachment clr = {};
VkClearRect rect = {};
rect.layerCount = 1;
rect.rect.extent.width = ctexture->m_width;
rect.rect.extent.height = ctexture->m_height;
clr.aspectMask = VK_IMAGE_ASPECT_DEPTH_BIT;
clr.clearValue.depthStencil.depth = 1.f;
vk::CmdClearAttachments(cmdBuf, 1, &clr, 1, &rect);
}
}
2016-02-21 06:27:54 +00:00
void execute();
};
void VulkanTextureR::doDestroy()
{
2016-06-25 07:04:58 +00:00
if (m_framebuffer)
{
2016-06-30 04:54:29 +00:00
vk::DestroyFramebuffer(m_q->m_ctx->m_dev, m_framebuffer, nullptr);
2016-06-25 07:04:58 +00:00
m_framebuffer = VK_NULL_HANDLE;
}
if (m_colorView)
{
2016-06-30 04:54:29 +00:00
vk::DestroyImageView(m_q->m_ctx->m_dev, m_colorView, nullptr);
2016-06-25 07:04:58 +00:00
m_colorView = VK_NULL_HANDLE;
}
2018-05-22 07:48:13 +00:00
m_colorTex.destroy(m_q->m_ctx);
2016-06-25 07:04:58 +00:00
if (m_depthView)
{
2016-06-30 04:54:29 +00:00
vk::DestroyImageView(m_q->m_ctx->m_dev, m_depthView, nullptr);
2016-06-25 07:04:58 +00:00
m_depthView = VK_NULL_HANDLE;
}
2018-05-22 07:48:13 +00:00
m_depthTex.destroy(m_q->m_ctx);
2017-03-17 23:31:51 +00:00
for (size_t i=0 ; i<MAX_BIND_TEXS ; ++i)
if (m_colorBindView[i])
{
vk::DestroyImageView(m_q->m_ctx->m_dev, m_colorBindView[i], nullptr);
m_colorBindView[i] = VK_NULL_HANDLE;
}
for (size_t i=0 ; i<MAX_BIND_TEXS ; ++i)
2018-05-22 07:48:13 +00:00
m_colorBindTex[i].destroy(m_q->m_ctx);
2017-03-17 23:31:51 +00:00
for (size_t i=0 ; i<MAX_BIND_TEXS ; ++i)
if (m_depthBindView[i])
{
vk::DestroyImageView(m_q->m_ctx->m_dev, m_depthBindView[i], nullptr);
m_depthBindView[i] = VK_NULL_HANDLE;
}
for (size_t i=0 ; i<MAX_BIND_TEXS ; ++i)
2018-05-22 07:48:13 +00:00
m_depthBindTex[i].destroy(m_q->m_ctx);
}
VulkanTextureR::VulkanTextureR(const boo::ObjToken<BaseGraphicsData>& parent, VulkanCommandQueue* q,
size_t width, size_t height, TextureClampMode clampMode,
size_t colorBindCount, size_t depthBindCount)
: GraphicsDataNode<ITextureR>(parent), m_q(q),
m_width(width), m_height(height),
m_samplesColor(q->m_ctx->m_sampleCountColor),
m_samplesDepth(q->m_ctx->m_sampleCountDepth),
m_colorBindCount(colorBindCount),
m_depthBindCount(depthBindCount)
{
if (colorBindCount > MAX_BIND_TEXS)
Log.report(logvisor::Fatal, "too many color bindings for render texture");
if (depthBindCount > MAX_BIND_TEXS)
Log.report(logvisor::Fatal, "too many depth bindings for render texture");
if (m_samplesColor == 0) m_samplesColor = 1;
if (m_samplesDepth == 0) m_samplesDepth = 1;
setClampMode(clampMode);
Setup(q->m_ctx);
2016-02-21 06:27:54 +00:00
}
VulkanTextureR::~VulkanTextureR()
{
2016-06-30 04:54:29 +00:00
vk::DestroyFramebuffer(m_q->m_ctx->m_dev, m_framebuffer, nullptr);
vk::DestroyImageView(m_q->m_ctx->m_dev, m_colorView, nullptr);
2018-05-22 07:48:13 +00:00
m_colorTex.destroy(m_q->m_ctx);
2016-06-30 04:54:29 +00:00
vk::DestroyImageView(m_q->m_ctx->m_dev, m_depthView, nullptr);
2018-05-22 07:48:13 +00:00
m_depthTex.destroy(m_q->m_ctx);
2017-03-17 23:31:51 +00:00
for (size_t i=0 ; i<MAX_BIND_TEXS ; ++i)
if (m_colorBindView[i])
vk::DestroyImageView(m_q->m_ctx->m_dev, m_colorBindView[i], nullptr);
for (size_t i=0 ; i<MAX_BIND_TEXS ; ++i)
2018-05-22 07:48:13 +00:00
m_colorBindTex[i].destroy(m_q->m_ctx);
2017-03-17 23:31:51 +00:00
for (size_t i=0 ; i<MAX_BIND_TEXS ; ++i)
if (m_depthBindView[i])
vk::DestroyImageView(m_q->m_ctx->m_dev, m_depthBindView[i], nullptr);
for (size_t i=0 ; i<MAX_BIND_TEXS ; ++i)
2018-05-22 07:48:13 +00:00
m_depthBindTex[i].destroy(m_q->m_ctx);
}
void VulkanTextureR::setClampMode(TextureClampMode mode)
{
MakeSampler(m_q->m_ctx, m_sampler, mode, 1);
for (size_t i=0 ; i<m_colorBindCount ; ++i)
m_colorBindDescInfo[i].sampler = m_sampler;
for (size_t i=0 ; i<m_depthBindCount ; ++i)
m_depthBindDescInfo[i].sampler = m_sampler;
2016-02-21 06:27:54 +00:00
}
template <class DataCls>
void VulkanGraphicsBufferD<DataCls>::update(int b)
2016-02-21 06:27:54 +00:00
{
int slot = 1 << b;
if ((slot & m_validSlots) == 0)
{
2018-05-22 07:48:13 +00:00
memmove(m_bufferPtrs[b], m_cpuBuf.get(), m_cpuSz);
2016-02-21 06:27:54 +00:00
m_validSlots |= slot;
}
}
template <class DataCls>
void VulkanGraphicsBufferD<DataCls>::load(const void* data, size_t sz)
2016-02-21 06:27:54 +00:00
{
size_t bufSz = std::min(sz, m_cpuSz);
2016-06-25 07:04:58 +00:00
memmove(m_cpuBuf.get(), data, bufSz);
2016-02-21 06:27:54 +00:00
m_validSlots = 0;
}
template <class DataCls>
void* VulkanGraphicsBufferD<DataCls>::map(size_t sz)
2016-02-21 06:27:54 +00:00
{
if (sz > m_cpuSz)
return nullptr;
return m_cpuBuf.get();
}
template <class DataCls>
void VulkanGraphicsBufferD<DataCls>::unmap()
2016-02-21 06:27:54 +00:00
{
m_validSlots = 0;
}
2018-05-22 07:48:13 +00:00
VulkanTextureD::~VulkanTextureD()
{
vk::DestroyImageView(m_q->m_ctx->m_dev, m_gpuView[0], nullptr);
vk::DestroyImageView(m_q->m_ctx->m_dev, m_gpuView[1], nullptr);
m_gpuTex[0].destroy(m_q->m_ctx);
m_gpuTex[1].destroy(m_q->m_ctx);
}
2016-02-21 06:27:54 +00:00
void VulkanTextureD::update(int b)
{
int slot = 1 << b;
if ((slot & m_validSlots) == 0)
{
m_q->stallDynamicUpload();
VkCommandBuffer cmdBuf = m_q->m_dynamicCmdBufs[b];
2018-05-22 07:48:13 +00:00
/* copy staging data */
memmove(m_cpuBufPtrs[b], m_stagingBuf.get(), m_cpuSz);
2016-02-21 06:27:54 +00:00
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, m_gpuTex[b].m_image, VK_IMAGE_ASPECT_COLOR_BIT,
2016-07-08 00:05:22 +00:00
VK_IMAGE_LAYOUT_UNDEFINED,
2016-06-26 00:33:22 +00:00
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL, 1, 1);
2016-02-21 06:27:54 +00:00
/* Put the copy command into the command buffer */
2016-06-26 00:33:22 +00:00
VkBufferImageCopy copyRegion = {};
copyRegion.imageSubresource.aspectMask = VK_IMAGE_ASPECT_COLOR_BIT;
copyRegion.imageSubresource.mipLevel = 0;
copyRegion.imageSubresource.baseArrayLayer = 0;
copyRegion.imageSubresource.layerCount = 1;
copyRegion.imageExtent.width = m_width;
copyRegion.imageExtent.height = m_height;
copyRegion.imageExtent.depth = 1;
2018-05-22 07:48:13 +00:00
copyRegion.bufferOffset = m_cpuOffsets[b];
2016-06-26 00:33:22 +00:00
2016-06-30 04:54:29 +00:00
vk::CmdCopyBufferToImage(cmdBuf,
2018-05-22 07:48:13 +00:00
m_cpuBuf,
m_gpuTex[b].m_image,
2016-06-30 04:54:29 +00:00
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
1,
&copyRegion);
2016-02-21 06:27:54 +00:00
/* Set the layout for the texture image from DESTINATION_OPTIMAL to
* SHADER_READ_ONLY */
2018-05-22 07:48:13 +00:00
SetImageLayout(cmdBuf, m_gpuTex[b].m_image, VK_IMAGE_ASPECT_COLOR_BIT,
2016-02-21 06:27:54 +00:00
VK_IMAGE_LAYOUT_TRANSFER_DST_OPTIMAL,
2016-06-24 03:03:06 +00:00
VK_IMAGE_LAYOUT_SHADER_READ_ONLY_OPTIMAL, 1, 1);
2016-02-21 06:27:54 +00:00
m_validSlots |= slot;
}
}
2018-05-22 07:48:13 +00:00
void VulkanTextureD::setClampMode(TextureClampMode mode)
{
m_clampMode = mode;
MakeSampler(m_q->m_ctx, m_sampler, mode, 1);
for (int i=0 ; i<2 ; ++i)
m_descInfo[i].sampler = m_sampler;
}
2016-02-21 06:27:54 +00:00
void VulkanTextureD::load(const void* data, size_t sz)
{
size_t bufSz = std::min(sz, m_cpuSz);
2016-06-26 00:33:22 +00:00
memmove(m_stagingBuf.get(), data, bufSz);
2016-02-21 06:27:54 +00:00
m_validSlots = 0;
}
void* VulkanTextureD::map(size_t sz)
{
if (sz > m_cpuSz)
return nullptr;
2016-06-26 00:33:22 +00:00
return m_stagingBuf.get();
2016-02-21 06:27:54 +00:00
}
void VulkanTextureD::unmap()
{
m_validSlots = 0;
}
VulkanDataFactoryImpl::VulkanDataFactoryImpl(IGraphicsContext* parent, VulkanContext* ctx)
: m_parent(parent), m_ctx(ctx) {}
2016-02-21 06:27:54 +00:00
VulkanShareableShader::Token VulkanDataFactoryImpl::PrepareShaderStage(const char* source,
std::vector<unsigned int>* blobOut,
EShLanguage lang)
2017-03-05 22:59:58 +00:00
{
uint64_t srcHash = 0;
uint64_t binHash = 0;
XXH64_state_t hashState;
XXH64_reset(&hashState, 0);
if (source)
{
XXH64_update(&hashState, source, strlen(source));
srcHash = XXH64_digest(&hashState);
auto binSearch = m_sourceToBinary.find(srcHash);
if (binSearch != m_sourceToBinary.cend())
binHash = binSearch->second;
}
else if (blobOut && blobOut->size())
2017-03-05 22:59:58 +00:00
{
XXH64_update(&hashState, blobOut->data(), blobOut->size() * sizeof(unsigned int));
binHash = XXH64_digest(&hashState);
2017-03-05 22:59:58 +00:00
}
if (blobOut && blobOut->empty())
binHash = Compile(*blobOut, source, srcHash, lang);
auto search = binHash ? m_sharedShaders.find(binHash) : m_sharedShaders.end();
if (search != m_sharedShaders.end())
2017-03-05 22:59:58 +00:00
{
return search->second->lock();
2017-03-05 22:59:58 +00:00
}
else
{
std::vector<unsigned int> blob;
const std::vector<unsigned int>* useBlob;
if (blobOut)
{
useBlob = blobOut;
}
else
{
useBlob = &blob;
binHash = Compile(blob, source, srcHash, lang);
}
2017-03-05 22:59:58 +00:00
VkShaderModuleCreateInfo smCreateInfo = {};
smCreateInfo.sType = VK_STRUCTURE_TYPE_SHADER_MODULE_CREATE_INFO;
smCreateInfo.pNext = nullptr;
smCreateInfo.flags = 0;
VkShaderModule module;
smCreateInfo.codeSize = useBlob->size() * sizeof(unsigned int);
smCreateInfo.pCode = useBlob->data();
ThrowIfFailed(vk::CreateShaderModule(m_ctx->m_dev, &smCreateInfo, nullptr, &module));
auto it =
m_sharedShaders.emplace(std::make_pair(binHash,
std::make_unique<VulkanShareableShader>(*this, srcHash, binHash, module))).first;
return it->second->lock();
}
2017-03-05 22:59:58 +00:00
}
uint64_t VulkanDataFactoryImpl::Compile(std::vector<unsigned int>& out, const char* source,
uint64_t srcKey, EShLanguage lang)
2017-03-05 22:59:58 +00:00
{
const EShMessages messages = EShMessages(EShMsgSpvRules | EShMsgVulkanRules);
glslang::TShader shader(lang);
shader.setStrings(&source, 1);
if (!shader.parse(&glslang::DefaultTBuiltInResource, 110, false, messages))
2017-03-05 22:59:58 +00:00
{
printf("%s\n", source);
Log.report(logvisor::Fatal, "unable to compile shader\n%s", shader.getInfoLog());
2017-03-05 22:59:58 +00:00
}
glslang::TProgram prog;
prog.addShader(&shader);
2017-03-05 22:59:58 +00:00
if (!prog.link(messages))
{
Log.report(logvisor::Fatal, "unable to link shader program\n%s", prog.getInfoLog());
}
glslang::GlslangToSpv(*prog.getIntermediate(lang), out);
2017-03-05 22:59:58 +00:00
//spv::Disassemble(std::cerr, out);
XXH64_state_t hashState;
XXH64_reset(&hashState, 0);
XXH64_update(&hashState, out.data(), out.size() * sizeof(unsigned int));
uint64_t binKey = XXH64_digest(&hashState);
2018-05-20 22:37:26 +00:00
m_sourceToBinary[srcKey] = binKey;
2017-03-05 22:59:58 +00:00
return binKey;
}
boo::ObjToken<IShaderPipeline> VulkanDataFactory::Context::newShaderPipeline
2016-02-21 06:27:54 +00:00
(const char* vertSource, const char* fragSource,
2017-03-05 22:59:58 +00:00
std::vector<unsigned int>* vertBlobOut, std::vector<unsigned int>* fragBlobOut,
std::vector<unsigned char>* pipelineBlob, const boo::ObjToken<IVertexFormat>& vtxFmt,
2016-03-24 08:15:15 +00:00
BlendFactor srcFac, BlendFactor dstFac, Primitive prim,
2017-03-17 23:31:51 +00:00
ZTest depthTest, bool depthWrite, bool colorWrite,
bool alphaWrite, CullMode culling, bool overwriteAlpha)
2016-02-21 06:27:54 +00:00
{
2017-03-05 22:59:58 +00:00
VulkanDataFactoryImpl& factory = static_cast<VulkanDataFactoryImpl&>(m_parent);
VulkanShareableShader::Token vertShader = factory.PrepareShaderStage(vertSource, vertBlobOut, EShLangVertex);
VulkanShareableShader::Token fragShader = factory.PrepareShaderStage(fragSource, fragBlobOut, EShLangFragment);
VkPipelineCache pipelineCache = VK_NULL_HANDLE;
if (pipelineBlob)
2017-03-05 22:59:58 +00:00
{
VkPipelineCacheCreateInfo cacheDataInfo = {};
cacheDataInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
cacheDataInfo.pNext = nullptr;
2016-02-24 03:11:58 +00:00
cacheDataInfo.initialDataSize = pipelineBlob->size();
if (cacheDataInfo.initialDataSize)
cacheDataInfo.pInitialData = pipelineBlob->data();
ThrowIfFailed(vk::CreatePipelineCache(factory.m_ctx->m_dev, &cacheDataInfo, nullptr, &pipelineCache));
}
2016-02-21 06:27:54 +00:00
VulkanShaderPipeline* retval = new VulkanShaderPipeline(m_data, factory.m_ctx, std::move(vertShader),
std::move(fragShader), pipelineCache, vtxFmt, srcFac,
dstFac, prim, depthTest, depthWrite, colorWrite,
alphaWrite, overwriteAlpha, culling);
2017-03-05 22:59:58 +00:00
if (pipelineBlob && pipelineBlob->empty())
2017-03-05 22:59:58 +00:00
{
size_t cacheSz = 0;
ThrowIfFailed(vk::GetPipelineCacheData(factory.m_ctx->m_dev, pipelineCache, &cacheSz, nullptr));
if (cacheSz)
2016-02-21 06:27:54 +00:00
{
pipelineBlob->resize(cacheSz);
ThrowIfFailed(vk::GetPipelineCacheData(factory.m_ctx->m_dev, pipelineCache,
&cacheSz, pipelineBlob->data()));
pipelineBlob->resize(cacheSz);
2016-02-21 06:27:54 +00:00
}
}
2017-03-05 22:59:58 +00:00
return {retval};
}
2017-11-08 02:24:07 +00:00
boo::ObjToken<IShaderPipeline> VulkanDataFactory::Context::newTessellationShaderPipeline
(const char* vertSource, const char* fragSource, const char* controlSource, const char* evaluationSource,
std::vector<unsigned int>* vertBlobOut, std::vector<unsigned int>* fragBlobOut,
std::vector<unsigned int>* controlBlobOut, std::vector<unsigned int>* evaluationBlobOut,
std::vector<unsigned char>* pipelineBlob, const boo::ObjToken<IVertexFormat>& vtxFmt,
BlendFactor srcFac, BlendFactor dstFac, uint32_t patchSize,
ZTest depthTest, bool depthWrite, bool colorWrite,
bool alphaWrite, CullMode culling, bool overwriteAlpha)
{
VulkanDataFactoryImpl& factory = static_cast<VulkanDataFactoryImpl&>(m_parent);
2016-02-21 06:27:54 +00:00
if (!factory.m_ctx->m_features.tessellationShader)
Log.report(logvisor::Fatal, "Device does not support tessellation shaders");
if (patchSize > factory.m_ctx->m_gpuProps.limits.maxTessellationPatchSize)
Log.report(logvisor::Fatal, "Device supports %d patch vertices, %d requested",
int(factory.m_ctx->m_gpuProps.limits.maxTessellationPatchSize), int(patchSize));
2017-11-08 02:24:07 +00:00
VulkanShareableShader::Token vertShader = factory.PrepareShaderStage(vertSource, vertBlobOut, EShLangVertex);
VulkanShareableShader::Token fragShader = factory.PrepareShaderStage(fragSource, fragBlobOut, EShLangFragment);
VulkanShareableShader::Token controlShader = factory.PrepareShaderStage(controlSource, controlBlobOut, EShLangTessControl);
VulkanShareableShader::Token evaluationShader = factory.PrepareShaderStage(evaluationSource, evaluationBlobOut, EShLangTessEvaluation);
2016-02-21 06:27:54 +00:00
2017-03-05 22:59:58 +00:00
VkPipelineCache pipelineCache = VK_NULL_HANDLE;
if (pipelineBlob)
{
VkPipelineCacheCreateInfo cacheDataInfo = {};
cacheDataInfo.sType = VK_STRUCTURE_TYPE_PIPELINE_CACHE_CREATE_INFO;
cacheDataInfo.pNext = nullptr;
2016-02-21 06:27:54 +00:00
2017-03-05 22:59:58 +00:00
cacheDataInfo.initialDataSize = pipelineBlob->size();
if (cacheDataInfo.initialDataSize)
cacheDataInfo.pInitialData = pipelineBlob->data();
2016-02-21 06:27:54 +00:00
2017-03-05 22:59:58 +00:00
ThrowIfFailed(vk::CreatePipelineCache(factory.m_ctx->m_dev, &cacheDataInfo, nullptr, &pipelineCache));
}
VulkanShaderPipeline* retval =
new VulkanTessellationShaderPipeline(m_data, factory.m_ctx, std::move(vertShader),
std::move(fragShader), std::move(controlShader),
std::move(evaluationShader), pipelineCache, vtxFmt, srcFac,
dstFac, patchSize, depthTest, depthWrite, colorWrite,
alphaWrite, overwriteAlpha, culling);
2016-02-21 06:27:54 +00:00
2017-03-05 22:59:58 +00:00
if (pipelineBlob && pipelineBlob->empty())
2016-02-21 06:27:54 +00:00
{
size_t cacheSz = 0;
2017-03-05 22:59:58 +00:00
ThrowIfFailed(vk::GetPipelineCacheData(factory.m_ctx->m_dev, pipelineCache, &cacheSz, nullptr));
2016-02-21 06:27:54 +00:00
if (cacheSz)
{
2017-03-05 22:59:58 +00:00
pipelineBlob->resize(cacheSz);
ThrowIfFailed(vk::GetPipelineCacheData(factory.m_ctx->m_dev, pipelineCache,
&cacheSz, pipelineBlob->data()));
2017-03-05 22:59:58 +00:00
pipelineBlob->resize(cacheSz);
2016-02-21 06:27:54 +00:00
}
}
return {retval};
2016-02-21 06:27:54 +00:00
}
2018-05-20 06:11:49 +00:00
VulkanDataFactory::Context::Context(VulkanDataFactory& parent __BooTraceArgs)
: m_parent(parent), m_data(new VulkanData(static_cast<VulkanDataFactoryImpl&>(parent) __BooTraceArgsUse)) {}
VulkanDataFactory::Context::~Context() {}
boo::ObjToken<IGraphicsBufferS>
VulkanDataFactory::Context::newStaticBuffer(BufferUse use, const void* data, size_t stride, size_t count)
2016-02-21 06:27:54 +00:00
{
2017-03-05 22:59:58 +00:00
VulkanDataFactoryImpl& factory = static_cast<VulkanDataFactoryImpl&>(m_parent);
return {new VulkanGraphicsBufferS(m_data, use, factory.m_ctx, data, stride, count)};
2016-02-21 06:27:54 +00:00
}
boo::ObjToken<IGraphicsBufferD>
VulkanDataFactory::Context::newDynamicBuffer(BufferUse use, size_t stride, size_t count)
2016-02-21 06:27:54 +00:00
{
2017-03-05 22:59:58 +00:00
VulkanDataFactoryImpl& factory = static_cast<VulkanDataFactoryImpl&>(m_parent);
VulkanCommandQueue* q = static_cast<VulkanCommandQueue*>(factory.m_parent->getCommandQueue());
2018-05-22 07:48:13 +00:00
return {new VulkanGraphicsBufferD<BaseGraphicsData>(m_data, use, factory.m_ctx, stride, count)};
2016-02-21 06:27:54 +00:00
}
boo::ObjToken<ITextureS>
VulkanDataFactory::Context::newStaticTexture(size_t width, size_t height, size_t mips,
TextureFormat fmt, TextureClampMode clampMode,
const void* data, size_t sz)
2016-02-21 06:27:54 +00:00
{
2017-03-05 22:59:58 +00:00
VulkanDataFactoryImpl& factory = static_cast<VulkanDataFactoryImpl&>(m_parent);
return {new VulkanTextureS(m_data, factory.m_ctx, width, height, mips, fmt, clampMode, data, sz)};
2016-02-21 06:27:54 +00:00
}
boo::ObjToken<ITextureSA>
VulkanDataFactory::Context::newStaticArrayTexture(size_t width, size_t height, size_t layers, size_t mips,
TextureFormat fmt, TextureClampMode clampMode,
const void* data, size_t sz)
2016-02-21 06:27:54 +00:00
{
2017-03-05 22:59:58 +00:00
VulkanDataFactoryImpl& factory = static_cast<VulkanDataFactoryImpl&>(m_parent);
return {new VulkanTextureSA(m_data, factory.m_ctx, width, height, layers, mips, fmt, clampMode, data, sz)};
2016-02-21 06:27:54 +00:00
}
boo::ObjToken<ITextureD>
VulkanDataFactory::Context::newDynamicTexture(size_t width, size_t height, TextureFormat fmt,
TextureClampMode clampMode)
2016-02-21 06:27:54 +00:00
{
2017-03-05 22:59:58 +00:00
VulkanDataFactoryImpl& factory = static_cast<VulkanDataFactoryImpl&>(m_parent);
VulkanCommandQueue* q = static_cast<VulkanCommandQueue*>(factory.m_parent->getCommandQueue());
2018-05-22 07:48:13 +00:00
return {new VulkanTextureD(m_data, q, width, height, fmt, clampMode)};
2016-02-21 06:27:54 +00:00
}
boo::ObjToken<ITextureR>
VulkanDataFactory::Context::newRenderTexture(size_t width, size_t height, TextureClampMode clampMode,
size_t colorBindCount, size_t depthBindCount)
2016-02-21 06:27:54 +00:00
{
2017-03-05 22:59:58 +00:00
VulkanDataFactoryImpl& factory = static_cast<VulkanDataFactoryImpl&>(m_parent);
VulkanCommandQueue* q = static_cast<VulkanCommandQueue*>(factory.m_parent->getCommandQueue());
2018-05-22 07:48:13 +00:00
return {new VulkanTextureR(m_data, q, width, height, clampMode, colorBindCount, depthBindCount)};
2016-02-21 06:27:54 +00:00
}
boo::ObjToken<IVertexFormat>
VulkanDataFactory::Context::newVertexFormat(size_t elementCount,
const VertexElementDescriptor* elements,
size_t baseVert, size_t baseInst)
2016-02-21 06:27:54 +00:00
{
return {new struct VulkanVertexFormat(m_data, elementCount, elements)};
2016-02-21 06:27:54 +00:00
}
boo::ObjToken<IShaderDataBinding>
VulkanDataFactory::Context::newShaderDataBinding(
const boo::ObjToken<IShaderPipeline>& pipeline,
const boo::ObjToken<IVertexFormat>& /*vtxFormat*/,
const boo::ObjToken<IGraphicsBuffer>& vbuf,
const boo::ObjToken<IGraphicsBuffer>& instVbuf,
const boo::ObjToken<IGraphicsBuffer>& ibuf,
size_t ubufCount, const boo::ObjToken<IGraphicsBuffer>* ubufs, const PipelineStage* /*ubufStages*/,
const size_t* ubufOffs, const size_t* ubufSizes,
size_t texCount, const boo::ObjToken<ITexture>* texs,
2017-03-17 23:31:51 +00:00
const int* bindIdxs, const bool* bindDepth,
2016-12-11 01:50:26 +00:00
size_t baseVert, size_t baseInst)
2016-02-21 06:27:54 +00:00
{
2017-03-05 22:59:58 +00:00
VulkanDataFactoryImpl& factory = static_cast<VulkanDataFactoryImpl&>(m_parent);
2018-05-27 21:35:21 +00:00
return {new VulkanShaderDataBinding(m_data, factory, pipeline, vbuf, instVbuf, ibuf,
ubufCount, ubufs, ubufOffs, ubufSizes, texCount, texs,
bindIdxs, bindDepth, baseVert, baseInst)};
2016-02-21 06:27:54 +00:00
}
void VulkanDataFactoryImpl::commitTransaction
2018-05-20 06:11:49 +00:00
(const std::function<bool(IGraphicsDataFactory::Context&)>& trans __BooTraceArgs)
2016-02-21 06:27:54 +00:00
{
2018-05-20 06:11:49 +00:00
Context ctx(*this __BooTraceArgsUse);
if (!trans(ctx))
return;
2016-02-21 06:27:54 +00:00
VulkanData* data = ctx.m_data.cast<VulkanData>();
2016-02-21 06:27:54 +00:00
/* size up resources */
2018-05-22 07:48:13 +00:00
VkDeviceSize constantMemSizes[3] = {};
2016-02-21 06:27:54 +00:00
VkDeviceSize texMemSize = 0;
if (data->m_SBufs)
for (IGraphicsBufferS& buf : *data->m_SBufs)
2018-05-22 07:48:13 +00:00
{
auto& cbuf = static_cast<VulkanGraphicsBufferS&>(buf);
if (cbuf.m_use == BufferUse::Null)
continue;
VkDeviceSize& sz = constantMemSizes[int(cbuf.m_use) - 1];
sz = cbuf.sizeForGPU(m_ctx, sz);
}
2016-02-21 06:27:54 +00:00
if (data->m_DBufs)
for (IGraphicsBufferD& buf : *data->m_DBufs)
2018-05-22 07:48:13 +00:00
{
auto& cbuf = static_cast<VulkanGraphicsBufferD<BaseGraphicsData>&>(buf);
if (cbuf.m_use == BufferUse::Null)
continue;
VkDeviceSize& sz = constantMemSizes[int(cbuf.m_use) - 1];
sz = cbuf.sizeForGPU(m_ctx, sz);
}
2016-02-21 06:27:54 +00:00
if (data->m_DTexs)
for (ITextureD& tex : *data->m_DTexs)
2018-05-22 07:48:13 +00:00
{
auto& ctex = static_cast<VulkanTextureD&>(tex);
texMemSize = ctex.sizeForGPU(m_ctx, texMemSize);
}
2016-02-21 06:27:54 +00:00
std::unique_lock<std::mutex> qlk(m_ctx->m_queueLock);
2018-05-22 07:48:13 +00:00
/* allocate memory and place buffers */
for (int i=0 ; i<3 ; ++i)
2016-02-24 03:11:58 +00:00
{
2018-05-22 07:48:13 +00:00
if (constantMemSizes[i])
{
AllocatedBuffer& poolBuf = data->m_constantBuffers[i];
2016-02-21 06:27:54 +00:00
2018-05-22 07:48:13 +00:00
VkBufferCreateInfo createInfo = {};
createInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
createInfo.size = constantMemSizes[i];
createInfo.usage = USE_TABLE[i+1];
createInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
uint8_t* mappedData = reinterpret_cast<uint8_t*>(poolBuf.createCPUtoGPU(m_ctx, &createInfo));
2016-02-21 06:27:54 +00:00
2018-05-22 07:48:13 +00:00
if (data->m_SBufs)
for (IGraphicsBufferS& buf : *data->m_SBufs)
{
auto& cbuf = static_cast<VulkanGraphicsBufferS&>(buf);
if (int(cbuf.m_use) - 1 != i)
continue;
cbuf.placeForGPU(poolBuf.m_buffer, mappedData);
}
2016-02-21 06:27:54 +00:00
2018-05-22 07:48:13 +00:00
if (data->m_DBufs)
for (IGraphicsBufferD& buf : *data->m_DBufs)
{
auto& cbuf = static_cast<VulkanGraphicsBufferD<BaseGraphicsData>&>(buf);
if (int(cbuf.m_use) - 1 != i)
continue;
cbuf.placeForGPU(poolBuf.m_buffer, mappedData);
}
}
2016-02-24 03:11:58 +00:00
}
2016-02-21 06:27:54 +00:00
2018-05-22 07:48:13 +00:00
/* place static textures */
if (data->m_STexs)
for (ITextureS& tex : *data->m_STexs)
static_cast<VulkanTextureS&>(tex).placeForGPU(m_ctx);
if (data->m_SATexs)
for (ITextureSA& tex : *data->m_SATexs)
static_cast<VulkanTextureSA&>(tex).placeForGPU(m_ctx);
/* allocate memory and place dynamic textures */
2016-02-24 03:11:58 +00:00
if (texMemSize)
{
2018-05-22 07:48:13 +00:00
AllocatedBuffer& poolBuf = data->m_texStagingBuffer;
2016-02-21 06:27:54 +00:00
2018-05-22 07:48:13 +00:00
VkBufferCreateInfo createInfo = {};
createInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
createInfo.size = texMemSize;
createInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT;
createInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
uint8_t* mappedData = reinterpret_cast<uint8_t*>(poolBuf.createCPUtoGPU(m_ctx, &createInfo));
2016-02-24 03:11:58 +00:00
if (data->m_DTexs)
for (ITextureD& tex : *data->m_DTexs)
2018-05-22 07:48:13 +00:00
static_cast<VulkanTextureD&>(tex).placeForGPU(m_ctx, poolBuf.m_buffer, mappedData);
2016-02-24 03:11:58 +00:00
}
2016-02-21 06:27:54 +00:00
/* Execute static uploads */
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::EndCommandBuffer(m_ctx->m_loadCmdBuf));
2016-02-21 06:27:54 +00:00
VkSubmitInfo submitInfo = {};
submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
submitInfo.commandBufferCount = 1;
submitInfo.pCommandBuffers = &m_ctx->m_loadCmdBuf;
/* Take exclusive lock here and submit queue */
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::QueueWaitIdle(m_ctx->m_queue));
ThrowIfFailed(vk::QueueSubmit(m_ctx->m_queue, 1, &submitInfo, VK_NULL_HANDLE));
2016-02-21 06:27:54 +00:00
2016-07-27 02:17:16 +00:00
/* Commit data bindings (create descriptor sets) */
if (data->m_SBinds)
for (IShaderDataBinding& bind : *data->m_SBinds)
static_cast<VulkanShaderDataBinding&>(bind).commit(m_ctx);
2016-02-21 06:27:54 +00:00
/* Wait for uploads to complete */
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::QueueWaitIdle(m_ctx->m_queue));
qlk.unlock();
2016-02-21 06:27:54 +00:00
/* Reset command buffer */
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::ResetCommandBuffer(m_ctx->m_loadCmdBuf, 0));
2016-02-21 06:27:54 +00:00
VkCommandBufferBeginInfo cmdBufBeginInfo = {};
cmdBufBeginInfo.sType = VK_STRUCTURE_TYPE_COMMAND_BUFFER_BEGIN_INFO;
2018-05-22 07:48:13 +00:00
cmdBufBeginInfo.flags = VK_COMMAND_BUFFER_USAGE_ONE_TIME_SUBMIT_BIT;
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::BeginCommandBuffer(m_ctx->m_loadCmdBuf, &cmdBufBeginInfo));
2016-02-21 06:27:54 +00:00
/* Delete upload objects */
if (data->m_STexs)
for (ITextureS& tex : *data->m_STexs)
static_cast<VulkanTextureS&>(tex).deleteUploadObjects();
2016-02-21 06:27:54 +00:00
if (data->m_SATexs)
for (ITextureSA& tex : *data->m_SATexs)
static_cast<VulkanTextureSA&>(tex).deleteUploadObjects();
2016-02-21 06:27:54 +00:00
}
boo::ObjToken<IGraphicsBufferD>
2018-05-20 06:11:49 +00:00
VulkanDataFactoryImpl::newPoolBuffer(BufferUse use, size_t stride, size_t count __BooTraceArgs)
2016-12-11 01:50:26 +00:00
{
2018-05-20 06:11:49 +00:00
boo::ObjToken<BaseGraphicsPool> pool(new VulkanPool(*this __BooTraceArgsUse));
VulkanPool* cpool = pool.cast<VulkanPool>();
VulkanGraphicsBufferD<BaseGraphicsPool>* retval =
2018-05-22 07:48:13 +00:00
new VulkanGraphicsBufferD<BaseGraphicsPool>(pool, use, m_ctx, stride, count);
2016-12-11 01:50:26 +00:00
2018-05-22 07:48:13 +00:00
VkDeviceSize size = retval->sizeForGPU(m_ctx, 0);
2016-12-11 01:50:26 +00:00
/* allocate memory */
2018-05-22 07:48:13 +00:00
if (size)
{
AllocatedBuffer& poolBuf = cpool->m_constantBuffer;
VkBufferCreateInfo createInfo = {};
createInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
createInfo.size = size;
createInfo.usage = USE_TABLE[int(use)];
createInfo.sharingMode = VK_SHARING_MODE_EXCLUSIVE;
uint8_t* mappedData = reinterpret_cast<uint8_t*>(poolBuf.createCPUtoGPU(m_ctx, &createInfo));
retval->placeForGPU(poolBuf.m_buffer, mappedData);
2016-12-11 01:50:26 +00:00
}
return {retval};
2016-12-11 06:18:41 +00:00
}
2016-02-21 06:27:54 +00:00
void VulkanCommandQueue::execute()
{
if (!m_running)
return;
/* Stage dynamic uploads */
2017-03-05 22:59:58 +00:00
VulkanDataFactoryImpl* gfxF = static_cast<VulkanDataFactoryImpl*>(m_parent->getDataFactory());
2017-11-08 07:33:10 +00:00
std::unique_lock<std::recursive_mutex> datalk(gfxF->m_dataMutex);
if (gfxF->m_dataHead)
2016-02-21 06:27:54 +00:00
{
for (BaseGraphicsData& d : *gfxF->m_dataHead)
{
if (d.m_DBufs)
for (IGraphicsBufferD& b : *d.m_DBufs)
static_cast<VulkanGraphicsBufferD<BaseGraphicsData>&>(b).update(m_fillBuf);
if (d.m_DTexs)
for (ITextureD& t : *d.m_DTexs)
static_cast<VulkanTextureD&>(t).update(m_fillBuf);
}
2016-02-21 06:27:54 +00:00
}
if (gfxF->m_poolHead)
2016-12-11 01:50:26 +00:00
{
for (BaseGraphicsPool& p : *gfxF->m_poolHead)
{
if (p.m_DBufs)
for (IGraphicsBufferD& b : *p.m_DBufs)
static_cast<VulkanGraphicsBufferD<BaseGraphicsData>&>(b).update(m_fillBuf);
}
2016-12-11 01:50:26 +00:00
}
2016-02-21 06:27:54 +00:00
datalk.unlock();
/* Perform dynamic uploads */
std::unique_lock<std::mutex> lk(m_ctx->m_queueLock);
2016-02-21 06:27:54 +00:00
if (!m_dynamicNeedsReset)
{
2016-06-30 04:54:29 +00:00
vk::EndCommandBuffer(m_dynamicCmdBufs[m_fillBuf]);
2016-02-21 06:27:54 +00:00
2016-06-30 04:54:29 +00:00
vk::WaitForFences(m_ctx->m_dev, 1, &m_dynamicBufFence, VK_FALSE, -1);
vk::ResetFences(m_ctx->m_dev, 1, &m_dynamicBufFence);
2016-02-21 06:27:54 +00:00
VkSubmitInfo submitInfo = {};
submitInfo.pNext = nullptr;
submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
submitInfo.waitSemaphoreCount = 0;
submitInfo.pWaitSemaphores = nullptr;
submitInfo.pWaitDstStageMask = nullptr;
submitInfo.commandBufferCount = 1;
submitInfo.pCommandBuffers = &m_dynamicCmdBufs[m_fillBuf];
submitInfo.signalSemaphoreCount = 0;
submitInfo.pSignalSemaphores = nullptr;
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::QueueSubmit(m_ctx->m_queue, 1, &submitInfo, m_dynamicBufFence));
2016-02-21 06:27:54 +00:00
}
2017-02-16 03:17:18 +00:00
vk::CmdEndRenderPass(m_cmdBufs[m_fillBuf]);
2016-02-21 06:27:54 +00:00
/* Check on fence */
2016-06-30 04:54:29 +00:00
if (m_submitted && vk::GetFenceStatus(m_ctx->m_dev, m_drawCompleteFence) == VK_NOT_READY)
2016-02-21 06:27:54 +00:00
{
/* Abandon this list (renderer too slow) */
resetCommandBuffer();
m_dynamicNeedsReset = true;
m_resolveDispSource = nullptr;
/* Clear dead data */
m_drawResTokens[m_fillBuf].clear();
2016-02-21 06:27:54 +00:00
return;
}
2017-02-16 03:17:18 +00:00
m_submitted = false;
vk::ResetFences(m_ctx->m_dev, 1, &m_drawCompleteFence);
2016-02-21 06:27:54 +00:00
2017-02-16 03:17:18 +00:00
/* Perform texture and swap-chain resizes */
if (m_ctx->_resizeSwapChains() || m_texResizes.size())
2016-02-21 06:27:54 +00:00
{
for (const auto& resize : m_texResizes)
{
if (m_boundTarget.get() == resize.first)
m_boundTarget.reset();
2016-02-21 06:27:54 +00:00
resize.first->resize(m_ctx, resize.second.first, resize.second.second);
}
2016-02-21 06:27:54 +00:00
m_texResizes.clear();
resetCommandBuffer();
m_dynamicNeedsReset = true;
m_resolveDispSource = nullptr;
2016-02-21 06:27:54 +00:00
return;
}
/* Clear dead data */
m_drawResTokens[m_drawBuf].clear();
2016-02-21 06:27:54 +00:00
m_drawBuf = m_fillBuf;
m_fillBuf ^= 1;
/* Queue the command buffer for execution */
2018-06-01 06:36:57 +00:00
VkPipelineStageFlags pipeStageFlags = VK_PIPELINE_STAGE_COLOR_ATTACHMENT_OUTPUT_BIT;
2016-02-21 06:27:54 +00:00
VkSubmitInfo submitInfo = {};
submitInfo.pNext = nullptr;
submitInfo.sType = VK_STRUCTURE_TYPE_SUBMIT_INFO;
submitInfo.waitSemaphoreCount = 0;
submitInfo.pWaitSemaphores = nullptr;
2016-02-21 06:27:54 +00:00
submitInfo.pWaitDstStageMask = &pipeStageFlags;
submitInfo.commandBufferCount = 1;
submitInfo.pCommandBuffers = &m_cmdBufs[m_drawBuf];
submitInfo.signalSemaphoreCount = 0;
submitInfo.pSignalSemaphores = nullptr;
if (_resolveDisplay())
2016-02-21 06:27:54 +00:00
{
submitInfo.waitSemaphoreCount = 1;
submitInfo.pWaitSemaphores = &m_swapChainReadySem;
2016-02-21 06:27:54 +00:00
submitInfo.signalSemaphoreCount = 1;
submitInfo.pSignalSemaphores = &m_drawCompleteSem;
}
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::EndCommandBuffer(m_cmdBufs[m_drawBuf]));
ThrowIfFailed(vk::QueueSubmit(m_ctx->m_queue, 1, &submitInfo, m_drawCompleteFence));
m_submitted = true;
2016-02-21 06:27:54 +00:00
if (submitInfo.signalSemaphoreCount)
2016-02-21 06:27:54 +00:00
{
VulkanContext::Window::SwapChain& thisSc = m_windowCtx->m_swapChains[m_windowCtx->m_activeSwapChain];
2016-02-21 06:27:54 +00:00
VkPresentInfoKHR present;
present.sType = VK_STRUCTURE_TYPE_PRESENT_INFO_KHR;
present.pNext = nullptr;
present.swapchainCount = 1;
present.pSwapchains = &thisSc.m_swapChain;
present.pImageIndices = &thisSc.m_backBuf;
2016-02-21 06:27:54 +00:00
present.waitSemaphoreCount = 1;
present.pWaitSemaphores = &m_drawCompleteSem;
present.pResults = nullptr;
2016-06-30 04:54:29 +00:00
ThrowIfFailed(vk::QueuePresentKHR(m_ctx->m_queue, &present));
2016-02-21 06:27:54 +00:00
}
resetCommandBuffer();
resetDynamicCommandBuffer();
}
2018-05-20 06:11:49 +00:00
std::unique_ptr<IGraphicsCommandQueue> _NewVulkanCommandQueue(VulkanContext* ctx, VulkanContext::Window* windowCtx,
IGraphicsContext* parent)
2016-02-21 06:27:54 +00:00
{
2018-05-20 06:11:49 +00:00
return std::make_unique<VulkanCommandQueue>(ctx, windowCtx, parent);
2016-02-21 06:27:54 +00:00
}
2018-05-20 06:11:49 +00:00
std::unique_ptr<IGraphicsDataFactory> _NewVulkanDataFactory(IGraphicsContext* parent, VulkanContext* ctx)
2017-03-05 22:59:58 +00:00
{
2018-05-20 06:11:49 +00:00
return std::make_unique<VulkanDataFactoryImpl>(parent, ctx);
2017-03-05 22:59:58 +00:00
}
}