mirror of https://github.com/AxioDL/metaforce.git
Start of Vulkan backend (not yet working)
This commit is contained in:
parent
42dba1148d
commit
e71baa686a
|
@ -28,22 +28,31 @@ else()
|
||||||
endif()
|
endif()
|
||||||
|
|
||||||
if(APPLE)
|
if(APPLE)
|
||||||
target_sources(visigen PRIVATE MainMac.mm VISIRendererMetal.mm VISIRendererMetal.hh)
|
target_sources(visigen PRIVATE main.cpp metal/VISIRendererMetal.mm metal/VISIRendererMetal.hh)
|
||||||
set_source_files_properties(MainMac.mm PROPERTIES COMPILE_FLAGS -fobjc-arc)
|
set_source_files_properties(metal/VISIRendererMetal.mm PROPERTIES COMPILE_FLAGS -fobjc-arc)
|
||||||
set_source_files_properties(VISIRendererMetal.mm PROPERTIES COMPILE_FLAGS -fobjc-arc)
|
|
||||||
find_library(METAL_LIBRARY Metal REQUIRED)
|
find_library(METAL_LIBRARY Metal REQUIRED)
|
||||||
target_link_libraries(visigen PRIVATE ${METAL_LIBRARY})
|
target_link_libraries(visigen PRIVATE ${METAL_LIBRARY})
|
||||||
elseif(WIN32)
|
elseif(WIN32)
|
||||||
target_sources(visigen PRIVATE
|
target_sources(visigen PRIVATE
|
||||||
MainWin.cpp
|
MainWin.cpp
|
||||||
VISIRendererOpenGL.cpp
|
opengl/VISIRendererOpenGL.cpp
|
||||||
VISIRendererOpenGL.hpp)
|
opengl/VISIRendererOpenGL.hpp
|
||||||
else()
|
vulkan/VISIRendererVulkan.cpp
|
||||||
|
vulkan/VISIRendererVulkan.hpp
|
||||||
|
vulkan/utils.cpp)
|
||||||
|
else ()
|
||||||
|
bintoc(vk_fs.cpp vulkan/shader.frag.spv VK_FRAGMENT_SPV)
|
||||||
|
bintoc(vk_vs.cpp vulkan/shader.vert.spv VK_VERTEX_SPV)
|
||||||
target_sources(visigen PRIVATE
|
target_sources(visigen PRIVATE
|
||||||
MainXlib.cpp
|
main.cpp
|
||||||
VISIRendererOpenGL.cpp
|
opengl/VISIRendererOpenGL.cpp
|
||||||
VISIRendererOpenGL.hpp)
|
opengl/VISIRendererOpenGL.hpp
|
||||||
endif()
|
vulkan/VISIRendererVulkan.cpp
|
||||||
|
vulkan/VISIRendererVulkan.hpp
|
||||||
|
vulkan/utils.cpp
|
||||||
|
vk_fs.cpp
|
||||||
|
vk_vs.cpp)
|
||||||
|
endif ()
|
||||||
|
|
||||||
target_link_libraries(visigen PRIVATE
|
target_link_libraries(visigen PRIVATE
|
||||||
athena-core
|
athena-core
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#include "VISIRendererOpenGL.hpp"
|
#include "opengl/VISIRendererOpenGL.hpp"
|
||||||
#include <Windows.h>
|
#include <Windows.h>
|
||||||
#include <WinUser.h>
|
#include <WinUser.h>
|
||||||
#include <Shlwapi.h>
|
#include <Shlwapi.h>
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
#include "VISIRendererOpenGL.hpp"
|
#include "opengl/VISIRendererOpenGL.hpp"
|
||||||
#include <X11/Xlib.h>
|
#include <X11/Xlib.h>
|
||||||
#include <X11/Xatom.h>
|
#include <X11/Xatom.h>
|
||||||
#include <GL/glx.h>
|
#include <GL/glx.h>
|
||||||
|
|
|
@ -1,68 +0,0 @@
|
||||||
#include <metal_stdlib>
|
|
||||||
#include <simd/simd.h>
|
|
||||||
|
|
||||||
#include "ShaderTypes.h"
|
|
||||||
|
|
||||||
using namespace metal;
|
|
||||||
|
|
||||||
/*static const matrix_float4x4 LookMATs[] = {
|
|
||||||
{// Forward
|
|
||||||
{1.f, 0.f, 0.f, 0.f},
|
|
||||||
{0.f, 0.f, 1.f, 0.f},
|
|
||||||
{0.f, -1.f, 0.f, 0.f},
|
|
||||||
{0.f, 0.f, 0.f, 1.f}},
|
|
||||||
{// Backward
|
|
||||||
{-1.f, 0.f, 0.f, 0.f},
|
|
||||||
{0.f, 0.f, 1.f, 0.f},
|
|
||||||
{0.f, 1.f, 0.f, 0.f},
|
|
||||||
{0.f, 0.f, 0.f, 1.f}},
|
|
||||||
{// Up
|
|
||||||
{1.f, 0.f, 0.f, 0.f},
|
|
||||||
{0.f, -1.f, 0.f, 0.f},
|
|
||||||
{0.f, 0.f, -1.f, 0.f},
|
|
||||||
{0.f, 0.f, 0.f, 1.f}},
|
|
||||||
{// Down
|
|
||||||
{1.f, 0.f, 0.f, 0.f},
|
|
||||||
{0.f, 1.f, 0.f, 0.f},
|
|
||||||
{0.f, 0.f, 1.f, 0.f},
|
|
||||||
{0.f, 0.f, 0.f, 1.f}},
|
|
||||||
{// Left
|
|
||||||
{0.f, 1.f, 0.f, 0.f},
|
|
||||||
{0.f, 0.f, 1.f, 0.f},
|
|
||||||
{1.f, 0.f, 0.f, 0.f},
|
|
||||||
{0.f, 0.f, 0.f, 1.f}},
|
|
||||||
{// Right
|
|
||||||
{0.f, -1.f, 0.f, 0.f},
|
|
||||||
{0.f, 0.f, 1.f, 0.f},
|
|
||||||
{-1.f, 0.f, 0.f, 0.f},
|
|
||||||
{0.f, 0.f, 0.f, 1.f}},
|
|
||||||
};*/
|
|
||||||
|
|
||||||
typedef struct
|
|
||||||
{
|
|
||||||
float4 position [[position]];
|
|
||||||
float4 color;
|
|
||||||
} ColorInOut;
|
|
||||||
|
|
||||||
typedef struct
|
|
||||||
{
|
|
||||||
float3 position [[attribute(VertexAttributePosition)]];
|
|
||||||
float4 color [[attribute(VertexAttributeColor)]];
|
|
||||||
} Vertex;
|
|
||||||
|
|
||||||
vertex ColorInOut vertexShader(Vertex in [[stage_in]], constant Uniforms& uniforms [[buffer(BufferIndexUniforms)]])
|
|
||||||
{
|
|
||||||
ColorInOut out;
|
|
||||||
|
|
||||||
float4 position = float4(in.position, 1.0);
|
|
||||||
position.y *= -1.f;
|
|
||||||
out.position = uniforms.projectionMatrix * uniforms.modelViewMatrix * position;
|
|
||||||
out.color = in.color;
|
|
||||||
|
|
||||||
return out;
|
|
||||||
}
|
|
||||||
|
|
||||||
fragment float4 fragmentShader(ColorInOut in [[stage_in]])
|
|
||||||
{
|
|
||||||
return in.color;
|
|
||||||
}
|
|
|
@ -31,15 +31,15 @@ const VISIBuilder::Leaf& VISIBuilder::PVSRenderCache::GetLeaf(const zeus::CVecto
|
||||||
bool needsTransparent = false;
|
bool needsTransparent = false;
|
||||||
m_renderer.RenderPVSOpaque(RGBABuf.get(), needsTransparent);
|
m_renderer.RenderPVSOpaque(RGBABuf.get(), needsTransparent);
|
||||||
|
|
||||||
// size_t outsize;
|
size_t outsize;
|
||||||
// auto* buf = VISIRenderer::makePNGBuffer(reinterpret_cast<unsigned char*>(RGBABuf.get()), 768, 512, &outsize);
|
auto* buf = VISIRenderer::makePNGBuffer(reinterpret_cast<unsigned char*>(RGBABuf.get()), 768, 512, &outsize);
|
||||||
// auto filename = fmt::format(FMT_STRING("outx{}.png"), m_frame++);
|
auto filename = fmt::format(FMT_STRING("/tmp/visigen/outx{}.png"), m_frame++);
|
||||||
// std::cout << "Rendering " << filename << std::endl;
|
std::cout << "Rendering " << filename << std::endl;
|
||||||
// std::ofstream fout;
|
std::ofstream fout;
|
||||||
// fout.open(filename, std::ios::binary | std::ios::out);
|
fout.open(filename, std::ios::binary | std::ios::out);
|
||||||
// fout.write(static_cast<const char*>(buf), outsize);
|
fout.write(static_cast<const char*>(buf), outsize);
|
||||||
// fout.close();
|
fout.close();
|
||||||
// free(buf);
|
free(buf);
|
||||||
|
|
||||||
std::unique_ptr<Leaf> leafOut = std::make_unique<Leaf>();
|
std::unique_ptr<Leaf> leafOut = std::make_unique<Leaf>();
|
||||||
for (unsigned i = 0; i < 768 * 512; ++i) {
|
for (unsigned i = 0; i < 768 * 512; ++i) {
|
||||||
|
|
|
@ -70,7 +70,7 @@ public:
|
||||||
};
|
};
|
||||||
|
|
||||||
VISIRenderer(int argc, const hecl::SystemChar** argv) : m_argc(argc), m_argv(argv) {}
|
VISIRenderer(int argc, const hecl::SystemChar** argv) : m_argc(argc), m_argv(argv) {}
|
||||||
void Run(FPercent updatePercent);
|
virtual void Run(FPercent updatePercent);
|
||||||
void Terminate();
|
void Terminate();
|
||||||
virtual void RenderPVSOpaque(RGBA8* bufOut, bool& needTransparent) = 0;
|
virtual void RenderPVSOpaque(RGBA8* bufOut, bool& needTransparent) = 0;
|
||||||
virtual void RenderPVSTransparent(const std::function<void(int)>& passFunc) = 0;
|
virtual void RenderPVSTransparent(const std::function<void(int)>& passFunc) = 0;
|
||||||
|
|
|
@ -1,13 +1,10 @@
|
||||||
#include "../version.h"
|
#include "../version.h"
|
||||||
#include "VISIRendererMetal.hh"
|
|
||||||
#include "athena/Global.hpp"
|
#include "athena/Global.hpp"
|
||||||
#include "logvisor/logvisor.hpp"
|
#include "logvisor/logvisor.hpp"
|
||||||
#include <AppKit/AppKit.h>
|
#ifdef __APPLE__
|
||||||
#include <MetalKit/MetalKit.h>
|
#include "metal/VISIRendererMetal.hh"
|
||||||
#include <thread>
|
#else
|
||||||
|
#include "vulkan/VISIRendererVulkan.hpp"
|
||||||
#if !__has_feature(objc_arc)
|
|
||||||
#error ARC Required
|
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
static logvisor::Module AthenaLog("Athena");
|
static logvisor::Module AthenaLog("Athena");
|
||||||
|
@ -25,9 +22,11 @@ int main(int argc, const char **argv) {
|
||||||
logvisor::RegisterStandardExceptions();
|
logvisor::RegisterStandardExceptions();
|
||||||
logvisor::RegisterConsoleLogger();
|
logvisor::RegisterConsoleLogger();
|
||||||
atSetExceptionHandler(AthenaExc);
|
atSetExceptionHandler(AthenaExc);
|
||||||
|
#ifdef __APPLE__
|
||||||
VISIRendererMetal renderer(argc, argv);
|
VISIRendererMetal renderer(argc, argv);
|
||||||
@autoreleasepool {
|
#else
|
||||||
renderer.Run(nullptr);
|
VISIRendererVulkan renderer(argc, argv);
|
||||||
}
|
#endif
|
||||||
|
renderer.Run(nullptr);
|
||||||
return renderer.ReturnVal();
|
return renderer.ReturnVal();
|
||||||
}
|
}
|
|
@ -0,0 +1,34 @@
|
||||||
|
#include <metal_stdlib>
|
||||||
|
#include <simd/simd.h>
|
||||||
|
|
||||||
|
#include "ShaderTypes.h"
|
||||||
|
|
||||||
|
using namespace metal;
|
||||||
|
|
||||||
|
typedef struct
|
||||||
|
{
|
||||||
|
float4 position [[position]];
|
||||||
|
float4 color;
|
||||||
|
} ColorInOut;
|
||||||
|
|
||||||
|
typedef struct
|
||||||
|
{
|
||||||
|
float3 position [[attribute(VertexAttributePosition)]];
|
||||||
|
float4 color [[attribute(VertexAttributeColor)]];
|
||||||
|
} Vertex;
|
||||||
|
|
||||||
|
vertex ColorInOut vertexShader(Vertex in [[stage_in]], constant Uniforms& uniforms [[buffer(BufferIndexUniforms)]])
|
||||||
|
{
|
||||||
|
ColorInOut out;
|
||||||
|
|
||||||
|
float4 position = float4(in.position, 1.0);
|
||||||
|
out.position = uniforms.projectionMatrix * uniforms.modelViewMatrix * position;
|
||||||
|
out.color = in.color;
|
||||||
|
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
|
fragment float4 fragmentShader(ColorInOut in [[stage_in]])
|
||||||
|
{
|
||||||
|
return in.color;
|
||||||
|
}
|
|
@ -1,9 +1,11 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include "VISIRenderer.hpp"
|
#include "../VISIRenderer.hpp"
|
||||||
#import <AppKit/AppKit.h>
|
|
||||||
#import <Metal/Metal.h>
|
#import <Metal/Metal.h>
|
||||||
#import <MetalKit/MetalKit.h>
|
|
||||||
|
#if !__has_feature(objc_arc)
|
||||||
|
#error ARC Required
|
||||||
|
#endif
|
||||||
|
|
||||||
@interface MetalRenderer : NSObject
|
@interface MetalRenderer : NSObject
|
||||||
@end
|
@end
|
||||||
|
@ -19,8 +21,9 @@ public:
|
||||||
VISIRendererMetal(int argc, const hecl::SystemChar** argv) : VISIRenderer(argc, argv) {
|
VISIRendererMetal(int argc, const hecl::SystemChar** argv) : VISIRenderer(argc, argv) {
|
||||||
view = [[MetalRenderer alloc] init];
|
view = [[MetalRenderer alloc] init];
|
||||||
}
|
}
|
||||||
|
void Run(FPercent updatePercent) override;
|
||||||
void RenderPVSOpaque(RGBA8* out, bool& needTransparent) override;
|
void RenderPVSOpaque(RGBA8* out, bool& needTransparent) override;
|
||||||
void RenderPVSTransparent(const std::function<void(int)>& passFunc) override;
|
void RenderPVSTransparent(const std::function<void(int)>& passFunc) override;
|
||||||
void RenderPVSEntitiesAndLights(const std::function<void(int)>& passFunc,
|
void RenderPVSEntitiesAndLights(const std::function<void(int)>& passFunc,
|
||||||
const std::function<void(int, EPVSVisSetState)>& lightPassFunc) override;
|
const std::function<void(int, EPVSVisSetState)>& lightPassFunc) override;
|
||||||
};
|
};
|
|
@ -5,8 +5,11 @@
|
||||||
|
|
||||||
static zeus::CMatrix4f g_Proj;
|
static zeus::CMatrix4f g_Proj;
|
||||||
|
|
||||||
constexpr zeus::CMatrix4f VulkanCorrect(1.f, 0.f, 0.f, 0.f, 0.f, -1.f, 0.f, 0.f, 0.f, 0.f, 0.5f, 0.5f + FLT_EPSILON,
|
constexpr zeus::CMatrix4f DepthCorrect(
|
||||||
0.f, 0.f, 0.f, 1.f);
|
1.f, 0.f, 0.f, 0.f,
|
||||||
|
0.f, 1.f, 0.f, 0.f,
|
||||||
|
0.f, 0.f, 0.5f, 0.5f,
|
||||||
|
0.f, 0.f, 0.f, 1.f);
|
||||||
|
|
||||||
static void CalculateProjMatrix() {
|
static void CalculateProjMatrix() {
|
||||||
float znear = 0.2f;
|
float znear = 0.2f;
|
||||||
|
@ -25,9 +28,11 @@ static void CalculateProjMatrix() {
|
||||||
float fmn = zfar - znear;
|
float fmn = zfar - znear;
|
||||||
|
|
||||||
zeus::CMatrix4f mat2{
|
zeus::CMatrix4f mat2{
|
||||||
2.f * znear / rml, 0.f, rpl / rml, 0.f, 0.f, 2.f * znear / tmb, tpb / tmb, 0.f, 0.f, 0.f, -fpn / fmn,
|
2.f * znear / rml, 0.f, rpl / rml, 0.f,
|
||||||
-2.f * zfar * znear / fmn, 0.f, 0.f, -1.f, 0.f};
|
0.f, 2.f * znear / tmb, tpb / tmb, 0.f,
|
||||||
g_Proj = VulkanCorrect * mat2;
|
0.f, 0.f, -fpn / fmn, -2.f * zfar * znear / fmn,
|
||||||
|
0.f, 0.f, -1.f, 0.f};
|
||||||
|
g_Proj = DepthCorrect * mat2;
|
||||||
}
|
}
|
||||||
|
|
||||||
static constexpr std::array<uint16_t, 20> AABBIdxs{0, 1, 2, 3, 4, 5, 6, 7, 0, 1, 1, 7, 3, 5, 5, 0, 0, 2, 6, 4};
|
static constexpr std::array<uint16_t, 20> AABBIdxs{0, 1, 2, 3, 4, 5, 6, 7, 0, 1, 1, 7, 3, 5, 5, 0, 0, 2, 6, 4};
|
||||||
|
@ -238,8 +243,8 @@ using Vertex = VISIRenderer::Model::Vert;
|
||||||
[encoder setVertexBuffer:_uniformBuffer offset:0 atIndex:BufferIndexUniforms];
|
[encoder setVertexBuffer:_uniformBuffer offset:0 atIndex:BufferIndexUniforms];
|
||||||
|
|
||||||
for (int j = 0; j < 6; ++j) {
|
for (int j = 0; j < 6; ++j) {
|
||||||
GLint x = (j % 3) * 256;
|
NSUInteger x = (j % 3) * 256;
|
||||||
GLint y = (j / 3) * 256;
|
NSUInteger y = (j / 3) * 256;
|
||||||
[encoder setViewport:{x, y, 256, 256, 0, 1}];
|
[encoder setViewport:{x, y, 256, 256, 0, 1}];
|
||||||
if (j > 0) {
|
if (j > 0) {
|
||||||
[encoder setVertexBufferOffset:j * sizeof(Uniforms) atIndex:BufferIndexUniforms];
|
[encoder setVertexBufferOffset:j * sizeof(Uniforms) atIndex:BufferIndexUniforms];
|
||||||
|
@ -404,7 +409,7 @@ using Vertex = VISIRenderer::Model::Vert;
|
||||||
[encoder setVisibilityResultMode:MTLVisibilityResultModeBoolean offset:queryCount * sizeof(uint64_t)];
|
[encoder setVisibilityResultMode:MTLVisibilityResultModeBoolean offset:queryCount * sizeof(uint64_t)];
|
||||||
[encoder drawIndexedPrimitives:MTLPrimitiveTypeTriangleStrip
|
[encoder drawIndexedPrimitives:MTLPrimitiveTypeTriangleStrip
|
||||||
indexCount:20
|
indexCount:20
|
||||||
indexType:MTLIndexTypeUInt32
|
indexType:MTLIndexTypeUInt16
|
||||||
indexBuffer:_aabbIndexBuffer
|
indexBuffer:_aabbIndexBuffer
|
||||||
indexBufferOffset:0];
|
indexBufferOffset:0];
|
||||||
}
|
}
|
||||||
|
@ -454,6 +459,12 @@ using Vertex = VISIRenderer::Model::Vert;
|
||||||
}
|
}
|
||||||
@end
|
@end
|
||||||
|
|
||||||
|
void VISIRendererMetal::Run(FPercent updatePercent) {
|
||||||
|
@autoreleasepool {
|
||||||
|
VISIRenderer::Run(updatePercent);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
bool VISIRendererMetal::SetupShaders() { return [view setup]; }
|
bool VISIRendererMetal::SetupShaders() { return [view setup]; }
|
||||||
|
|
||||||
bool VISIRendererMetal::SetupVertexBuffersAndFormats() {
|
bool VISIRendererMetal::SetupVertexBuffersAndFormats() {
|
||||||
|
@ -477,4 +488,4 @@ void VISIRendererMetal::RenderPVSEntitiesAndLights(const std::function<void(int)
|
||||||
lights:m_lights
|
lights:m_lights
|
||||||
lightPassFunc:lightPassFunc
|
lightPassFunc:lightPassFunc
|
||||||
totalAABB:m_totalAABB];
|
totalAABB:m_totalAABB];
|
||||||
}
|
}
|
|
@ -1,6 +1,6 @@
|
||||||
#pragma once
|
#pragma once
|
||||||
|
|
||||||
#include "VISIRenderer.hpp"
|
#include "../VISIRenderer.hpp"
|
||||||
#include "boo/graphicsdev/glew.h"
|
#include "boo/graphicsdev/glew.h"
|
||||||
|
|
||||||
#include <zeus/CFrustum.hpp>
|
#include <zeus/CFrustum.hpp>
|
||||||
|
@ -39,4 +39,4 @@ public:
|
||||||
void RenderPVSTransparent(const std::function<void(int)>& passFunc) override;
|
void RenderPVSTransparent(const std::function<void(int)>& passFunc) override;
|
||||||
void RenderPVSEntitiesAndLights(const std::function<void(int)>& passFunc,
|
void RenderPVSEntitiesAndLights(const std::function<void(int)>& passFunc,
|
||||||
const std::function<void(int, EPVSVisSetState)>& lightPassFunc) override;
|
const std::function<void(int, EPVSVisSetState)>& lightPassFunc) override;
|
||||||
};
|
};
|
|
@ -0,0 +1,342 @@
|
||||||
|
#include "VISIRendererVulkan.hpp"
|
||||||
|
#include "utils.hpp"
|
||||||
|
|
||||||
|
extern "C" {
|
||||||
|
extern const uint8_t VK_FRAGMENT_SPV[];
|
||||||
|
extern const size_t VK_FRAGMENT_SPV_SZ;
|
||||||
|
extern const uint8_t VK_VERTEX_SPV[];
|
||||||
|
extern const size_t VK_VERTEX_SPV_SZ;
|
||||||
|
}
|
||||||
|
|
||||||
|
static char const* AppName = "VISIGen";
|
||||||
|
|
||||||
|
static zeus::CMatrix4f g_Proj;
|
||||||
|
|
||||||
|
constexpr zeus::CMatrix4f DepthCorrect(1.f, 0.f, 0.f, 0.f, 0.f, 1.f, 0.f, 0.f, 0.f, 0.f, 0.5f, 0.5f, 0.f, 0.f, 0.f,
|
||||||
|
1.f);
|
||||||
|
|
||||||
|
static void CalculateProjMatrix() {
|
||||||
|
float znear = 0.2f;
|
||||||
|
float zfar = 1000.f;
|
||||||
|
float tfov = std::tan(zeus::degToRad(90.f * 0.5f));
|
||||||
|
float top = znear * tfov;
|
||||||
|
float bottom = -top;
|
||||||
|
float right = znear * tfov;
|
||||||
|
float left = -right;
|
||||||
|
|
||||||
|
float rml = right - left;
|
||||||
|
float rpl = right + left;
|
||||||
|
float tmb = top - bottom;
|
||||||
|
float tpb = top + bottom;
|
||||||
|
float fpn = zfar + znear;
|
||||||
|
float fmn = zfar - znear;
|
||||||
|
|
||||||
|
zeus::CMatrix4f mat2{
|
||||||
|
2.f * znear / rml, 0.f, rpl / rml, 0.f, 0.f, 2.f * znear / tmb, tpb / tmb, 0.f, 0.f, 0.f, -fpn / fmn,
|
||||||
|
-2.f * zfar * znear / fmn, 0.f, 0.f, -1.f, 0.f};
|
||||||
|
g_Proj = DepthCorrect * mat2;
|
||||||
|
}
|
||||||
|
|
||||||
|
static constexpr std::array<uint16_t, 20> AABBIdxs{0, 1, 2, 3, 4, 5, 6, 7, 0, 1, 1, 7, 3, 5, 5, 0, 0, 2, 6, 4};
|
||||||
|
|
||||||
|
static const zeus::CMatrix4f LookMATs[] = {
|
||||||
|
{// Forward
|
||||||
|
1.f, 0.f, 0.f, 0.f, 0.f, 0.f, 1.f, 0.f, 0.f, -1.f, 0.f, 0.f, 0.f, 0.f, 0.f, 1.f},
|
||||||
|
{// Backward
|
||||||
|
-1.f, 0.f, 0.f, 0.f, 0.f, 0.f, 1.f, 0.f, 0.f, 1.f, 0.f, 0.f, 0.f, 0.f, 0.f, 1.f},
|
||||||
|
{// Up
|
||||||
|
1.f, 0.f, 0.f, 0.f, 0.f, -1.f, 0.f, 0.f, 0.f, 0.f, -1.f, 0.f, 0.f, 0.f, 0.f, 1.f},
|
||||||
|
{// Down
|
||||||
|
1.f, 0.f, 0.f, 0.f, 0.f, 1.f, 0.f, 0.f, 0.f, 0.f, 1.f, 0.f, 0.f, 0.f, 0.f, 1.f},
|
||||||
|
{// Left
|
||||||
|
0.f, 1.f, 0.f, 0.f, 0.f, 0.f, 1.f, 0.f, 1.f, 0.f, 0.f, 0.f, 0.f, 0.f, 0.f, 1.f},
|
||||||
|
{// Right
|
||||||
|
0.f, -1.f, 0.f, 0.f, 0.f, 0.f, 1.f, 0.f, -1.f, 0.f, 0.f, 0.f, 0.f, 0.f, 0.f, 1.f},
|
||||||
|
};
|
||||||
|
|
||||||
|
template <typename T>
|
||||||
|
static vk::UniqueShaderModule createShaderModule(const vk::UniqueDevice& device, const T* code, size_t size) {
|
||||||
|
return vk::su::assertSuccess(device->createShaderModuleUnique(
|
||||||
|
vk::ShaderModuleCreateInfo().setCodeSize(size).setPCode(reinterpret_cast<const uint32_t*>(code))));
|
||||||
|
}
|
||||||
|
|
||||||
|
static int rateDeviceSuitability(const vk::PhysicalDevice& device) {
|
||||||
|
int score = 0;
|
||||||
|
const auto deviceProperties = device.getProperties();
|
||||||
|
if (deviceProperties.deviceType == vk::PhysicalDeviceType::eDiscreteGpu) {
|
||||||
|
score += 1000;
|
||||||
|
} else if (deviceProperties.deviceType == vk::PhysicalDeviceType::eIntegratedGpu) {
|
||||||
|
score += 100;
|
||||||
|
}
|
||||||
|
// const auto deviceFeatures = device.getFeatures();
|
||||||
|
// if (!deviceFeatures.geometryShader) {
|
||||||
|
// return 0;
|
||||||
|
// }
|
||||||
|
return score;
|
||||||
|
}
|
||||||
|
|
||||||
|
static const vk::PhysicalDevice& pickPhysicalDevice(const std::vector<vk::PhysicalDevice>& devices) {
|
||||||
|
std::multimap<int, const vk::PhysicalDevice&> candidates;
|
||||||
|
for (const auto& device : devices) {
|
||||||
|
int score = rateDeviceSuitability(device);
|
||||||
|
candidates.insert(std::make_pair(score, device));
|
||||||
|
}
|
||||||
|
if (candidates.rbegin()->first > 0) {
|
||||||
|
return candidates.rbegin()->second;
|
||||||
|
}
|
||||||
|
assert(false && "failed to find a suitable GPU!");
|
||||||
|
return candidates.begin()->second;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool VISIRendererVulkan::SetupShaders() {
|
||||||
|
instance = vk::su::createInstance(AppName, AppName, {}, vk::su::getInstanceExtensions());
|
||||||
|
physicalDevice = pickPhysicalDevice(vk::su::assertSuccess(instance->enumeratePhysicalDevices()));
|
||||||
|
auto queueFamilyIndex = vk::su::findGraphicsQueueFamilyIndex(physicalDevice.getQueueFamilyProperties());
|
||||||
|
device = vk::su::createDevice(physicalDevice, queueFamilyIndex, {});
|
||||||
|
commandPool = vk::su::createCommandPool(device.get(), queueFamilyIndex);
|
||||||
|
commandBuffer = vk::su::createCommandBuffer(device.get(), commandPool.get());
|
||||||
|
graphicsQueue = device->getQueue(queueFamilyIndex, 0);
|
||||||
|
vertexShader = createShaderModule(device, static_cast<const uint8_t*>(VK_VERTEX_SPV), VK_VERTEX_SPV_SZ);
|
||||||
|
fragmentShader = createShaderModule(device, static_cast<const uint8_t*>(VK_FRAGMENT_SPV), VK_FRAGMENT_SPV_SZ);
|
||||||
|
vk::Format colorFormat = vk::Format::eR8G8B8A8Unorm;
|
||||||
|
vk::Format depthFormat = vk::Format::eD16Unorm;
|
||||||
|
colorRenderPass = vk::su::createRenderPass(device.get(), colorFormat, depthFormat, vk::AttachmentLoadOp::eClear,
|
||||||
|
vk::AttachmentStoreOp::eStore, vk::AttachmentLoadOp::eClear,
|
||||||
|
vk::AttachmentStoreOp::eStore, vk::ImageLayout::eTransferSrcOptimal);
|
||||||
|
depthRenderPass = vk::su::createRenderPass(device.get(), colorFormat, depthFormat, vk::AttachmentLoadOp::eDontCare,
|
||||||
|
vk::AttachmentStoreOp::eDontCare, vk::AttachmentLoadOp::eLoad,
|
||||||
|
vk::AttachmentStoreOp::eStore, vk::ImageLayout::eGeneral);
|
||||||
|
pipelineCache = vk::su::assertSuccess(device->createPipelineCacheUnique(vk::PipelineCacheCreateInfo()));
|
||||||
|
|
||||||
|
descriptorSetLayout = vk::su::createDescriptorSetLayout(
|
||||||
|
device.get(), {{vk::DescriptorType::eUniformBufferDynamic, 1, vk::ShaderStageFlagBits::eVertex}});
|
||||||
|
descriptorPool = vk::su::createDescriptorPool(device.get(),
|
||||||
|
{vk::DescriptorPoolSize(vk::DescriptorType::eUniformBufferDynamic, 1)});
|
||||||
|
pipelineLayout = vk::su::assertSuccess(
|
||||||
|
device->createPipelineLayoutUnique(vk::PipelineLayoutCreateInfo({}, descriptorSetLayout.get())));
|
||||||
|
std::pair<vk::ShaderModule, vk::SpecializationInfo const*> vertexShaderData{vertexShader.get(), nullptr};
|
||||||
|
std::pair<vk::ShaderModule, vk::SpecializationInfo const*> fragmentShaderData{fragmentShader.get(), nullptr};
|
||||||
|
std::vector<std::pair<vk::Format, uint32_t>> vertexInputAttributeFormats{
|
||||||
|
{vk::Format::eR32G32B32Sfloat, offsetof(Vertex, pos)},
|
||||||
|
{vk::Format::eR32G32B32A32Sfloat, offsetof(Vertex, color)},
|
||||||
|
};
|
||||||
|
colorPipeline = vk::su::createGraphicsPipeline(
|
||||||
|
device.get(), pipelineCache.get(), vertexShaderData, fragmentShaderData, sizeof(Vertex),
|
||||||
|
vertexInputAttributeFormats, vk::FrontFace::eClockwise, true, pipelineLayout.get(), colorRenderPass.get());
|
||||||
|
depthPipeline = vk::su::createGraphicsPipeline(
|
||||||
|
device.get(), pipelineCache.get(), vertexShaderData, fragmentShaderData, sizeof(Vertex),
|
||||||
|
vertexInputAttributeFormats, vk::FrontFace::eClockwise, true, pipelineLayout.get(), depthRenderPass.get());
|
||||||
|
m_extent = vk::Extent2D(768, 512);
|
||||||
|
colorAttachment = std::make_unique<vk::su::ImageData>(
|
||||||
|
physicalDevice, device.get(), colorFormat, m_extent, vk::ImageTiling::eOptimal,
|
||||||
|
vk::ImageUsageFlags{vk::ImageUsageFlagBits::eColorAttachment | vk::ImageUsageFlagBits::eTransferSrc},
|
||||||
|
vk::ImageLayout::eUndefined, vk::MemoryPropertyFlags{vk::MemoryPropertyFlagBits::eDeviceLocal},
|
||||||
|
vk::ImageAspectFlags{vk::ImageAspectFlagBits::eColor});
|
||||||
|
colorAttachmentRead = std::make_unique<vk::su::ImageData>(
|
||||||
|
physicalDevice, device.get(), colorFormat, m_extent, vk::ImageTiling::eLinear,
|
||||||
|
vk::ImageUsageFlags{vk::ImageUsageFlagBits::eTransferDst}, vk::ImageLayout::eUndefined,
|
||||||
|
vk::MemoryPropertyFlags{vk::MemoryPropertyFlagBits::eHostVisible},
|
||||||
|
vk::ImageAspectFlags{vk::ImageAspectFlagBits::eColor});
|
||||||
|
// depthAttachment = std::make_unique<vk::su::ImageData>(
|
||||||
|
// physicalDevice, device.get(), vk::Format::eD16Unorm, m_extent, vk::ImageTiling::eOptimal,
|
||||||
|
// vk::ImageUsageFlags{vk::ImageUsageFlagBits::eDepthStencilAttachment}, vk::ImageLayout::eUndefined,
|
||||||
|
// vk::MemoryPropertyFlags{vk::MemoryPropertyFlagBits::eDeviceLocal},
|
||||||
|
// vk::ImageAspectFlags{vk::ImageAspectFlagBits::eDepth});
|
||||||
|
depthAttachment = std::make_unique<vk::su::DepthBufferData>(physicalDevice, device.get(), depthFormat, m_extent);
|
||||||
|
colorFramebuffer = vk::su::createFramebuffer(device.get(), colorRenderPass.get(), colorAttachment->imageView.get(),
|
||||||
|
depthAttachment->imageView.get(), m_extent);
|
||||||
|
depthFramebuffer = vk::su::createFramebuffer(device.get(), depthRenderPass.get(), colorAttachment->imageView.get(),
|
||||||
|
depthAttachment->imageView.get(), m_extent);
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool VISIRendererVulkan::SetupVertexBuffersAndFormats() {
|
||||||
|
size_t vertCount = 0;
|
||||||
|
size_t indexCount = 0;
|
||||||
|
for (const auto& model : m_models) {
|
||||||
|
vertCount += model.verts.size();
|
||||||
|
indexCount += model.idxs.size();
|
||||||
|
}
|
||||||
|
m_entityVertStart = vertCount;
|
||||||
|
vertCount += 8 * m_entities.size();
|
||||||
|
vertCount += m_lights.size();
|
||||||
|
vertexBuffer = createBuffer(vertCount * sizeof(Vertex), {vk::BufferUsageFlagBits::eVertexBuffer});
|
||||||
|
indexBuffer = createBuffer(indexCount * sizeof(uint32_t), {vk::BufferUsageFlagBits::eIndexBuffer});
|
||||||
|
|
||||||
|
auto* vertMap = static_cast<Vertex*>(vertexBuffer->map(device.get()));
|
||||||
|
auto* indexMap = static_cast<uint32_t*>(indexBuffer->map(device.get()));
|
||||||
|
for (const auto& model : m_models) {
|
||||||
|
memcpy(vertMap, model.verts.data(), model.verts.size() * sizeof(Vertex));
|
||||||
|
memcpy(indexMap, model.idxs.data(), model.idxs.size() * sizeof(uint32_t));
|
||||||
|
vertMap += model.verts.size();
|
||||||
|
indexMap += model.idxs.size();
|
||||||
|
}
|
||||||
|
auto idx = static_cast<uint32_t>(m_models.size());
|
||||||
|
for (const auto& ent : m_entities) {
|
||||||
|
auto verts = VISIRenderer::AABBToVerts(ent.aabb, VISIRenderer::ColorForIndex(idx++));
|
||||||
|
memcpy(vertMap, verts.data(), verts.size() * sizeof(Vertex));
|
||||||
|
vertMap += verts.size();
|
||||||
|
}
|
||||||
|
for (const auto& light : m_lights) {
|
||||||
|
auto* vert = vertMap++;
|
||||||
|
vert->pos = light.point;
|
||||||
|
vert->color = VISIRenderer::ColorForIndex(idx++);
|
||||||
|
}
|
||||||
|
vertexBuffer->unmap(device.get());
|
||||||
|
indexBuffer->unmap(device.get());
|
||||||
|
|
||||||
|
uniformBuffer = createBuffer(sizeof(Uniforms) * 6, {vk::BufferUsageFlagBits::eUniformBuffer});
|
||||||
|
aabbIndexBuffer = createBuffer(AABBIdxs.size() * sizeof(uint16_t), {vk::BufferUsageFlagBits::eIndexBuffer});
|
||||||
|
aabbIndexBuffer->upload(device.get(), AABBIdxs);
|
||||||
|
|
||||||
|
{
|
||||||
|
const std::array<const vk::DescriptorSetLayout, 1> layouts{descriptorSetLayout.get()};
|
||||||
|
uniformBufferDescriptorSet =
|
||||||
|
std::move(vk::su::assertSuccess(device->allocateDescriptorSetsUnique(
|
||||||
|
vk::DescriptorSetAllocateInfo{descriptorPool.get(), layouts}))
|
||||||
|
.front());
|
||||||
|
}
|
||||||
|
vk::su::updateDescriptorSets(
|
||||||
|
device.get(), uniformBufferDescriptorSet.get(),
|
||||||
|
{{vk::DescriptorType::eUniformBufferDynamic, uniformBuffer->buffer.get(), sizeof(Uniforms)}}, {});
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
void VISIRendererVulkan::SetupRenderPass(const zeus::CVector3f& pos) {
|
||||||
|
auto posMat = zeus::CTransform::Translate(-pos).toMatrix4f();
|
||||||
|
auto* buffer = static_cast<Uniforms*>(uniformBuffer->map(device.get()));
|
||||||
|
for (uint16_t j = 0; j < 6; ++j) {
|
||||||
|
zeus::CMatrix4f modelView = LookMATs[j] * posMat;
|
||||||
|
m_frustums[j].updatePlanes(modelView, g_Proj);
|
||||||
|
buffer->projectionMatrix = g_Proj;
|
||||||
|
buffer->modelViewMatrix = modelView;
|
||||||
|
buffer++;
|
||||||
|
}
|
||||||
|
uniformBuffer->unmap(device.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
void VISIRendererVulkan::RenderPVSOpaque(VISIRenderer::RGBA8* out, bool& needTransparent) {
|
||||||
|
commandBuffer->begin(vk::CommandBufferBeginInfo{});
|
||||||
|
|
||||||
|
{
|
||||||
|
std::array<vk::ClearValue, 2> clearValues{
|
||||||
|
vk::ClearColorValue(std::array<float, 4>{0.f, 0.f, 0.f, 1.f}),
|
||||||
|
vk::ClearDepthStencilValue(1.f, 0),
|
||||||
|
};
|
||||||
|
vk::RenderPassBeginInfo renderPassBeginInfo(colorRenderPass.get(), colorFramebuffer.get(),
|
||||||
|
vk::Rect2D{{0, 0}, m_extent}, clearValues.size(), clearValues.data());
|
||||||
|
commandBuffer->beginRenderPass(renderPassBeginInfo, vk::SubpassContents::eInline);
|
||||||
|
}
|
||||||
|
commandBuffer->bindPipeline(vk::PipelineBindPoint::eGraphics, colorPipeline.get());
|
||||||
|
commandBuffer->setScissor(0, std::array{vk::Rect2D{{}, m_extent}});
|
||||||
|
// commandBuffer->bindVertexBuffers(0, std::array{vertexBuffer->buffer.get()}, std::array<vk::DeviceSize, 1>{0});
|
||||||
|
commandBuffer->bindIndexBuffer(indexBuffer->buffer.get(), 0, vk::IndexType::eUint32);
|
||||||
|
|
||||||
|
for (uint32_t j = 0; j < 6; ++j) {
|
||||||
|
auto x = static_cast<float>((j % 3) * 256);
|
||||||
|
auto y = static_cast<float>((j / 3) * 256); // NOLINT(bugprone-integer-division)
|
||||||
|
commandBuffer->setViewport(0, std::array{vk::Viewport{x, y, 256.f, 256.f, 0.f, 1.f}});
|
||||||
|
commandBuffer->bindDescriptorSets(vk::PipelineBindPoint::eGraphics, pipelineLayout.get(), 0,
|
||||||
|
std::array{uniformBufferDescriptorSet.get()},
|
||||||
|
std::array<uint32_t, 1>{j * sizeof(Uniforms)});
|
||||||
|
size_t vertexBufferOffset = 0;
|
||||||
|
size_t indexBufferOffset = 0;
|
||||||
|
for (const auto& model : m_models) {
|
||||||
|
if (m_frustums[j].aabbFrustumTest(model.aabb)) {
|
||||||
|
commandBuffer->bindVertexBuffers(0, std::array{vertexBuffer->buffer.get()},
|
||||||
|
std::array<vk::DeviceSize, 1>{vertexBufferOffset});
|
||||||
|
for (const auto& surf : model.surfaces) {
|
||||||
|
// Non-transparents first
|
||||||
|
if (surf.transparent) {
|
||||||
|
needTransparent = true;
|
||||||
|
} else {
|
||||||
|
assert(model.topology == hecl::HMDLTopology::TriStrips);
|
||||||
|
commandBuffer->drawIndexed(surf.count, 1, indexBufferOffset + surf.first, 0, 0);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
vertexBufferOffset += model.verts.size() * sizeof(Vertex);
|
||||||
|
indexBufferOffset += model.idxs.size();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
commandBuffer->endRenderPass();
|
||||||
|
|
||||||
|
{
|
||||||
|
// const vk::ImageMemoryBarrier transferSrcBarrier{
|
||||||
|
// vk::AccessFlagBits::eMemoryRead,
|
||||||
|
// vk::AccessFlagBits::eTransferRead,
|
||||||
|
// vk::ImageLayout::eGeneral,
|
||||||
|
// vk::ImageLayout::eTransferSrcOptimal,
|
||||||
|
// 0,
|
||||||
|
// 0,
|
||||||
|
// colorAttachment->image.get(),
|
||||||
|
// vk::ImageSubresourceRange{vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1},
|
||||||
|
// };
|
||||||
|
const vk::ImageMemoryBarrier transferDstBarrier{
|
||||||
|
vk::AccessFlags{},
|
||||||
|
vk::AccessFlagBits::eTransferWrite,
|
||||||
|
vk::ImageLayout::eUndefined,
|
||||||
|
vk::ImageLayout::eTransferDstOptimal,
|
||||||
|
0,
|
||||||
|
0,
|
||||||
|
colorAttachmentRead->image.get(),
|
||||||
|
vk::ImageSubresourceRange{vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1},
|
||||||
|
};
|
||||||
|
commandBuffer->pipelineBarrier(vk::PipelineStageFlagBits::eColorAttachmentOutput, vk::PipelineStageFlagBits::eTransfer,
|
||||||
|
vk::DependencyFlags{}, std::array<vk::MemoryBarrier, 0>{},
|
||||||
|
std::array<vk::BufferMemoryBarrier, 0>{},
|
||||||
|
// std::array<vk::ImageMemoryBarrier, 2>{transferSrcBarrier, transferDstBarrier}
|
||||||
|
std::array<vk::ImageMemoryBarrier, 1>{transferDstBarrier}
|
||||||
|
);
|
||||||
|
}
|
||||||
|
{
|
||||||
|
const vk::ImageCopy imageCopy{vk::ImageSubresourceLayers{vk::ImageAspectFlagBits::eColor, 0, 0, 1}, vk::Offset3D{},
|
||||||
|
vk::ImageSubresourceLayers{vk::ImageAspectFlagBits::eColor, 0, 0, 1}, vk::Offset3D{},
|
||||||
|
vk::Extent3D{m_extent, 1}};
|
||||||
|
commandBuffer->copyImage(colorAttachment->image.get(), vk::ImageLayout::eTransferSrcOptimal,
|
||||||
|
colorAttachmentRead->image.get(), vk::ImageLayout::eTransferDstOptimal,
|
||||||
|
std::array{imageCopy});
|
||||||
|
}
|
||||||
|
// {
|
||||||
|
//// const vk::ImageMemoryBarrier transferSrcBarrier{
|
||||||
|
//// vk::AccessFlagBits::eTransferRead,
|
||||||
|
//// vk::AccessFlags{},
|
||||||
|
//// vk::ImageLayout::eTransferSrcOptimal,
|
||||||
|
//// vk::ImageLayout::eColorAttachmentOptimal,
|
||||||
|
//// 0,
|
||||||
|
//// 0,
|
||||||
|
//// colorAttachment->image.get(),
|
||||||
|
//// vk::ImageSubresourceRange{vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1},
|
||||||
|
//// };
|
||||||
|
// const vk::ImageMemoryBarrier transferDstBarrier{
|
||||||
|
// vk::AccessFlagBits::eTransferWrite,
|
||||||
|
// vk::AccessFlagBits::eHostRead,
|
||||||
|
// vk::ImageLayout::eTransferDstOptimal,
|
||||||
|
// vk::ImageLayout::eGeneral,
|
||||||
|
// 0,
|
||||||
|
// 0,
|
||||||
|
// colorAttachmentRead->image.get(),
|
||||||
|
// vk::ImageSubresourceRange{vk::ImageAspectFlagBits::eColor, 0, 1, 0, 1},
|
||||||
|
// };
|
||||||
|
// commandBuffer->pipelineBarrier(vk::PipelineStageFlagBits::eTransfer,
|
||||||
|
// vk::PipelineStageFlagBits::eHost, vk::DependencyFlags{},
|
||||||
|
// std::array<vk::MemoryBarrier, 0>{}, std::array<vk::BufferMemoryBarrier, 0>{},
|
||||||
|
//// std::array<vk::ImageMemoryBarrier, 2>{transferSrcBarrier, transferDstBarrier}
|
||||||
|
// std::array<vk::ImageMemoryBarrier, 1>{transferDstBarrier}
|
||||||
|
// );
|
||||||
|
// }
|
||||||
|
commandBuffer->end();
|
||||||
|
vk::su::submitAndWait(device.get(), graphicsQueue, commandBuffer.get());
|
||||||
|
|
||||||
|
size_t size = sizeof(VISIRenderer::RGBA8) * m_extent.height * m_extent.width;
|
||||||
|
assert(size == colorAttachmentRead->deviceSize);
|
||||||
|
void* imageMemory = vk::su::assertSuccess(device->mapMemory(colorAttachmentRead->deviceMemory.get(), 0, size));
|
||||||
|
memcpy(out, imageMemory, size);
|
||||||
|
device->unmapMemory(colorAttachmentRead->deviceMemory.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
void VISIRendererVulkan::RenderPVSTransparent(const std::function<void(int)>& passFunc) {}
|
||||||
|
|
||||||
|
void VISIRendererVulkan::RenderPVSEntitiesAndLights(const std::function<void(int)>& passFunc,
|
||||||
|
const std::function<void(int, EPVSVisSetState)>& lightPassFunc) {}
|
|
@ -0,0 +1,61 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
#include "../VISIRenderer.hpp"
|
||||||
|
|
||||||
|
#include "utils.hpp"
|
||||||
|
|
||||||
|
#include <zeus/CFrustum.hpp>
|
||||||
|
|
||||||
|
class VISIRendererVulkan : public VISIRenderer {
|
||||||
|
vk::PhysicalDevice physicalDevice;
|
||||||
|
vk::UniqueInstance instance;
|
||||||
|
vk::UniqueDevice device;
|
||||||
|
vk::UniqueCommandPool commandPool;
|
||||||
|
vk::UniqueCommandBuffer commandBuffer;
|
||||||
|
vk::Queue graphicsQueue;
|
||||||
|
vk::UniqueShaderModule vertexShader;
|
||||||
|
vk::UniqueShaderModule fragmentShader;
|
||||||
|
vk::UniqueRenderPass colorRenderPass;
|
||||||
|
vk::UniqueRenderPass depthRenderPass;
|
||||||
|
vk::UniquePipelineCache pipelineCache;
|
||||||
|
vk::UniquePipeline colorPipeline;
|
||||||
|
vk::UniquePipeline depthPipeline;
|
||||||
|
vk::UniqueDescriptorSetLayout descriptorSetLayout;
|
||||||
|
vk::UniqueDescriptorPool descriptorPool;
|
||||||
|
vk::UniqueDescriptorSet uniformBufferDescriptorSet;
|
||||||
|
vk::UniquePipelineLayout pipelineLayout;
|
||||||
|
std::unique_ptr<vk::su::ImageData> colorAttachment;
|
||||||
|
std::unique_ptr<vk::su::ImageData> colorAttachmentRead;
|
||||||
|
std::unique_ptr<vk::su::ImageData> depthAttachment;
|
||||||
|
std::unique_ptr<vk::su::BufferData> uniformBuffer;
|
||||||
|
std::unique_ptr<vk::su::BufferData> vertexBuffer;
|
||||||
|
std::unique_ptr<vk::su::BufferData> indexBuffer;
|
||||||
|
std::unique_ptr<vk::su::BufferData> aabbIndexBuffer;
|
||||||
|
vk::UniqueFramebuffer colorFramebuffer;
|
||||||
|
vk::UniqueFramebuffer depthFramebuffer;
|
||||||
|
|
||||||
|
bool SetupShaders() override;
|
||||||
|
bool SetupVertexBuffersAndFormats() override;
|
||||||
|
void SetupRenderPass(const zeus::CVector3f& pos) override;
|
||||||
|
|
||||||
|
private:
|
||||||
|
using Vertex = VISIRenderer::Model::Vert;
|
||||||
|
using Uniforms = struct {
|
||||||
|
zeus::CMatrix4f projectionMatrix;
|
||||||
|
zeus::CMatrix4f modelViewMatrix;
|
||||||
|
};
|
||||||
|
size_t m_entityVertStart;
|
||||||
|
std::array<zeus::CFrustum, 6> m_frustums;
|
||||||
|
vk::Extent2D m_extent;
|
||||||
|
|
||||||
|
inline std::unique_ptr<vk::su::BufferData> createBuffer(size_t size, vk::BufferUsageFlags usageFlags) {
|
||||||
|
return std::make_unique<vk::su::BufferData>(physicalDevice, device.get(), size, usageFlags);
|
||||||
|
}
|
||||||
|
|
||||||
|
public:
|
||||||
|
VISIRendererVulkan(int argc, const hecl::SystemChar** argv) : VISIRenderer(argc, argv) {}
|
||||||
|
void RenderPVSOpaque(RGBA8* out, bool& needTransparent) override;
|
||||||
|
void RenderPVSTransparent(const std::function<void(int)>& passFunc) override;
|
||||||
|
void RenderPVSEntitiesAndLights(const std::function<void(int)>& passFunc,
|
||||||
|
const std::function<void(int, EPVSVisSetState)>& lightPassFunc) override;
|
||||||
|
};
|
|
@ -0,0 +1,14 @@
|
||||||
|
#version 450
|
||||||
|
#extension GL_ARB_separate_shader_objects : enable
|
||||||
|
|
||||||
|
struct VertToFrag
|
||||||
|
{
|
||||||
|
vec4 color;
|
||||||
|
};
|
||||||
|
|
||||||
|
layout(location=0) in VertToFrag vtf;
|
||||||
|
layout(location=0) out vec4 colorOut;
|
||||||
|
void main()
|
||||||
|
{
|
||||||
|
colorOut = vtf.color;
|
||||||
|
}
|
Binary file not shown.
|
@ -0,0 +1,23 @@
|
||||||
|
#version 450
|
||||||
|
#extension GL_ARB_separate_shader_objects : enable
|
||||||
|
|
||||||
|
layout(location=0) in vec4 posIn;
|
||||||
|
layout(location=1) in vec4 colorIn;
|
||||||
|
|
||||||
|
layout(binding=0) uniform UniformBlock
|
||||||
|
{
|
||||||
|
mat4 projectionMatrix;
|
||||||
|
mat4 modelViewMatrix;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct VertToFrag
|
||||||
|
{
|
||||||
|
vec4 color;
|
||||||
|
};
|
||||||
|
|
||||||
|
layout(location=0) out VertToFrag vtf;
|
||||||
|
void main()
|
||||||
|
{
|
||||||
|
vtf.color = colorIn;
|
||||||
|
gl_Position = projectionMatrix * modelViewMatrix * vec4(posIn.xyz, 1.0);
|
||||||
|
}
|
Binary file not shown.
|
@ -0,0 +1,767 @@
|
||||||
|
// Copyright(c) 2019, NVIDIA CORPORATION. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
//
|
||||||
|
|
||||||
|
#if defined(_MSC_VER)
|
||||||
|
// no need to ignore any warnings with MSVC
|
||||||
|
#elif defined(__clang__)
|
||||||
|
#pragma clang diagnostic ignored "-Wmissing-braces"
|
||||||
|
#elif defined(__GNUC__)
|
||||||
|
// no need to ignore any warnings with GCC
|
||||||
|
#else
|
||||||
|
// unknow compiler... just ignore the warnings for yourselves ;)
|
||||||
|
#endif
|
||||||
|
|
||||||
|
#include "utils.hpp"
|
||||||
|
|
||||||
|
#include <vulkan/vulkan.hpp>
|
||||||
|
|
||||||
|
#include <iomanip>
|
||||||
|
#include <memory>
|
||||||
|
#include <numeric>
|
||||||
|
#include <utility>
|
||||||
|
|
||||||
|
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1)
|
||||||
|
VULKAN_HPP_DEFAULT_DISPATCH_LOADER_DYNAMIC_STORAGE
|
||||||
|
#endif
|
||||||
|
|
||||||
|
namespace vk::su {
|
||||||
|
vk::UniqueDeviceMemory allocateDeviceMemory(vk::Device const& device,
|
||||||
|
vk::PhysicalDeviceMemoryProperties const& memoryProperties,
|
||||||
|
vk::MemoryRequirements const& memoryRequirements,
|
||||||
|
vk::MemoryPropertyFlags memoryPropertyFlags) {
|
||||||
|
uint32_t memoryTypeIndex = findMemoryType(memoryProperties, memoryRequirements.memoryTypeBits, memoryPropertyFlags);
|
||||||
|
|
||||||
|
return assertSuccess(device.allocateMemoryUnique(vk::MemoryAllocateInfo(memoryRequirements.size, memoryTypeIndex)));
|
||||||
|
}
|
||||||
|
|
||||||
|
bool contains(std::vector<vk::ExtensionProperties> const& extensionProperties, std::string const& extensionName) {
|
||||||
|
auto propertyIterator =
|
||||||
|
std::find_if(extensionProperties.begin(), extensionProperties.end(),
|
||||||
|
[&extensionName](vk::ExtensionProperties const& ep) { return extensionName == ep.extensionName; });
|
||||||
|
return (propertyIterator != extensionProperties.end());
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::UniqueCommandPool createCommandPool(vk::Device const& device, uint32_t queueFamilyIndex) {
|
||||||
|
vk::CommandPoolCreateInfo commandPoolCreateInfo(vk::CommandPoolCreateFlagBits::eResetCommandBuffer, queueFamilyIndex);
|
||||||
|
return assertSuccess(device.createCommandPoolUnique(commandPoolCreateInfo));
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::UniqueCommandBuffer createCommandBuffer(vk::Device const& device, vk::CommandPool const& commandPool) {
|
||||||
|
const vk::CommandBufferAllocateInfo info(commandPool, vk::CommandBufferLevel::ePrimary, 1);
|
||||||
|
auto commandBuffers = vk::su::assertSuccess(device.allocateCommandBuffersUnique(info));
|
||||||
|
return std::move(commandBuffers.front());
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::DebugUtilsMessengerEXT createDebugUtilsMessengerEXT(vk::Instance const& instance) {
|
||||||
|
return instance.createDebugUtilsMessengerEXT(vk::su::makeDebugUtilsMessengerCreateInfoEXT()).value;
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::UniqueDescriptorPool createDescriptorPool(vk::Device const& device,
|
||||||
|
std::vector<vk::DescriptorPoolSize> const& poolSizes) {
|
||||||
|
assert(!poolSizes.empty());
|
||||||
|
uint32_t maxSets =
|
||||||
|
std::accumulate(poolSizes.begin(), poolSizes.end(), 0,
|
||||||
|
[](uint32_t sum, vk::DescriptorPoolSize const& dps) { return sum + dps.descriptorCount; });
|
||||||
|
assert(0 < maxSets);
|
||||||
|
|
||||||
|
vk::DescriptorPoolCreateInfo descriptorPoolCreateInfo(vk::DescriptorPoolCreateFlagBits::eFreeDescriptorSet, maxSets,
|
||||||
|
poolSizes);
|
||||||
|
return assertSuccess(device.createDescriptorPoolUnique(descriptorPoolCreateInfo));
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::UniqueDescriptorSetLayout createDescriptorSetLayout(
|
||||||
|
vk::Device const& device,
|
||||||
|
std::vector<std::tuple<vk::DescriptorType, uint32_t, vk::ShaderStageFlags>> const& bindingData,
|
||||||
|
vk::DescriptorSetLayoutCreateFlags flags) {
|
||||||
|
std::vector<vk::DescriptorSetLayoutBinding> bindings(bindingData.size());
|
||||||
|
for (size_t i = 0; i < bindingData.size(); i++) {
|
||||||
|
bindings[i] = vk::DescriptorSetLayoutBinding(checked_cast<uint32_t>(i), std::get<0>(bindingData[i]),
|
||||||
|
std::get<1>(bindingData[i]), std::get<2>(bindingData[i]));
|
||||||
|
}
|
||||||
|
return assertSuccess(device.createDescriptorSetLayoutUnique(vk::DescriptorSetLayoutCreateInfo(flags, bindings)));
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::UniqueDevice createDevice(vk::PhysicalDevice const& physicalDevice, uint32_t queueFamilyIndex,
|
||||||
|
std::vector<std::string> const& extensions,
|
||||||
|
vk::PhysicalDeviceFeatures const* physicalDeviceFeatures, void const* pNext) {
|
||||||
|
std::vector<char const*> enabledExtensions;
|
||||||
|
enabledExtensions.reserve(extensions.size());
|
||||||
|
for (auto const& ext : extensions) {
|
||||||
|
enabledExtensions.push_back(ext.data());
|
||||||
|
}
|
||||||
|
|
||||||
|
float queuePriority = 0.0f;
|
||||||
|
vk::DeviceQueueCreateInfo deviceQueueCreateInfo({}, queueFamilyIndex, 1, &queuePriority);
|
||||||
|
vk::DeviceCreateInfo deviceCreateInfo({}, deviceQueueCreateInfo, {}, enabledExtensions, physicalDeviceFeatures);
|
||||||
|
deviceCreateInfo.pNext = pNext;
|
||||||
|
|
||||||
|
vk::UniqueDevice device = assertSuccess(physicalDevice.createDeviceUnique(deviceCreateInfo));
|
||||||
|
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1)
|
||||||
|
// initialize function pointers for instance
|
||||||
|
VULKAN_HPP_DEFAULT_DISPATCHER.init(device.get());
|
||||||
|
#endif
|
||||||
|
return device;
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::UniqueFramebuffer createFramebuffer(vk::Device const& device, vk::RenderPass& renderPass,
|
||||||
|
vk::ImageView const& colorImageView, vk::ImageView const& depthImageView,
|
||||||
|
vk::Extent2D const& extent) {
|
||||||
|
std::array<vk::ImageView, 2> attachments{colorImageView, depthImageView};
|
||||||
|
vk::FramebufferCreateInfo framebufferCreateInfo(vk::FramebufferCreateFlags(), renderPass, depthImageView ? 2 : 1,
|
||||||
|
attachments.data(), extent.width, extent.height, 1);
|
||||||
|
return assertSuccess(device.createFramebufferUnique(framebufferCreateInfo));
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::UniquePipeline createGraphicsPipeline(
|
||||||
|
vk::Device const& device, vk::PipelineCache const& pipelineCache,
|
||||||
|
std::pair<vk::ShaderModule, vk::SpecializationInfo const*> const& vertexShaderData,
|
||||||
|
std::pair<vk::ShaderModule, vk::SpecializationInfo const*> const& fragmentShaderData, uint32_t vertexStride,
|
||||||
|
std::vector<std::pair<vk::Format, uint32_t>> const& vertexInputAttributeFormatOffset, vk::FrontFace frontFace,
|
||||||
|
bool depthBuffered, vk::PipelineLayout const& pipelineLayout, vk::RenderPass const& renderPass) {
|
||||||
|
std::array<vk::PipelineShaderStageCreateInfo, 2> pipelineShaderStageCreateInfos = {
|
||||||
|
vk::PipelineShaderStageCreateInfo(vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eVertex,
|
||||||
|
vertexShaderData.first, "main", vertexShaderData.second),
|
||||||
|
vk::PipelineShaderStageCreateInfo(vk::PipelineShaderStageCreateFlags(), vk::ShaderStageFlagBits::eFragment,
|
||||||
|
fragmentShaderData.first, "main", fragmentShaderData.second)};
|
||||||
|
|
||||||
|
std::vector<vk::VertexInputAttributeDescription> vertexInputAttributeDescriptions;
|
||||||
|
vk::PipelineVertexInputStateCreateInfo pipelineVertexInputStateCreateInfo;
|
||||||
|
vk::VertexInputBindingDescription vertexInputBindingDescription(0, vertexStride);
|
||||||
|
|
||||||
|
if (0 < vertexStride) {
|
||||||
|
vertexInputAttributeDescriptions.reserve(vertexInputAttributeFormatOffset.size());
|
||||||
|
for (uint32_t i = 0; i < vertexInputAttributeFormatOffset.size(); i++) {
|
||||||
|
vertexInputAttributeDescriptions.emplace_back(i, 0, vertexInputAttributeFormatOffset[i].first,
|
||||||
|
vertexInputAttributeFormatOffset[i].second);
|
||||||
|
}
|
||||||
|
pipelineVertexInputStateCreateInfo.setVertexBindingDescriptions(vertexInputBindingDescription);
|
||||||
|
pipelineVertexInputStateCreateInfo.setVertexAttributeDescriptions(vertexInputAttributeDescriptions);
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::PipelineInputAssemblyStateCreateInfo pipelineInputAssemblyStateCreateInfo(
|
||||||
|
vk::PipelineInputAssemblyStateCreateFlags(), vk::PrimitiveTopology::eTriangleStrip);
|
||||||
|
|
||||||
|
vk::PipelineViewportStateCreateInfo pipelineViewportStateCreateInfo(vk::PipelineViewportStateCreateFlags(), 1,
|
||||||
|
nullptr, 1, nullptr);
|
||||||
|
|
||||||
|
vk::PipelineRasterizationStateCreateInfo pipelineRasterizationStateCreateInfo(
|
||||||
|
vk::PipelineRasterizationStateCreateFlags(), VK_FALSE, VK_FALSE, vk::PolygonMode::eFill,
|
||||||
|
vk::CullModeFlagBits::eBack, frontFace, VK_FALSE, 0.0f, 0.0f, 0.0f, 1.0f);
|
||||||
|
|
||||||
|
vk::PipelineMultisampleStateCreateInfo pipelineMultisampleStateCreateInfo({}, vk::SampleCountFlagBits::e1);
|
||||||
|
|
||||||
|
vk::StencilOpState stencilOpState(vk::StencilOp::eKeep, vk::StencilOp::eKeep, vk::StencilOp::eKeep,
|
||||||
|
vk::CompareOp::eAlways);
|
||||||
|
vk::PipelineDepthStencilStateCreateInfo pipelineDepthStencilStateCreateInfo(
|
||||||
|
vk::PipelineDepthStencilStateCreateFlags(), static_cast<vk::Bool32>(depthBuffered),
|
||||||
|
static_cast<vk::Bool32>(depthBuffered), vk::CompareOp::eLessOrEqual, VK_FALSE, VK_FALSE, stencilOpState,
|
||||||
|
stencilOpState);
|
||||||
|
|
||||||
|
vk::ColorComponentFlags colorComponentFlags(vk::ColorComponentFlagBits::eR | vk::ColorComponentFlagBits::eG |
|
||||||
|
vk::ColorComponentFlagBits::eB | vk::ColorComponentFlagBits::eA);
|
||||||
|
vk::PipelineColorBlendAttachmentState pipelineColorBlendAttachmentState(
|
||||||
|
VK_FALSE, vk::BlendFactor::eZero, vk::BlendFactor::eZero, vk::BlendOp::eAdd, vk::BlendFactor::eZero,
|
||||||
|
vk::BlendFactor::eZero, vk::BlendOp::eAdd, colorComponentFlags);
|
||||||
|
vk::PipelineColorBlendStateCreateInfo pipelineColorBlendStateCreateInfo(
|
||||||
|
vk::PipelineColorBlendStateCreateFlags(), VK_FALSE, vk::LogicOp::eNoOp, pipelineColorBlendAttachmentState,
|
||||||
|
{{1.0f, 1.0f, 1.0f, 1.0f}});
|
||||||
|
|
||||||
|
std::array<vk::DynamicState, 2> dynamicStates = {vk::DynamicState::eViewport, vk::DynamicState::eScissor};
|
||||||
|
vk::PipelineDynamicStateCreateInfo pipelineDynamicStateCreateInfo(vk::PipelineDynamicStateCreateFlags(),
|
||||||
|
dynamicStates);
|
||||||
|
|
||||||
|
vk::GraphicsPipelineCreateInfo graphicsPipelineCreateInfo(
|
||||||
|
vk::PipelineCreateFlags(), pipelineShaderStageCreateInfos, &pipelineVertexInputStateCreateInfo,
|
||||||
|
&pipelineInputAssemblyStateCreateInfo, nullptr, &pipelineViewportStateCreateInfo,
|
||||||
|
&pipelineRasterizationStateCreateInfo, &pipelineMultisampleStateCreateInfo, &pipelineDepthStencilStateCreateInfo,
|
||||||
|
&pipelineColorBlendStateCreateInfo, &pipelineDynamicStateCreateInfo, pipelineLayout, renderPass);
|
||||||
|
|
||||||
|
return assertSuccess(device.createGraphicsPipelineUnique(pipelineCache, graphicsPipelineCreateInfo));
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<char const*> gatherExtensions(std::vector<std::string> const& extensions
|
||||||
|
#if !defined(NDEBUG)
|
||||||
|
,
|
||||||
|
std::vector<vk::ExtensionProperties> const& extensionProperties
|
||||||
|
#endif
|
||||||
|
) {
|
||||||
|
std::vector<char const*> enabledExtensions;
|
||||||
|
enabledExtensions.reserve(extensions.size());
|
||||||
|
for (auto const& ext : extensions) {
|
||||||
|
assert(std::find_if(extensionProperties.begin(), extensionProperties.end(),
|
||||||
|
[ext](vk::ExtensionProperties const& ep) { return ext == ep.extensionName; }) !=
|
||||||
|
extensionProperties.end());
|
||||||
|
enabledExtensions.push_back(ext.data());
|
||||||
|
}
|
||||||
|
#if !defined(NDEBUG)
|
||||||
|
if (std::find(extensions.begin(), extensions.end(), VK_EXT_DEBUG_UTILS_EXTENSION_NAME) == extensions.end() &&
|
||||||
|
std::find_if(extensionProperties.begin(), extensionProperties.end(), [](vk::ExtensionProperties const& ep) {
|
||||||
|
return (strcmp(VK_EXT_DEBUG_UTILS_EXTENSION_NAME, ep.extensionName) == 0);
|
||||||
|
}) != extensionProperties.end()) {
|
||||||
|
enabledExtensions.push_back(VK_EXT_DEBUG_UTILS_EXTENSION_NAME);
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
return enabledExtensions;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<char const*> gatherLayers(std::vector<std::string> const& layers
|
||||||
|
#if !defined(NDEBUG)
|
||||||
|
,
|
||||||
|
std::vector<vk::LayerProperties> const& layerProperties
|
||||||
|
#endif
|
||||||
|
) {
|
||||||
|
std::vector<char const*> enabledLayers;
|
||||||
|
enabledLayers.reserve(layers.size());
|
||||||
|
for (auto const& layer : layers) {
|
||||||
|
assert(std::find_if(layerProperties.begin(), layerProperties.end(), [layer](vk::LayerProperties const& lp) {
|
||||||
|
return layer == lp.layerName;
|
||||||
|
}) != layerProperties.end());
|
||||||
|
enabledLayers.push_back(layer.data());
|
||||||
|
}
|
||||||
|
#if !defined(NDEBUG)
|
||||||
|
// Enable standard validation layer to find as much errors as possible!
|
||||||
|
if (std::find(layers.begin(), layers.end(), "VK_LAYER_KHRONOS_validation") == layers.end() &&
|
||||||
|
std::find_if(layerProperties.begin(), layerProperties.end(), [](vk::LayerProperties const& lp) {
|
||||||
|
return (strcmp("VK_LAYER_KHRONOS_validation", lp.layerName) == 0);
|
||||||
|
}) != layerProperties.end()) {
|
||||||
|
enabledLayers.push_back("VK_LAYER_KHRONOS_validation");
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
return enabledLayers;
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::UniqueInstance createInstance(std::string const& appName, std::string const& engineName,
|
||||||
|
std::vector<std::string> const& layers, std::vector<std::string> const& extensions,
|
||||||
|
uint32_t apiVersion) {
|
||||||
|
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1)
|
||||||
|
static vk::DynamicLoader dl;
|
||||||
|
auto vkGetInstanceProcAddr = dl.getProcAddress<PFN_vkGetInstanceProcAddr>("vkGetInstanceProcAddr");
|
||||||
|
VULKAN_HPP_DEFAULT_DISPATCHER.init(vkGetInstanceProcAddr);
|
||||||
|
#endif
|
||||||
|
|
||||||
|
vk::ApplicationInfo applicationInfo(appName.c_str(), 1, engineName.c_str(), 1, apiVersion);
|
||||||
|
std::vector<char const*> enabledLayers = vk::su::gatherLayers(layers
|
||||||
|
#if !defined(NDEBUG)
|
||||||
|
, vk::enumerateInstanceLayerProperties().value
|
||||||
|
#endif
|
||||||
|
);
|
||||||
|
std::vector<char const*> enabledExtensions =
|
||||||
|
vk::su::gatherExtensions(extensions
|
||||||
|
#if !defined(NDEBUG)
|
||||||
|
, vk::enumerateInstanceExtensionProperties().value
|
||||||
|
#endif
|
||||||
|
);
|
||||||
|
|
||||||
|
vk::UniqueInstance instance = assertSuccess(vk::createInstanceUnique(
|
||||||
|
makeInstanceCreateInfoChain(applicationInfo, enabledLayers, enabledExtensions).get<vk::InstanceCreateInfo>()));
|
||||||
|
|
||||||
|
#if (VULKAN_HPP_DISPATCH_LOADER_DYNAMIC == 1)
|
||||||
|
// initialize function pointers for instance
|
||||||
|
VULKAN_HPP_DEFAULT_DISPATCHER.init(instance.get());
|
||||||
|
#endif
|
||||||
|
|
||||||
|
return instance;
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::UniqueRenderPass createRenderPass(vk::Device const& device, vk::Format colorFormat, vk::Format depthFormat,
|
||||||
|
vk::AttachmentLoadOp colorLoadOp, vk::AttachmentStoreOp colorStoreOp,
|
||||||
|
vk::AttachmentLoadOp depthLoadOp, vk::AttachmentStoreOp depthStoreOp,
|
||||||
|
vk::ImageLayout colorFinalLayout) {
|
||||||
|
std::vector<vk::AttachmentDescription> attachmentDescriptions;
|
||||||
|
assert(colorFormat != vk::Format::eUndefined);
|
||||||
|
attachmentDescriptions.emplace_back(vk::AttachmentDescriptionFlags(), colorFormat, vk::SampleCountFlagBits::e1,
|
||||||
|
colorLoadOp, colorStoreOp, vk::AttachmentLoadOp::eDontCare,
|
||||||
|
vk::AttachmentStoreOp::eDontCare, vk::ImageLayout::eUndefined, colorFinalLayout);
|
||||||
|
if (depthFormat != vk::Format::eUndefined) {
|
||||||
|
attachmentDescriptions.emplace_back(
|
||||||
|
vk::AttachmentDescriptionFlags(), depthFormat, vk::SampleCountFlagBits::e1, depthLoadOp, depthStoreOp,
|
||||||
|
vk::AttachmentLoadOp::eDontCare, vk::AttachmentStoreOp::eDontCare,
|
||||||
|
vk::ImageLayout::eUndefined, vk::ImageLayout::eDepthStencilAttachmentOptimal);
|
||||||
|
}
|
||||||
|
vk::AttachmentReference colorAttachment(0, vk::ImageLayout::eColorAttachmentOptimal);
|
||||||
|
vk::AttachmentReference depthAttachment(1, vk::ImageLayout::eDepthStencilAttachmentOptimal);
|
||||||
|
vk::SubpassDescription subpassDescription(vk::SubpassDescriptionFlags(), vk::PipelineBindPoint::eGraphics, {},
|
||||||
|
colorAttachment, {},
|
||||||
|
(depthFormat != vk::Format::eUndefined) ? &depthAttachment : nullptr);
|
||||||
|
return assertSuccess(device.createRenderPassUnique(
|
||||||
|
vk::RenderPassCreateInfo(vk::RenderPassCreateFlags(), attachmentDescriptions, subpassDescription)));
|
||||||
|
}
|
||||||
|
|
||||||
|
VKAPI_ATTR VkBool32 VKAPI_CALL debugUtilsMessengerCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
|
||||||
|
VkDebugUtilsMessageTypeFlagsEXT messageTypes,
|
||||||
|
VkDebugUtilsMessengerCallbackDataEXT const* pCallbackData,
|
||||||
|
void* /*pUserData*/) {
|
||||||
|
#if !defined(NDEBUG)
|
||||||
|
if (pCallbackData->messageIdNumber == 648835635) {
|
||||||
|
// UNASSIGNED-khronos-Validation-debug-build-warning-message
|
||||||
|
return VK_FALSE;
|
||||||
|
}
|
||||||
|
if (pCallbackData->messageIdNumber == 767975156) {
|
||||||
|
// UNASSIGNED-BestPractices-vkCreateInstance-specialuse-extension
|
||||||
|
return VK_FALSE;
|
||||||
|
}
|
||||||
|
#endif
|
||||||
|
|
||||||
|
std::cerr << vk::to_string(static_cast<vk::DebugUtilsMessageSeverityFlagBitsEXT>(messageSeverity)) << ": "
|
||||||
|
<< vk::to_string(static_cast<vk::DebugUtilsMessageTypeFlagsEXT>(messageTypes)) << ":\n";
|
||||||
|
std::cerr << "\t"
|
||||||
|
<< "messageIDName = <" << pCallbackData->pMessageIdName << ">\n";
|
||||||
|
std::cerr << "\t"
|
||||||
|
<< "messageIdNumber = " << pCallbackData->messageIdNumber << "\n";
|
||||||
|
std::cerr << "\t"
|
||||||
|
<< "message = <" << pCallbackData->pMessage << ">\n";
|
||||||
|
if (0 < pCallbackData->queueLabelCount) {
|
||||||
|
std::cerr << "\t"
|
||||||
|
<< "Queue Labels:\n";
|
||||||
|
for (uint8_t i = 0; i < pCallbackData->queueLabelCount; i++) {
|
||||||
|
std::cerr << "\t\t"
|
||||||
|
<< "labelName = <" << pCallbackData->pQueueLabels[i].pLabelName << ">\n";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (0 < pCallbackData->cmdBufLabelCount) {
|
||||||
|
std::cerr << "\t"
|
||||||
|
<< "CommandBuffer Labels:\n";
|
||||||
|
for (uint8_t i = 0; i < pCallbackData->cmdBufLabelCount; i++) {
|
||||||
|
std::cerr << "\t\t"
|
||||||
|
<< "labelName = <" << pCallbackData->pCmdBufLabels[i].pLabelName << ">\n";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if (0 < pCallbackData->objectCount) {
|
||||||
|
std::cerr << "\t"
|
||||||
|
<< "Objects:\n";
|
||||||
|
for (uint8_t i = 0; i < pCallbackData->objectCount; i++) {
|
||||||
|
std::cerr << "\t\t"
|
||||||
|
<< "Object " << i << "\n";
|
||||||
|
std::cerr << "\t\t\t"
|
||||||
|
<< "objectType = "
|
||||||
|
<< vk::to_string(static_cast<vk::ObjectType>(pCallbackData->pObjects[i].objectType)) << "\n";
|
||||||
|
std::cerr << "\t\t\t"
|
||||||
|
<< "objectHandle = " << pCallbackData->pObjects[i].objectHandle << "\n";
|
||||||
|
if (pCallbackData->pObjects[i].pObjectName != nullptr) {
|
||||||
|
std::cerr << "\t\t\t"
|
||||||
|
<< "objectName = <" << pCallbackData->pObjects[i].pObjectName << ">\n";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return VK_TRUE;
|
||||||
|
}
|
||||||
|
|
||||||
|
uint32_t findGraphicsQueueFamilyIndex(std::vector<vk::QueueFamilyProperties> const& queueFamilyProperties) {
|
||||||
|
// get the first index into queueFamiliyProperties which supports graphics
|
||||||
|
const auto graphicsQueueFamilyProperty =
|
||||||
|
std::find_if(queueFamilyProperties.begin(), queueFamilyProperties.end(),
|
||||||
|
[](vk::QueueFamilyProperties const& qfp) { return qfp.queueFlags & vk::QueueFlagBits::eGraphics; });
|
||||||
|
assert(graphicsQueueFamilyProperty != queueFamilyProperties.end());
|
||||||
|
return static_cast<uint32_t>(std::distance(queueFamilyProperties.begin(), graphicsQueueFamilyProperty));
|
||||||
|
}
|
||||||
|
|
||||||
|
std::pair<uint32_t, uint32_t> findGraphicsAndPresentQueueFamilyIndex(vk::PhysicalDevice physicalDevice,
|
||||||
|
vk::SurfaceKHR const& surface) {
|
||||||
|
std::vector<vk::QueueFamilyProperties> queueFamilyProperties = physicalDevice.getQueueFamilyProperties();
|
||||||
|
assert(queueFamilyProperties.size() < std::numeric_limits<uint32_t>::max());
|
||||||
|
|
||||||
|
uint32_t graphicsQueueFamilyIndex = findGraphicsQueueFamilyIndex(queueFamilyProperties);
|
||||||
|
if (physicalDevice.getSurfaceSupportKHR(graphicsQueueFamilyIndex, surface).value == VK_TRUE) {
|
||||||
|
return std::make_pair(graphicsQueueFamilyIndex,
|
||||||
|
graphicsQueueFamilyIndex); // the first graphicsQueueFamilyIndex does also support presents
|
||||||
|
}
|
||||||
|
|
||||||
|
// the graphicsQueueFamilyIndex doesn't support present -> look for an other family index that supports both
|
||||||
|
// graphics and present
|
||||||
|
for (size_t i = 0; i < queueFamilyProperties.size(); i++) {
|
||||||
|
if ((queueFamilyProperties[i].queueFlags & vk::QueueFlagBits::eGraphics) &&
|
||||||
|
physicalDevice.getSurfaceSupportKHR(static_cast<uint32_t>(i), surface).value == VK_TRUE) {
|
||||||
|
return std::make_pair(static_cast<uint32_t>(i), static_cast<uint32_t>(i));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// there's nothing like a single family index that supports both graphics and present -> look for an other family
|
||||||
|
// index that supports present
|
||||||
|
for (size_t i = 0; i < queueFamilyProperties.size(); i++) {
|
||||||
|
if (physicalDevice.getSurfaceSupportKHR(static_cast<uint32_t>(i), surface).value == VK_TRUE) {
|
||||||
|
return std::make_pair(graphicsQueueFamilyIndex, static_cast<uint32_t>(i));
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
assert(false && "Could not find queues for both graphics or present -> terminating");
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
uint32_t findMemoryType(vk::PhysicalDeviceMemoryProperties const& memoryProperties, uint32_t typeBits,
|
||||||
|
vk::MemoryPropertyFlags requirementsMask) {
|
||||||
|
auto typeIndex = uint32_t(~0);
|
||||||
|
for (uint32_t i = 0; i < memoryProperties.memoryTypeCount; i++) {
|
||||||
|
if (((typeBits & 1) != 0u) &&
|
||||||
|
((memoryProperties.memoryTypes[i].propertyFlags & requirementsMask) == requirementsMask)) {
|
||||||
|
typeIndex = i;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
typeBits >>= 1;
|
||||||
|
}
|
||||||
|
assert(typeIndex != uint32_t(~0));
|
||||||
|
return typeIndex;
|
||||||
|
}
|
||||||
|
|
||||||
|
std::vector<std::string> getDeviceExtensions() { return {VK_KHR_SWAPCHAIN_EXTENSION_NAME}; }
|
||||||
|
|
||||||
|
std::vector<std::string> getInstanceExtensions() {
|
||||||
|
std::vector<std::string> extensions;
|
||||||
|
extensions.emplace_back(VK_KHR_SURFACE_EXTENSION_NAME);
|
||||||
|
#if defined(VK_USE_PLATFORM_ANDROID_KHR)
|
||||||
|
extensions.emplace_back(VK_KHR_ANDROID_SURFACE_EXTENSION_NAME);
|
||||||
|
#elif defined(VK_USE_PLATFORM_IOS_MVK)
|
||||||
|
extensions.emplace_back(VK_MVK_IOS_SURFACE_EXTENSION_NAME);
|
||||||
|
#elif defined(VK_USE_PLATFORM_MACOS_MVK)
|
||||||
|
extensions.emplace_back(VK_MVK_MACOS_SURFACE_EXTENSION_NAME);
|
||||||
|
#elif defined(VK_USE_PLATFORM_MIR_KHR)
|
||||||
|
extensions.emplace_back(VK_KHR_MIR_SURFACE_EXTENSION_NAME);
|
||||||
|
#elif defined(VK_USE_PLATFORM_VI_NN)
|
||||||
|
extensions.emplace_back(VK_NN_VI_SURFACE_EXTENSION_NAME);
|
||||||
|
#elif defined(VK_USE_PLATFORM_WAYLAND_KHR)
|
||||||
|
extensions.emplace_back(VK_KHR_WAYLAND_SURFACE_EXTENSION_NAME);
|
||||||
|
#elif defined(VK_USE_PLATFORM_WIN32_KHR)
|
||||||
|
extensions.emplace_back(VK_KHR_WIN32_SURFACE_EXTENSION_NAME);
|
||||||
|
#elif defined(VK_USE_PLATFORM_XCB_KHR)
|
||||||
|
extensions.emplace_back(VK_KHR_XCB_SURFACE_EXTENSION_NAME);
|
||||||
|
#elif defined(VK_USE_PLATFORM_XLIB_KHR)
|
||||||
|
extensions.emplace_back(VK_KHR_XLIB_SURFACE_EXTENSION_NAME);
|
||||||
|
#elif defined(VK_USE_PLATFORM_XLIB_XRANDR_EXT)
|
||||||
|
extensions.emplace_back(VK_EXT_ACQUIRE_XLIB_DISPLAY_EXTENSION_NAME);
|
||||||
|
#endif
|
||||||
|
return extensions;
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::Format pickDepthFormat(vk::PhysicalDevice const& physicalDevice) {
|
||||||
|
std::vector<vk::Format> candidates = {vk::Format::eD32Sfloat, vk::Format::eD32SfloatS8Uint,
|
||||||
|
vk::Format::eD24UnormS8Uint};
|
||||||
|
for (vk::Format format : candidates) {
|
||||||
|
vk::FormatProperties props = physicalDevice.getFormatProperties(format);
|
||||||
|
|
||||||
|
if (props.optimalTilingFeatures & vk::FormatFeatureFlagBits::eDepthStencilAttachment) {
|
||||||
|
return format;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return vk::Format::eUndefined;
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::PresentModeKHR pickPresentMode(std::vector<vk::PresentModeKHR> const& presentModes) {
|
||||||
|
vk::PresentModeKHR pickedMode = vk::PresentModeKHR::eFifo;
|
||||||
|
for (const auto& presentMode : presentModes) {
|
||||||
|
if (presentMode == vk::PresentModeKHR::eMailbox) {
|
||||||
|
pickedMode = presentMode;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (presentMode == vk::PresentModeKHR::eImmediate) {
|
||||||
|
pickedMode = presentMode;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return pickedMode;
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::SurfaceFormatKHR pickSurfaceFormat(std::vector<vk::SurfaceFormatKHR> const& formats) {
|
||||||
|
assert(!formats.empty());
|
||||||
|
vk::SurfaceFormatKHR pickedFormat = formats[0];
|
||||||
|
if (formats.size() == 1) {
|
||||||
|
if (formats[0].format == vk::Format::eUndefined) {
|
||||||
|
pickedFormat.format = vk::Format::eB8G8R8A8Unorm;
|
||||||
|
pickedFormat.colorSpace = vk::ColorSpaceKHR::eSrgbNonlinear;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
// request several formats, the first found will be used
|
||||||
|
vk::Format requestedFormats[] = {vk::Format::eB8G8R8A8Unorm, vk::Format::eR8G8B8A8Unorm, vk::Format::eB8G8R8Unorm,
|
||||||
|
vk::Format::eR8G8B8Unorm};
|
||||||
|
vk::ColorSpaceKHR requestedColorSpace = vk::ColorSpaceKHR::eSrgbNonlinear;
|
||||||
|
for (size_t i = 0; i < sizeof(requestedFormats) / sizeof(requestedFormats[0]); i++) {
|
||||||
|
vk::Format requestedFormat = requestedFormats[i];
|
||||||
|
auto it = std::find_if(formats.begin(), formats.end(),
|
||||||
|
[requestedFormat, requestedColorSpace](vk::SurfaceFormatKHR const& f) {
|
||||||
|
return (f.format == requestedFormat) && (f.colorSpace == requestedColorSpace);
|
||||||
|
});
|
||||||
|
if (it != formats.end()) {
|
||||||
|
pickedFormat = *it;
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
assert(pickedFormat.colorSpace == vk::ColorSpaceKHR::eSrgbNonlinear);
|
||||||
|
return pickedFormat;
|
||||||
|
}
|
||||||
|
|
||||||
|
void setImageLayout(vk::CommandBuffer const& commandBuffer, vk::Image image, vk::Format format,
|
||||||
|
vk::ImageLayout oldImageLayout, vk::ImageLayout newImageLayout) {
|
||||||
|
vk::AccessFlags sourceAccessMask;
|
||||||
|
switch (oldImageLayout) {
|
||||||
|
case vk::ImageLayout::eTransferDstOptimal:
|
||||||
|
sourceAccessMask = vk::AccessFlagBits::eTransferWrite;
|
||||||
|
break;
|
||||||
|
case vk::ImageLayout::ePreinitialized:
|
||||||
|
sourceAccessMask = vk::AccessFlagBits::eHostWrite;
|
||||||
|
break;
|
||||||
|
case vk::ImageLayout::eGeneral: // sourceAccessMask is empty
|
||||||
|
case vk::ImageLayout::eUndefined:
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
assert(false);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::PipelineStageFlags sourceStage;
|
||||||
|
switch (oldImageLayout) {
|
||||||
|
case vk::ImageLayout::eGeneral:
|
||||||
|
case vk::ImageLayout::ePreinitialized:
|
||||||
|
sourceStage = vk::PipelineStageFlagBits::eHost;
|
||||||
|
break;
|
||||||
|
case vk::ImageLayout::eTransferDstOptimal:
|
||||||
|
sourceStage = vk::PipelineStageFlagBits::eTransfer;
|
||||||
|
break;
|
||||||
|
case vk::ImageLayout::eUndefined:
|
||||||
|
sourceStage = vk::PipelineStageFlagBits::eTopOfPipe;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
assert(false);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::AccessFlags destinationAccessMask;
|
||||||
|
switch (newImageLayout) {
|
||||||
|
case vk::ImageLayout::eColorAttachmentOptimal:
|
||||||
|
destinationAccessMask = vk::AccessFlagBits::eColorAttachmentWrite;
|
||||||
|
break;
|
||||||
|
case vk::ImageLayout::eDepthStencilAttachmentOptimal:
|
||||||
|
destinationAccessMask =
|
||||||
|
vk::AccessFlagBits::eDepthStencilAttachmentRead | vk::AccessFlagBits::eDepthStencilAttachmentWrite;
|
||||||
|
break;
|
||||||
|
case vk::ImageLayout::eGeneral: // empty destinationAccessMask
|
||||||
|
case vk::ImageLayout::ePresentSrcKHR:
|
||||||
|
break;
|
||||||
|
case vk::ImageLayout::eShaderReadOnlyOptimal:
|
||||||
|
destinationAccessMask = vk::AccessFlagBits::eShaderRead;
|
||||||
|
break;
|
||||||
|
case vk::ImageLayout::eTransferSrcOptimal:
|
||||||
|
destinationAccessMask = vk::AccessFlagBits::eTransferRead;
|
||||||
|
break;
|
||||||
|
case vk::ImageLayout::eTransferDstOptimal:
|
||||||
|
destinationAccessMask = vk::AccessFlagBits::eTransferWrite;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
assert(false);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::PipelineStageFlags destinationStage;
|
||||||
|
switch (newImageLayout) {
|
||||||
|
case vk::ImageLayout::eColorAttachmentOptimal:
|
||||||
|
destinationStage = vk::PipelineStageFlagBits::eColorAttachmentOutput;
|
||||||
|
break;
|
||||||
|
case vk::ImageLayout::eDepthStencilAttachmentOptimal:
|
||||||
|
destinationStage = vk::PipelineStageFlagBits::eEarlyFragmentTests;
|
||||||
|
break;
|
||||||
|
case vk::ImageLayout::eGeneral:
|
||||||
|
destinationStage = vk::PipelineStageFlagBits::eHost;
|
||||||
|
break;
|
||||||
|
case vk::ImageLayout::ePresentSrcKHR:
|
||||||
|
destinationStage = vk::PipelineStageFlagBits::eBottomOfPipe;
|
||||||
|
break;
|
||||||
|
case vk::ImageLayout::eShaderReadOnlyOptimal:
|
||||||
|
destinationStage = vk::PipelineStageFlagBits::eFragmentShader;
|
||||||
|
break;
|
||||||
|
case vk::ImageLayout::eTransferDstOptimal:
|
||||||
|
case vk::ImageLayout::eTransferSrcOptimal:
|
||||||
|
destinationStage = vk::PipelineStageFlagBits::eTransfer;
|
||||||
|
break;
|
||||||
|
default:
|
||||||
|
assert(false);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::ImageAspectFlags aspectMask;
|
||||||
|
if (newImageLayout == vk::ImageLayout::eDepthStencilAttachmentOptimal) {
|
||||||
|
aspectMask = vk::ImageAspectFlagBits::eDepth;
|
||||||
|
if (format == vk::Format::eD32SfloatS8Uint || format == vk::Format::eD24UnormS8Uint) {
|
||||||
|
aspectMask |= vk::ImageAspectFlagBits::eStencil;
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
aspectMask = vk::ImageAspectFlagBits::eColor;
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::ImageSubresourceRange imageSubresourceRange(aspectMask, 0, 1, 0, 1);
|
||||||
|
vk::ImageMemoryBarrier imageMemoryBarrier(sourceAccessMask, destinationAccessMask, oldImageLayout, newImageLayout,
|
||||||
|
VK_QUEUE_FAMILY_IGNORED, VK_QUEUE_FAMILY_IGNORED, image,
|
||||||
|
imageSubresourceRange);
|
||||||
|
return commandBuffer.pipelineBarrier(sourceStage, destinationStage, {}, nullptr, nullptr, imageMemoryBarrier);
|
||||||
|
}
|
||||||
|
|
||||||
|
void submitAndWait(vk::Device const& device, vk::Queue const& queue, vk::CommandBuffer const& commandBuffer) {
|
||||||
|
vk::Fence fence = device.createFence(vk::FenceCreateInfo()).value;
|
||||||
|
queue.submit(vk::SubmitInfo(0, nullptr, nullptr, 1, &commandBuffer), fence);
|
||||||
|
while (vk::Result::eTimeout == device.waitForFences(fence, VK_TRUE, vk::su::FenceTimeout)) {}
|
||||||
|
device.destroyFence(fence);
|
||||||
|
}
|
||||||
|
|
||||||
|
void updateDescriptorSets(
|
||||||
|
vk::Device const& device, vk::DescriptorSet const& descriptorSet,
|
||||||
|
std::vector<std::tuple<vk::DescriptorType, vk::Buffer const&, vk::DeviceSize>> const& bufferData,
|
||||||
|
uint32_t bindingOffset) {
|
||||||
|
std::vector<vk::DescriptorBufferInfo> bufferInfos;
|
||||||
|
bufferInfos.reserve(bufferData.size());
|
||||||
|
|
||||||
|
std::vector<vk::WriteDescriptorSet> writeDescriptorSets;
|
||||||
|
writeDescriptorSets.reserve(bufferData.size() + 1);
|
||||||
|
uint32_t dstBinding = bindingOffset;
|
||||||
|
for (auto const& bd : bufferData) {
|
||||||
|
bufferInfos.emplace_back(std::get<1>(bd), 0, std::get<2>(bd));
|
||||||
|
writeDescriptorSets.emplace_back(descriptorSet, dstBinding++, 0, 1, std::get<0>(bd), nullptr, &bufferInfos.back());
|
||||||
|
}
|
||||||
|
|
||||||
|
device.updateDescriptorSets(writeDescriptorSets, nullptr);
|
||||||
|
}
|
||||||
|
|
||||||
|
BufferData::BufferData(vk::PhysicalDevice const& physicalDevice, vk::Device const& device, vk::DeviceSize size,
|
||||||
|
vk::BufferUsageFlags usage, vk::MemoryPropertyFlags propertyFlags)
|
||||||
|
: size(size)
|
||||||
|
#if !defined(NDEBUG)
|
||||||
|
, m_usage(usage)
|
||||||
|
, m_propertyFlags(propertyFlags)
|
||||||
|
#endif
|
||||||
|
{
|
||||||
|
buffer = assertSuccess(device.createBufferUnique(vk::BufferCreateInfo(vk::BufferCreateFlags(), size, usage)));
|
||||||
|
deviceMemory = vk::su::allocateDeviceMemory(device, physicalDevice.getMemoryProperties(),
|
||||||
|
device.getBufferMemoryRequirements(buffer.get()), propertyFlags);
|
||||||
|
device.bindBufferMemory(buffer.get(), deviceMemory.get(), 0);
|
||||||
|
}
|
||||||
|
|
||||||
|
DepthBufferData::DepthBufferData(vk::PhysicalDevice const& physicalDevice, vk::Device const& device, vk::Format format,
|
||||||
|
vk::Extent2D const& extent)
|
||||||
|
: ImageData(physicalDevice, device, format, extent, vk::ImageTiling::eOptimal,
|
||||||
|
vk::ImageUsageFlagBits::eDepthStencilAttachment, vk::ImageLayout::eUndefined,
|
||||||
|
vk::MemoryPropertyFlagBits::eDeviceLocal, vk::ImageAspectFlagBits::eDepth) {}
|
||||||
|
|
||||||
|
ImageData::ImageData(vk::PhysicalDevice const& physicalDevice, vk::Device const& device, vk::Format format_,
|
||||||
|
vk::Extent2D const& extent, vk::ImageTiling tiling, vk::ImageUsageFlags usage,
|
||||||
|
vk::ImageLayout initialLayout, vk::MemoryPropertyFlags memoryProperties,
|
||||||
|
vk::ImageAspectFlags aspectMask)
|
||||||
|
: format(format_) {
|
||||||
|
vk::ImageCreateInfo imageCreateInfo(vk::ImageCreateFlags(), vk::ImageType::e2D, format, vk::Extent3D(extent, 1), 1, 1,
|
||||||
|
vk::SampleCountFlagBits::e1, tiling, usage | vk::ImageUsageFlagBits::eSampled,
|
||||||
|
vk::SharingMode::eExclusive, {}, initialLayout);
|
||||||
|
image = assertSuccess(device.createImageUnique(imageCreateInfo));
|
||||||
|
|
||||||
|
auto memoryRequirements = device.getImageMemoryRequirements(image.get());
|
||||||
|
deviceSize = memoryRequirements.size;
|
||||||
|
deviceMemory = vk::su::allocateDeviceMemory(device, physicalDevice.getMemoryProperties(), memoryRequirements, memoryProperties);
|
||||||
|
|
||||||
|
device.bindImageMemory(image.get(), deviceMemory.get(), 0);
|
||||||
|
|
||||||
|
vk::ComponentMapping componentMapping(ComponentSwizzle::eR, ComponentSwizzle::eG, ComponentSwizzle::eB,
|
||||||
|
ComponentSwizzle::eA);
|
||||||
|
vk::ImageSubresourceRange imageSubresourceRange(aspectMask, 0, 1, 0, 1);
|
||||||
|
vk::ImageViewCreateInfo imageViewCreateInfo({}, image.get(), vk::ImageViewType::e2D, format, componentMapping,
|
||||||
|
imageSubresourceRange);
|
||||||
|
imageView = assertSuccess(device.createImageViewUnique(imageViewCreateInfo));
|
||||||
|
}
|
||||||
|
|
||||||
|
TextureData::TextureData(vk::PhysicalDevice const& physicalDevice, vk::Device const& device,
|
||||||
|
vk::Extent2D const& extent_, vk::ImageUsageFlags usageFlags,
|
||||||
|
vk::FormatFeatureFlags formatFeatureFlags, bool anisotropyEnable, bool forceStaging)
|
||||||
|
: format(vk::Format::eR8G8B8A8Unorm), extent(extent_) {
|
||||||
|
vk::FormatProperties formatProperties = physicalDevice.getFormatProperties(format);
|
||||||
|
|
||||||
|
formatFeatureFlags |= vk::FormatFeatureFlagBits::eSampledImage;
|
||||||
|
needsStaging = forceStaging || ((formatProperties.linearTilingFeatures & formatFeatureFlags) != formatFeatureFlags);
|
||||||
|
vk::ImageTiling imageTiling;
|
||||||
|
vk::ImageLayout initialLayout;
|
||||||
|
vk::MemoryPropertyFlags requirements;
|
||||||
|
if (needsStaging) {
|
||||||
|
assert((formatProperties.optimalTilingFeatures & formatFeatureFlags) == formatFeatureFlags);
|
||||||
|
stagingBufferData = std::make_unique<BufferData>(physicalDevice, device, extent.width * extent.height * 4,
|
||||||
|
vk::BufferUsageFlagBits::eTransferSrc);
|
||||||
|
imageTiling = vk::ImageTiling::eOptimal;
|
||||||
|
usageFlags |= vk::ImageUsageFlagBits::eTransferDst;
|
||||||
|
initialLayout = vk::ImageLayout::eUndefined;
|
||||||
|
} else {
|
||||||
|
imageTiling = vk::ImageTiling::eLinear;
|
||||||
|
initialLayout = vk::ImageLayout::ePreinitialized;
|
||||||
|
requirements = vk::MemoryPropertyFlagBits::eHostCoherent | vk::MemoryPropertyFlagBits::eHostVisible;
|
||||||
|
}
|
||||||
|
imageData = std::make_unique<ImageData>(physicalDevice, device, format, extent, imageTiling,
|
||||||
|
usageFlags | vk::ImageUsageFlagBits::eSampled, initialLayout, requirements,
|
||||||
|
vk::ImageAspectFlagBits::eColor);
|
||||||
|
|
||||||
|
sampler = assertSuccess(device.createSamplerUnique(vk::SamplerCreateInfo(
|
||||||
|
vk::SamplerCreateFlags(), vk::Filter::eLinear, vk::Filter::eLinear, vk::SamplerMipmapMode::eLinear,
|
||||||
|
vk::SamplerAddressMode::eRepeat, vk::SamplerAddressMode::eRepeat, vk::SamplerAddressMode::eRepeat, 0.0f,
|
||||||
|
static_cast<vk::Bool32>(anisotropyEnable), 16.0f, VK_FALSE, vk::CompareOp::eNever, 0.0f, 0.0f,
|
||||||
|
vk::BorderColor::eFloatOpaqueBlack)));
|
||||||
|
}
|
||||||
|
|
||||||
|
UUID::UUID(uint8_t const data[VK_UUID_SIZE]) { memcpy(m_data, data, VK_UUID_SIZE * sizeof(uint8_t)); }
|
||||||
|
|
||||||
|
vk::DebugUtilsMessengerCreateInfoEXT makeDebugUtilsMessengerCreateInfoEXT() {
|
||||||
|
return {{},
|
||||||
|
vk::DebugUtilsMessageSeverityFlagBitsEXT::eWarning | vk::DebugUtilsMessageSeverityFlagBitsEXT::eError,
|
||||||
|
vk::DebugUtilsMessageTypeFlagBitsEXT::eGeneral | vk::DebugUtilsMessageTypeFlagBitsEXT::ePerformance |
|
||||||
|
vk::DebugUtilsMessageTypeFlagBitsEXT::eValidation,
|
||||||
|
&vk::su::debugUtilsMessengerCallback};
|
||||||
|
}
|
||||||
|
|
||||||
|
#if defined(NDEBUG)
|
||||||
|
vk::StructureChain<vk::InstanceCreateInfo>
|
||||||
|
#elif defined(VULKAN_HPP_UTILS_USE_BEST_PRACTICES)
|
||||||
|
vk::StructureChain<vk::InstanceCreateInfo, vk::DebugUtilsMessengerCreateInfoEXT, vk::ValidationFeaturesEXT>
|
||||||
|
#else
|
||||||
|
vk::StructureChain<vk::InstanceCreateInfo, vk::DebugUtilsMessengerCreateInfoEXT>
|
||||||
|
#endif
|
||||||
|
makeInstanceCreateInfoChain(vk::ApplicationInfo const& applicationInfo, std::vector<char const*> const& enabledLayers,
|
||||||
|
std::vector<char const*> const& enabledExtensions) {
|
||||||
|
#if defined(NDEBUG)
|
||||||
|
// in non-debug mode just use the InstanceCreateInfo for instance creation
|
||||||
|
vk::StructureChain<vk::InstanceCreateInfo> instanceCreateInfo(
|
||||||
|
{{}, &applicationInfo, enabledLayers, enabledExtensions});
|
||||||
|
#else
|
||||||
|
// in debug mode, addionally use the debugUtilsMessengerCallback in instance creation!
|
||||||
|
vk::DebugUtilsMessageSeverityFlagsEXT severityFlags(vk::DebugUtilsMessageSeverityFlagBitsEXT::eWarning |
|
||||||
|
vk::DebugUtilsMessageSeverityFlagBitsEXT::eError);
|
||||||
|
vk::DebugUtilsMessageTypeFlagsEXT messageTypeFlags(vk::DebugUtilsMessageTypeFlagBitsEXT::eGeneral |
|
||||||
|
vk::DebugUtilsMessageTypeFlagBitsEXT::ePerformance |
|
||||||
|
vk::DebugUtilsMessageTypeFlagBitsEXT::eValidation);
|
||||||
|
#if defined(VULKAN_HPP_UTILS_USE_BEST_PRACTICES)
|
||||||
|
vk::ValidationFeatureEnableEXT validationFeatureEnable = vk::ValidationFeatureEnableEXT::eBestPractices;
|
||||||
|
vk::StructureChain<vk::InstanceCreateInfo, vk::DebugUtilsMessengerCreateInfoEXT, vk::ValidationFeaturesEXT>
|
||||||
|
instanceCreateInfo({{}, &applicationInfo, enabledLayers, enabledExtensions},
|
||||||
|
{{}, severityFlags, messageTypeFlags, &vk::su::debugUtilsMessengerCallback},
|
||||||
|
{validationFeatureEnable});
|
||||||
|
#else
|
||||||
|
vk::StructureChain<vk::InstanceCreateInfo, vk::DebugUtilsMessengerCreateInfoEXT> instanceCreateInfo(
|
||||||
|
{{}, &applicationInfo, enabledLayers, enabledExtensions},
|
||||||
|
{{}, severityFlags, messageTypeFlags, &vk::su::debugUtilsMessengerCallback});
|
||||||
|
#endif
|
||||||
|
#endif
|
||||||
|
return instanceCreateInfo;
|
||||||
|
}
|
||||||
|
|
||||||
|
} // namespace vk::su
|
||||||
|
|
||||||
|
std::ostream& operator<<(std::ostream& os, vk::su::UUID const& uuid) {
|
||||||
|
os << std::setfill('0') << std::hex;
|
||||||
|
for (uint32_t j = 0; j < VK_UUID_SIZE; ++j) {
|
||||||
|
os << std::setw(2) << static_cast<uint32_t>(uuid.m_data[j]);
|
||||||
|
if (j == 3 || j == 5 || j == 7 || j == 9) {
|
||||||
|
std::cout << '-';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
os << std::setfill(' ') << std::dec;
|
||||||
|
return os;
|
||||||
|
}
|
|
@ -0,0 +1,303 @@
|
||||||
|
#pragma once
|
||||||
|
|
||||||
|
// Copyright(c) 2019, NVIDIA CORPORATION. All rights reserved.
|
||||||
|
//
|
||||||
|
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
// you may not use this file except in compliance with the License.
|
||||||
|
// You may obtain a copy of the License at
|
||||||
|
//
|
||||||
|
// http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
//
|
||||||
|
// Unless required by applicable law or agreed to in writing, software
|
||||||
|
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
// See the License for the specific language governing permissions and
|
||||||
|
// limitations under the License.
|
||||||
|
//
|
||||||
|
|
||||||
|
#define VULKAN_HPP_NO_EXCEPTIONS 1
|
||||||
|
#define VULKAN_HPP_ASSERT_ON_RESULT
|
||||||
|
//#define VULKAN_HPP_UTILS_USE_BEST_PRACTICES 1
|
||||||
|
#define VULKAN_HPP_DISPATCH_LOADER_DYNAMIC 1
|
||||||
|
|
||||||
|
#include <vulkan/vulkan.hpp>
|
||||||
|
|
||||||
|
#include <iostream>
|
||||||
|
#include <map>
|
||||||
|
|
||||||
|
namespace vk::su {
|
||||||
|
template <typename T>
|
||||||
|
inline static T assertSuccess(vk::ResultValue<T> result) {
|
||||||
|
assert(result.result == vk::Result::eSuccess);
|
||||||
|
return std::move(result.value);
|
||||||
|
}
|
||||||
|
|
||||||
|
const uint64_t FenceTimeout = 100000000;
|
||||||
|
|
||||||
|
template <typename Func>
|
||||||
|
void oneTimeSubmit(vk::CommandBuffer const& commandBuffer, vk::Queue const& queue, Func const& func) {
|
||||||
|
commandBuffer.begin(vk::CommandBufferBeginInfo(vk::CommandBufferUsageFlagBits::eOneTimeSubmit));
|
||||||
|
func(commandBuffer);
|
||||||
|
commandBuffer.end();
|
||||||
|
queue.submit(vk::SubmitInfo(0, nullptr, nullptr, 1, &commandBuffer), nullptr);
|
||||||
|
queue.waitIdle();
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename Func>
|
||||||
|
void oneTimeSubmit(vk::Device const& device, vk::CommandPool const& commandPool, vk::Queue const& queue,
|
||||||
|
Func const& func) {
|
||||||
|
vk::CommandBuffer commandBuffer =
|
||||||
|
device.allocateCommandBuffers(vk::CommandBufferAllocateInfo(commandPool, vk::CommandBufferLevel::ePrimary, 1))
|
||||||
|
.value.front();
|
||||||
|
oneTimeSubmit(commandBuffer, queue, func);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <class T>
|
||||||
|
void copyToDevice(vk::Device const& device, vk::DeviceMemory const& deviceMemory, T const* pData, size_t count,
|
||||||
|
vk::DeviceSize stride = sizeof(T)) {
|
||||||
|
assert(sizeof(T) <= stride);
|
||||||
|
uint8_t* deviceData = static_cast<uint8_t*>(device.mapMemory(deviceMemory, 0, count * stride).value);
|
||||||
|
if (stride == sizeof(T)) {
|
||||||
|
memcpy(deviceData, pData, count * sizeof(T));
|
||||||
|
} else {
|
||||||
|
for (size_t i = 0; i < count; i++) {
|
||||||
|
memcpy(deviceData, &pData[i], sizeof(T));
|
||||||
|
deviceData += stride;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
device.unmapMemory(deviceMemory);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <class T>
|
||||||
|
void copyToDevice(vk::Device const& device, vk::DeviceMemory const& deviceMemory, T const& data) {
|
||||||
|
copyToDevice<T>(device, deviceMemory, &data, 1);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <class T>
|
||||||
|
VULKAN_HPP_INLINE constexpr const T& clamp(const T& v, const T& lo, const T& hi) {
|
||||||
|
return v < lo ? lo : hi < v ? hi : v;
|
||||||
|
}
|
||||||
|
|
||||||
|
void setImageLayout(vk::CommandBuffer const& commandBuffer, vk::Image image, vk::Format format,
|
||||||
|
vk::ImageLayout oldImageLayout, vk::ImageLayout newImageLayout);
|
||||||
|
|
||||||
|
struct BufferData {
|
||||||
|
BufferData(vk::PhysicalDevice const& physicalDevice, vk::Device const& device, vk::DeviceSize size,
|
||||||
|
vk::BufferUsageFlags usage,
|
||||||
|
vk::MemoryPropertyFlags propertyFlags = vk::MemoryPropertyFlagBits::eHostVisible |
|
||||||
|
vk::MemoryPropertyFlagBits::eHostCoherent);
|
||||||
|
|
||||||
|
template <typename DataType>
|
||||||
|
void upload(vk::Device const& device, DataType const& data) const {
|
||||||
|
assert((m_propertyFlags & vk::MemoryPropertyFlagBits::eHostCoherent) &&
|
||||||
|
(m_propertyFlags & vk::MemoryPropertyFlagBits::eHostVisible));
|
||||||
|
assert(sizeof(DataType) <= size);
|
||||||
|
|
||||||
|
void* dataPtr = assertSuccess(device.mapMemory(deviceMemory.get(), 0, sizeof(DataType)));
|
||||||
|
memcpy(dataPtr, &data, sizeof(DataType));
|
||||||
|
device.unmapMemory(deviceMemory.get());
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename DataType>
|
||||||
|
void upload(vk::Device const& device, std::vector<DataType> const& data, size_t stride = 0) const {
|
||||||
|
assert(m_propertyFlags & vk::MemoryPropertyFlagBits::eHostVisible);
|
||||||
|
|
||||||
|
size_t elementSize = stride ? stride : sizeof(DataType);
|
||||||
|
assert(sizeof(DataType) <= elementSize);
|
||||||
|
|
||||||
|
copyToDevice(device, deviceMemory, data.data(), data.size(), elementSize);
|
||||||
|
}
|
||||||
|
|
||||||
|
template <typename DataType>
|
||||||
|
void upload(vk::PhysicalDevice const& physicalDevice, vk::Device const& device, vk::CommandPool const& commandPool,
|
||||||
|
vk::Queue queue, std::vector<DataType> const& data, size_t stride) const {
|
||||||
|
assert(m_usage & vk::BufferUsageFlagBits::eTransferDst);
|
||||||
|
assert(m_propertyFlags & vk::MemoryPropertyFlagBits::eDeviceLocal);
|
||||||
|
|
||||||
|
size_t elementSize = stride ? stride : sizeof(DataType);
|
||||||
|
assert(sizeof(DataType) <= elementSize);
|
||||||
|
|
||||||
|
size_t dataSize = data.size() * elementSize;
|
||||||
|
assert(dataSize <= size);
|
||||||
|
|
||||||
|
vk::su::BufferData stagingBuffer(physicalDevice, device, dataSize, vk::BufferUsageFlagBits::eTransferSrc);
|
||||||
|
copyToDevice(device, stagingBuffer.deviceMemory, data.data(), data.size(), elementSize);
|
||||||
|
|
||||||
|
vk::su::oneTimeSubmit(device, commandPool, queue, [&](vk::CommandBuffer const& commandBuffer) {
|
||||||
|
commandBuffer.copyBuffer(stagingBuffer.buffer.get(), buffer.get(), vk::BufferCopy(0, 0, dataSize));
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
void* map(vk::Device const& device) { return assertSuccess(device.mapMemory(deviceMemory.get(), 0, size)); }
|
||||||
|
|
||||||
|
void unmap(vk::Device const& device) { device.unmapMemory(deviceMemory.get()); }
|
||||||
|
|
||||||
|
vk::UniqueBuffer buffer;
|
||||||
|
vk::UniqueDeviceMemory deviceMemory;
|
||||||
|
vk::DeviceSize size;
|
||||||
|
#if !defined(NDEBUG)
|
||||||
|
private:
|
||||||
|
vk::BufferUsageFlags m_usage;
|
||||||
|
vk::MemoryPropertyFlags m_propertyFlags;
|
||||||
|
#endif
|
||||||
|
};
|
||||||
|
|
||||||
|
struct ImageData {
|
||||||
|
ImageData(vk::PhysicalDevice const& physicalDevice, vk::Device const& device, vk::Format format,
|
||||||
|
vk::Extent2D const& extent, vk::ImageTiling tiling, vk::ImageUsageFlags usage,
|
||||||
|
vk::ImageLayout initialLayout, vk::MemoryPropertyFlags memoryProperties, vk::ImageAspectFlags aspectMask);
|
||||||
|
|
||||||
|
vk::Format format;
|
||||||
|
vk::UniqueImage image;
|
||||||
|
vk::UniqueDeviceMemory deviceMemory;
|
||||||
|
vk::DeviceSize deviceSize;
|
||||||
|
vk::UniqueImageView imageView;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct DepthBufferData : public ImageData {
|
||||||
|
DepthBufferData(vk::PhysicalDevice const& physicalDevice, vk::Device const& device, vk::Format format,
|
||||||
|
vk::Extent2D const& extent);
|
||||||
|
};
|
||||||
|
|
||||||
|
struct TextureData {
|
||||||
|
TextureData(vk::PhysicalDevice const& physicalDevice, vk::Device const& device,
|
||||||
|
vk::Extent2D const& extent_ = {256, 256}, vk::ImageUsageFlags usageFlags = {},
|
||||||
|
vk::FormatFeatureFlags formatFeatureFlags = {}, bool anisotropyEnable = false, bool forceStaging = false);
|
||||||
|
|
||||||
|
template <typename ImageGenerator>
|
||||||
|
void setImage(vk::Device const& device, vk::CommandBuffer const& commandBuffer,
|
||||||
|
ImageGenerator const& imageGenerator) {
|
||||||
|
void* data =
|
||||||
|
needsStaging
|
||||||
|
? assertSuccess(device.mapMemory(stagingBufferData->deviceMemory.get(), 0, stagingBufferData->size))
|
||||||
|
: assertSuccess(device.mapMemory(imageData->deviceMemory.get(), 0, imageData->deviceSize));
|
||||||
|
imageGenerator(data, extent);
|
||||||
|
device.unmapMemory(needsStaging ? stagingBufferData->deviceMemory.get() : imageData->deviceMemory.get());
|
||||||
|
|
||||||
|
if (needsStaging) {
|
||||||
|
// Since we're going to blit to the texture image, set its layout to eTransferDstOptimal
|
||||||
|
vk::su::setImageLayout(commandBuffer, imageData->image.get(), imageData->format, vk::ImageLayout::eUndefined,
|
||||||
|
vk::ImageLayout::eTransferDstOptimal);
|
||||||
|
vk::BufferImageCopy copyRegion(0, extent.width, extent.height,
|
||||||
|
vk::ImageSubresourceLayers(vk::ImageAspectFlagBits::eColor, 0, 0, 1),
|
||||||
|
vk::Offset3D(0, 0, 0), vk::Extent3D(extent, 1));
|
||||||
|
commandBuffer.copyBufferToImage(stagingBufferData->buffer.get(), imageData->image.get(),
|
||||||
|
vk::ImageLayout::eTransferDstOptimal, copyRegion);
|
||||||
|
// Set the layout for the texture image from eTransferDstOptimal to SHADER_READ_ONLY
|
||||||
|
vk::su::setImageLayout(commandBuffer, imageData->image.get(), imageData->format,
|
||||||
|
vk::ImageLayout::eTransferDstOptimal, vk::ImageLayout::eShaderReadOnlyOptimal);
|
||||||
|
} else {
|
||||||
|
// If we can use the linear tiled image as a texture, just do it
|
||||||
|
vk::su::setImageLayout(commandBuffer, imageData->image.get(), imageData->format, vk::ImageLayout::ePreinitialized,
|
||||||
|
vk::ImageLayout::eShaderReadOnlyOptimal);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::Format format;
|
||||||
|
vk::Extent2D extent;
|
||||||
|
bool needsStaging;
|
||||||
|
std::unique_ptr<BufferData> stagingBufferData;
|
||||||
|
std::unique_ptr<ImageData> imageData;
|
||||||
|
vk::UniqueSampler sampler;
|
||||||
|
};
|
||||||
|
|
||||||
|
struct UUID {
|
||||||
|
public:
|
||||||
|
UUID(uint8_t const data[VK_UUID_SIZE]);
|
||||||
|
|
||||||
|
uint8_t m_data[VK_UUID_SIZE];
|
||||||
|
};
|
||||||
|
|
||||||
|
template <typename TargetType, typename SourceType>
|
||||||
|
VULKAN_HPP_INLINE TargetType checked_cast(SourceType value) {
|
||||||
|
static_assert(sizeof(TargetType) <= sizeof(SourceType), "No need to cast from smaller to larger type!");
|
||||||
|
static_assert(std::numeric_limits<SourceType>::is_integer, "Only integer types supported!");
|
||||||
|
static_assert(!std::numeric_limits<SourceType>::is_signed, "Only unsigned types supported!");
|
||||||
|
static_assert(std::numeric_limits<TargetType>::is_integer, "Only integer types supported!");
|
||||||
|
static_assert(!std::numeric_limits<TargetType>::is_signed, "Only unsigned types supported!");
|
||||||
|
assert(value <= std::numeric_limits<TargetType>::max());
|
||||||
|
return static_cast<TargetType>(value);
|
||||||
|
}
|
||||||
|
|
||||||
|
vk::UniqueDeviceMemory allocateDeviceMemory(vk::Device const& device,
|
||||||
|
vk::PhysicalDeviceMemoryProperties const& memoryProperties,
|
||||||
|
vk::MemoryRequirements const& memoryRequirements,
|
||||||
|
vk::MemoryPropertyFlags memoryPropertyFlags);
|
||||||
|
bool contains(std::vector<vk::ExtensionProperties> const& extensionProperties, std::string const& extensionName);
|
||||||
|
vk::UniqueCommandPool createCommandPool(vk::Device const& device, uint32_t queueFamilyIndex);
|
||||||
|
vk::UniqueCommandBuffer createCommandBuffer(vk::Device const& device, vk::CommandPool const& commandPool);
|
||||||
|
vk::DebugUtilsMessengerEXT createDebugUtilsMessengerEXT(vk::Instance const& instance);
|
||||||
|
vk::UniqueDescriptorPool createDescriptorPool(vk::Device const& device,
|
||||||
|
std::vector<vk::DescriptorPoolSize> const& poolSizes);
|
||||||
|
vk::UniqueDescriptorSetLayout createDescriptorSetLayout(
|
||||||
|
vk::Device const& device,
|
||||||
|
std::vector<std::tuple<vk::DescriptorType, uint32_t, vk::ShaderStageFlags>> const& bindingData,
|
||||||
|
vk::DescriptorSetLayoutCreateFlags flags = {});
|
||||||
|
vk::UniqueDevice createDevice(vk::PhysicalDevice const& physicalDevice, uint32_t queueFamilyIndex,
|
||||||
|
std::vector<std::string> const& extensions = {},
|
||||||
|
vk::PhysicalDeviceFeatures const* physicalDeviceFeatures = nullptr,
|
||||||
|
void const* pNext = nullptr);
|
||||||
|
vk::UniqueFramebuffer createFramebuffer(vk::Device const& device, vk::RenderPass& renderPass,
|
||||||
|
vk::ImageView const& colorImageView, vk::ImageView const& depthImageView,
|
||||||
|
vk::Extent2D const& extent);
|
||||||
|
vk::UniquePipeline createGraphicsPipeline(
|
||||||
|
vk::Device const& device, vk::PipelineCache const& pipelineCache,
|
||||||
|
std::pair<vk::ShaderModule, vk::SpecializationInfo const*> const& vertexShaderData,
|
||||||
|
std::pair<vk::ShaderModule, vk::SpecializationInfo const*> const& fragmentShaderData, uint32_t vertexStride,
|
||||||
|
std::vector<std::pair<vk::Format, uint32_t>> const& vertexInputAttributeFormatOffset, vk::FrontFace frontFace,
|
||||||
|
bool depthBuffered, vk::PipelineLayout const& pipelineLayout, vk::RenderPass const& renderPass);
|
||||||
|
vk::UniqueInstance createInstance(std::string const& appName, std::string const& engineName,
|
||||||
|
std::vector<std::string> const& layers = {},
|
||||||
|
std::vector<std::string> const& extensions = {},
|
||||||
|
uint32_t apiVersion = VK_API_VERSION_1_0);
|
||||||
|
vk::UniqueRenderPass createRenderPass(vk::Device const& device, vk::Format colorFormat, vk::Format depthFormat,
|
||||||
|
vk::AttachmentLoadOp colorloadOp = vk::AttachmentLoadOp::eDontCare,
|
||||||
|
vk::AttachmentStoreOp colorStoreOp = vk::AttachmentStoreOp::eStore,
|
||||||
|
vk::AttachmentLoadOp depthLoadOp = vk::AttachmentLoadOp::eClear,
|
||||||
|
vk::AttachmentStoreOp depthStoreOp = vk::AttachmentStoreOp::eDontCare,
|
||||||
|
vk::ImageLayout colorFinalLayout = vk::ImageLayout::ePresentSrcKHR);
|
||||||
|
VKAPI_ATTR VkBool32 VKAPI_CALL debugUtilsMessengerCallback(VkDebugUtilsMessageSeverityFlagBitsEXT messageSeverity,
|
||||||
|
VkDebugUtilsMessageTypeFlagsEXT messageTypes,
|
||||||
|
VkDebugUtilsMessengerCallbackDataEXT const* pCallbackData,
|
||||||
|
void* /*pUserData*/);
|
||||||
|
uint32_t findGraphicsQueueFamilyIndex(std::vector<vk::QueueFamilyProperties> const& queueFamilyProperties);
|
||||||
|
std::pair<uint32_t, uint32_t> findGraphicsAndPresentQueueFamilyIndex(vk::PhysicalDevice physicalDevice,
|
||||||
|
vk::SurfaceKHR const& surface);
|
||||||
|
uint32_t findMemoryType(vk::PhysicalDeviceMemoryProperties const& memoryProperties, uint32_t typeBits,
|
||||||
|
vk::MemoryPropertyFlags requirementsMask);
|
||||||
|
std::vector<char const*> gatherExtensions(std::vector<std::string> const& extensions
|
||||||
|
#if !defined(NDEBUG)
|
||||||
|
,
|
||||||
|
std::vector<vk::ExtensionProperties> const& extensionProperties
|
||||||
|
#endif
|
||||||
|
);
|
||||||
|
std::vector<char const*> gatherLayers(std::vector<std::string> const& layers
|
||||||
|
#if !defined(NDEBUG)
|
||||||
|
,
|
||||||
|
std::vector<vk::LayerProperties> const& layerProperties
|
||||||
|
#endif
|
||||||
|
);
|
||||||
|
std::vector<std::string> getDeviceExtensions();
|
||||||
|
std::vector<std::string> getInstanceExtensions();
|
||||||
|
vk::DebugUtilsMessengerCreateInfoEXT makeDebugUtilsMessengerCreateInfoEXT();
|
||||||
|
#if defined(NDEBUG)
|
||||||
|
vk::StructureChain<vk::InstanceCreateInfo>
|
||||||
|
#elif defined(VULKAN_HPP_UTILS_USE_BEST_PRACTICES)
|
||||||
|
vk::StructureChain<vk::InstanceCreateInfo, vk::DebugUtilsMessengerCreateInfoEXT, vk::ValidationFeaturesEXT>
|
||||||
|
#else
|
||||||
|
vk::StructureChain<vk::InstanceCreateInfo, vk::DebugUtilsMessengerCreateInfoEXT>
|
||||||
|
#endif
|
||||||
|
makeInstanceCreateInfoChain(vk::ApplicationInfo const& applicationInfo, std::vector<char const*> const& enabledLayers,
|
||||||
|
std::vector<char const*> const& enabledExtensions);
|
||||||
|
vk::Format pickDepthFormat(vk::PhysicalDevice const& physicalDevice);
|
||||||
|
vk::PresentModeKHR pickPresentMode(std::vector<vk::PresentModeKHR> const& presentModes);
|
||||||
|
vk::SurfaceFormatKHR pickSurfaceFormat(std::vector<vk::SurfaceFormatKHR> const& formats);
|
||||||
|
void submitAndWait(vk::Device const& device, vk::Queue const& queue, vk::CommandBuffer const& commandBuffer);
|
||||||
|
void updateDescriptorSets(
|
||||||
|
vk::Device const& device, vk::DescriptorSet const& descriptorSet,
|
||||||
|
std::vector<std::tuple<vk::DescriptorType, vk::Buffer const&, vk::DeviceSize>> const& bufferData,
|
||||||
|
uint32_t bindingOffset = 0);
|
||||||
|
|
||||||
|
} // namespace vk::su
|
||||||
|
|
||||||
|
std::ostream& operator<<(std::ostream& os, vk::su::UUID const& uuid);
|
Loading…
Reference in New Issue