2
0
mirror of https://github.com/AxioDL/metaforce.git synced 2025-12-18 09:25:33 +00:00

Start wiring up wgpu+winit

This commit is contained in:
2022-01-31 19:06:54 -05:00
parent 5491fd75cf
commit e48435f11e
209 changed files with 24234 additions and 8085 deletions

View File

@@ -5,7 +5,7 @@
#include "Runtime/Graphics/CGraphics.hpp"
#include <amuse/DSPCodec.hpp>
#include <hecl/Pipeline.hpp>
//#include <hecl/Pipeline.hpp>
#include <turbojpeg.h>
namespace metaforce {
@@ -27,7 +27,7 @@ static const u16 StaticVolumeLookup[] = {
0x6CA2, 0x6E80, 0x7061, 0x7247, 0x7430, 0x761E, 0x7810, 0x7A06, 0x7C00, 0x7DFE, 0x8000};
/* shared boo resources */
static boo::ObjToken<boo::IShaderPipeline> YUVShaderPipeline;
//static boo::ObjToken<boo::IShaderPipeline> YUVShaderPipeline;
static tjhandle TjHandle = nullptr;
/* RSF audio state */
@@ -46,20 +46,20 @@ static float SfxVolume = 1.f;
static const char* BlockNames[] = {"SpecterViewBlock"};
static const char* TexNames[] = {"texY", "texU", "texV"};
void CMoviePlayer::Initialize(boo::IGraphicsDataFactory* factory) {
switch (factory->platform()) {
case boo::IGraphicsDataFactory::Platform::Vulkan:
g_PlatformMatrix.m[1][1] = -1.f;
break;
default:
break;
}
YUVShaderPipeline = hecl::conv->convert(Shader_CMoviePlayerShader{});
void CMoviePlayer::Initialize() {
// switch (factory->platform()) {
// case boo::IGraphicsDataFactory::Platform::Vulkan:
// g_PlatformMatrix.m[1][1] = -1.f;
// break;
// default:
// break;
// }
// YUVShaderPipeline = hecl::conv->convert(Shader_CMoviePlayerShader{});
TjHandle = tjInitDecompress();
}
void CMoviePlayer::Shutdown() {
YUVShaderPipeline.reset();
// YUVShaderPipeline.reset();
tjDestroy(TjHandle);
}
@@ -202,50 +202,50 @@ CMoviePlayer::CMoviePlayer(const char* path, float preLoadSeconds, bool loop, bo
xa0_bufferQueue.reserve(xf0_preLoadFrames);
/* All set for GPU resources */
CGraphics::CommitResources([&](boo::IGraphicsDataFactory::Context& ctx) {
m_blockBuf = ctx.newDynamicBuffer(boo::BufferUse::Uniform, sizeof(m_viewVertBlock), 1);
m_vertBuf = ctx.newDynamicBuffer(boo::BufferUse::Vertex, sizeof(TexShaderVert), 4);
/* Allocate textures here (rather than at decode time) */
x80_textures.reserve(3);
for (int i = 0; i < 3; ++i) {
CTHPTextureSet& set = x80_textures.emplace_back();
if (deinterlace) {
/* metaforce addition: this way interlaced THPs don't look horrible */
set.Y[0] = ctx.newDynamicTexture(x6c_videoInfo.width, x6c_videoInfo.height / 2, boo::TextureFormat::I8,
boo::TextureClampMode::Repeat);
set.Y[1] = ctx.newDynamicTexture(x6c_videoInfo.width, x6c_videoInfo.height / 2, boo::TextureFormat::I8,
boo::TextureClampMode::Repeat);
set.U = ctx.newDynamicTexture(x6c_videoInfo.width / 2, x6c_videoInfo.height / 2, boo::TextureFormat::I8,
boo::TextureClampMode::Repeat);
set.V = ctx.newDynamicTexture(x6c_videoInfo.width / 2, x6c_videoInfo.height / 2, boo::TextureFormat::I8,
boo::TextureClampMode::Repeat);
boo::ObjToken<boo::IGraphicsBuffer> bufs[] = {m_blockBuf.get()};
for (int j = 0; j < 2; ++j) {
boo::ObjToken<boo::ITexture> texs[] = {set.Y[j].get(), set.U.get(), set.V.get()};
set.binding[j] = ctx.newShaderDataBinding(YUVShaderPipeline, m_vertBuf.get(), nullptr, nullptr, 1, bufs,
nullptr, 3, texs, nullptr, nullptr);
}
} else {
/* normal progressive presentation */
set.Y[0] = ctx.newDynamicTexture(x6c_videoInfo.width, x6c_videoInfo.height, boo::TextureFormat::I8,
boo::TextureClampMode::Repeat);
set.U = ctx.newDynamicTexture(x6c_videoInfo.width / 2, x6c_videoInfo.height / 2, boo::TextureFormat::I8,
boo::TextureClampMode::Repeat);
set.V = ctx.newDynamicTexture(x6c_videoInfo.width / 2, x6c_videoInfo.height / 2, boo::TextureFormat::I8,
boo::TextureClampMode::Repeat);
boo::ObjToken<boo::IGraphicsBuffer> bufs[] = {m_blockBuf.get()};
boo::ObjToken<boo::ITexture> texs[] = {set.Y[0].get(), set.U.get(), set.V.get()};
set.binding[0] = ctx.newShaderDataBinding(YUVShaderPipeline, m_vertBuf.get(), nullptr, nullptr, 1, bufs,
nullptr, 3, texs, nullptr, nullptr);
}
if (xf4_25_hasAudio)
set.audioBuf.reset(new s16[x28_thpHead.maxAudioSamples * 2]);
}
return true;
} BooTrace);
// CGraphics::CommitResources([&](boo::IGraphicsDataFactory::Context& ctx) {
// m_blockBuf = ctx.newDynamicBuffer(boo::BufferUse::Uniform, sizeof(m_viewVertBlock), 1);
// m_vertBuf = ctx.newDynamicBuffer(boo::BufferUse::Vertex, sizeof(TexShaderVert), 4);
//
// /* Allocate textures here (rather than at decode time) */
// x80_textures.reserve(3);
// for (int i = 0; i < 3; ++i) {
// CTHPTextureSet& set = x80_textures.emplace_back();
// if (deinterlace) {
// /* metaforce addition: this way interlaced THPs don't look horrible */
// set.Y[0] = ctx.newDynamicTexture(x6c_videoInfo.width, x6c_videoInfo.height / 2, boo::TextureFormat::I8,
// boo::TextureClampMode::Repeat);
// set.Y[1] = ctx.newDynamicTexture(x6c_videoInfo.width, x6c_videoInfo.height / 2, boo::TextureFormat::I8,
// boo::TextureClampMode::Repeat);
// set.U = ctx.newDynamicTexture(x6c_videoInfo.width / 2, x6c_videoInfo.height / 2, boo::TextureFormat::I8,
// boo::TextureClampMode::Repeat);
// set.V = ctx.newDynamicTexture(x6c_videoInfo.width / 2, x6c_videoInfo.height / 2, boo::TextureFormat::I8,
// boo::TextureClampMode::Repeat);
//
// boo::ObjToken<boo::IGraphicsBuffer> bufs[] = {m_blockBuf.get()};
// for (int j = 0; j < 2; ++j) {
// boo::ObjToken<boo::ITexture> texs[] = {set.Y[j].get(), set.U.get(), set.V.get()};
// set.binding[j] = ctx.newShaderDataBinding(YUVShaderPipeline, m_vertBuf.get(), nullptr, nullptr, 1, bufs,
// nullptr, 3, texs, nullptr, nullptr);
// }
// } else {
// /* normal progressive presentation */
// set.Y[0] = ctx.newDynamicTexture(x6c_videoInfo.width, x6c_videoInfo.height, boo::TextureFormat::I8,
// boo::TextureClampMode::Repeat);
// set.U = ctx.newDynamicTexture(x6c_videoInfo.width / 2, x6c_videoInfo.height / 2, boo::TextureFormat::I8,
// boo::TextureClampMode::Repeat);
// set.V = ctx.newDynamicTexture(x6c_videoInfo.width / 2, x6c_videoInfo.height / 2, boo::TextureFormat::I8,
// boo::TextureClampMode::Repeat);
//
// boo::ObjToken<boo::IGraphicsBuffer> bufs[] = {m_blockBuf.get()};
// boo::ObjToken<boo::ITexture> texs[] = {set.Y[0].get(), set.U.get(), set.V.get()};
// set.binding[0] = ctx.newShaderDataBinding(YUVShaderPipeline, m_vertBuf.get(), nullptr, nullptr, 1, bufs,
// nullptr, 3, texs, nullptr, nullptr);
// }
// if (xf4_25_hasAudio)
// set.audioBuf.reset(new s16[x28_thpHead.maxAudioSamples * 2]);
// }
// return true;
// } BooTrace);
/* Temporary planar YUV decode buffer, resulting planes copied to Boo */
m_yuvBuf.reset(new uint8_t[tjBufSizeYUV(x6c_videoInfo.width, x6c_videoInfo.height, TJ_420)]);
@@ -260,7 +260,7 @@ CMoviePlayer::CMoviePlayer(const char* path, float preLoadSeconds, bool loop, bo
SetFrame({-0.5f, 0.5f, 0.f}, {-0.5f, -0.5f, 0.f}, {0.5f, -0.5f, 0.f}, {0.5f, 0.5f, 0.f});
m_viewVertBlock.finalAssign(m_viewVertBlock);
m_blockBuf->load(&m_viewVertBlock, sizeof(m_viewVertBlock));
// m_blockBuf->load(&m_viewVertBlock, sizeof(m_viewVertBlock));
}
void CMoviePlayer::SetStaticAudioVolume(int vol) {
@@ -405,7 +405,7 @@ void CMoviePlayer::SetFrame(const zeus::CVector3f& a, const zeus::CVector3f& b,
m_frame[1].m_pos = b;
m_frame[2].m_pos = d;
m_frame[3].m_pos = c;
m_vertBuf->load(m_frame, sizeof(m_frame));
// m_vertBuf->load(m_frame, sizeof(m_frame));
}
void CMoviePlayer::DrawFrame() {
@@ -415,8 +415,8 @@ void CMoviePlayer::DrawFrame() {
/* draw appropriate field */
CTHPTextureSet& tex = x80_textures[xd0_drawTexSlot];
CGraphics::SetShaderDataBinding(tex.binding[m_deinterlace ? (xfc_fieldIndex != 0) : 0]);
CGraphics::DrawArray(0, 4);
// CGraphics::SetShaderDataBinding(tex.binding[m_deinterlace ? (xfc_fieldIndex != 0) : 0]);
// CGraphics::DrawArray(0, 4);
/* ensure second field is being displayed by VI to signal advance
* (faked in metaforce with continuous xor) */
@@ -532,27 +532,27 @@ void CMoviePlayer::DecodeFromRead(const void* data) {
if (m_deinterlace) {
/* Deinterlace into 2 discrete 60-fps half-res textures */
u8* mappedData = (u8*)tex.Y[0]->map(planeSizeHalf);
for (unsigned y = 0; y < x6c_videoInfo.height / 2; ++y) {
memmove(mappedData + x6c_videoInfo.width * y, m_yuvBuf.get() + x6c_videoInfo.width * (y * 2),
x6c_videoInfo.width);
}
tex.Y[0]->unmap();
mappedData = (u8*)tex.Y[1]->map(planeSizeHalf);
for (unsigned y = 0; y < x6c_videoInfo.height / 2; ++y) {
memmove(mappedData + x6c_videoInfo.width * y, m_yuvBuf.get() + x6c_videoInfo.width * (y * 2 + 1),
x6c_videoInfo.width);
}
tex.Y[1]->unmap();
tex.U->load(m_yuvBuf.get() + planeSize, planeSizeQuarter);
tex.V->load(m_yuvBuf.get() + planeSize + planeSizeQuarter, planeSizeQuarter);
// u8* mappedData = (u8*)tex.Y[0]->map(planeSizeHalf);
// for (unsigned y = 0; y < x6c_videoInfo.height / 2; ++y) {
// memmove(mappedData + x6c_videoInfo.width * y, m_yuvBuf.get() + x6c_videoInfo.width * (y * 2),
// x6c_videoInfo.width);
// }
// tex.Y[0]->unmap();
//
// mappedData = (u8*)tex.Y[1]->map(planeSizeHalf);
// for (unsigned y = 0; y < x6c_videoInfo.height / 2; ++y) {
// memmove(mappedData + x6c_videoInfo.width * y, m_yuvBuf.get() + x6c_videoInfo.width * (y * 2 + 1),
// x6c_videoInfo.width);
// }
// tex.Y[1]->unmap();
//
// tex.U->load(m_yuvBuf.get() + planeSize, planeSizeQuarter);
// tex.V->load(m_yuvBuf.get() + planeSize + planeSizeQuarter, planeSizeQuarter);
} else {
/* Direct planar load */
tex.Y[0]->load(m_yuvBuf.get(), planeSize);
tex.U->load(m_yuvBuf.get() + planeSize, planeSizeQuarter);
tex.V->load(m_yuvBuf.get() + planeSize + planeSizeQuarter, planeSizeQuarter);
// tex.Y[0]->load(m_yuvBuf.get(), planeSize);
// tex.U->load(m_yuvBuf.get() + planeSize, planeSizeQuarter);
// tex.V->load(m_yuvBuf.get() + planeSize + planeSizeQuarter, planeSizeQuarter);
}
break;