mirror of https://github.com/AxioDL/metaforce.git
HECL Compiler bug fixes
This commit is contained in:
parent
2148bc2459
commit
a86b5f8c1d
|
@ -558,21 +558,25 @@ BlenderConnection::DataStream::Mesh::Mesh(BlenderConnection& conn, int skinSlotC
|
||||||
BlenderConnection::DataStream::Mesh::Material::Material
|
BlenderConnection::DataStream::Mesh::Material::Material
|
||||||
(BlenderConnection& conn)
|
(BlenderConnection& conn)
|
||||||
{
|
{
|
||||||
char buf[4096];
|
uint32_t bufSz;
|
||||||
conn._readLine(buf, 4096);
|
conn._readBuf(&bufSz, 4);
|
||||||
source.assign(buf);
|
name.assign(bufSz, ' ');
|
||||||
|
conn._readBuf(&name[0], bufSz);
|
||||||
|
|
||||||
|
conn._readBuf(&bufSz, 4);
|
||||||
|
source.assign(bufSz, ' ');
|
||||||
|
conn._readBuf(&source[0], bufSz);
|
||||||
|
|
||||||
uint32_t texCount;
|
uint32_t texCount;
|
||||||
conn._readBuf(&texCount, 4);
|
conn._readBuf(&texCount, 4);
|
||||||
texs.reserve(texCount);
|
texs.reserve(texCount);
|
||||||
for (uint32_t i=0 ; i<texCount ; ++i)
|
for (uint32_t i=0 ; i<texCount ; ++i)
|
||||||
{
|
{
|
||||||
conn._readLine(buf, 4096);
|
conn._readBuf(&bufSz, 4);
|
||||||
#if HECL_UCS2
|
std::string readStr(bufSz, ' ');
|
||||||
SystemString absolute = HECL::UTF8ToWide(buf);
|
conn._readBuf(&readStr[0], bufSz);
|
||||||
#else
|
SystemStringView absolute(readStr);
|
||||||
SystemString absolute(buf);
|
|
||||||
#endif
|
|
||||||
SystemString relative =
|
SystemString relative =
|
||||||
conn.m_loadedBlend.getProject().getProjectRootPath().getProjectRelativeFromAbsolute(absolute);
|
conn.m_loadedBlend.getProject().getProjectRootPath().getProjectRelativeFromAbsolute(absolute);
|
||||||
texs.emplace_back(conn.m_loadedBlend.getProject().getProjectWorkingPath(), relative);
|
texs.emplace_back(conn.m_loadedBlend.getProject().getProjectWorkingPath(), relative);
|
||||||
|
|
|
@ -308,6 +308,7 @@ public:
|
||||||
/* HECL source of each material */
|
/* HECL source of each material */
|
||||||
struct Material
|
struct Material
|
||||||
{
|
{
|
||||||
|
std::string name;
|
||||||
std::string source;
|
std::string source;
|
||||||
std::vector<ProjectPath> texs;
|
std::vector<ProjectPath> texs;
|
||||||
|
|
||||||
|
|
|
@ -354,7 +354,7 @@ def shader(mat_obj, mesh_obj):
|
||||||
tex_paths = [get_texture_path(name) for name in tex_list]
|
tex_paths = [get_texture_path(name) for name in tex_list]
|
||||||
|
|
||||||
if mat_obj.game_settings.alpha_blend == 'ALPHA' or mat_obj.game_settings.alpha_blend == 'ALPHA_SORT':
|
if mat_obj.game_settings.alpha_blend == 'ALPHA' or mat_obj.game_settings.alpha_blend == 'ALPHA_SORT':
|
||||||
return "HECLBlend(%s, %s)" % (color_trace_result, alpha_trace_result), tex_paths
|
return "HECLAlpha(%s, %s)" % (color_trace_result, alpha_trace_result), tex_paths
|
||||||
elif mat_obj.game_settings.alpha_blend == 'ADD':
|
elif mat_obj.game_settings.alpha_blend == 'ADD':
|
||||||
return "HECLAdditive(%s, %s)" % (color_trace_result, alpha_trace_result), tex_paths
|
return "HECLAdditive(%s, %s)" % (color_trace_result, alpha_trace_result), tex_paths
|
||||||
else:
|
else:
|
||||||
|
|
|
@ -95,11 +95,10 @@ def cook(writebuf, mesh_obj, max_skin_banks, max_octant_length=None):
|
||||||
rna_loops = copy_mesh.loops
|
rna_loops = copy_mesh.loops
|
||||||
|
|
||||||
# Filter out useless AABB points and send data
|
# Filter out useless AABB points and send data
|
||||||
aabb = bytearray()
|
pt = copy_obj.bound_box[0]
|
||||||
for comp in copy_obj.bound_box[0]:
|
writebuf(struct.pack('fff', pt[0], pt[1], pt[2]))
|
||||||
writebuf(struct.pack('f', comp))
|
pt = copy_obj.bound_box[6]
|
||||||
for comp in copy_obj.bound_box[6]:
|
writebuf(struct.pack('fff', pt[0], pt[1], pt[2]))
|
||||||
writebuf(struct.pack('f', comp))
|
|
||||||
|
|
||||||
# Create master BMesh and VertPool
|
# Create master BMesh and VertPool
|
||||||
bm_master = bmesh.new()
|
bm_master = bmesh.new()
|
||||||
|
@ -128,10 +127,14 @@ def cook(writebuf, mesh_obj, max_skin_banks, max_octant_length=None):
|
||||||
for mat in bpy.data.materials:
|
for mat in bpy.data.materials:
|
||||||
if mat.name.endswith('_%u_%u' % (grp_idx, mat_idx)):
|
if mat.name.endswith('_%u_%u' % (grp_idx, mat_idx)):
|
||||||
hecl_str, texs = HMDLShader.shader(mat, mesh_obj)
|
hecl_str, texs = HMDLShader.shader(mat, mesh_obj)
|
||||||
writebuf((hecl_str + '\n').encode())
|
writebuf(struct.pack('I', len(mat.name)))
|
||||||
|
writebuf(mat.name.encode())
|
||||||
|
writebuf(struct.pack('I', len(hecl_str)))
|
||||||
|
writebuf(hecl_str.encode())
|
||||||
writebuf(struct.pack('I', len(texs)))
|
writebuf(struct.pack('I', len(texs)))
|
||||||
for tex in texs:
|
for tex in texs:
|
||||||
writebuf((tex + '\n').encode())
|
writebuf(struct.pack('I', len(tex)))
|
||||||
|
writebuf(tex.encode())
|
||||||
found = True
|
found = True
|
||||||
break
|
break
|
||||||
if not found:
|
if not found:
|
||||||
|
@ -141,10 +144,14 @@ def cook(writebuf, mesh_obj, max_skin_banks, max_octant_length=None):
|
||||||
for mat_idx in sorted_material_idxs:
|
for mat_idx in sorted_material_idxs:
|
||||||
mat = mesh_obj.data.materials[mat_idx]
|
mat = mesh_obj.data.materials[mat_idx]
|
||||||
hecl_str, texs = HMDLShader.shader(mat, mesh_obj)
|
hecl_str, texs = HMDLShader.shader(mat, mesh_obj)
|
||||||
writebuf((hecl_str + '\n').encode())
|
writebuf(struct.pack('I', len(mat.name)))
|
||||||
|
writebuf(mat.name.encode())
|
||||||
|
writebuf(struct.pack('I', len(hecl_str)))
|
||||||
|
writebuf(hecl_str.encode())
|
||||||
writebuf(struct.pack('I', len(texs)))
|
writebuf(struct.pack('I', len(texs)))
|
||||||
for tex in texs:
|
for tex in texs:
|
||||||
writebuf((tex + '\n').encode())
|
writebuf(struct.pack('I', len(tex)))
|
||||||
|
writebuf(tex.encode())
|
||||||
|
|
||||||
# Output vert pool
|
# Output vert pool
|
||||||
vert_pool.write_out(writebuf, mesh_obj.vertex_groups)
|
vert_pool.write_out(writebuf, mesh_obj.vertex_groups)
|
||||||
|
|
|
@ -26,5 +26,5 @@ endif()
|
||||||
|
|
||||||
target_link_libraries(hecl
|
target_link_libraries(hecl
|
||||||
${DATA_SPEC_LIBS}
|
${DATA_SPEC_LIBS}
|
||||||
HECLDatabase HECLBlender HECLCommon AthenaCore NOD
|
HECLDatabase HECLBackend HECLFrontend HECLBlender HECLCommon AthenaCore NOD
|
||||||
LogVisor AthenaLibYaml ${PNG_LIB} squish xxhash ${ZLIB_LIBRARIES} ${LZO_LIB} ${PLAT_LIBS})
|
LogVisor AthenaLibYaml ${PNG_LIB} squish xxhash ${ZLIB_LIBRARIES} ${LZO_LIB} ${PLAT_LIBS})
|
||||||
|
|
|
@ -201,8 +201,8 @@ struct GX : IBackend
|
||||||
struct TEVStage
|
struct TEVStage
|
||||||
{
|
{
|
||||||
TevOp m_op = TEV_ADD;
|
TevOp m_op = TEV_ADD;
|
||||||
TevColorArg m_color[4] = {CC_ZERO, CC_ZERO, CC_ZERO, CC_CPREV};
|
TevColorArg m_color[4] = {CC_ZERO, CC_ZERO, CC_ZERO, CC_ZERO};
|
||||||
TevAlphaArg m_alpha[4] = {CA_ZERO, CA_ZERO, CA_ZERO, CA_APREV};
|
TevAlphaArg m_alpha[4] = {CA_ZERO, CA_ZERO, CA_ZERO, CA_ZERO};
|
||||||
TevKColorSel m_kColor = TEV_KCSEL_1;
|
TevKColorSel m_kColor = TEV_KCSEL_1;
|
||||||
TevKAlphaSel m_kAlpha = TEV_KASEL_1;
|
TevKAlphaSel m_kAlpha = TEV_KASEL_1;
|
||||||
TevRegID m_regOut = TEVPREV;
|
TevRegID m_regOut = TEVPREV;
|
||||||
|
|
|
@ -23,12 +23,17 @@ struct SourceLocation
|
||||||
class Diagnostics
|
class Diagnostics
|
||||||
{
|
{
|
||||||
std::string m_name;
|
std::string m_name;
|
||||||
|
std::string m_source;
|
||||||
|
std::string sourceDiagString(const SourceLocation& l, bool ansi=false) const;
|
||||||
public:
|
public:
|
||||||
void setName(const std::string& name) {m_name = name;}
|
void reset(const std::string& name, const std::string& source) {m_name = name; m_source = source;}
|
||||||
void reportParserErr(const SourceLocation& l, const char* format, ...);
|
void reportParserErr(const SourceLocation& l, const char* format, ...);
|
||||||
void reportLexerErr(const SourceLocation& l, const char* format, ...);
|
void reportLexerErr(const SourceLocation& l, const char* format, ...);
|
||||||
void reportCompileErr(const SourceLocation& l, const char* format, ...);
|
void reportCompileErr(const SourceLocation& l, const char* format, ...);
|
||||||
void reportBackendErr(const SourceLocation& l, const char* format, ...);
|
void reportBackendErr(const SourceLocation& l, const char* format, ...);
|
||||||
|
|
||||||
|
const std::string& getName() const {return m_name;}
|
||||||
|
const std::string& getSource() const {return m_source;}
|
||||||
};
|
};
|
||||||
|
|
||||||
class Parser
|
class Parser
|
||||||
|
@ -65,6 +70,36 @@ public:
|
||||||
float m_tokenFloat = 0.0;
|
float m_tokenFloat = 0.0;
|
||||||
Token() : m_type(TokenNone) {}
|
Token() : m_type(TokenNone) {}
|
||||||
Token(TokenType type, SourceLocation loc) : m_type(type), m_location(loc) {}
|
Token(TokenType type, SourceLocation loc) : m_type(type), m_location(loc) {}
|
||||||
|
const char* typeString() const
|
||||||
|
{
|
||||||
|
switch (m_type)
|
||||||
|
{
|
||||||
|
case TokenNone:
|
||||||
|
return "None";
|
||||||
|
case TokenSourceBegin:
|
||||||
|
return "SourceBegin";
|
||||||
|
case TokenSourceEnd:
|
||||||
|
return "SourceEnd";
|
||||||
|
case TokenNumLiteral:
|
||||||
|
return "NumLiteral";
|
||||||
|
case TokenVectorSwizzle:
|
||||||
|
return "VectorSwizzle";
|
||||||
|
case TokenEvalGroupStart:
|
||||||
|
return "EvalGroupStart";
|
||||||
|
case TokenEvalGroupEnd:
|
||||||
|
return "EvalGroupEnd";
|
||||||
|
case TokenFunctionStart:
|
||||||
|
return "FunctionStart";
|
||||||
|
case TokenFunctionEnd:
|
||||||
|
return "FunctionEnd";
|
||||||
|
case TokenFunctionArgDelim:
|
||||||
|
return "FunctionArgDelim";
|
||||||
|
case TokenArithmeticOp:
|
||||||
|
return "ArithmeticOp";
|
||||||
|
default: break;
|
||||||
|
}
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
};
|
};
|
||||||
void reset(const std::string& source);
|
void reset(const std::string& source);
|
||||||
Token consumeToken();
|
Token consumeToken();
|
||||||
|
@ -120,7 +155,7 @@ struct IR
|
||||||
|
|
||||||
struct
|
struct
|
||||||
{
|
{
|
||||||
int m_idxs[4] = {-1};
|
int m_idxs[4] = {-1, -1, -1, -1};
|
||||||
size_t m_instIdx;
|
size_t m_instIdx;
|
||||||
} m_swizzle;
|
} m_swizzle;
|
||||||
|
|
||||||
|
@ -208,6 +243,9 @@ class Lexer
|
||||||
void EmitArithmetic(IR& ir, const Lexer::OperationNode* arithNode, IR::RegID target) const;
|
void EmitArithmetic(IR& ir, const Lexer::OperationNode* arithNode, IR::RegID target) const;
|
||||||
void EmitVectorSwizzle(IR& ir, const Lexer::OperationNode* swizNode, IR::RegID target) const;
|
void EmitVectorSwizzle(IR& ir, const Lexer::OperationNode* swizNode, IR::RegID target) const;
|
||||||
|
|
||||||
|
static void PrintChain(const Lexer::OperationNode* begin, const Lexer::OperationNode* end);
|
||||||
|
static void PrintTree(const Lexer::OperationNode* node, int indent=0);
|
||||||
|
|
||||||
public:
|
public:
|
||||||
void reset();
|
void reset();
|
||||||
void consumeAllTokens(Parser& parser);
|
void consumeAllTokens(Parser& parser);
|
||||||
|
@ -224,7 +262,7 @@ class Frontend
|
||||||
public:
|
public:
|
||||||
IR compileSource(const std::string& source, const std::string& diagName)
|
IR compileSource(const std::string& source, const std::string& diagName)
|
||||||
{
|
{
|
||||||
m_diag.setName(diagName);
|
m_diag.reset(diagName, source);
|
||||||
m_parser.reset(source);
|
m_parser.reset(source);
|
||||||
m_lexer.consumeAllTokens(m_parser);
|
m_lexer.consumeAllTokens(m_parser);
|
||||||
return m_lexer.compileIR();
|
return m_lexer.compileIR();
|
||||||
|
|
|
@ -30,6 +30,7 @@
|
||||||
#include <algorithm>
|
#include <algorithm>
|
||||||
#include <regex>
|
#include <regex>
|
||||||
#include <list>
|
#include <list>
|
||||||
|
#include <map>
|
||||||
#include <LogVisor/LogVisor.hpp>
|
#include <LogVisor/LogVisor.hpp>
|
||||||
#include "../extern/xxhash/xxhash.h"
|
#include "../extern/xxhash/xxhash.h"
|
||||||
|
|
||||||
|
@ -658,6 +659,19 @@ public:
|
||||||
return m_relPath.c_str() + m_relPath.size();
|
return m_relPath.c_str() + m_relPath.size();
|
||||||
return m_relPath.c_str() + pos + 1;
|
return m_relPath.c_str() + pos + 1;
|
||||||
}
|
}
|
||||||
|
const char* getLastComponentUTF8() const
|
||||||
|
{
|
||||||
|
size_t pos = m_relPath.rfind(_S('/'));
|
||||||
|
#if HECL_UCS2
|
||||||
|
if (pos == SystemString::npos)
|
||||||
|
return m_utf8RelPath.c_str() + m_utf8RelPath.size();
|
||||||
|
return m_utf8RelPath.c_str() + pos + 1;
|
||||||
|
#else
|
||||||
|
if (pos == SystemString::npos)
|
||||||
|
return m_relPath.c_str() + m_relPath.size();
|
||||||
|
return m_relPath.c_str() + pos + 1;
|
||||||
|
#endif
|
||||||
|
}
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Obtain c-string of extension of final path component (stored within relative path)
|
* @brief Obtain c-string of extension of final path component (stored within relative path)
|
||||||
|
@ -738,7 +752,7 @@ public:
|
||||||
* @brief Insert directory children into list
|
* @brief Insert directory children into list
|
||||||
* @param outPaths list to append children to
|
* @param outPaths list to append children to
|
||||||
*/
|
*/
|
||||||
void getDirChildren(std::vector<ProjectPath>& outPaths) const;
|
void getDirChildren(std::map<SystemString, ProjectPath>& outPaths) const;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* @brief Insert glob matches into existing vector
|
* @brief Insert glob matches into existing vector
|
||||||
|
|
|
@ -169,6 +169,7 @@ GX::TraceResult GX::RecursiveTraceColor(const IR& ir, Diagnostics& diag, const I
|
||||||
TEVStage* b = bTrace.tevStage;
|
TEVStage* b = bTrace.tevStage;
|
||||||
if (b->m_prev != a)
|
if (b->m_prev != a)
|
||||||
diag.reportBackendErr(inst.m_loc, "TEV stages must have monotonic progression");
|
diag.reportBackendErr(inst.m_loc, "TEV stages must have monotonic progression");
|
||||||
|
b->m_color[3] = CC_CPREV;
|
||||||
return TraceResult(b);
|
return TraceResult(b);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
@ -183,6 +184,7 @@ GX::TraceResult GX::RecursiveTraceColor(const IR& ir, Diagnostics& diag, const I
|
||||||
if (b->m_prev != a)
|
if (b->m_prev != a)
|
||||||
diag.reportBackendErr(inst.m_loc, "TEV stages must have monotonic progression");
|
diag.reportBackendErr(inst.m_loc, "TEV stages must have monotonic progression");
|
||||||
b->m_op = TEV_SUB;
|
b->m_op = TEV_SUB;
|
||||||
|
b->m_color[3] = CC_CPREV;
|
||||||
return TraceResult(b);
|
return TraceResult(b);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
@ -204,6 +206,14 @@ GX::TraceResult GX::RecursiveTraceColor(const IR& ir, Diagnostics& diag, const I
|
||||||
b->m_color[3] = CC_ZERO;
|
b->m_color[3] = CC_ZERO;
|
||||||
return TraceResult(b);
|
return TraceResult(b);
|
||||||
}
|
}
|
||||||
|
else if (aTrace.type == TraceResult::TraceTEVColorArg &&
|
||||||
|
bTrace.type == TraceResult::TraceTEVColorArg)
|
||||||
|
{
|
||||||
|
TEVStage& stage = addTEVStage(diag, inst.m_loc);
|
||||||
|
stage.m_color[1] = aTrace.tevColorArg;
|
||||||
|
stage.m_color[2] = bTrace.tevColorArg;
|
||||||
|
return TraceResult(&stage);
|
||||||
|
}
|
||||||
else if (aTrace.type == TraceResult::TraceTEVStage &&
|
else if (aTrace.type == TraceResult::TraceTEVStage &&
|
||||||
bTrace.type == TraceResult::TraceTEVColorArg)
|
bTrace.type == TraceResult::TraceTEVColorArg)
|
||||||
{
|
{
|
||||||
|
@ -262,6 +272,19 @@ GX::TraceResult GX::RecursiveTraceColor(const IR& ir, Diagnostics& diag, const I
|
||||||
|
|
||||||
diag.reportBackendErr(inst.m_loc, "unable to convert arithmetic to TEV stage");
|
diag.reportBackendErr(inst.m_loc, "unable to convert arithmetic to TEV stage");
|
||||||
}
|
}
|
||||||
|
case IR::OpSwizzle:
|
||||||
|
{
|
||||||
|
if (inst.m_swizzle.m_idxs[0] == 3 && inst.m_swizzle.m_idxs[1] == -1 &&
|
||||||
|
inst.m_swizzle.m_idxs[2] == -1 && inst.m_swizzle.m_idxs[3] == -1)
|
||||||
|
{
|
||||||
|
const IR::Instruction& cInst = inst.getChildInst(ir, 0);
|
||||||
|
if (cInst.m_op != IR::OpCall || cInst.m_call.m_name.compare("Texture"))
|
||||||
|
diag.reportBackendErr(inst.m_loc, "only Texture() accepted for alpha swizzle");
|
||||||
|
return TraceResult(CC_TEXA);
|
||||||
|
}
|
||||||
|
else
|
||||||
|
diag.reportBackendErr(inst.m_loc, "only alpha extract may be performed with swizzle operation");
|
||||||
|
}
|
||||||
default:
|
default:
|
||||||
diag.reportBackendErr(inst.m_loc, "invalid color op");
|
diag.reportBackendErr(inst.m_loc, "invalid color op");
|
||||||
}
|
}
|
||||||
|
@ -305,6 +328,7 @@ GX::TraceResult GX::RecursiveTraceAlpha(const IR& ir, Diagnostics& diag, const I
|
||||||
}
|
}
|
||||||
|
|
||||||
TEVStage& newStage = addTEVStage(diag, inst.m_loc);
|
TEVStage& newStage = addTEVStage(diag, inst.m_loc);
|
||||||
|
newStage.m_color[3] = CC_CPREV;
|
||||||
|
|
||||||
newStage.m_texMapIdx = mapIdx;
|
newStage.m_texMapIdx = mapIdx;
|
||||||
newStage.m_alpha[0] = CA_TEXA;
|
newStage.m_alpha[0] = CA_TEXA;
|
||||||
|
@ -355,6 +379,7 @@ GX::TraceResult GX::RecursiveTraceAlpha(const IR& ir, Diagnostics& diag, const I
|
||||||
TEVStage* b = bTrace.tevStage;
|
TEVStage* b = bTrace.tevStage;
|
||||||
if (b->m_prev != a)
|
if (b->m_prev != a)
|
||||||
diag.reportBackendErr(inst.m_loc, "TEV stages must have monotonic progression");
|
diag.reportBackendErr(inst.m_loc, "TEV stages must have monotonic progression");
|
||||||
|
b->m_alpha[3] = CA_APREV;
|
||||||
return TraceResult(b);
|
return TraceResult(b);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
@ -370,6 +395,7 @@ GX::TraceResult GX::RecursiveTraceAlpha(const IR& ir, Diagnostics& diag, const I
|
||||||
diag.reportBackendErr(inst.m_loc, "TEV stages must have monotonic progression");
|
diag.reportBackendErr(inst.m_loc, "TEV stages must have monotonic progression");
|
||||||
if (b->m_op != TEV_SUB)
|
if (b->m_op != TEV_SUB)
|
||||||
diag.reportBackendErr(inst.m_loc, "unable to integrate alpha subtraction into stage chain");
|
diag.reportBackendErr(inst.m_loc, "unable to integrate alpha subtraction into stage chain");
|
||||||
|
b->m_alpha[3] = CA_APREV;
|
||||||
return TraceResult(b);
|
return TraceResult(b);
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
@ -391,6 +417,15 @@ GX::TraceResult GX::RecursiveTraceAlpha(const IR& ir, Diagnostics& diag, const I
|
||||||
b->m_alpha[3] = CA_ZERO;
|
b->m_alpha[3] = CA_ZERO;
|
||||||
return TraceResult(b);
|
return TraceResult(b);
|
||||||
}
|
}
|
||||||
|
else if (aTrace.type == TraceResult::TraceTEVAlphaArg &&
|
||||||
|
bTrace.type == TraceResult::TraceTEVAlphaArg)
|
||||||
|
{
|
||||||
|
TEVStage& stage = addTEVStage(diag, inst.m_loc);
|
||||||
|
stage.m_color[3] = CC_CPREV;
|
||||||
|
stage.m_alpha[1] = aTrace.tevAlphaArg;
|
||||||
|
stage.m_alpha[2] = bTrace.tevAlphaArg;
|
||||||
|
return TraceResult(&stage);
|
||||||
|
}
|
||||||
else if (aTrace.type == TraceResult::TraceTEVStage &&
|
else if (aTrace.type == TraceResult::TraceTEVStage &&
|
||||||
bTrace.type == TraceResult::TraceTEVColorArg)
|
bTrace.type == TraceResult::TraceTEVColorArg)
|
||||||
{
|
{
|
||||||
|
@ -484,6 +519,12 @@ void GX::reset(const IR& ir, Diagnostics& diag)
|
||||||
m_blendDst = BL_ONE;
|
m_blendDst = BL_ONE;
|
||||||
doAlpha = true;
|
doAlpha = true;
|
||||||
}
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
diag.reportBackendErr(rootCall.m_loc, "GX backend doesn't handle '%s' root",
|
||||||
|
rootCall.m_call.m_name.c_str());
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
/* Follow Color Chain */
|
/* Follow Color Chain */
|
||||||
const IR::Instruction& colorRoot =
|
const IR::Instruction& colorRoot =
|
||||||
|
@ -496,6 +537,11 @@ void GX::reset(const IR& ir, Diagnostics& diag)
|
||||||
const IR::Instruction& alphaRoot =
|
const IR::Instruction& alphaRoot =
|
||||||
ir.m_instructions.at(rootCall.m_call.m_argInstIdxs.at(1));
|
ir.m_instructions.at(rootCall.m_call.m_argInstIdxs.at(1));
|
||||||
RecursiveTraceAlpha(ir, diag, alphaRoot);
|
RecursiveTraceAlpha(ir, diag, alphaRoot);
|
||||||
|
|
||||||
|
/* Ensure Alpha reaches end of chain */
|
||||||
|
if (m_alphaTraceStage >= 0)
|
||||||
|
for (int i=m_alphaTraceStage+1 ; i<m_tevCount ; ++i)
|
||||||
|
m_tevs[i].m_alpha[3] = CA_APREV;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -385,14 +385,14 @@ static void VisitDirectory(const ProjectPath& dir, bool recursive,
|
||||||
std::vector<SpecInst>& specInsts,
|
std::vector<SpecInst>& specInsts,
|
||||||
CookProgress& progress)
|
CookProgress& progress)
|
||||||
{
|
{
|
||||||
std::vector<ProjectPath> children;
|
std::map<SystemString, ProjectPath> children;
|
||||||
dir.getDirChildren(children);
|
dir.getDirChildren(children);
|
||||||
|
|
||||||
/* Pass 1: child file count */
|
/* Pass 1: child file count */
|
||||||
int childFileCount = 0;
|
int childFileCount = 0;
|
||||||
for (ProjectPath& child : children)
|
for (auto& child : children)
|
||||||
{
|
{
|
||||||
switch (child.getPathType())
|
switch (child.second.getPathType())
|
||||||
{
|
{
|
||||||
case ProjectPath::PT_FILE:
|
case ProjectPath::PT_FILE:
|
||||||
{
|
{
|
||||||
|
@ -401,7 +401,7 @@ static void VisitDirectory(const ProjectPath& dir, bool recursive,
|
||||||
}
|
}
|
||||||
case ProjectPath::PT_LINK:
|
case ProjectPath::PT_LINK:
|
||||||
{
|
{
|
||||||
ProjectPath target = child.resolveLink();
|
ProjectPath target = child.second.resolveLink();
|
||||||
if (target.getPathType() == ProjectPath::PT_FILE)
|
if (target.getPathType() == ProjectPath::PT_FILE)
|
||||||
++childFileCount;
|
++childFileCount;
|
||||||
break;
|
break;
|
||||||
|
@ -414,19 +414,19 @@ static void VisitDirectory(const ProjectPath& dir, bool recursive,
|
||||||
int progNum = 0;
|
int progNum = 0;
|
||||||
float progDenom = childFileCount;
|
float progDenom = childFileCount;
|
||||||
progress.changeDir(dir.getLastComponent());
|
progress.changeDir(dir.getLastComponent());
|
||||||
for (ProjectPath& child : children)
|
for (auto& child : children)
|
||||||
{
|
{
|
||||||
switch (child.getPathType())
|
switch (child.second.getPathType())
|
||||||
{
|
{
|
||||||
case ProjectPath::PT_FILE:
|
case ProjectPath::PT_FILE:
|
||||||
{
|
{
|
||||||
progress.changeFile(child.getLastComponent(), progNum++/progDenom);
|
progress.changeFile(child.first.c_str(), progNum++/progDenom);
|
||||||
VisitFile(child, specInsts, progress);
|
VisitFile(child.second, specInsts, progress);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
case ProjectPath::PT_LINK:
|
case ProjectPath::PT_LINK:
|
||||||
{
|
{
|
||||||
ProjectPath target = child.resolveLink();
|
ProjectPath target = child.second.resolveLink();
|
||||||
if (target.getPathType() == ProjectPath::PT_FILE)
|
if (target.getPathType() == ProjectPath::PT_FILE)
|
||||||
{
|
{
|
||||||
progress.changeFile(target.getLastComponent(), progNum++/progDenom);
|
progress.changeFile(target.getLastComponent(), progNum++/progDenom);
|
||||||
|
@ -442,13 +442,13 @@ static void VisitDirectory(const ProjectPath& dir, bool recursive,
|
||||||
/* Pass 3: child directories */
|
/* Pass 3: child directories */
|
||||||
if (recursive)
|
if (recursive)
|
||||||
{
|
{
|
||||||
for (ProjectPath& child : children)
|
for (auto& child : children)
|
||||||
{
|
{
|
||||||
switch (child.getPathType())
|
switch (child.second.getPathType())
|
||||||
{
|
{
|
||||||
case ProjectPath::PT_DIRECTORY:
|
case ProjectPath::PT_DIRECTORY:
|
||||||
{
|
{
|
||||||
VisitDirectory(child, recursive, specInsts, progress);
|
VisitDirectory(child.second, recursive, specInsts, progress);
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
default: break;
|
default: break;
|
||||||
|
|
|
@ -17,6 +17,32 @@ namespace HECL
|
||||||
namespace Frontend
|
namespace Frontend
|
||||||
{
|
{
|
||||||
|
|
||||||
|
std::string Diagnostics::sourceDiagString(const SourceLocation& l, bool ansi) const
|
||||||
|
{
|
||||||
|
std::string::const_iterator it = m_source.begin();
|
||||||
|
for (int i=1 ; i<l.line ; ++i)
|
||||||
|
{
|
||||||
|
while (*it != '\n' && it != m_source.end())
|
||||||
|
++it;
|
||||||
|
if (*it == '\n')
|
||||||
|
++it;
|
||||||
|
}
|
||||||
|
std::string::const_iterator begin = it;
|
||||||
|
while (*it != '\n' && it != m_source.end())
|
||||||
|
++it;
|
||||||
|
std::string::const_iterator end = it;
|
||||||
|
|
||||||
|
std::string retval(begin, end);
|
||||||
|
retval += '\n';
|
||||||
|
for (int i=1 ; i<l.col ; ++i)
|
||||||
|
retval += ' ';
|
||||||
|
if (ansi)
|
||||||
|
retval += GREEN "^" NORMAL;
|
||||||
|
else
|
||||||
|
retval += '^';
|
||||||
|
return retval;
|
||||||
|
}
|
||||||
|
|
||||||
void Diagnostics::reportParserErr(const SourceLocation& l, const char* fmt, ...)
|
void Diagnostics::reportParserErr(const SourceLocation& l, const char* fmt, ...)
|
||||||
{
|
{
|
||||||
va_list ap;
|
va_list ap;
|
||||||
|
@ -31,11 +57,11 @@ void Diagnostics::reportParserErr(const SourceLocation& l, const char* fmt, ...)
|
||||||
#endif
|
#endif
|
||||||
va_end(ap);
|
va_end(ap);
|
||||||
if (LogVisor::XtermColor)
|
if (LogVisor::XtermColor)
|
||||||
LogModule.report(LogVisor::FatalError, RED "Error parsing" NORMAL " '%s' " YELLOW "@%d:%d " NORMAL "\n%s",
|
LogModule.report(LogVisor::FatalError, CYAN "[Parser]" NORMAL " %s " YELLOW "@%d:%d " NORMAL "\n%s\n%s",
|
||||||
m_name.c_str(), l.line, l.col, result);
|
m_name.c_str(), l.line, l.col, result, sourceDiagString(l, true).c_str());
|
||||||
else
|
else
|
||||||
LogModule.report(LogVisor::FatalError, "Error parsing '%s' @%d:%d\n%s",
|
LogModule.report(LogVisor::FatalError, "[Parser] %s @%d:%d\n%s\n%s",
|
||||||
m_name.c_str(), l.line, l.col, result);
|
m_name.c_str(), l.line, l.col, result, sourceDiagString(l, false).c_str());
|
||||||
free(result);
|
free(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -53,11 +79,11 @@ void Diagnostics::reportLexerErr(const SourceLocation& l, const char* fmt, ...)
|
||||||
#endif
|
#endif
|
||||||
va_end(ap);
|
va_end(ap);
|
||||||
if (LogVisor::XtermColor)
|
if (LogVisor::XtermColor)
|
||||||
LogModule.report(LogVisor::FatalError, RED "Error lexing" NORMAL " '%s' " YELLOW "@%d:%d " NORMAL "\n%s",
|
LogModule.report(LogVisor::FatalError, CYAN "[Lexer]" NORMAL " %s " YELLOW "@%d:%d " NORMAL "\n%s\n%s",
|
||||||
m_name.c_str(), l.line, l.col, result);
|
m_name.c_str(), l.line, l.col, result, sourceDiagString(l, true).c_str());
|
||||||
else
|
else
|
||||||
LogModule.report(LogVisor::FatalError, "Error lexing '%s' @%d:%d\n%s",
|
LogModule.report(LogVisor::FatalError, "[Lexer] %s @%d:%d\n%s\n%s",
|
||||||
m_name.c_str(), l.line, l.col, result);
|
m_name.c_str(), l.line, l.col, result, sourceDiagString(l, false).c_str());
|
||||||
free(result);
|
free(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -75,11 +101,11 @@ void Diagnostics::reportCompileErr(const SourceLocation& l, const char* fmt, ...
|
||||||
#endif
|
#endif
|
||||||
va_end(ap);
|
va_end(ap);
|
||||||
if (LogVisor::XtermColor)
|
if (LogVisor::XtermColor)
|
||||||
LogModule.report(LogVisor::FatalError, RED "Error compiling" NORMAL " '%s' " YELLOW "@%d:%d " NORMAL "\n%s",
|
LogModule.report(LogVisor::FatalError, CYAN "[Compiler]" NORMAL " %s " YELLOW "@%d:%d " NORMAL "\n%s\n%s",
|
||||||
m_name.c_str(), l.line, l.col, result);
|
m_name.c_str(), l.line, l.col, result, sourceDiagString(l, true).c_str());
|
||||||
else
|
else
|
||||||
LogModule.report(LogVisor::FatalError, "Error compiling '%s' @%d:%d\n%s",
|
LogModule.report(LogVisor::FatalError, "[Compiler] %s @%d:%d\n%s\n%s",
|
||||||
m_name.c_str(), l.line, l.col, result);
|
m_name.c_str(), l.line, l.col, result, sourceDiagString(l, false).c_str());
|
||||||
free(result);
|
free(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -97,11 +123,11 @@ void Diagnostics::reportBackendErr(const SourceLocation& l, const char* fmt, ...
|
||||||
#endif
|
#endif
|
||||||
va_end(ap);
|
va_end(ap);
|
||||||
if (LogVisor::XtermColor)
|
if (LogVisor::XtermColor)
|
||||||
LogModule.report(LogVisor::FatalError, RED "Backend error" NORMAL " in '%s' " YELLOW "@%d:%d " NORMAL "\n%s",
|
LogModule.report(LogVisor::FatalError, CYAN "[Backend]" NORMAL " %s " YELLOW "@%d:%d " NORMAL "\n%s\n%s",
|
||||||
m_name.c_str(), l.line, l.col, result);
|
m_name.c_str(), l.line, l.col, result, sourceDiagString(l, true).c_str());
|
||||||
else
|
else
|
||||||
LogModule.report(LogVisor::FatalError, "Backend error in '%s' @%d:%d\n%s",
|
LogModule.report(LogVisor::FatalError, "[Backend] %s @%d:%d\n%s\n%s",
|
||||||
m_name.c_str(), l.line, l.col, result);
|
m_name.c_str(), l.line, l.col, result, sourceDiagString(l, false).c_str());
|
||||||
free(result);
|
free(result);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -47,6 +47,28 @@ void Lexer::ReconnectArithmetic(OperationNode* sn, OperationNode** lastSub, Oper
|
||||||
*newSub = sn;
|
*newSub = sn;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
void Lexer::PrintChain(const Lexer::OperationNode* begin, const Lexer::OperationNode* end)
|
||||||
|
{
|
||||||
|
for (const Lexer::OperationNode* n = begin ; n != end ; n = n->m_next)
|
||||||
|
{
|
||||||
|
printf("%3d %s %s\n", n->m_tok.m_location.col, n->m_tok.typeString(),
|
||||||
|
n->m_tok.m_tokenString.c_str());
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
void Lexer::PrintTree(const Lexer::OperationNode* node, int indent)
|
||||||
|
{
|
||||||
|
for (const Lexer::OperationNode* n = node ; n ; n = n->m_next)
|
||||||
|
{
|
||||||
|
for (int i=0 ; i<indent ; ++i)
|
||||||
|
printf(" ");
|
||||||
|
printf("%3d %s %s\n", n->m_tok.m_location.col, n->m_tok.typeString(),
|
||||||
|
n->m_tok.m_tokenString.c_str());
|
||||||
|
if (n->m_sub)
|
||||||
|
PrintTree(n->m_sub, indent + 1);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
void Lexer::reset()
|
void Lexer::reset()
|
||||||
{
|
{
|
||||||
m_root = nullptr;
|
m_root = nullptr;
|
||||||
|
@ -157,7 +179,7 @@ void Lexer::consumeAllTokens(Parser& parser)
|
||||||
}
|
}
|
||||||
else if (n->m_tok.m_type == Parser::TokenFunctionEnd)
|
else if (n->m_tok.m_type == Parser::TokenFunctionEnd)
|
||||||
{
|
{
|
||||||
if (n->m_prev->m_tok.m_type != Parser::TokenEvalGroupStart)
|
if (n->m_prev->m_tok.m_type != Parser::TokenFunctionStart)
|
||||||
{
|
{
|
||||||
m_pool.emplace_front(std::move(
|
m_pool.emplace_front(std::move(
|
||||||
Parser::Token(Parser::TokenEvalGroupEnd, n->m_tok.m_location)));
|
Parser::Token(Parser::TokenEvalGroupEnd, n->m_tok.m_location)));
|
||||||
|
@ -230,10 +252,15 @@ void Lexer::consumeAllTokens(Parser& parser)
|
||||||
if (sn->m_tok.m_type == Parser::TokenFunctionEnd)
|
if (sn->m_tok.m_type == Parser::TokenFunctionEnd)
|
||||||
{
|
{
|
||||||
n.m_sub = n.m_next;
|
n.m_sub = n.m_next;
|
||||||
|
if (n.m_next == sn)
|
||||||
|
n.m_sub = nullptr;
|
||||||
n.m_next = sn->m_next;
|
n.m_next = sn->m_next;
|
||||||
sn->m_next->m_prev = &n;
|
if (sn->m_next)
|
||||||
n.m_sub->m_prev = nullptr;
|
sn->m_next->m_prev = &n;
|
||||||
sn->m_prev->m_next = nullptr;
|
if (n.m_sub)
|
||||||
|
n.m_sub->m_prev = nullptr;
|
||||||
|
if (sn->m_prev)
|
||||||
|
sn->m_prev->m_next = nullptr;
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -310,6 +337,13 @@ void Lexer::consumeAllTokens(Parser& parser)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if (HECL::VerbosityLevel)
|
||||||
|
{
|
||||||
|
printf("%s\n", m_diag.getSource().c_str());
|
||||||
|
PrintTree(firstNode);
|
||||||
|
printf("\n");
|
||||||
|
}
|
||||||
|
|
||||||
/* Done! */
|
/* Done! */
|
||||||
m_root = firstNode->m_next;
|
m_root = firstNode->m_next;
|
||||||
}
|
}
|
||||||
|
|
|
@ -286,7 +286,7 @@ static void _recursiveGlob(Database::Project& proj,
|
||||||
#endif
|
#endif
|
||||||
}
|
}
|
||||||
|
|
||||||
void ProjectPath::getDirChildren(std::vector<ProjectPath>& outPaths) const
|
void ProjectPath::getDirChildren(std::map<SystemString, ProjectPath>& outPaths) const
|
||||||
{
|
{
|
||||||
#if _WIN32
|
#if _WIN32
|
||||||
#else
|
#else
|
||||||
|
@ -298,18 +298,6 @@ void ProjectPath::getDirChildren(std::vector<ProjectPath>& outPaths) const
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
/* Count elements */
|
|
||||||
size_t count = 0;
|
|
||||||
while ((de = readdir(dir)))
|
|
||||||
{
|
|
||||||
if (!strcmp(de->d_name, "."))
|
|
||||||
continue;
|
|
||||||
if (!strcmp(de->d_name, ".."))
|
|
||||||
continue;
|
|
||||||
++count;
|
|
||||||
}
|
|
||||||
outPaths.reserve(outPaths.size() + count);
|
|
||||||
|
|
||||||
/* Add elements */
|
/* Add elements */
|
||||||
rewinddir(dir);
|
rewinddir(dir);
|
||||||
while ((de = readdir(dir)))
|
while ((de = readdir(dir)))
|
||||||
|
@ -318,7 +306,7 @@ void ProjectPath::getDirChildren(std::vector<ProjectPath>& outPaths) const
|
||||||
continue;
|
continue;
|
||||||
if (!strcmp(de->d_name, ".."))
|
if (!strcmp(de->d_name, ".."))
|
||||||
continue;
|
continue;
|
||||||
outPaths.emplace_back(*this, de->d_name);
|
outPaths[de->d_name] = ProjectPath(*this, de->d_name);
|
||||||
}
|
}
|
||||||
|
|
||||||
closedir(dir);
|
closedir(dir);
|
||||||
|
|
Loading…
Reference in New Issue