Initial HECL language frontend

This commit is contained in:
Jack Andersen 2015-10-08 16:08:10 -10:00
parent 135a7ced5d
commit 32bd32b9dd
12 changed files with 750 additions and 26 deletions

View File

@ -458,6 +458,7 @@ void BlenderConnection::PyOutStream::linkBlend(const std::string& target,
}
BlenderConnection::DataStream::Mesh::Mesh(BlenderConnection& conn, int skinSlotCount)
: aabbMin(conn), aabbMax(conn)
{
uint32_t matSetCount;
conn._readBuf(&matSetCount, 4);

View File

@ -19,6 +19,7 @@
#include <functional>
#include "HECL/HECL.hpp"
#include <Athena/Types.hpp>
namespace HECL
{
@ -283,6 +284,26 @@ public:
/* Intermediate mesh representation prepared by blender from a single mesh object */
struct Mesh
{
struct Vector2f
{
atVec2f val;
Vector2f(BlenderConnection& conn) {conn._readBuf(&val, 8);}
};
struct Vector3f
{
atVec3f val;
Vector3f(BlenderConnection& conn) {conn._readBuf(&val, 12);}
};
struct Index
{
uint32_t val;
Index(BlenderConnection& conn) {conn._readBuf(&val, 4);}
};
/* Cumulative AABB */
Vector3f aabbMin;
Vector3f aabbMax;
/* HECL source of each material */
struct Material
{
@ -294,21 +315,6 @@ public:
std::vector<std::vector<Material>> materialSets;
/* Vertex buffer data */
struct Vector2f
{
float val[2];
Vector2f(BlenderConnection& conn) {conn._readBuf(val, 8);}
};
struct Vector3f
{
float val[3];
Vector3f(BlenderConnection& conn) {conn._readBuf(val, 12);}
};
struct Index
{
uint32_t val;
Index(BlenderConnection& conn) {conn._readBuf(&val, 4);}
};
std::vector<Vector3f> pos;
std::vector<Vector3f> norm;
uint32_t colorLayerCount = 0;

View File

@ -94,6 +94,13 @@ def cook(writebuf, mesh_obj, max_skin_banks, max_octant_length=None):
copy_mesh.calc_normals_split()
rna_loops = copy_mesh.loops
# Filter out useless AABB points and send data
aabb = bytearray()
for comp in copy_obj.bound_box[0]:
writebuf(struct.pack('f', comp))
for comp in copy_obj.bound_box[6]:
writebuf(struct.pack('f', comp))
# Create master BMesh and VertPool
bm_master = bmesh.new()
bm_master.from_mesh(copy_obj.data)
@ -177,13 +184,6 @@ def cook(writebuf, mesh_obj, max_skin_banks, max_octant_length=None):
# No more surfaces
writebuf(struct.pack('B', 0))
# Filter out useless AABB points and generate data array
#aabb = bytearray()
#for comp in copy_obj.bound_box[0]:
# aabb += struct.pack('f', comp)
#for comp in copy_obj.bound_box[6]:
# aabb += struct.pack('f', comp)
# Delete copied mesh from scene
bm_master.free()
bpy.context.scene.objects.unlink(copy_obj)

2
hecl/extern/Athena vendored

@ -1 +1 @@
Subproject commit f4716070dd33f910e4854997a91a249e40922229
Subproject commit e6dedd0e6cd72117cc3b06ec1b42aec0371d5790

@ -1 +1 @@
Subproject commit 8b9dd56955f0ada6cb89e77eb7ea65fb15efdb0c
Subproject commit 189e047977b138b711259ad84d94471f5d006ffb

View File

@ -1,4 +1,195 @@
#ifndef HECLFRONTEND_HPP
#define HECLFRONTEND_HPP
#include <string>
#include <vector>
#include <forward_list>
#include <Athena/Types.hpp>
namespace HECL
{
namespace Frontend
{
struct SourceLocation
{
int line = -1;
int col = -1;
SourceLocation() = default;
SourceLocation(int l, int c) : line(l), col(c) {}
};
class Diagnostics
{
std::string m_name;
public:
void setName(const std::string& name) {m_name = name;}
void reportParserErr(const SourceLocation& l, const char* format, ...);
void reportLexerErr(const SourceLocation& l, const char* format, ...);
};
class Parser
{
public:
enum TokenType
{
TokenNone,
TokenSourceBegin,
TokenSourceEnd,
TokenNumLiteral,
TokenVectorSwizzle,
TokenEvalGroupStart,
TokenEvalGroupEnd,
TokenFunctionStart,
TokenFunctionEnd,
TokenFunctionArgDelim,
TokenArithmeticOp,
};
private:
Diagnostics& m_diag;
const std::string* m_source = nullptr;
std::string::const_iterator m_sourceIt;
std::vector<TokenType> m_parenStack;
bool m_reset = false;
void skipWhitespace(std::string::const_iterator& it);
public:
struct Token
{
TokenType m_type;
SourceLocation m_location;
std::string m_tokenString;
int m_tokenInt = 0;
float m_tokenFloat = 0.0;
Token() : m_type(TokenNone) {}
Token(TokenType type, SourceLocation loc) : m_type(type), m_location(loc) {}
};
void reset(const std::string& source);
Token consumeToken();
SourceLocation getLocation() const;
Parser(Diagnostics& diag) : m_diag(diag) {}
};
struct IR
{
enum OpType
{
OpNone, /**< NOP */
OpCall, /**< Deferred function insertion for HECL backend using specified I/O regs */
OpLoadImm, /**< Load a constant (numeric literal) into register */
OpArithmetic, /**< Perform binary arithmetic between registers */
OpSwizzle /**< Vector insertion/extraction/swizzling operation */
};
enum RegType
{
RegNone,
RegFloat,
RegVec3,
RegVec4
};
struct RegID
{
RegType m_type = RegNone;
unsigned m_idx = 0;
};
struct Instruction
{
OpType m_op = OpNone;
struct
{
std::vector<RegID> m_callRegs;
RegID m_target;
} m_call;
struct
{
atVec4f m_immVec;
RegID m_target;
} m_loadImm;
struct
{
enum ArithmeticOpType
{
ArithmeticOpNone,
ArithmeticOpAdd,
ArithmeticOpSubtract,
ArithmeticOpMultiply,
ArithmeticOpDivide
} m_op = ArithmeticOpNone;
RegID m_a;
RegID m_b;
RegID m_target;
} m_arithmetic;
struct
{
RegID m_source;
int m_sourceIdxs[4] = {-1};
RegID m_target;
int m_targetIdxs[4] = {-1};
} m_swizzle;
};
unsigned m_floatRegCount = 0;
unsigned m_vec3RegCount = 0;
unsigned m_vec4RegCount = 0;
std::vector<Instruction> m_instructions;
};
class Lexer
{
friend class OperationNode;
Diagnostics& m_diag;
/* Intermediate tree-node for organizing tokens into operations */
struct OperationNode
{
Parser::Token m_tok;
OperationNode* m_prev = nullptr;
OperationNode* m_next = nullptr;
OperationNode* m_sub = nullptr;
OperationNode() {}
OperationNode(Parser::Token&& tok) : m_tok(std::move(tok)) {}
};
/* Pool of nodes to keep ownership (forward_list so pointers aren't invalidated) */
std::forward_list<OperationNode> m_pool;
/* Final lexed root function (IR comes from this) */
OperationNode* m_root = nullptr;
public:
void reset();
void consumeAllTokens(Parser& parser);
IR compileIR() const;
Lexer(Diagnostics& diag) : m_diag(diag) {}
};
class Frontend
{
Diagnostics m_diag;
Parser m_parser;
Lexer m_lexer;
public:
IR compileSource(const std::string& source, const std::string& diagName)
{
m_diag.setName(diagName);
m_parser.reset(source);
m_lexer.consumeAllTokens(m_parser);
return m_lexer.compileIR();
}
Frontend() : m_parser(m_diag), m_lexer(m_diag) {}
};
}
}
#endif // HECLFRONTEND_HPP

View File

@ -1,3 +1,4 @@
add_library(HECLFrontend
CHECLIR.cpp
CHECLLexer.cpp)
Parser.cpp
Lexer.cpp
Diagnostics.cpp)

View File

@ -0,0 +1,65 @@
#include "HECL/HECL.hpp"
#include "HECL/Frontend.hpp"
#include <stdarg.h>
/* ANSI sequences */
#define RED "\x1b[1;31m"
#define YELLOW "\x1b[1;33m"
#define GREEN "\x1b[1;32m"
#define MAGENTA "\x1b[1;35m"
#define CYAN "\x1b[1;36m"
#define BOLD "\x1b[1m"
#define NORMAL "\x1b[0m"
namespace HECL
{
namespace Frontend
{
void Diagnostics::reportParserErr(const SourceLocation& l, const char* fmt, ...)
{
va_list ap;
va_start(ap, fmt);
char* result = nullptr;
#ifdef _WIN32
int length = _vscprintf(fmt, ap);
result = (char*)malloc(length);
vsnprintf(result, length, fmt, ap);
#else
vasprintf(&result, fmt, ap);
#endif
va_end(ap);
if (LogVisor::XtermColor)
LogModule.report(LogVisor::FatalError, RED "Error parsing" NORMAL " '%s' " YELLOW "@%d:%d " NORMAL "\n%s",
m_name.c_str(), l.line, l.col, result);
else
LogModule.report(LogVisor::FatalError, "Error parsing '%s' @%d:%d\n%s",
m_name.c_str(), l.line, l.col, result);
free(result);
}
void Diagnostics::reportLexerErr(const SourceLocation& l, const char* fmt, ...)
{
va_list ap;
va_start(ap, fmt);
char* result = nullptr;
#ifdef _WIN32
int length = _vscprintf(fmt, ap);
result = (char*)malloc(length);
vsnprintf(result, length, fmt, ap);
#else
vasprintf(&result, fmt, ap);
#endif
va_end(ap);
if (LogVisor::XtermColor)
LogModule.report(LogVisor::FatalError, RED "Error lexing" NORMAL " '%s' " YELLOW "@%d:%d " NORMAL "\n%s",
m_name.c_str(), l.line, l.col, result);
else
LogModule.report(LogVisor::FatalError, "Error lexing '%s' @%d:%d\n%s",
m_name.c_str(), l.line, l.col, result);
free(result);
}
}
}

268
hecl/lib/Frontend/Lexer.cpp Normal file
View File

@ -0,0 +1,268 @@
#include "HECL/HECL.hpp"
#include "HECL/Frontend.hpp"
namespace HECL
{
namespace Frontend
{
void Lexer::reset()
{
m_root = nullptr;
m_pool.clear();
}
void Lexer::consumeAllTokens(Parser& parser)
{
reset();
Parser::Token firstTok = parser.consumeToken();
if (firstTok.m_type != Parser::TokenSourceBegin)
{
m_diag.reportLexerErr(firstTok.m_location, "expected start token");
return;
}
m_pool.emplace_front(std::move(firstTok));
Lexer::OperationNode* firstNode = &m_pool.front();
Lexer::OperationNode* lastNode = firstNode;
/* Build linked-list of nodes parsed in-order */
{
std::vector<SourceLocation> funcStack;
std::vector<SourceLocation> groupStack;
while (lastNode->m_tok.m_type != Parser::TokenSourceEnd)
{
Parser::Token tok = parser.consumeToken();
switch (tok.m_type)
{
case Parser::TokenEvalGroupStart:
groupStack.push_back(tok.m_location);
break;
case Parser::TokenEvalGroupEnd:
if (groupStack.empty())
{
m_diag.reportLexerErr(tok.m_location, "unbalanced group detected");
return;
}
groupStack.pop_back();
break;
case Parser::TokenFunctionStart:
funcStack.push_back(tok.m_location);
break;
case Parser::TokenFunctionEnd:
if (funcStack.empty())
{
m_diag.reportLexerErr(tok.m_location, "unbalanced function detected");
return;
}
funcStack.pop_back();
break;
case Parser::TokenSourceEnd:
case Parser::TokenNumLiteral:
case Parser::TokenVectorSwizzle:
case Parser::TokenFunctionArgDelim:
case Parser::TokenArithmeticOp:
break;
default:
m_diag.reportLexerErr(tok.m_location, "invalid token");
return;
}
m_pool.emplace_front(std::move(tok));
lastNode->m_next = &m_pool.front();
m_pool.front().m_prev = lastNode;
lastNode = &m_pool.front();
}
/* Ensure functions and groups are balanced */
if (funcStack.size())
{
m_diag.reportLexerErr(funcStack.back(), "unclosed function detected");
return;
}
if (groupStack.size())
{
m_diag.reportLexerErr(groupStack.back(), "unclosed group detected");
return;
}
}
/* Ensure first non-start node is a function */
if (firstNode->m_next->m_tok.m_type != Parser::TokenFunctionStart)
{
m_diag.reportLexerErr(firstNode->m_tok.m_location, "expected root function");
return;
}
/* Organize marked function args into implicit groups */
for (Lexer::OperationNode* n = firstNode ; n != lastNode ; n = n->m_next)
{
if (n->m_tok.m_type == Parser::TokenFunctionStart)
{
if (n->m_next->m_tok.m_type != Parser::TokenFunctionEnd)
{
if (n->m_next->m_tok.m_type == Parser::TokenFunctionArgDelim)
{
m_diag.reportLexerErr(n->m_next->m_tok.m_location, "empty function arg");
return;
}
m_pool.emplace_front(std::move(
Parser::Token(Parser::TokenEvalGroupStart, n->m_next->m_tok.m_location)));
Lexer::OperationNode* grp = &m_pool.front();
grp->m_next = n->m_next;
grp->m_prev = n;
n->m_next->m_prev = grp;
n->m_next = grp;
}
}
else if (n->m_tok.m_type == Parser::TokenFunctionEnd)
{
if (n->m_prev->m_tok.m_type != Parser::TokenEvalGroupStart)
{
m_pool.emplace_front(std::move(
Parser::Token(Parser::TokenEvalGroupEnd, n->m_tok.m_location)));
Lexer::OperationNode* grp = &m_pool.front();
grp->m_next = n;
grp->m_prev = n->m_prev;
n->m_prev->m_next = grp;
n->m_prev = grp;
}
}
else if (n->m_tok.m_type == Parser::TokenFunctionArgDelim)
{
if (n->m_next->m_tok.m_type == Parser::TokenFunctionArgDelim ||
n->m_next->m_tok.m_type == Parser::TokenFunctionEnd)
{
m_diag.reportLexerErr(n->m_next->m_tok.m_location, "empty function arg");
return;
}
m_pool.emplace_front(std::move(
Parser::Token(Parser::TokenEvalGroupEnd, n->m_tok.m_location)));
Lexer::OperationNode* egrp = &m_pool.front();
m_pool.emplace_front(std::move(
Parser::Token(Parser::TokenEvalGroupStart, n->m_next->m_tok.m_location)));
Lexer::OperationNode* sgrp = &m_pool.front();
egrp->m_next = sgrp;
sgrp->m_prev = egrp;
sgrp->m_next = n->m_next;
egrp->m_prev = n->m_prev;
n->m_next->m_prev = sgrp;
n->m_prev->m_next = egrp;
}
}
/* Organize marked groups into tree-hierarchy */
{
std::vector<Lexer::OperationNode*> groupStack;
for (Lexer::OperationNode* n = firstNode ; n != lastNode ; n = n->m_next)
{
if (n->m_tok.m_type == Parser::TokenEvalGroupStart)
groupStack.push_back(n);
else if (n->m_tok.m_type == Parser::TokenEvalGroupEnd)
{
Lexer::OperationNode* start = groupStack.back();
groupStack.pop_back();
if (n->m_prev == start)
{
m_diag.reportLexerErr(start->m_tok.m_location, "empty group");
return;
}
start->m_sub = start->m_next;
start->m_next = n->m_next;
if (n->m_next)
n->m_next->m_prev = start;
n->m_prev->m_next = nullptr;
}
}
}
/* Organize functions into tree-hierarchy */
for (Lexer::OperationNode& n : m_pool)
{
if (n.m_tok.m_type == Parser::TokenFunctionStart)
{
for (Lexer::OperationNode* sn = n.m_next ; sn ; sn = sn->m_next)
{
if (sn->m_tok.m_type == Parser::TokenFunctionEnd)
{
n.m_sub = n.m_next;
n.m_next = sn->m_next;
sn->m_next->m_prev = &n;
n.m_sub->m_prev = nullptr;
sn->m_prev->m_next = nullptr;
break;
}
}
}
}
/* Organize vector swizzles into tree-hierarchy */
for (Lexer::OperationNode& n : m_pool)
{
if (n.m_tok.m_type == Parser::TokenVectorSwizzle)
{
if (n.m_prev->m_tok.m_type != Parser::TokenFunctionStart)
{
m_diag.reportLexerErr(n.m_tok.m_location,
"vector swizzles may only follow functions");
return;
}
Lexer::OperationNode* func = n.m_prev;
n.m_sub = func;
n.m_prev = func->m_prev;
func->m_prev->m_next = &n;
func->m_next = nullptr;
func->m_prev = nullptr;
}
}
/* Ensure evaluation groups have proper arithmetic usage */
for (Lexer::OperationNode& n : m_pool)
{
if (n.m_tok.m_type == Parser::TokenEvalGroupStart)
{
int idx = 0;
for (Lexer::OperationNode* sn = n.m_sub ; sn ; sn = sn->m_next, ++idx)
{
if ((sn->m_tok.m_type == Parser::TokenArithmeticOp && !(idx & 1)) ||
(sn->m_tok.m_type != Parser::TokenArithmeticOp && (idx & 1)))
{
m_diag.reportLexerErr(sn->m_tok.m_location, "improper arithmetic expression");
return;
}
}
}
}
/* Done! */
m_root = firstNode->m_next;
}
IR Lexer::compileIR() const
{
if (!m_root)
LogModule.report(LogVisor::FatalError, "unable to compile HECL-IR for invalid source");
IR ir;
/* Determine maximum float regs */
for (const Lexer::OperationNode& n : m_pool)
{
if (n.m_tok.m_type == Parser::TokenFunctionStart)
{
for (Lexer::OperationNode* sn = n.m_sub ; sn ; sn = sn->m_next)
{
}
}
}
return ir;
}
}
}

View File

@ -0,0 +1,192 @@
#include "HECL/HECL.hpp"
#include "HECL/Frontend.hpp"
#include <math.h>
namespace HECL
{
namespace Frontend
{
void Parser::skipWhitespace(std::string::const_iterator& it)
{
while (true)
{
while (isspace(*it) && it != m_source->cend())
++it;
/* Skip comment line */
if (*it == '#')
{
while (*it != '\n' && it != m_source->cend())
++it;
if (*it == '\n')
++it;
continue;
}
break;
}
}
void Parser::reset(const std::string& source)
{
m_source = &source;
m_sourceIt = m_source->cbegin();
m_parenStack.clear();
m_reset = true;
}
Parser::Token Parser::consumeToken()
{
if (!m_source)
return Parser::Token(TokenNone, SourceLocation());
/* If parser has just been reset, emit begin token */
if (m_reset)
{
m_reset = false;
return Parser::Token(TokenSourceBegin, getLocation());
}
/* Skip whitespace */
skipWhitespace(m_sourceIt);
/* Check for source end */
if (m_sourceIt == m_source->cend())
return Parser::Token(TokenSourceEnd, getLocation());
/* Check for numeric literal */
{
char* strEnd;
float val = std::strtof(&*m_sourceIt, &strEnd);
if (&*m_sourceIt != strEnd)
{
Parser::Token tok(TokenNumLiteral, getLocation());
tok.m_tokenFloat = val;
m_sourceIt += (strEnd - &*m_sourceIt);
return tok;
}
}
/* Check for swizzle op */
if (*m_sourceIt == '.')
{
int count = 0;
std::string::const_iterator tmp = m_sourceIt + 1;
if (tmp != m_source->cend())
{
for (int i=0 ; i<4 ; ++i)
{
std::string::const_iterator tmp2 = tmp + i;
if (tmp2 == m_source->cend())
break;
char ch = tolower(*tmp2);
if (ch >= 'w' && ch <= 'z')
++count;
else if (ch == 'r' || ch == 'g' || ch == 'b' || ch == 'a')
++count;
else
break;
}
}
if (count)
{
Parser::Token tok(TokenVectorSwizzle, getLocation());
for (int i=0 ; i<count ; ++i)
{
std::string::const_iterator tmp2 = tmp + i;
tok.m_tokenString += tolower(*tmp2);
}
m_sourceIt = tmp + count;
return tok;
}
}
/* Check for arithmetic op */
if (*m_sourceIt == '+' || *m_sourceIt == '-' || *m_sourceIt == '*' || *m_sourceIt == '/')
{
Parser::Token tok(TokenArithmeticOp, getLocation());
tok.m_tokenInt = *m_sourceIt;
++m_sourceIt;
return tok;
}
/* Check for parenthesis end (group or function call) */
if (*m_sourceIt == ')')
{
if (m_parenStack.empty())
{
m_diag.reportParserErr(getLocation(), "unexpected ')' while parsing");
return Parser::Token(TokenNone, SourceLocation());
}
Parser::Token tok(m_parenStack.back(), getLocation());
++m_sourceIt;
m_parenStack.pop_back();
return tok;
}
/* Check for group start */
if (*m_sourceIt == '(')
{
m_parenStack.push_back(TokenEvalGroupEnd);
Parser::Token tok(TokenEvalGroupStart, getLocation());
++m_sourceIt;
return tok;
}
/* Check for function start */
if (isalpha(*m_sourceIt) || *m_sourceIt == '_')
{
std::string::const_iterator tmp = m_sourceIt + 1;
while (tmp != m_source->cend() && (isalnum(*tmp) || *tmp == '_') && *tmp != '(')
++tmp;
std::string::const_iterator nameEnd = tmp;
skipWhitespace(tmp);
if (*tmp == '(')
{
Parser::Token tok(TokenFunctionStart, getLocation());
tok.m_tokenString.assign(m_sourceIt, nameEnd);
m_sourceIt = tmp + 1;
m_parenStack.push_back(TokenFunctionEnd);
return tok;
}
}
/* Check for function arg delimitation */
if (*m_sourceIt == ',')
{
if (m_parenStack.empty() || m_parenStack.back() != TokenFunctionEnd)
{
m_diag.reportParserErr(getLocation(), "unexpected ',' while parsing");
return Parser::Token(TokenNone, SourceLocation());
}
Parser::Token tok(TokenFunctionArgDelim, getLocation());
++m_sourceIt;
return tok;
}
/* Error condition if reached */
m_diag.reportParserErr(getLocation(), "unexpected token while parsing");
return Parser::Token(TokenNone, SourceLocation());
}
SourceLocation Parser::getLocation() const
{
if (!m_source)
return SourceLocation();
std::string::const_iterator it = m_source->cbegin();
int line = 0;
int col = 0;
for (; it != m_sourceIt ; ++it)
{
++col;
if (*it == '\n')
{
++line;
col = 0;
}
}
return {line+1, col+1};
}
}
}