Add File & Range information to tint::Source
This is the first step in improving the error messages produced while parsing. The `line` and `column` information of `Source` has been moved to `Source::Location`. `Source::Range` has been added that contains a `Location` interval - allowing error messages to highlight the full region of the error. The `File` information provides an optional file path, and pre-splits the content into lines. These lines can be used to print the full line containing an error. This CL contains a few temporary changes that help split up this work, and to ease integration with Tint. Bug: tint:282 Change-Id: I7aa501b0a9631f286e8e93fd7396bdbe38175727 Reviewed-on: https://dawn-review.googlesource.com/c/tint/+/31420 Reviewed-by: dan sinclair <dsinclair@chromium.org> Reviewed-by: David Neto <dneto@google.com> Commit-Queue: David Neto <dneto@google.com>
This commit is contained in:
parent
17e0deaeba
commit
5bee67fced
1
BUILD.gn
1
BUILD.gn
|
@ -386,6 +386,7 @@ source_set("libtint_core_src") {
|
|||
"src/reader/reader.cc",
|
||||
"src/reader/reader.h",
|
||||
"src/scope_stack.h",
|
||||
"src/source.cc",
|
||||
"src/source.h",
|
||||
"src/transform/bound_array_accessors_transform.cc",
|
||||
"src/transform/bound_array_accessors_transform.h",
|
||||
|
|
|
@ -19,8 +19,10 @@
|
|||
extern "C" int LLVMFuzzerTestOneInput(const uint8_t* data, size_t size) {
|
||||
std::string str(reinterpret_cast<const char*>(data), size);
|
||||
|
||||
tint::Source::File file("test.wgsl", str);
|
||||
|
||||
tint::Context ctx;
|
||||
tint::reader::wgsl::Parser parser(&ctx, str);
|
||||
tint::reader::wgsl::Parser parser(&ctx, &file);
|
||||
parser.Parse();
|
||||
|
||||
return 0;
|
||||
|
|
|
@ -421,6 +421,7 @@ int main(int argc, const char** argv) {
|
|||
tint::Context ctx;
|
||||
|
||||
std::unique_ptr<tint::reader::Reader> reader;
|
||||
std::unique_ptr<tint::Source::File> source_file;
|
||||
#if TINT_BUILD_WGSL_READER
|
||||
if (options.input_filename.size() > 5 &&
|
||||
options.input_filename.substr(options.input_filename.size() - 5) ==
|
||||
|
@ -429,8 +430,10 @@ int main(int argc, const char** argv) {
|
|||
if (!ReadFile<uint8_t>(options.input_filename, &data)) {
|
||||
return 1;
|
||||
}
|
||||
reader = std::make_unique<tint::reader::wgsl::Parser>(
|
||||
&ctx, std::string(data.begin(), data.end()));
|
||||
source_file = std::make_unique<tint::Source::File>(
|
||||
options.input_filename, std::string(data.begin(), data.end()));
|
||||
reader =
|
||||
std::make_unique<tint::reader::wgsl::Parser>(&ctx, source_file.get());
|
||||
}
|
||||
#endif // TINT_BUILD_WGSL_READER
|
||||
|
||||
|
|
|
@ -207,6 +207,7 @@ set(TINT_LIB_SRCS
|
|||
reader/reader.cc
|
||||
reader/reader.h
|
||||
scope_stack.h
|
||||
source.cc
|
||||
source.h
|
||||
transform/bound_array_accessors_transform.cc
|
||||
transform/bound_array_accessors_transform.h
|
||||
|
|
|
@ -40,8 +40,8 @@ TEST_F(ArrayAccessorExpressionTest, CreateWithSource) {
|
|||
|
||||
ArrayAccessorExpression exp(Source{20, 2}, std::move(ary), std::move(idx));
|
||||
auto src = exp.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(ArrayAccessorExpressionTest, IsArrayAccessor) {
|
||||
|
|
|
@ -41,8 +41,8 @@ TEST_F(AssignmentStatementTest, CreationWithSource) {
|
|||
|
||||
AssignmentStatement stmt(Source{20, 2}, std::move(lhs), std::move(rhs));
|
||||
auto src = stmt.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(AssignmentStatementTest, IsAssign) {
|
||||
|
|
|
@ -45,8 +45,8 @@ TEST_F(BinaryExpressionTest, Creation_WithSource) {
|
|||
BinaryExpression r(Source{20, 2}, BinaryOp::kEqual, std::move(lhs),
|
||||
std::move(rhs));
|
||||
auto src = r.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(BinaryExpressionTest, IsBinaryal) {
|
||||
|
|
|
@ -41,8 +41,8 @@ TEST_F(BitcastExpressionTest, CreateWithSource) {
|
|||
|
||||
BitcastExpression exp(Source{20, 2}, &f32, std::move(expr));
|
||||
auto src = exp.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(BitcastExpressionTest, IsBitcast) {
|
||||
|
|
|
@ -62,8 +62,8 @@ TEST_F(BlockStatementTest, Creation_WithInsert) {
|
|||
TEST_F(BlockStatementTest, Creation_WithSource) {
|
||||
BlockStatement b(Source{20, 2});
|
||||
auto src = b.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(BlockStatementTest, IsBlock) {
|
||||
|
|
|
@ -25,8 +25,8 @@ using BreakStatementTest = testing::Test;
|
|||
TEST_F(BreakStatementTest, Creation_WithSource) {
|
||||
BreakStatement stmt(Source{20, 2});
|
||||
auto src = stmt.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(BreakStatementTest, IsBreak) {
|
||||
|
|
|
@ -46,8 +46,8 @@ TEST_F(CallExpressionTest, Creation_WithSource) {
|
|||
auto func = std::make_unique<IdentifierExpression>("func");
|
||||
CallExpression stmt(Source{20, 2}, std::move(func), {});
|
||||
auto src = stmt.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(CallExpressionTest, IsCall) {
|
||||
|
|
|
@ -76,8 +76,8 @@ TEST_F(CaseStatementTest, Creation_WithSource) {
|
|||
|
||||
CaseStatement c(Source{20, 2}, std::move(b), std::move(body));
|
||||
auto src = c.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(CaseStatementTest, IsDefault_WithoutSelectors) {
|
||||
|
|
|
@ -25,8 +25,8 @@ using ContinueStatementTest = testing::Test;
|
|||
TEST_F(ContinueStatementTest, Creation_WithSource) {
|
||||
ContinueStatement stmt(Source{20, 2});
|
||||
auto src = stmt.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(ContinueStatementTest, IsContinue) {
|
||||
|
|
|
@ -46,8 +46,8 @@ TEST_F(ElseStatementTest, Creation) {
|
|||
TEST_F(ElseStatementTest, Creation_WithSource) {
|
||||
ElseStatement e(Source{20, 2}, std::make_unique<BlockStatement>());
|
||||
auto src = e.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(ElseStatementTest, IsElse) {
|
||||
|
|
|
@ -31,8 +31,8 @@ TEST_F(FallthroughStatementTest, Creation) {
|
|||
TEST_F(FallthroughStatementTest, Creation_WithSource) {
|
||||
FallthroughStatement stmt(Source{20, 2});
|
||||
auto src = stmt.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(FallthroughStatementTest, IsFallthrough) {
|
||||
|
|
|
@ -58,8 +58,8 @@ TEST_F(FunctionTest, Creation_WithSource) {
|
|||
|
||||
Function f(Source{20, 2}, "func", std::move(params), &void_type);
|
||||
auto src = f.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(FunctionTest, AddDuplicateReferencedVariables) {
|
||||
|
|
|
@ -32,8 +32,8 @@ TEST_F(IdentifierExpressionTest, Creation_WithSource) {
|
|||
EXPECT_EQ(i.name(), "ident");
|
||||
|
||||
auto src = i.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(IdentifierExpressionTest, IsIdentifier) {
|
||||
|
|
|
@ -45,8 +45,8 @@ TEST_F(IfStatementTest, Creation_WithSource) {
|
|||
|
||||
IfStatement stmt(Source{20, 2}, std::move(cond), std::move(body));
|
||||
auto src = stmt.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(IfStatementTest, IsIf) {
|
||||
|
|
|
@ -52,8 +52,8 @@ TEST_F(LoopStatementTest, Creation_WithSource) {
|
|||
|
||||
LoopStatement l(Source{20, 2}, std::move(body), std::move(continuing));
|
||||
auto src = l.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(LoopStatementTest, IsLoop) {
|
||||
|
|
|
@ -43,8 +43,8 @@ TEST_F(MemberAccessorExpressionTest, Creation_WithSource) {
|
|||
|
||||
MemberAccessorExpression stmt(Source{20, 2}, std::move(str), std::move(mem));
|
||||
auto src = stmt.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(MemberAccessorExpressionTest, IsMemberAccessor) {
|
||||
|
|
|
@ -34,10 +34,11 @@ class Node {
|
|||
/// @param source the source data
|
||||
void set_source(const Source& source) { source_ = source; }
|
||||
|
||||
// TODO(bclayton): Deprecate - use source().range.begin instead
|
||||
/// @returns the line the node was declared on
|
||||
size_t line() const { return source_.line; }
|
||||
size_t line() const { return source_.range.begin.line; }
|
||||
/// @returns the column the node was declared on
|
||||
size_t column() const { return source_.column; }
|
||||
size_t column() const { return source_.range.begin.column; }
|
||||
|
||||
/// @returns true if the node is valid
|
||||
virtual bool IsValid() const = 0;
|
||||
|
|
|
@ -36,8 +36,8 @@ TEST_F(ReturnStatementTest, Creation) {
|
|||
TEST_F(ReturnStatementTest, Creation_WithSource) {
|
||||
ReturnStatement r(Source{20, 2});
|
||||
auto src = r.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(ReturnStatementTest, IsReturn) {
|
||||
|
|
|
@ -37,8 +37,8 @@ TEST_F(ScalarConstructorExpressionTest, Creation_WithSource) {
|
|||
auto b = std::make_unique<BoolLiteral>(&bool_type, true);
|
||||
ScalarConstructorExpression c(Source{20, 2}, std::move(b));
|
||||
auto src = c.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(ScalarConstructorExpressionTest, IsValid) {
|
||||
|
|
|
@ -53,8 +53,8 @@ TEST_F(SwitchStatementTest, Creation_WithSource) {
|
|||
|
||||
SwitchStatement stmt(Source{20, 2}, std::move(ident), CaseStatementList());
|
||||
auto src = stmt.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(SwitchStatementTest, IsSwitch) {
|
||||
|
|
|
@ -48,8 +48,8 @@ TEST_F(TypeConstructorExpressionTest, Creation_WithSource) {
|
|||
|
||||
TypeConstructorExpression t(Source{20, 2}, &f32, std::move(expr));
|
||||
auto src = t.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(TypeConstructorExpressionTest, IsTypeConstructor) {
|
||||
|
|
|
@ -38,8 +38,8 @@ TEST_F(UnaryOpExpressionTest, Creation_WithSource) {
|
|||
auto ident = std::make_unique<IdentifierExpression>("ident");
|
||||
UnaryOpExpression u(Source{20, 2}, UnaryOp::kNot, std::move(ident));
|
||||
auto src = u.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(UnaryOpExpressionTest, IsUnaryOp) {
|
||||
|
|
|
@ -39,8 +39,8 @@ TEST_F(VariableDeclStatementTest, Creation_WithSource) {
|
|||
|
||||
VariableDeclStatement stmt(Source{20, 2}, std::move(var));
|
||||
auto src = stmt.source();
|
||||
EXPECT_EQ(src.line, 20u);
|
||||
EXPECT_EQ(src.column, 2u);
|
||||
EXPECT_EQ(src.range.begin.line, 20u);
|
||||
EXPECT_EQ(src.range.begin.column, 2u);
|
||||
}
|
||||
|
||||
TEST_F(VariableDeclStatementTest, IsVariableDecl) {
|
||||
|
|
|
@ -429,7 +429,7 @@ class StructuredTraverser {
|
|||
/// @param src a source record
|
||||
/// @returns true if |src| is a non-default Source
|
||||
bool HasSource(const Source& src) {
|
||||
return src.line != 0 || src.column != 0;
|
||||
return src.range.begin.line > 0 || src.range.begin.column != 0;
|
||||
}
|
||||
|
||||
} // namespace
|
||||
|
|
|
@ -474,12 +474,12 @@ bool ParserImpl::ParseInternalModule() {
|
|||
}
|
||||
|
||||
void ParserImpl::RegisterLineNumbers() {
|
||||
Source instruction_number{0, 0};
|
||||
Source::Location instruction_number{0, 0};
|
||||
|
||||
// Has there been an OpLine since the last OpNoLine or start of the module?
|
||||
bool in_op_line_scope = false;
|
||||
// The source location provided by the most recent OpLine instruction.
|
||||
Source op_line_source{0, 0};
|
||||
Source::Location op_line_source{0, 0};
|
||||
const bool run_on_debug_insts = true;
|
||||
module_->ForEachInst(
|
||||
[this, &in_op_line_scope, &op_line_source,
|
||||
|
@ -515,7 +515,7 @@ Source ParserImpl::GetSourceForInst(
|
|||
if (where == inst_source_.end()) {
|
||||
return {};
|
||||
}
|
||||
return where->second;
|
||||
return Source{where->second};
|
||||
}
|
||||
|
||||
bool ParserImpl::ParseInternalModuleExceptFunctions() {
|
||||
|
|
|
@ -454,7 +454,8 @@ class ParserImpl : Reader {
|
|||
// is in effect for the instruction, map the instruction to its position
|
||||
// in the SPIR-V module, counting by instructions, where the first
|
||||
// instruction is line 1.
|
||||
std::unordered_map<const spvtools::opt::Instruction*, Source> inst_source_;
|
||||
std::unordered_map<const spvtools::opt::Instruction*, Source::Location>
|
||||
inst_source_;
|
||||
|
||||
// The set of IDs that are imports of the GLSL.std.450 extended instruction
|
||||
// sets.
|
||||
|
|
|
@ -139,14 +139,14 @@ TEST_F(SpvParserTest, Impl_Source_NoOpLine) {
|
|||
EXPECT_TRUE(p->error().empty());
|
||||
// Use instruction counting.
|
||||
auto s5 = p->GetSourceForResultIdForTest(5);
|
||||
EXPECT_EQ(7u, s5.line);
|
||||
EXPECT_EQ(0u, s5.column);
|
||||
EXPECT_EQ(7u, s5.range.begin.line);
|
||||
EXPECT_EQ(0u, s5.range.begin.column);
|
||||
auto s60 = p->GetSourceForResultIdForTest(60);
|
||||
EXPECT_EQ(8u, s60.line);
|
||||
EXPECT_EQ(0u, s60.column);
|
||||
EXPECT_EQ(8u, s60.range.begin.line);
|
||||
EXPECT_EQ(0u, s60.range.begin.column);
|
||||
auto s1 = p->GetSourceForResultIdForTest(1);
|
||||
EXPECT_EQ(10u, s1.line);
|
||||
EXPECT_EQ(0u, s1.column);
|
||||
EXPECT_EQ(10u, s1.range.begin.line);
|
||||
EXPECT_EQ(0u, s1.range.begin.column);
|
||||
}
|
||||
|
||||
TEST_F(SpvParserTest, Impl_Source_WithOpLine_WithOpNoLine) {
|
||||
|
@ -172,15 +172,15 @@ TEST_F(SpvParserTest, Impl_Source_WithOpLine_WithOpNoLine) {
|
|||
EXPECT_TRUE(p->error().empty());
|
||||
// Use the information from the OpLine that is still in scope.
|
||||
auto s5 = p->GetSourceForResultIdForTest(5);
|
||||
EXPECT_EQ(42u, s5.line);
|
||||
EXPECT_EQ(53u, s5.column);
|
||||
EXPECT_EQ(42u, s5.range.begin.line);
|
||||
EXPECT_EQ(53u, s5.range.begin.column);
|
||||
auto s60 = p->GetSourceForResultIdForTest(60);
|
||||
EXPECT_EQ(42u, s60.line);
|
||||
EXPECT_EQ(53u, s60.column);
|
||||
EXPECT_EQ(42u, s60.range.begin.line);
|
||||
EXPECT_EQ(53u, s60.range.begin.column);
|
||||
// After OpNoLine, revert back to instruction counting.
|
||||
auto s1 = p->GetSourceForResultIdForTest(1);
|
||||
EXPECT_EQ(13u, s1.line);
|
||||
EXPECT_EQ(0u, s1.column);
|
||||
EXPECT_EQ(13u, s1.range.begin.line);
|
||||
EXPECT_EQ(0u, s1.range.begin.column);
|
||||
}
|
||||
|
||||
TEST_F(SpvParserTest, Impl_Source_InvalidId) {
|
||||
|
@ -201,8 +201,8 @@ TEST_F(SpvParserTest, Impl_Source_InvalidId) {
|
|||
EXPECT_TRUE(p->Parse());
|
||||
EXPECT_TRUE(p->error().empty());
|
||||
auto s99 = p->GetSourceForResultIdForTest(99);
|
||||
EXPECT_EQ(0u, s99.line);
|
||||
EXPECT_EQ(0u, s99.column);
|
||||
EXPECT_EQ(0u, s99.range.begin.line);
|
||||
EXPECT_EQ(0u, s99.range.begin.column);
|
||||
}
|
||||
|
||||
TEST_F(SpvParserTest, Impl_IsValidIdentifier) {
|
||||
|
|
|
@ -32,8 +32,10 @@ bool is_whitespace(char c) {
|
|||
|
||||
} // namespace
|
||||
|
||||
Lexer::Lexer(const std::string& input)
|
||||
: input_(input), len_(static_cast<uint32_t>(input.size())) {}
|
||||
Lexer::Lexer(Source::File const* file)
|
||||
: file_(file),
|
||||
len_(static_cast<uint32_t>(file->content.size())),
|
||||
location_{1, 1} {}
|
||||
|
||||
Lexer::~Lexer() = default;
|
||||
|
||||
|
@ -79,7 +81,11 @@ Token Lexer::next() {
|
|||
}
|
||||
|
||||
Source Lexer::make_source() const {
|
||||
return Source{line_, column_};
|
||||
Source src{};
|
||||
src.file = file_;
|
||||
src.range.begin = location_;
|
||||
src.range.end = location_;
|
||||
return src;
|
||||
}
|
||||
|
||||
bool Lexer::is_eof() const {
|
||||
|
@ -103,24 +109,24 @@ bool Lexer::is_hex(char ch) const {
|
|||
}
|
||||
|
||||
bool Lexer::matches(size_t pos, const std::string& substr) {
|
||||
if (pos >= input_.size())
|
||||
if (pos >= len_)
|
||||
return false;
|
||||
return input_.substr(pos, substr.size()) == substr;
|
||||
return file_->content.substr(pos, substr.size()) == substr;
|
||||
}
|
||||
|
||||
void Lexer::skip_whitespace() {
|
||||
for (;;) {
|
||||
auto pos = pos_;
|
||||
while (!is_eof() && is_whitespace(input_[pos_])) {
|
||||
while (!is_eof() && is_whitespace(file_->content[pos_])) {
|
||||
if (matches(pos_, "\n")) {
|
||||
pos_++;
|
||||
line_++;
|
||||
column_ = 1;
|
||||
location_.line++;
|
||||
location_.column = 1;
|
||||
continue;
|
||||
}
|
||||
|
||||
pos_++;
|
||||
column_++;
|
||||
location_.column++;
|
||||
}
|
||||
|
||||
skip_comments();
|
||||
|
@ -139,7 +145,7 @@ void Lexer::skip_comments() {
|
|||
|
||||
while (!is_eof() && !matches(pos_, "\n")) {
|
||||
pos_++;
|
||||
column_++;
|
||||
location_.column++;
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -152,7 +158,7 @@ Token Lexer::try_float() {
|
|||
if (matches(end, "-")) {
|
||||
end++;
|
||||
}
|
||||
while (end < len_ && is_digit(input_[end])) {
|
||||
while (end < len_ && is_digit(file_->content[end])) {
|
||||
end++;
|
||||
}
|
||||
|
||||
|
@ -161,7 +167,7 @@ Token Lexer::try_float() {
|
|||
}
|
||||
end++;
|
||||
|
||||
while (end < len_ && is_digit(input_[end])) {
|
||||
while (end < len_ && is_digit(file_->content[end])) {
|
||||
end++;
|
||||
}
|
||||
|
||||
|
@ -173,7 +179,7 @@ Token Lexer::try_float() {
|
|||
}
|
||||
|
||||
auto exp_start = end;
|
||||
while (end < len_ && isdigit(input_[end])) {
|
||||
while (end < len_ && isdigit(file_->content[end])) {
|
||||
end++;
|
||||
}
|
||||
|
||||
|
@ -182,14 +188,14 @@ Token Lexer::try_float() {
|
|||
return {};
|
||||
}
|
||||
|
||||
auto str = input_.substr(start, end - start);
|
||||
auto str = file_->content.substr(start, end - start);
|
||||
if (str == "." || str == "-.")
|
||||
return {};
|
||||
|
||||
pos_ = end;
|
||||
column_ += (end - start);
|
||||
location_.column += (end - start);
|
||||
|
||||
auto res = strtod(input_.c_str() + start, nullptr);
|
||||
auto res = strtod(file_->content.c_str() + start, nullptr);
|
||||
// This handles if the number is a really small in the exponent
|
||||
if (res > 0 && res < static_cast<double>(std::numeric_limits<float>::min())) {
|
||||
return {Token::Type::kError, source, "f32 (" + str + " too small"};
|
||||
|
@ -205,28 +211,31 @@ Token Lexer::try_float() {
|
|||
return {source, static_cast<float>(res)};
|
||||
}
|
||||
|
||||
Token Lexer::build_token_from_int_if_possible(const Source& source,
|
||||
Token Lexer::build_token_from_int_if_possible(Source source,
|
||||
size_t start,
|
||||
size_t end,
|
||||
int32_t base) {
|
||||
auto res = strtoll(input_.c_str() + start, nullptr, base);
|
||||
auto res = strtoll(file_->content.c_str() + start, nullptr, base);
|
||||
if (matches(pos_, "u")) {
|
||||
if (static_cast<uint64_t>(res) >
|
||||
static_cast<uint64_t>(std::numeric_limits<uint32_t>::max())) {
|
||||
return {Token::Type::kError, source,
|
||||
"u32 (" + input_.substr(start, end - start) + ") too large"};
|
||||
return {
|
||||
Token::Type::kError, source,
|
||||
"u32 (" + file_->content.substr(start, end - start) + ") too large"};
|
||||
}
|
||||
pos_ += 1;
|
||||
return {source, static_cast<uint32_t>(res)};
|
||||
}
|
||||
|
||||
if (res < static_cast<int64_t>(std::numeric_limits<int32_t>::min())) {
|
||||
return {Token::Type::kError, source,
|
||||
"i32 (" + input_.substr(start, end - start) + ") too small"};
|
||||
return {
|
||||
Token::Type::kError, source,
|
||||
"i32 (" + file_->content.substr(start, end - start) + ") too small"};
|
||||
}
|
||||
if (res > static_cast<int64_t>(std::numeric_limits<int32_t>::max())) {
|
||||
return {Token::Type::kError, source,
|
||||
"i32 (" + input_.substr(start, end - start) + ") too large"};
|
||||
return {
|
||||
Token::Type::kError, source,
|
||||
"i32 (" + file_->content.substr(start, end - start) + ") too large"};
|
||||
}
|
||||
return {source, static_cast<int32_t>(res)};
|
||||
}
|
||||
|
@ -245,12 +254,12 @@ Token Lexer::try_hex_integer() {
|
|||
}
|
||||
end += 2;
|
||||
|
||||
while (!is_eof() && is_hex(input_[end])) {
|
||||
while (!is_eof() && is_hex(file_->content[end])) {
|
||||
end += 1;
|
||||
}
|
||||
|
||||
pos_ = end;
|
||||
column_ += (end - start);
|
||||
location_.column += (end - start);
|
||||
|
||||
return build_token_from_int_if_possible(source, start, end, 16);
|
||||
}
|
||||
|
@ -264,41 +273,41 @@ Token Lexer::try_integer() {
|
|||
if (matches(end, "-")) {
|
||||
end++;
|
||||
}
|
||||
if (end >= len_ || !is_digit(input_[end])) {
|
||||
if (end >= len_ || !is_digit(file_->content[end])) {
|
||||
return {};
|
||||
}
|
||||
|
||||
auto first = end;
|
||||
while (end < len_ && is_digit(input_[end])) {
|
||||
while (end < len_ && is_digit(file_->content[end])) {
|
||||
end++;
|
||||
}
|
||||
|
||||
// If the first digit is a zero this must only be zero as leading zeros
|
||||
// are not allowed.
|
||||
if (input_[first] == '0' && (end - first != 1))
|
||||
if (file_->content[first] == '0' && (end - first != 1))
|
||||
return {};
|
||||
|
||||
pos_ = end;
|
||||
column_ += (end - start);
|
||||
location_.column += (end - start);
|
||||
|
||||
return build_token_from_int_if_possible(source, start, end, 10);
|
||||
}
|
||||
|
||||
Token Lexer::try_ident() {
|
||||
// Must begin with an a-zA-Z_
|
||||
if (!is_alpha(input_[pos_])) {
|
||||
if (!is_alpha(file_->content[pos_])) {
|
||||
return {};
|
||||
}
|
||||
|
||||
auto source = make_source();
|
||||
|
||||
auto s = pos_;
|
||||
while (!is_eof() && is_alphanum(input_[pos_])) {
|
||||
while (!is_eof() && is_alphanum(file_->content[pos_])) {
|
||||
pos_++;
|
||||
column_++;
|
||||
location_.column++;
|
||||
}
|
||||
|
||||
auto str = input_.substr(s, pos_ - s);
|
||||
auto str = file_->content.substr(s, pos_ - s);
|
||||
auto t = check_reserved(source, str);
|
||||
if (!t.IsUninitialized()) {
|
||||
return t;
|
||||
|
@ -325,10 +334,10 @@ Token Lexer::try_string() {
|
|||
}
|
||||
auto end = pos_;
|
||||
pos_++;
|
||||
column_ += (pos_ - start) + 1;
|
||||
location_.column += (pos_ - start) + 1;
|
||||
|
||||
return {Token::Type::kStringLiteral, source,
|
||||
input_.substr(start, end - start)};
|
||||
file_->content.substr(start, end - start)};
|
||||
}
|
||||
|
||||
Token Lexer::try_punctuation() {
|
||||
|
@ -338,131 +347,131 @@ Token Lexer::try_punctuation() {
|
|||
if (matches(pos_, "[[")) {
|
||||
type = Token::Type::kAttrLeft;
|
||||
pos_ += 2;
|
||||
column_ += 2;
|
||||
location_.column += 2;
|
||||
} else if (matches(pos_, "]]")) {
|
||||
type = Token::Type::kAttrRight;
|
||||
pos_ += 2;
|
||||
column_ += 2;
|
||||
location_.column += 2;
|
||||
} else if (matches(pos_, "(")) {
|
||||
type = Token::Type::kParenLeft;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
} else if (matches(pos_, ")")) {
|
||||
type = Token::Type::kParenRight;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
} else if (matches(pos_, "[")) {
|
||||
type = Token::Type::kBracketLeft;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
} else if (matches(pos_, "]")) {
|
||||
type = Token::Type::kBracketRight;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
} else if (matches(pos_, "{")) {
|
||||
type = Token::Type::kBraceLeft;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
} else if (matches(pos_, "}")) {
|
||||
type = Token::Type::kBraceRight;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
} else if (matches(pos_, "&&")) {
|
||||
type = Token::Type::kAndAnd;
|
||||
pos_ += 2;
|
||||
column_ += 2;
|
||||
location_.column += 2;
|
||||
} else if (matches(pos_, "&")) {
|
||||
type = Token::Type::kAnd;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
} else if (matches(pos_, "/")) {
|
||||
type = Token::Type::kForwardSlash;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
} else if (matches(pos_, "!=")) {
|
||||
type = Token::Type::kNotEqual;
|
||||
pos_ += 2;
|
||||
column_ += 2;
|
||||
location_.column += 2;
|
||||
} else if (matches(pos_, "!")) {
|
||||
type = Token::Type::kBang;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
} else if (matches(pos_, "::")) {
|
||||
type = Token::Type::kNamespace;
|
||||
pos_ += 2;
|
||||
column_ += 2;
|
||||
location_.column += 2;
|
||||
} else if (matches(pos_, ":")) {
|
||||
type = Token::Type::kColon;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
} else if (matches(pos_, ",")) {
|
||||
type = Token::Type::kComma;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
} else if (matches(pos_, "==")) {
|
||||
type = Token::Type::kEqualEqual;
|
||||
pos_ += 2;
|
||||
column_ += 2;
|
||||
location_.column += 2;
|
||||
} else if (matches(pos_, "=")) {
|
||||
type = Token::Type::kEqual;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
} else if (matches(pos_, ">=")) {
|
||||
type = Token::Type::kGreaterThanEqual;
|
||||
pos_ += 2;
|
||||
column_ += 2;
|
||||
location_.column += 2;
|
||||
} else if (matches(pos_, ">")) {
|
||||
type = Token::Type::kGreaterThan;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
} else if (matches(pos_, "<=")) {
|
||||
type = Token::Type::kLessThanEqual;
|
||||
pos_ += 2;
|
||||
column_ += 2;
|
||||
location_.column += 2;
|
||||
} else if (matches(pos_, "<")) {
|
||||
type = Token::Type::kLessThan;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
} else if (matches(pos_, "%")) {
|
||||
type = Token::Type::kMod;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
} else if (matches(pos_, "->")) {
|
||||
type = Token::Type::kArrow;
|
||||
pos_ += 2;
|
||||
column_ += 2;
|
||||
location_.column += 2;
|
||||
} else if (matches(pos_, "-")) {
|
||||
type = Token::Type::kMinus;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
} else if (matches(pos_, ".")) {
|
||||
type = Token::Type::kPeriod;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
} else if (matches(pos_, "+")) {
|
||||
type = Token::Type::kPlus;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
} else if (matches(pos_, "||")) {
|
||||
type = Token::Type::kOrOr;
|
||||
pos_ += 2;
|
||||
column_ += 2;
|
||||
location_.column += 2;
|
||||
} else if (matches(pos_, "|")) {
|
||||
type = Token::Type::kOr;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
} else if (matches(pos_, ";")) {
|
||||
type = Token::Type::kSemicolon;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
} else if (matches(pos_, "*")) {
|
||||
type = Token::Type::kStar;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
} else if (matches(pos_, "^")) {
|
||||
type = Token::Type::kXor;
|
||||
pos_ += 1;
|
||||
column_ += 1;
|
||||
location_.column += 1;
|
||||
}
|
||||
|
||||
return {type, source};
|
||||
|
|
|
@ -28,8 +28,8 @@ namespace wgsl {
|
|||
class Lexer {
|
||||
public:
|
||||
/// Creates a new Lexer
|
||||
/// @param input the input to parse
|
||||
explicit Lexer(const std::string& input);
|
||||
/// @param file the input file to parse
|
||||
explicit Lexer(Source::File const* file);
|
||||
~Lexer();
|
||||
|
||||
/// Returns the next token in the input stream
|
||||
|
@ -40,7 +40,7 @@ class Lexer {
|
|||
void skip_whitespace();
|
||||
void skip_comments();
|
||||
|
||||
Token build_token_from_int_if_possible(const Source& source,
|
||||
Token build_token_from_int_if_possible(Source source,
|
||||
size_t start,
|
||||
size_t end,
|
||||
int32_t base);
|
||||
|
@ -63,15 +63,13 @@ class Lexer {
|
|||
bool matches(size_t pos, const std::string& substr);
|
||||
|
||||
/// The source to parse
|
||||
std::string input_;
|
||||
Source::File const* file_;
|
||||
/// The length of the input
|
||||
uint32_t len_ = 0;
|
||||
/// The current position within the input
|
||||
uint32_t pos_ = 0;
|
||||
/// The current line within the input
|
||||
uint32_t line_ = 1;
|
||||
/// The current column within the input
|
||||
uint32_t column_ = 1;
|
||||
/// The current location within the input
|
||||
Source::Location location_;
|
||||
};
|
||||
|
||||
} // namespace wgsl
|
||||
|
|
|
@ -26,13 +26,15 @@ namespace {
|
|||
using LexerTest = testing::Test;
|
||||
|
||||
TEST_F(LexerTest, Empty) {
|
||||
Lexer l("");
|
||||
Source::File file("test.wgsl", "");
|
||||
Lexer l(&file);
|
||||
auto t = l.next();
|
||||
EXPECT_TRUE(t.IsEof());
|
||||
}
|
||||
|
||||
TEST_F(LexerTest, Skips_Whitespace) {
|
||||
Lexer l("\t\r\n\t ident\t\n\t \r ");
|
||||
Source::File file("test.wgsl", "\t\r\n\t ident\t\n\t \r ");
|
||||
Lexer l(&file);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_TRUE(t.IsIdentifier());
|
||||
|
@ -45,10 +47,11 @@ TEST_F(LexerTest, Skips_Whitespace) {
|
|||
}
|
||||
|
||||
TEST_F(LexerTest, Skips_Comments) {
|
||||
Lexer l(R"(#starts with comment
|
||||
Source::File file("test.wgsl", R"(#starts with comment
|
||||
ident1 #ends with comment
|
||||
# blank line
|
||||
ident2)");
|
||||
Lexer l(&file);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_TRUE(t.IsIdentifier());
|
||||
|
@ -67,7 +70,8 @@ ident1 #ends with comment
|
|||
}
|
||||
|
||||
TEST_F(LexerTest, StringTest_Parse) {
|
||||
Lexer l(R"(id "this is string content" id2)");
|
||||
Source::File file("test.wgsl", R"(id "this is string content" id2)");
|
||||
Lexer l(&file);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_TRUE(t.IsIdentifier());
|
||||
|
@ -89,7 +93,8 @@ TEST_F(LexerTest, StringTest_Parse) {
|
|||
}
|
||||
|
||||
TEST_F(LexerTest, StringTest_Unterminated) {
|
||||
Lexer l(R"(id "this is string content)");
|
||||
Source::File file("test.wgsl", R"(id "this is string content)");
|
||||
Lexer l(&file);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_TRUE(t.IsIdentifier());
|
||||
|
@ -116,7 +121,8 @@ inline std::ostream& operator<<(std::ostream& out, FloatData data) {
|
|||
using FloatTest = testing::TestWithParam<FloatData>;
|
||||
TEST_P(FloatTest, Parse) {
|
||||
auto params = GetParam();
|
||||
Lexer l(std::string(params.input));
|
||||
Source::File file("test.wgsl", params.input);
|
||||
Lexer l(&file);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_TRUE(t.IsFloatLiteral());
|
||||
|
@ -149,7 +155,8 @@ INSTANTIATE_TEST_SUITE_P(LexerTest,
|
|||
|
||||
using FloatTest_Invalid = testing::TestWithParam<const char*>;
|
||||
TEST_P(FloatTest_Invalid, Handles) {
|
||||
Lexer l(GetParam());
|
||||
Source::File file("test.wgsl", GetParam());
|
||||
Lexer l(&file);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_FALSE(t.IsFloatLiteral());
|
||||
|
@ -166,7 +173,8 @@ INSTANTIATE_TEST_SUITE_P(LexerTest,
|
|||
|
||||
using IdentifierTest = testing::TestWithParam<const char*>;
|
||||
TEST_P(IdentifierTest, Parse) {
|
||||
Lexer l(GetParam());
|
||||
Source::File file("test.wgsl", GetParam());
|
||||
Lexer l(&file);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_TRUE(t.IsIdentifier());
|
||||
|
@ -180,7 +188,8 @@ INSTANTIATE_TEST_SUITE_P(
|
|||
testing::Values("test01", "_test_", "test_", "_test", "_01", "_test01"));
|
||||
|
||||
TEST_F(LexerTest, IdentifierTest_DoesNotStartWithNumber) {
|
||||
Lexer l("01test");
|
||||
Source::File file("test.wgsl", "01test");
|
||||
Lexer l(&file);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_FALSE(t.IsIdentifier());
|
||||
|
@ -198,7 +207,8 @@ inline std::ostream& operator<<(std::ostream& out, HexSignedIntData data) {
|
|||
using IntegerTest_HexSigned = testing::TestWithParam<HexSignedIntData>;
|
||||
TEST_P(IntegerTest_HexSigned, Matches) {
|
||||
auto params = GetParam();
|
||||
Lexer l(std::string(params.input));
|
||||
Source::File file("test.wgsl", params.input);
|
||||
Lexer l(&file);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_TRUE(t.IsSintLiteral());
|
||||
|
@ -218,14 +228,16 @@ INSTANTIATE_TEST_SUITE_P(
|
|||
HexSignedIntData{"0x7FFFFFFF", std::numeric_limits<int32_t>::max()}));
|
||||
|
||||
TEST_F(LexerTest, IntegerTest_HexSignedTooLarge) {
|
||||
Lexer l("0x80000000");
|
||||
Source::File file("test.wgsl", "0x80000000");
|
||||
Lexer l(&file);
|
||||
auto t = l.next();
|
||||
ASSERT_TRUE(t.IsError());
|
||||
EXPECT_EQ(t.to_str(), "i32 (0x80000000) too large");
|
||||
}
|
||||
|
||||
TEST_F(LexerTest, IntegerTest_HexSignedTooSmall) {
|
||||
Lexer l("-0x8000000F");
|
||||
Source::File file("test.wgsl", "-0x8000000F");
|
||||
Lexer l(&file);
|
||||
auto t = l.next();
|
||||
ASSERT_TRUE(t.IsError());
|
||||
EXPECT_EQ(t.to_str(), "i32 (-0x8000000F) too small");
|
||||
|
@ -242,7 +254,8 @@ inline std::ostream& operator<<(std::ostream& out, HexUnsignedIntData data) {
|
|||
using IntegerTest_HexUnsigned = testing::TestWithParam<HexUnsignedIntData>;
|
||||
TEST_P(IntegerTest_HexUnsigned, Matches) {
|
||||
auto params = GetParam();
|
||||
Lexer l(std::string(params.input));
|
||||
Source::File file("test.wgsl", params.input);
|
||||
Lexer l(&file);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_TRUE(t.IsUintLiteral());
|
||||
|
@ -265,7 +278,8 @@ INSTANTIATE_TEST_SUITE_P(
|
|||
std::numeric_limits<uint32_t>::max()}));
|
||||
|
||||
TEST_F(LexerTest, IntegerTest_HexUnsignedTooLarge) {
|
||||
Lexer l("0xffffffffffu");
|
||||
Source::File file("test.wgsl", "0xffffffffffu");
|
||||
Lexer l(&file);
|
||||
auto t = l.next();
|
||||
ASSERT_TRUE(t.IsError());
|
||||
EXPECT_EQ(t.to_str(), "u32 (0xffffffffff) too large");
|
||||
|
@ -282,7 +296,8 @@ inline std::ostream& operator<<(std::ostream& out, UnsignedIntData data) {
|
|||
using IntegerTest_Unsigned = testing::TestWithParam<UnsignedIntData>;
|
||||
TEST_P(IntegerTest_Unsigned, Matches) {
|
||||
auto params = GetParam();
|
||||
Lexer l(params.input);
|
||||
Source::File file("test.wgsl", params.input);
|
||||
Lexer l(&file);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_TRUE(t.IsUintLiteral());
|
||||
|
@ -308,7 +323,8 @@ inline std::ostream& operator<<(std::ostream& out, SignedIntData data) {
|
|||
using IntegerTest_Signed = testing::TestWithParam<SignedIntData>;
|
||||
TEST_P(IntegerTest_Signed, Matches) {
|
||||
auto params = GetParam();
|
||||
Lexer l(params.input);
|
||||
Source::File file("test.wgsl", params.input);
|
||||
Lexer l(&file);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_TRUE(t.IsSintLiteral());
|
||||
|
@ -328,7 +344,8 @@ INSTANTIATE_TEST_SUITE_P(
|
|||
|
||||
using IntegerTest_Invalid = testing::TestWithParam<const char*>;
|
||||
TEST_P(IntegerTest_Invalid, Parses) {
|
||||
Lexer l(GetParam());
|
||||
Source::File file("test.wgsl", GetParam());
|
||||
Lexer l(&file);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_FALSE(t.IsSintLiteral());
|
||||
|
@ -349,7 +366,8 @@ inline std::ostream& operator<<(std::ostream& out, TokenData data) {
|
|||
using PunctuationTest = testing::TestWithParam<TokenData>;
|
||||
TEST_P(PunctuationTest, Parses) {
|
||||
auto params = GetParam();
|
||||
Lexer l(params.input);
|
||||
Source::File file("test.wgsl", params.input);
|
||||
Lexer l(&file);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_TRUE(t.Is(params.type));
|
||||
|
@ -398,7 +416,8 @@ INSTANTIATE_TEST_SUITE_P(
|
|||
using KeywordTest = testing::TestWithParam<TokenData>;
|
||||
TEST_P(KeywordTest, Parses) {
|
||||
auto params = GetParam();
|
||||
Lexer l(params.input);
|
||||
Source::File file("test.wgsl", params.input);
|
||||
Lexer l(&file);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_TRUE(t.Is(params.type)) << params.input;
|
||||
|
@ -547,7 +566,8 @@ INSTANTIATE_TEST_SUITE_P(
|
|||
using KeywordTest_Reserved = testing::TestWithParam<const char*>;
|
||||
TEST_P(KeywordTest_Reserved, Parses) {
|
||||
auto* keyword = GetParam();
|
||||
Lexer l(keyword);
|
||||
Source::File file("test.wgsl", keyword);
|
||||
Lexer l(&file);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_TRUE(t.IsReservedKeyword());
|
||||
|
|
|
@ -20,8 +20,14 @@ namespace tint {
|
|||
namespace reader {
|
||||
namespace wgsl {
|
||||
|
||||
Parser::Parser(Context* ctx, const std::string& input)
|
||||
: Reader(ctx), impl_(std::make_unique<ParserImpl>(ctx, input)) {}
|
||||
Parser::Parser(Context* ctx, Source::File const* file)
|
||||
: Reader(ctx), impl_(std::make_unique<ParserImpl>(ctx, file, false)) {}
|
||||
|
||||
Parser::Parser(Context* ctx, const std::string& content)
|
||||
: Reader(ctx),
|
||||
impl_(std::make_unique<ParserImpl>(ctx,
|
||||
new Source::File("", content),
|
||||
true)) {}
|
||||
|
||||
Parser::~Parser() = default;
|
||||
|
||||
|
|
|
@ -19,6 +19,7 @@
|
|||
#include <string>
|
||||
|
||||
#include "src/reader/reader.h"
|
||||
#include "src/source.h"
|
||||
|
||||
namespace tint {
|
||||
namespace reader {
|
||||
|
@ -29,10 +30,18 @@ class ParserImpl;
|
|||
/// Parser for WGSL source data
|
||||
class Parser : public Reader {
|
||||
public:
|
||||
/// Creates a new parser
|
||||
/// Creates a new parser from the given file.
|
||||
/// @param ctx the non-null context object
|
||||
/// @param input the input string to parse
|
||||
Parser(Context* ctx, const std::string& input);
|
||||
/// @param file the input source file to parse
|
||||
Parser(Context* ctx, Source::File const* file);
|
||||
|
||||
/// Creates a new parser from the given file content.
|
||||
/// @param ctx the non-null context object
|
||||
/// @param content the input string to parse
|
||||
/// TODO(bclayton): Remove this constructor.
|
||||
/// It purely exists to break up changes into bite sized pieces.
|
||||
Parser(Context* ctx, const std::string& content);
|
||||
|
||||
~Parser() override;
|
||||
|
||||
/// Run the parser
|
||||
|
|
|
@ -119,10 +119,17 @@ bool IsFunctionDecoration(Token t) {
|
|||
|
||||
} // namespace
|
||||
|
||||
ParserImpl::ParserImpl(Context* ctx, const std::string& input)
|
||||
: ctx_(*ctx), lexer_(std::make_unique<Lexer>(input)) {}
|
||||
ParserImpl::ParserImpl(Context* ctx, Source::File const* file, bool owns_file)
|
||||
: ctx_(*ctx),
|
||||
lexer_(std::make_unique<Lexer>(file)),
|
||||
file_(file),
|
||||
owns_file_(owns_file) {}
|
||||
|
||||
ParserImpl::~ParserImpl() = default;
|
||||
ParserImpl::~ParserImpl() {
|
||||
if (owns_file_) {
|
||||
delete file_;
|
||||
}
|
||||
}
|
||||
|
||||
void ParserImpl::set_error(const Token& t, const std::string& err) {
|
||||
auto prefix =
|
||||
|
|
|
@ -76,17 +76,21 @@ struct ForHeader {
|
|||
/// ParserImpl for WGSL source data
|
||||
class ParserImpl {
|
||||
public:
|
||||
/// Creates a new parser
|
||||
/// Creates a new parser using the given file
|
||||
/// @param ctx the non-null context object
|
||||
/// @param input the input string to parse
|
||||
ParserImpl(Context* ctx, const std::string& input);
|
||||
/// @param file the input source file to parse
|
||||
/// @param owns_file if true, the file will be deleted on parser destruction.
|
||||
/// TODO(bclayton): Remove owns_file.
|
||||
/// It purely exists to break up changes into bite sized pieces.
|
||||
ParserImpl(Context* ctx, Source::File const* file, bool owns_file = false);
|
||||
|
||||
~ParserImpl();
|
||||
|
||||
/// Run the parser
|
||||
/// @returns true if the parse was successful, false otherwise.
|
||||
bool Parse();
|
||||
|
||||
/// @returns true if an error was encountered
|
||||
/// @returns true if an error was encountered.
|
||||
bool has_error() const { return error_.size() > 0; }
|
||||
/// @returns the parser error string
|
||||
const std::string& error() const { return error_; }
|
||||
|
@ -411,6 +415,9 @@ class ParserImpl {
|
|||
std::deque<Token> token_queue_;
|
||||
std::unordered_map<std::string, ast::type::Type*> registered_constructs_;
|
||||
ast::Module module_;
|
||||
|
||||
Source::File const* file_;
|
||||
bool owns_file_;
|
||||
};
|
||||
|
||||
} // namespace wgsl
|
||||
|
|
|
@ -27,6 +27,7 @@ void ParserImplTest::SetUp() {
|
|||
|
||||
void ParserImplTest::TearDown() {
|
||||
impl_ = nullptr;
|
||||
files_.clear();
|
||||
}
|
||||
|
||||
} // namespace wgsl
|
||||
|
|
|
@ -43,7 +43,9 @@ class ParserImplTest : public testing::Test {
|
|||
/// @param str the string to parse
|
||||
/// @returns the parser implementation
|
||||
ParserImpl* parser(const std::string& str) {
|
||||
impl_ = std::make_unique<ParserImpl>(&ctx_, str);
|
||||
auto file = std::make_unique<Source::File>("test.wgsl", str);
|
||||
impl_ = std::make_unique<ParserImpl>(&ctx_, file.get());
|
||||
files_.emplace_back(std::move(file));
|
||||
return impl_.get();
|
||||
}
|
||||
|
||||
|
@ -51,6 +53,7 @@ class ParserImplTest : public testing::Test {
|
|||
TypeManager* tm() { return &(ctx_.type_mgr()); }
|
||||
|
||||
private:
|
||||
std::vector<std::unique_ptr<Source::File>> files_;
|
||||
std::unique_ptr<ParserImpl> impl_;
|
||||
Context ctx_;
|
||||
};
|
||||
|
@ -67,13 +70,18 @@ class ParserImplTestWithParam : public testing::TestWithParam<T> {
|
|||
void SetUp() override { ctx_.Reset(); }
|
||||
|
||||
/// Tears down the test helper
|
||||
void TearDown() override { impl_ = nullptr; }
|
||||
void TearDown() override {
|
||||
impl_ = nullptr;
|
||||
files_.clear();
|
||||
}
|
||||
|
||||
/// Retrieves the parser from the helper
|
||||
/// @param str the string to parse
|
||||
/// @returns the parser implementation
|
||||
ParserImpl* parser(const std::string& str) {
|
||||
impl_ = std::make_unique<ParserImpl>(&ctx_, str);
|
||||
auto file = std::make_unique<Source::File>("test.wgsl", str);
|
||||
impl_ = std::make_unique<ParserImpl>(&ctx_, file.get());
|
||||
files_.emplace_back(std::move(file));
|
||||
return impl_.get();
|
||||
}
|
||||
|
||||
|
@ -81,6 +89,7 @@ class ParserImplTestWithParam : public testing::TestWithParam<T> {
|
|||
TypeManager* tm() { return &(ctx_.type_mgr()); }
|
||||
|
||||
private:
|
||||
std::vector<std::unique_ptr<Source::File>> files_;
|
||||
std::unique_ptr<ParserImpl> impl_;
|
||||
Context ctx_;
|
||||
};
|
||||
|
|
|
@ -29,8 +29,8 @@ TEST_F(ParserImplTest, VariableDecl_Parses) {
|
|||
ASSERT_NE(var, nullptr);
|
||||
ASSERT_EQ(var->name(), "my_var");
|
||||
ASSERT_NE(var->type(), nullptr);
|
||||
ASSERT_EQ(var->source().line, 1u);
|
||||
ASSERT_EQ(var->source().column, 1u);
|
||||
ASSERT_EQ(var->source().range.begin.line, 1u);
|
||||
ASSERT_EQ(var->source().range.begin.column, 1u);
|
||||
ASSERT_TRUE(var->type()->IsF32());
|
||||
}
|
||||
|
||||
|
|
|
@ -26,14 +26,14 @@ using ParserTest = testing::Test;
|
|||
|
||||
TEST_F(ParserTest, Empty) {
|
||||
Context ctx;
|
||||
Parser p(&ctx, "");
|
||||
Source::File file("test.wgsl", "");
|
||||
Parser p(&ctx, &file);
|
||||
ASSERT_TRUE(p.Parse()) << p.error();
|
||||
}
|
||||
|
||||
TEST_F(ParserTest, Parses) {
|
||||
Context ctx;
|
||||
|
||||
Parser p(&ctx, R"(
|
||||
Source::File file("test.wgsl", R"(
|
||||
[[location(0)]] var<out> gl_FragColor : vec4<f32>;
|
||||
|
||||
[[stage(vertex)]]
|
||||
|
@ -41,6 +41,7 @@ fn main() -> void {
|
|||
gl_FragColor = vec4<f32>(.4, .2, .3, 1);
|
||||
}
|
||||
)");
|
||||
Parser p(&ctx, &file);
|
||||
ASSERT_TRUE(p.Parse()) << p.error();
|
||||
|
||||
auto m = p.module();
|
||||
|
@ -50,10 +51,11 @@ fn main() -> void {
|
|||
|
||||
TEST_F(ParserTest, HandlesError) {
|
||||
Context ctx;
|
||||
Parser p(&ctx, R"(
|
||||
Source::File file("test.wgsl", R"(
|
||||
fn main() -> { # missing return type
|
||||
return;
|
||||
})");
|
||||
Parser p(&ctx, &file);
|
||||
|
||||
ASSERT_FALSE(p.Parse());
|
||||
ASSERT_TRUE(p.has_error());
|
||||
|
|
|
@ -778,10 +778,12 @@ class Token {
|
|||
/// @returns true if token is a 'workgroup_size'
|
||||
bool IsWorkgroupSize() const { return type_ == Type::kWorkgroupSize; }
|
||||
|
||||
// TODO(bclayton): Deprecate - use source().range.begin instead
|
||||
/// @returns the source line of the token
|
||||
size_t line() const { return source_.line; }
|
||||
size_t line() const { return source_.range.begin.line; }
|
||||
/// @returns the source column of the token
|
||||
size_t column() const { return source_.column; }
|
||||
size_t column() const { return source_.range.begin.column; }
|
||||
|
||||
/// @returns the source information for this token
|
||||
Source source() const { return source_; }
|
||||
|
||||
|
|
|
@ -0,0 +1,38 @@
|
|||
// Copyright 2020 The Tint Authors.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
//
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
#include <sstream>
|
||||
|
||||
#include "source.h"
|
||||
|
||||
namespace tint {
|
||||
namespace {
|
||||
std::vector<std::string> split_lines(const std::string& str) {
|
||||
std::stringstream stream(str);
|
||||
std::string line;
|
||||
std::vector<std::string> lines;
|
||||
while (std::getline(stream, line, '\n')) {
|
||||
lines.emplace_back(std::move(line));
|
||||
}
|
||||
return lines;
|
||||
}
|
||||
} // namespace
|
||||
|
||||
Source::File::File(const std::string& file_path,
|
||||
const std::string& file_content)
|
||||
: path(file_path), content(file_content), lines(split_lines(content)) {}
|
||||
|
||||
Source::File::~File() = default;
|
||||
|
||||
} // namespace tint
|
69
src/source.h
69
src/source.h
|
@ -18,14 +18,71 @@
|
|||
|
||||
#include <stddef.h>
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
namespace tint {
|
||||
|
||||
/// Represents a line and column position
|
||||
struct Source {
|
||||
/// The line the token appeared on
|
||||
size_t line = 0;
|
||||
/// The column the token appeared in
|
||||
size_t column = 0;
|
||||
/// Source describes a range of characters within a source file.
|
||||
class Source {
|
||||
public:
|
||||
/// File describes a source file, including path and content.
|
||||
class File {
|
||||
public:
|
||||
/// Constructs the File with the given file path and content.
|
||||
File(const std::string& file_path, const std::string& file_content);
|
||||
~File();
|
||||
|
||||
const std::string path; /// file path (optional)
|
||||
const std::string content; /// file content
|
||||
const std::vector<std::string> lines; /// |content| split by lines
|
||||
};
|
||||
|
||||
/// Location holds a 1-based line and column index.
|
||||
/// 0's for |line| or |column| represent invalid values.
|
||||
class Location {
|
||||
public:
|
||||
size_t line = 0;
|
||||
size_t column = 0;
|
||||
};
|
||||
|
||||
/// Range holds a Location interval described by [begin, end).
|
||||
class Range {
|
||||
public:
|
||||
/// Constructs a zero initialized Range.
|
||||
inline Range() = default;
|
||||
|
||||
/// Constructs a zero-length Range starting at |loc|.
|
||||
inline explicit Range(const Location& loc) : begin(loc), end(loc) {}
|
||||
|
||||
/// Constructs the Range beginning at |b| and ending at |e|.
|
||||
inline Range(const Location& b, const Location& e) : begin(b), end(e) {}
|
||||
|
||||
Location begin; /// The location of the first character in the range.
|
||||
Location end; /// The location of one-past the last character in the range.
|
||||
};
|
||||
|
||||
/// Constructs the Source with an zero initialized Range and null File.
|
||||
inline Source() = default;
|
||||
|
||||
/// Constructs the Source with the Range |rng| and a null File.
|
||||
inline explicit Source(const Range& rng) : range(rng) {}
|
||||
|
||||
/// Constructs the Source with the Range |loc| and a null File.
|
||||
inline explicit Source(const Location& loc) : range(Range(loc)) {}
|
||||
|
||||
/// Constructs the Source with the Range |rng| and File |f|.
|
||||
inline Source(const Range& rng, File const* f) : range(rng), file(f) {}
|
||||
|
||||
/// Constructs the Source with the zero-length range starting at |line| and
|
||||
/// |column| with a null File.
|
||||
/// TODO(bclayton): Remove this constructor.
|
||||
/// It purely exists to break up changes into bite sized pieces.
|
||||
inline explicit Source(size_t line, size_t column)
|
||||
: Source(Location{line, column}) {}
|
||||
|
||||
Range range;
|
||||
File const* file = nullptr;
|
||||
};
|
||||
|
||||
} // namespace tint
|
||||
|
|
|
@ -62,9 +62,9 @@ TypeDeterminer::~TypeDeterminer() = default;
|
|||
|
||||
void TypeDeterminer::set_error(const Source& src, const std::string& msg) {
|
||||
error_ = "";
|
||||
if (src.line > 0) {
|
||||
error_ +=
|
||||
std::to_string(src.line) + ":" + std::to_string(src.column) + ": ";
|
||||
if (src.range.begin.line > 0) {
|
||||
error_ += std::to_string(src.range.begin.line) + ":" +
|
||||
std::to_string(src.range.begin.column) + ": ";
|
||||
}
|
||||
error_ += msg;
|
||||
}
|
||||
|
|
|
@ -36,8 +36,8 @@ ValidatorImpl::ValidatorImpl() = default;
|
|||
ValidatorImpl::~ValidatorImpl() = default;
|
||||
|
||||
void ValidatorImpl::set_error(const Source& src, const std::string& msg) {
|
||||
error_ +=
|
||||
std::to_string(src.line) + ":" + std::to_string(src.column) + ": " + msg;
|
||||
error_ += std::to_string(src.range.begin.line) + ":" +
|
||||
std::to_string(src.range.begin.column) + ": " + msg;
|
||||
}
|
||||
|
||||
bool ValidatorImpl::Validate(const ast::Module* module) {
|
||||
|
|
Loading…
Reference in New Issue