mirror of
https://github.com/encounter/dawn-cmake.git
synced 2025-07-03 19:55:56 +00:00
Source: Restructure Source::File
Add Source::FileContent to hold the file source content and per-line data. Have Source hold an optional pointer to a FileContent, and add a file_path field. This allows us to kill the `FreeInternalCompilerErrors()` filth as we're now able to construct Sources that hold a file path without file content. Change-Id: I03556795d7d4161c3d34cef32cb685c45ad04a3d Reviewed-on: https://dawn-review.googlesource.com/c/tint/+/42026 Reviewed-by: Austin Eng <enga@chromium.org> Reviewed-by: dan sinclair <dsinclair@chromium.org> Commit-Queue: Ben Clayton <bclayton@google.com>
This commit is contained in:
parent
41e58d89ea
commit
1d98236770
@ -280,13 +280,6 @@ TEST(CloneContext, CloneWithReplace_WithNotANode) {
|
|||||||
ctx.Clone(original_root);
|
ctx.Clone(original_root);
|
||||||
},
|
},
|
||||||
"internal compiler error");
|
"internal compiler error");
|
||||||
|
|
||||||
// Ensure that this test does not leak memory.
|
|
||||||
// This will be automatically called by main() in src/test_main.cc, but
|
|
||||||
// chromium uses it's own test entry point.
|
|
||||||
// TODO(ben-clayton): Add this call to the end of Chromium's main(), and we
|
|
||||||
// can remove this call.
|
|
||||||
FreeInternalCompilerErrors();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
43
src/debug.cc
43
src/debug.cc
@ -27,45 +27,8 @@ namespace {
|
|||||||
|
|
||||||
InternalCompilerErrorReporter* ice_reporter = nullptr;
|
InternalCompilerErrorReporter* ice_reporter = nullptr;
|
||||||
|
|
||||||
/// Note - this class is _not_ thread safe. If we have multiple internal
|
|
||||||
/// compiler errors occurring at the same time on different threads, then
|
|
||||||
/// we're in serious trouble.
|
|
||||||
class SourceFileToDelete {
|
|
||||||
static SourceFileToDelete* instance;
|
|
||||||
|
|
||||||
public:
|
|
||||||
/// Adds file to the list that will be deleted on call to Free()
|
|
||||||
static void Add(Source::File* file) {
|
|
||||||
if (!instance) {
|
|
||||||
instance = new SourceFileToDelete();
|
|
||||||
}
|
|
||||||
instance->files.emplace_back(file);
|
|
||||||
}
|
|
||||||
|
|
||||||
/// Free deletes all the source files added by calls to Add() and then this
|
|
||||||
/// SourceFileToDelete object.
|
|
||||||
static void Free() {
|
|
||||||
if (instance) {
|
|
||||||
for (auto* file : instance->files) {
|
|
||||||
delete file;
|
|
||||||
}
|
|
||||||
delete instance;
|
|
||||||
instance = nullptr;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
private:
|
|
||||||
std::vector<Source::File*> files;
|
|
||||||
};
|
|
||||||
|
|
||||||
SourceFileToDelete* SourceFileToDelete::instance = nullptr;
|
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
void FreeInternalCompilerErrors() {
|
|
||||||
SourceFileToDelete::Free();
|
|
||||||
}
|
|
||||||
|
|
||||||
void SetInternalCompilerErrorReporter(InternalCompilerErrorReporter* reporter) {
|
void SetInternalCompilerErrorReporter(InternalCompilerErrorReporter* reporter) {
|
||||||
ice_reporter = reporter;
|
ice_reporter = reporter;
|
||||||
}
|
}
|
||||||
@ -76,11 +39,7 @@ InternalCompilerError::InternalCompilerError(const char* file,
|
|||||||
: file_(file), line_(line), diagnostics_(diagnostics) {}
|
: file_(file), line_(line), diagnostics_(diagnostics) {}
|
||||||
|
|
||||||
InternalCompilerError::~InternalCompilerError() {
|
InternalCompilerError::~InternalCompilerError() {
|
||||||
auto* file = new Source::File(file_, "");
|
Source source{Source::Range{Source::Location{line_}}, file_};
|
||||||
|
|
||||||
SourceFileToDelete::Add(file);
|
|
||||||
|
|
||||||
Source source{Source::Range{Source::Location{line_}}, file};
|
|
||||||
diagnostics_.add_ice(msg_.str(), source);
|
diagnostics_.add_ice(msg_.str(), source);
|
||||||
|
|
||||||
if (ice_reporter) {
|
if (ice_reporter) {
|
||||||
|
@ -27,12 +27,6 @@ namespace tint {
|
|||||||
/// Function type used for registering an internal compiler error reporter
|
/// Function type used for registering an internal compiler error reporter
|
||||||
using InternalCompilerErrorReporter = void(const diag::List&);
|
using InternalCompilerErrorReporter = void(const diag::List&);
|
||||||
|
|
||||||
/// Frees any memory allocated for reporting internal compiler errors.
|
|
||||||
/// Must only be called on application termination.
|
|
||||||
/// If an internal compiler error is raised and this function is not called,
|
|
||||||
/// then memory will leak.
|
|
||||||
void FreeInternalCompilerErrors();
|
|
||||||
|
|
||||||
/// Sets the global error reporter to be called in case of internal compiler
|
/// Sets the global error reporter to be called in case of internal compiler
|
||||||
/// errors.
|
/// errors.
|
||||||
/// @param reporter the error reporter
|
/// @param reporter the error reporter
|
||||||
|
@ -26,13 +26,6 @@ TEST(DebugTest, Unreachable) {
|
|||||||
TINT_UNREACHABLE(diagnostics);
|
TINT_UNREACHABLE(diagnostics);
|
||||||
},
|
},
|
||||||
"internal compiler error");
|
"internal compiler error");
|
||||||
|
|
||||||
// Ensure that this test does not leak memory.
|
|
||||||
// This will be automatically called by main() in src/test_main.cc, but
|
|
||||||
// chromium uses it's own test entry point.
|
|
||||||
// TODO(ben-clayton): Add this call to the end of Chromium's main(), and we
|
|
||||||
// can remove this call.
|
|
||||||
FreeInternalCompilerErrors();
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
@ -160,12 +160,12 @@ void Formatter::format(const Diagnostic& diag, State& state) const {
|
|||||||
std::vector<TextAndColor> prefix;
|
std::vector<TextAndColor> prefix;
|
||||||
prefix.reserve(6);
|
prefix.reserve(6);
|
||||||
|
|
||||||
if (style_.print_file && src.file != nullptr && !src.file->path.empty()) {
|
if (style_.print_file && !src.file_path.empty()) {
|
||||||
if (rng.begin.line > 0) {
|
if (rng.begin.line > 0) {
|
||||||
prefix.emplace_back(TextAndColor{src.file->path + ":" + to_str(rng.begin),
|
prefix.emplace_back(TextAndColor{src.file_path + ":" + to_str(rng.begin),
|
||||||
Color::kDefault});
|
Color::kDefault});
|
||||||
} else {
|
} else {
|
||||||
prefix.emplace_back(TextAndColor{src.file->path, Color::kDefault});
|
prefix.emplace_back(TextAndColor{src.file_path, Color::kDefault});
|
||||||
}
|
}
|
||||||
} else if (rng.begin.line > 0) {
|
} else if (rng.begin.line > 0) {
|
||||||
prefix.emplace_back(TextAndColor{to_str(rng.begin), Color::kDefault});
|
prefix.emplace_back(TextAndColor{to_str(rng.begin), Color::kDefault});
|
||||||
@ -208,15 +208,15 @@ void Formatter::format(const Diagnostic& diag, State& state) const {
|
|||||||
}
|
}
|
||||||
state << diag.message;
|
state << diag.message;
|
||||||
|
|
||||||
if (style_.print_line && src.file != nullptr && rng.begin.line > 0) {
|
if (style_.print_line && src.file_content != nullptr && rng.begin.line > 0) {
|
||||||
state.newline();
|
state.newline();
|
||||||
state.set_style({Color::kDefault, false});
|
state.set_style({Color::kDefault, false});
|
||||||
|
|
||||||
for (size_t line = rng.begin.line; line <= rng.end.line; line++) {
|
for (size_t line = rng.begin.line; line <= rng.end.line; line++) {
|
||||||
if (line < src.file->lines.size() + 1) {
|
if (line < src.file_content->lines.size() + 1) {
|
||||||
auto len = src.file->lines[line - 1].size();
|
auto len = src.file_content->lines[line - 1].size();
|
||||||
|
|
||||||
state << src.file->lines[line - 1];
|
state << src.file_content->lines[line - 1];
|
||||||
|
|
||||||
state.newline();
|
state.newline();
|
||||||
state.set_style({Color::kCyan, false});
|
state.set_style({Color::kCyan, false});
|
||||||
|
@ -32,9 +32,10 @@ bool is_whitespace(char c) {
|
|||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
Lexer::Lexer(Source::File const* file)
|
Lexer::Lexer(const std::string& file_path, const Source::FileContent* content)
|
||||||
: file_(file),
|
: file_path_(file_path),
|
||||||
len_(static_cast<uint32_t>(file->content.size())),
|
content_(content),
|
||||||
|
len_(static_cast<uint32_t>(content->data.size())),
|
||||||
location_{1, 1} {}
|
location_{1, 1} {}
|
||||||
|
|
||||||
Lexer::~Lexer() = default;
|
Lexer::~Lexer() = default;
|
||||||
@ -82,7 +83,8 @@ Token Lexer::next() {
|
|||||||
|
|
||||||
Source Lexer::begin_source() const {
|
Source Lexer::begin_source() const {
|
||||||
Source src{};
|
Source src{};
|
||||||
src.file = file_;
|
src.file_path = file_path_;
|
||||||
|
src.file_content = content_;
|
||||||
src.range.begin = location_;
|
src.range.begin = location_;
|
||||||
src.range.end = location_;
|
src.range.end = location_;
|
||||||
return src;
|
return src;
|
||||||
@ -115,13 +117,13 @@ bool Lexer::is_hex(char ch) const {
|
|||||||
bool Lexer::matches(size_t pos, const std::string& substr) {
|
bool Lexer::matches(size_t pos, const std::string& substr) {
|
||||||
if (pos >= len_)
|
if (pos >= len_)
|
||||||
return false;
|
return false;
|
||||||
return file_->content.substr(pos, substr.size()) == substr;
|
return content_->data.substr(pos, substr.size()) == substr;
|
||||||
}
|
}
|
||||||
|
|
||||||
void Lexer::skip_whitespace() {
|
void Lexer::skip_whitespace() {
|
||||||
for (;;) {
|
for (;;) {
|
||||||
auto pos = pos_;
|
auto pos = pos_;
|
||||||
while (!is_eof() && is_whitespace(file_->content[pos_])) {
|
while (!is_eof() && is_whitespace(content_->data[pos_])) {
|
||||||
if (matches(pos_, "\n")) {
|
if (matches(pos_, "\n")) {
|
||||||
pos_++;
|
pos_++;
|
||||||
location_.line++;
|
location_.line++;
|
||||||
@ -162,7 +164,7 @@ Token Lexer::try_float() {
|
|||||||
if (matches(end, "-")) {
|
if (matches(end, "-")) {
|
||||||
end++;
|
end++;
|
||||||
}
|
}
|
||||||
while (end < len_ && is_digit(file_->content[end])) {
|
while (end < len_ && is_digit(content_->data[end])) {
|
||||||
end++;
|
end++;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -171,7 +173,7 @@ Token Lexer::try_float() {
|
|||||||
}
|
}
|
||||||
end++;
|
end++;
|
||||||
|
|
||||||
while (end < len_ && is_digit(file_->content[end])) {
|
while (end < len_ && is_digit(content_->data[end])) {
|
||||||
end++;
|
end++;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -183,7 +185,7 @@ Token Lexer::try_float() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
auto exp_start = end;
|
auto exp_start = end;
|
||||||
while (end < len_ && isdigit(file_->content[end])) {
|
while (end < len_ && isdigit(content_->data[end])) {
|
||||||
end++;
|
end++;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -192,7 +194,7 @@ Token Lexer::try_float() {
|
|||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
auto str = file_->content.substr(start, end - start);
|
auto str = content_->data.substr(start, end - start);
|
||||||
if (str == "." || str == "-.")
|
if (str == "." || str == "-.")
|
||||||
return {};
|
return {};
|
||||||
|
|
||||||
@ -201,7 +203,7 @@ Token Lexer::try_float() {
|
|||||||
|
|
||||||
end_source(source);
|
end_source(source);
|
||||||
|
|
||||||
auto res = strtod(file_->content.c_str() + start, nullptr);
|
auto res = strtod(content_->data.c_str() + start, nullptr);
|
||||||
// This handles if the number is a really small in the exponent
|
// This handles if the number is a really small in the exponent
|
||||||
if (res > 0 && res < static_cast<double>(std::numeric_limits<float>::min())) {
|
if (res > 0 && res < static_cast<double>(std::numeric_limits<float>::min())) {
|
||||||
return {Token::Type::kError, source, "f32 (" + str + " too small"};
|
return {Token::Type::kError, source, "f32 (" + str + " too small"};
|
||||||
@ -221,13 +223,13 @@ Token Lexer::build_token_from_int_if_possible(Source source,
|
|||||||
size_t start,
|
size_t start,
|
||||||
size_t end,
|
size_t end,
|
||||||
int32_t base) {
|
int32_t base) {
|
||||||
auto res = strtoll(file_->content.c_str() + start, nullptr, base);
|
auto res = strtoll(content_->data.c_str() + start, nullptr, base);
|
||||||
if (matches(pos_, "u")) {
|
if (matches(pos_, "u")) {
|
||||||
if (static_cast<uint64_t>(res) >
|
if (static_cast<uint64_t>(res) >
|
||||||
static_cast<uint64_t>(std::numeric_limits<uint32_t>::max())) {
|
static_cast<uint64_t>(std::numeric_limits<uint32_t>::max())) {
|
||||||
return {
|
return {
|
||||||
Token::Type::kError, source,
|
Token::Type::kError, source,
|
||||||
"u32 (" + file_->content.substr(start, end - start) + ") too large"};
|
"u32 (" + content_->data.substr(start, end - start) + ") too large"};
|
||||||
}
|
}
|
||||||
pos_ += 1;
|
pos_ += 1;
|
||||||
location_.column += 1;
|
location_.column += 1;
|
||||||
@ -238,12 +240,12 @@ Token Lexer::build_token_from_int_if_possible(Source source,
|
|||||||
if (res < static_cast<int64_t>(std::numeric_limits<int32_t>::min())) {
|
if (res < static_cast<int64_t>(std::numeric_limits<int32_t>::min())) {
|
||||||
return {
|
return {
|
||||||
Token::Type::kError, source,
|
Token::Type::kError, source,
|
||||||
"i32 (" + file_->content.substr(start, end - start) + ") too small"};
|
"i32 (" + content_->data.substr(start, end - start) + ") too small"};
|
||||||
}
|
}
|
||||||
if (res > static_cast<int64_t>(std::numeric_limits<int32_t>::max())) {
|
if (res > static_cast<int64_t>(std::numeric_limits<int32_t>::max())) {
|
||||||
return {
|
return {
|
||||||
Token::Type::kError, source,
|
Token::Type::kError, source,
|
||||||
"i32 (" + file_->content.substr(start, end - start) + ") too large"};
|
"i32 (" + content_->data.substr(start, end - start) + ") too large"};
|
||||||
}
|
}
|
||||||
end_source(source);
|
end_source(source);
|
||||||
return {source, static_cast<int32_t>(res)};
|
return {source, static_cast<int32_t>(res)};
|
||||||
@ -263,7 +265,7 @@ Token Lexer::try_hex_integer() {
|
|||||||
}
|
}
|
||||||
end += 2;
|
end += 2;
|
||||||
|
|
||||||
while (!is_eof() && is_hex(file_->content[end])) {
|
while (!is_eof() && is_hex(content_->data[end])) {
|
||||||
end += 1;
|
end += 1;
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -282,18 +284,18 @@ Token Lexer::try_integer() {
|
|||||||
if (matches(end, "-")) {
|
if (matches(end, "-")) {
|
||||||
end++;
|
end++;
|
||||||
}
|
}
|
||||||
if (end >= len_ || !is_digit(file_->content[end])) {
|
if (end >= len_ || !is_digit(content_->data[end])) {
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
auto first = end;
|
auto first = end;
|
||||||
while (end < len_ && is_digit(file_->content[end])) {
|
while (end < len_ && is_digit(content_->data[end])) {
|
||||||
end++;
|
end++;
|
||||||
}
|
}
|
||||||
|
|
||||||
// If the first digit is a zero this must only be zero as leading zeros
|
// If the first digit is a zero this must only be zero as leading zeros
|
||||||
// are not allowed.
|
// are not allowed.
|
||||||
if (file_->content[first] == '0' && (end - first != 1))
|
if (content_->data[first] == '0' && (end - first != 1))
|
||||||
return {};
|
return {};
|
||||||
|
|
||||||
pos_ = end;
|
pos_ = end;
|
||||||
@ -304,19 +306,19 @@ Token Lexer::try_integer() {
|
|||||||
|
|
||||||
Token Lexer::try_ident() {
|
Token Lexer::try_ident() {
|
||||||
// Must begin with an a-zA-Z_
|
// Must begin with an a-zA-Z_
|
||||||
if (!is_alpha(file_->content[pos_])) {
|
if (!is_alpha(content_->data[pos_])) {
|
||||||
return {};
|
return {};
|
||||||
}
|
}
|
||||||
|
|
||||||
auto source = begin_source();
|
auto source = begin_source();
|
||||||
|
|
||||||
auto s = pos_;
|
auto s = pos_;
|
||||||
while (!is_eof() && is_alphanum(file_->content[pos_])) {
|
while (!is_eof() && is_alphanum(content_->data[pos_])) {
|
||||||
pos_++;
|
pos_++;
|
||||||
location_.column++;
|
location_.column++;
|
||||||
}
|
}
|
||||||
|
|
||||||
auto str = file_->content.substr(s, pos_ - s);
|
auto str = content_->data.substr(s, pos_ - s);
|
||||||
auto t = check_reserved(source, str);
|
auto t = check_reserved(source, str);
|
||||||
if (!t.IsUninitialized()) {
|
if (!t.IsUninitialized()) {
|
||||||
return t;
|
return t;
|
||||||
@ -352,7 +354,7 @@ Token Lexer::try_string() {
|
|||||||
end_source(source);
|
end_source(source);
|
||||||
|
|
||||||
return {Token::Type::kStringLiteral, source,
|
return {Token::Type::kStringLiteral, source,
|
||||||
file_->content.substr(start, end - start)};
|
content_->data.substr(start, end - start)};
|
||||||
}
|
}
|
||||||
|
|
||||||
Token Lexer::try_punctuation() {
|
Token Lexer::try_punctuation() {
|
||||||
|
@ -28,8 +28,9 @@ namespace wgsl {
|
|||||||
class Lexer {
|
class Lexer {
|
||||||
public:
|
public:
|
||||||
/// Creates a new Lexer
|
/// Creates a new Lexer
|
||||||
/// @param file the input file to parse
|
/// @param file_path the path to the file containing the source
|
||||||
explicit Lexer(Source::File const* file);
|
/// @param content the source content
|
||||||
|
Lexer(const std::string& file_path, const Source::FileContent* content);
|
||||||
~Lexer();
|
~Lexer();
|
||||||
|
|
||||||
/// Returns the next token in the input stream
|
/// Returns the next token in the input stream
|
||||||
@ -63,8 +64,10 @@ class Lexer {
|
|||||||
bool is_alphanum(char ch) const;
|
bool is_alphanum(char ch) const;
|
||||||
bool matches(size_t pos, const std::string& substr);
|
bool matches(size_t pos, const std::string& substr);
|
||||||
|
|
||||||
/// The source to parse
|
/// The source file path
|
||||||
Source::File const* file_;
|
std::string const file_path_;
|
||||||
|
/// The source file content
|
||||||
|
Source::FileContent const* const content_;
|
||||||
/// The length of the input
|
/// The length of the input
|
||||||
uint32_t len_ = 0;
|
uint32_t len_ = 0;
|
||||||
/// The current position within the input
|
/// The current position within the input
|
||||||
|
@ -26,15 +26,15 @@ namespace {
|
|||||||
using LexerTest = testing::Test;
|
using LexerTest = testing::Test;
|
||||||
|
|
||||||
TEST_F(LexerTest, Empty) {
|
TEST_F(LexerTest, Empty) {
|
||||||
Source::File file("test.wgsl", "");
|
Source::FileContent content("");
|
||||||
Lexer l(&file);
|
Lexer l("test.wgsl", &content);
|
||||||
auto t = l.next();
|
auto t = l.next();
|
||||||
EXPECT_TRUE(t.IsEof());
|
EXPECT_TRUE(t.IsEof());
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(LexerTest, Skips_Whitespace) {
|
TEST_F(LexerTest, Skips_Whitespace) {
|
||||||
Source::File file("test.wgsl", "\t\r\n\t ident\t\n\t \r ");
|
Source::FileContent content("\t\r\n\t ident\t\n\t \r ");
|
||||||
Lexer l(&file);
|
Lexer l("test.wgsl", &content);
|
||||||
|
|
||||||
auto t = l.next();
|
auto t = l.next();
|
||||||
EXPECT_TRUE(t.IsIdentifier());
|
EXPECT_TRUE(t.IsIdentifier());
|
||||||
@ -49,11 +49,11 @@ TEST_F(LexerTest, Skips_Whitespace) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(LexerTest, Skips_Comments) {
|
TEST_F(LexerTest, Skips_Comments) {
|
||||||
Source::File file("test.wgsl", R"(//starts with comment
|
Source::FileContent content(R"(//starts with comment
|
||||||
ident1 //ends with comment
|
ident1 //ends with comment
|
||||||
// blank line
|
// blank line
|
||||||
ident2)");
|
ident2)");
|
||||||
Lexer l(&file);
|
Lexer l("test.wgsl", &content);
|
||||||
|
|
||||||
auto t = l.next();
|
auto t = l.next();
|
||||||
EXPECT_TRUE(t.IsIdentifier());
|
EXPECT_TRUE(t.IsIdentifier());
|
||||||
@ -76,8 +76,8 @@ ident1 //ends with comment
|
|||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(LexerTest, StringTest_Parse) {
|
TEST_F(LexerTest, StringTest_Parse) {
|
||||||
Source::File file("test.wgsl", R"(id "this is string content" id2)");
|
Source::FileContent content(R"(id "this is string content" id2)");
|
||||||
Lexer l(&file);
|
Lexer l("test.wgsl", &content);
|
||||||
|
|
||||||
auto t = l.next();
|
auto t = l.next();
|
||||||
EXPECT_TRUE(t.IsIdentifier());
|
EXPECT_TRUE(t.IsIdentifier());
|
||||||
@ -105,8 +105,8 @@ TEST_F(LexerTest, StringTest_Parse) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(LexerTest, StringTest_Unterminated) {
|
TEST_F(LexerTest, StringTest_Unterminated) {
|
||||||
Source::File file("test.wgsl", R"(id "this is string content)");
|
Source::FileContent content(R"(id "this is string content)");
|
||||||
Lexer l(&file);
|
Lexer l("test.wgsl", &content);
|
||||||
|
|
||||||
auto t = l.next();
|
auto t = l.next();
|
||||||
EXPECT_TRUE(t.IsIdentifier());
|
EXPECT_TRUE(t.IsIdentifier());
|
||||||
@ -139,8 +139,8 @@ inline std::ostream& operator<<(std::ostream& out, FloatData data) {
|
|||||||
using FloatTest = testing::TestWithParam<FloatData>;
|
using FloatTest = testing::TestWithParam<FloatData>;
|
||||||
TEST_P(FloatTest, Parse) {
|
TEST_P(FloatTest, Parse) {
|
||||||
auto params = GetParam();
|
auto params = GetParam();
|
||||||
Source::File file("test.wgsl", params.input);
|
Source::FileContent content(params.input);
|
||||||
Lexer l(&file);
|
Lexer l("test.wgsl", &content);
|
||||||
|
|
||||||
auto t = l.next();
|
auto t = l.next();
|
||||||
EXPECT_TRUE(t.IsFloatLiteral());
|
EXPECT_TRUE(t.IsFloatLiteral());
|
||||||
@ -175,8 +175,8 @@ INSTANTIATE_TEST_SUITE_P(LexerTest,
|
|||||||
|
|
||||||
using FloatTest_Invalid = testing::TestWithParam<const char*>;
|
using FloatTest_Invalid = testing::TestWithParam<const char*>;
|
||||||
TEST_P(FloatTest_Invalid, Handles) {
|
TEST_P(FloatTest_Invalid, Handles) {
|
||||||
Source::File file("test.wgsl", GetParam());
|
Source::FileContent content(GetParam());
|
||||||
Lexer l(&file);
|
Lexer l("test.wgsl", &content);
|
||||||
|
|
||||||
auto t = l.next();
|
auto t = l.next();
|
||||||
EXPECT_FALSE(t.IsFloatLiteral());
|
EXPECT_FALSE(t.IsFloatLiteral());
|
||||||
@ -193,8 +193,8 @@ INSTANTIATE_TEST_SUITE_P(LexerTest,
|
|||||||
|
|
||||||
using IdentifierTest = testing::TestWithParam<const char*>;
|
using IdentifierTest = testing::TestWithParam<const char*>;
|
||||||
TEST_P(IdentifierTest, Parse) {
|
TEST_P(IdentifierTest, Parse) {
|
||||||
Source::File file("test.wgsl", GetParam());
|
Source::FileContent content(GetParam());
|
||||||
Lexer l(&file);
|
Lexer l("test.wgsl", &content);
|
||||||
|
|
||||||
auto t = l.next();
|
auto t = l.next();
|
||||||
EXPECT_TRUE(t.IsIdentifier());
|
EXPECT_TRUE(t.IsIdentifier());
|
||||||
@ -210,8 +210,8 @@ INSTANTIATE_TEST_SUITE_P(
|
|||||||
testing::Values("test01", "_test_", "test_", "_test", "_01", "_test01"));
|
testing::Values("test01", "_test_", "test_", "_test", "_01", "_test01"));
|
||||||
|
|
||||||
TEST_F(LexerTest, IdentifierTest_DoesNotStartWithNumber) {
|
TEST_F(LexerTest, IdentifierTest_DoesNotStartWithNumber) {
|
||||||
Source::File file("test.wgsl", "01test");
|
Source::FileContent content("01test");
|
||||||
Lexer l(&file);
|
Lexer l("test.wgsl", &content);
|
||||||
|
|
||||||
auto t = l.next();
|
auto t = l.next();
|
||||||
EXPECT_FALSE(t.IsIdentifier());
|
EXPECT_FALSE(t.IsIdentifier());
|
||||||
@ -229,8 +229,8 @@ inline std::ostream& operator<<(std::ostream& out, HexSignedIntData data) {
|
|||||||
using IntegerTest_HexSigned = testing::TestWithParam<HexSignedIntData>;
|
using IntegerTest_HexSigned = testing::TestWithParam<HexSignedIntData>;
|
||||||
TEST_P(IntegerTest_HexSigned, Matches) {
|
TEST_P(IntegerTest_HexSigned, Matches) {
|
||||||
auto params = GetParam();
|
auto params = GetParam();
|
||||||
Source::File file("test.wgsl", params.input);
|
Source::FileContent content(params.input);
|
||||||
Lexer l(&file);
|
Lexer l("test.wgsl", &content);
|
||||||
|
|
||||||
auto t = l.next();
|
auto t = l.next();
|
||||||
EXPECT_TRUE(t.IsSintLiteral());
|
EXPECT_TRUE(t.IsSintLiteral());
|
||||||
@ -252,16 +252,18 @@ INSTANTIATE_TEST_SUITE_P(
|
|||||||
HexSignedIntData{"0x7FFFFFFF", std::numeric_limits<int32_t>::max()}));
|
HexSignedIntData{"0x7FFFFFFF", std::numeric_limits<int32_t>::max()}));
|
||||||
|
|
||||||
TEST_F(LexerTest, IntegerTest_HexSignedTooLarge) {
|
TEST_F(LexerTest, IntegerTest_HexSignedTooLarge) {
|
||||||
Source::File file("test.wgsl", "0x80000000");
|
Source::FileContent content("0x80000000");
|
||||||
Lexer l(&file);
|
Lexer l("test.wgsl", &content);
|
||||||
|
|
||||||
auto t = l.next();
|
auto t = l.next();
|
||||||
ASSERT_TRUE(t.IsError());
|
ASSERT_TRUE(t.IsError());
|
||||||
EXPECT_EQ(t.to_str(), "i32 (0x80000000) too large");
|
EXPECT_EQ(t.to_str(), "i32 (0x80000000) too large");
|
||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(LexerTest, IntegerTest_HexSignedTooSmall) {
|
TEST_F(LexerTest, IntegerTest_HexSignedTooSmall) {
|
||||||
Source::File file("test.wgsl", "-0x8000000F");
|
Source::FileContent content("-0x8000000F");
|
||||||
Lexer l(&file);
|
Lexer l("test.wgsl", &content);
|
||||||
|
|
||||||
auto t = l.next();
|
auto t = l.next();
|
||||||
ASSERT_TRUE(t.IsError());
|
ASSERT_TRUE(t.IsError());
|
||||||
EXPECT_EQ(t.to_str(), "i32 (-0x8000000F) too small");
|
EXPECT_EQ(t.to_str(), "i32 (-0x8000000F) too small");
|
||||||
@ -278,8 +280,8 @@ inline std::ostream& operator<<(std::ostream& out, HexUnsignedIntData data) {
|
|||||||
using IntegerTest_HexUnsigned = testing::TestWithParam<HexUnsignedIntData>;
|
using IntegerTest_HexUnsigned = testing::TestWithParam<HexUnsignedIntData>;
|
||||||
TEST_P(IntegerTest_HexUnsigned, Matches) {
|
TEST_P(IntegerTest_HexUnsigned, Matches) {
|
||||||
auto params = GetParam();
|
auto params = GetParam();
|
||||||
Source::File file("test.wgsl", params.input);
|
Source::FileContent content(params.input);
|
||||||
Lexer l(&file);
|
Lexer l("test.wgsl", &content);
|
||||||
|
|
||||||
auto t = l.next();
|
auto t = l.next();
|
||||||
EXPECT_TRUE(t.IsUintLiteral());
|
EXPECT_TRUE(t.IsUintLiteral());
|
||||||
@ -304,8 +306,9 @@ INSTANTIATE_TEST_SUITE_P(
|
|||||||
std::numeric_limits<uint32_t>::max()}));
|
std::numeric_limits<uint32_t>::max()}));
|
||||||
|
|
||||||
TEST_F(LexerTest, IntegerTest_HexUnsignedTooLarge) {
|
TEST_F(LexerTest, IntegerTest_HexUnsignedTooLarge) {
|
||||||
Source::File file("test.wgsl", "0xffffffffffu");
|
Source::FileContent content("0xffffffffffu");
|
||||||
Lexer l(&file);
|
Lexer l("test.wgsl", &content);
|
||||||
|
|
||||||
auto t = l.next();
|
auto t = l.next();
|
||||||
ASSERT_TRUE(t.IsError());
|
ASSERT_TRUE(t.IsError());
|
||||||
EXPECT_EQ(t.to_str(), "u32 (0xffffffffff) too large");
|
EXPECT_EQ(t.to_str(), "u32 (0xffffffffff) too large");
|
||||||
@ -322,8 +325,8 @@ inline std::ostream& operator<<(std::ostream& out, UnsignedIntData data) {
|
|||||||
using IntegerTest_Unsigned = testing::TestWithParam<UnsignedIntData>;
|
using IntegerTest_Unsigned = testing::TestWithParam<UnsignedIntData>;
|
||||||
TEST_P(IntegerTest_Unsigned, Matches) {
|
TEST_P(IntegerTest_Unsigned, Matches) {
|
||||||
auto params = GetParam();
|
auto params = GetParam();
|
||||||
Source::File file("test.wgsl", params.input);
|
Source::FileContent content(params.input);
|
||||||
Lexer l(&file);
|
Lexer l("test.wgsl", &content);
|
||||||
|
|
||||||
auto t = l.next();
|
auto t = l.next();
|
||||||
EXPECT_TRUE(t.IsUintLiteral());
|
EXPECT_TRUE(t.IsUintLiteral());
|
||||||
@ -351,8 +354,8 @@ inline std::ostream& operator<<(std::ostream& out, SignedIntData data) {
|
|||||||
using IntegerTest_Signed = testing::TestWithParam<SignedIntData>;
|
using IntegerTest_Signed = testing::TestWithParam<SignedIntData>;
|
||||||
TEST_P(IntegerTest_Signed, Matches) {
|
TEST_P(IntegerTest_Signed, Matches) {
|
||||||
auto params = GetParam();
|
auto params = GetParam();
|
||||||
Source::File file("test.wgsl", params.input);
|
Source::FileContent content(params.input);
|
||||||
Lexer l(&file);
|
Lexer l("test.wgsl", &content);
|
||||||
|
|
||||||
auto t = l.next();
|
auto t = l.next();
|
||||||
EXPECT_TRUE(t.IsSintLiteral());
|
EXPECT_TRUE(t.IsSintLiteral());
|
||||||
@ -374,8 +377,8 @@ INSTANTIATE_TEST_SUITE_P(
|
|||||||
|
|
||||||
using IntegerTest_Invalid = testing::TestWithParam<const char*>;
|
using IntegerTest_Invalid = testing::TestWithParam<const char*>;
|
||||||
TEST_P(IntegerTest_Invalid, Parses) {
|
TEST_P(IntegerTest_Invalid, Parses) {
|
||||||
Source::File file("test.wgsl", GetParam());
|
Source::FileContent content(GetParam());
|
||||||
Lexer l(&file);
|
Lexer l("test.wgsl", &content);
|
||||||
|
|
||||||
auto t = l.next();
|
auto t = l.next();
|
||||||
EXPECT_FALSE(t.IsSintLiteral());
|
EXPECT_FALSE(t.IsSintLiteral());
|
||||||
@ -396,8 +399,8 @@ inline std::ostream& operator<<(std::ostream& out, TokenData data) {
|
|||||||
using PunctuationTest = testing::TestWithParam<TokenData>;
|
using PunctuationTest = testing::TestWithParam<TokenData>;
|
||||||
TEST_P(PunctuationTest, Parses) {
|
TEST_P(PunctuationTest, Parses) {
|
||||||
auto params = GetParam();
|
auto params = GetParam();
|
||||||
Source::File file("test.wgsl", params.input);
|
Source::FileContent content(params.input);
|
||||||
Lexer l(&file);
|
Lexer l("test.wgsl", &content);
|
||||||
|
|
||||||
auto t = l.next();
|
auto t = l.next();
|
||||||
EXPECT_TRUE(t.Is(params.type));
|
EXPECT_TRUE(t.Is(params.type));
|
||||||
@ -450,8 +453,8 @@ INSTANTIATE_TEST_SUITE_P(
|
|||||||
using KeywordTest = testing::TestWithParam<TokenData>;
|
using KeywordTest = testing::TestWithParam<TokenData>;
|
||||||
TEST_P(KeywordTest, Parses) {
|
TEST_P(KeywordTest, Parses) {
|
||||||
auto params = GetParam();
|
auto params = GetParam();
|
||||||
Source::File file("test.wgsl", params.input);
|
Source::FileContent content(params.input);
|
||||||
Lexer l(&file);
|
Lexer l("test.wgsl", &content);
|
||||||
|
|
||||||
auto t = l.next();
|
auto t = l.next();
|
||||||
EXPECT_TRUE(t.Is(params.type)) << params.input;
|
EXPECT_TRUE(t.Is(params.type)) << params.input;
|
||||||
@ -582,8 +585,8 @@ INSTANTIATE_TEST_SUITE_P(
|
|||||||
using KeywordTest_Reserved = testing::TestWithParam<const char*>;
|
using KeywordTest_Reserved = testing::TestWithParam<const char*>;
|
||||||
TEST_P(KeywordTest_Reserved, Parses) {
|
TEST_P(KeywordTest_Reserved, Parses) {
|
||||||
auto* keyword = GetParam();
|
auto* keyword = GetParam();
|
||||||
Source::File file("test.wgsl", keyword);
|
Source::FileContent content(keyword);
|
||||||
Lexer l(&file);
|
Lexer l("test.wgsl", &content);
|
||||||
|
|
||||||
auto t = l.next();
|
auto t = l.next();
|
||||||
EXPECT_TRUE(t.IsReservedKeyword());
|
EXPECT_TRUE(t.IsReservedKeyword());
|
||||||
|
@ -216,7 +216,7 @@ ParserImpl::FunctionHeader& ParserImpl::FunctionHeader::operator=(
|
|||||||
const FunctionHeader& rhs) = default;
|
const FunctionHeader& rhs) = default;
|
||||||
|
|
||||||
ParserImpl::ParserImpl(Source::File const* file)
|
ParserImpl::ParserImpl(Source::File const* file)
|
||||||
: lexer_(std::make_unique<Lexer>(file)) {}
|
: lexer_(std::make_unique<Lexer>(file->path, &file->content)) {}
|
||||||
|
|
||||||
ParserImpl::~ParserImpl() = default;
|
ParserImpl::~ParserImpl() = default;
|
||||||
|
|
||||||
@ -294,7 +294,7 @@ void ParserImpl::translation_unit() {
|
|||||||
}
|
}
|
||||||
expect_global_decl();
|
expect_global_decl();
|
||||||
if (diags_.error_count() >= max_errors_) {
|
if (diags_.error_count() >= max_errors_) {
|
||||||
add_error(Source{{}, p.source().file},
|
add_error(Source{{}, p.source().file_path},
|
||||||
"stopping after " + std::to_string(max_errors_) + " errors");
|
"stopping after " + std::to_string(max_errors_) + " errors");
|
||||||
break;
|
break;
|
||||||
}
|
}
|
||||||
|
@ -67,9 +67,7 @@ TEST_F(TokenTest, ReturnsMaxU32) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
TEST_F(TokenTest, Source) {
|
TEST_F(TokenTest, Source) {
|
||||||
Source::File file("", "");
|
|
||||||
Source src;
|
Source src;
|
||||||
src.file = &file;
|
|
||||||
src.range.begin = Source::Location{3, 9};
|
src.range.begin = Source::Location{3, 9};
|
||||||
src.range.end = Source::Location{4, 3};
|
src.range.end = Source::Location{4, 3};
|
||||||
|
|
||||||
@ -78,7 +76,6 @@ TEST_F(TokenTest, Source) {
|
|||||||
EXPECT_EQ(t.source().range.begin.column, 9u);
|
EXPECT_EQ(t.source().range.begin.column, 9u);
|
||||||
EXPECT_EQ(t.source().range.end.line, 4u);
|
EXPECT_EQ(t.source().range.end.line, 4u);
|
||||||
EXPECT_EQ(t.source().range.end.column, 3u);
|
EXPECT_EQ(t.source().range.end.column, 3u);
|
||||||
EXPECT_EQ(t.source().file, &file);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
} // namespace
|
} // namespace
|
||||||
|
@ -30,9 +30,10 @@ std::vector<std::string> split_lines(const std::string& str) {
|
|||||||
}
|
}
|
||||||
} // namespace
|
} // namespace
|
||||||
|
|
||||||
Source::File::File(const std::string& file_path,
|
Source::FileContent::FileContent(const std::string& body)
|
||||||
const std::string& file_content)
|
: data(body), lines(split_lines(body)) {}
|
||||||
: path(file_path), content(file_content), lines(split_lines(content)) {}
|
|
||||||
|
Source::FileContent::~FileContent() = default;
|
||||||
|
|
||||||
Source::File::~File() = default;
|
Source::File::~File() = default;
|
||||||
|
|
||||||
|
66
src/source.h
66
src/source.h
@ -18,6 +18,7 @@
|
|||||||
|
|
||||||
#include <stddef.h>
|
#include <stddef.h>
|
||||||
|
|
||||||
|
#include <ostream>
|
||||||
#include <string>
|
#include <string>
|
||||||
#include <vector>
|
#include <vector>
|
||||||
|
|
||||||
@ -26,21 +27,37 @@ namespace tint {
|
|||||||
/// Source describes a range of characters within a source file.
|
/// Source describes a range of characters within a source file.
|
||||||
class Source {
|
class Source {
|
||||||
public:
|
public:
|
||||||
|
/// FileContent describes the content of a source file.
|
||||||
|
class FileContent {
|
||||||
|
public:
|
||||||
|
/// Constructs the FileContent with the given file content.
|
||||||
|
/// @param data the file contents
|
||||||
|
explicit FileContent(const std::string& data);
|
||||||
|
|
||||||
|
/// Destructor
|
||||||
|
~FileContent();
|
||||||
|
|
||||||
|
/// un-split file content
|
||||||
|
const std::string data;
|
||||||
|
/// #data split by lines
|
||||||
|
const std::vector<std::string> lines;
|
||||||
|
};
|
||||||
|
|
||||||
/// File describes a source file, including path and content.
|
/// File describes a source file, including path and content.
|
||||||
class File {
|
class File {
|
||||||
public:
|
public:
|
||||||
/// Constructs the File with the given file path and content.
|
/// Constructs the File with the given file path and content.
|
||||||
/// @param file_path the path for this file
|
/// @param p the path for this file
|
||||||
/// @param file_content the file contents
|
/// @param c the file contents
|
||||||
File(const std::string& file_path, const std::string& file_content);
|
inline File(const std::string& p, const std::string& c)
|
||||||
|
: path(p), content(c) {}
|
||||||
|
|
||||||
~File();
|
~File();
|
||||||
|
|
||||||
/// file path (optional)
|
/// file path (optional)
|
||||||
const std::string path;
|
const std::string path;
|
||||||
/// file content
|
/// file content
|
||||||
const std::string content;
|
const FileContent content;
|
||||||
/// #content split by lines
|
|
||||||
const std::vector<std::string> lines;
|
|
||||||
};
|
};
|
||||||
|
|
||||||
/// Location holds a 1-based line and column index.
|
/// Location holds a 1-based line and column index.
|
||||||
@ -74,7 +91,7 @@ class Source {
|
|||||||
};
|
};
|
||||||
|
|
||||||
/// Constructs the Source with an zero initialized Range and null File.
|
/// Constructs the Source with an zero initialized Range and null File.
|
||||||
inline Source() = default;
|
inline Source() : range() {}
|
||||||
|
|
||||||
/// Constructs the Source with the Range `rng` and a null File
|
/// Constructs the Source with the Range `rng` and a null File
|
||||||
/// @param rng the source range
|
/// @param rng the source range
|
||||||
@ -84,17 +101,40 @@ class Source {
|
|||||||
/// @param loc the start and end location for the source range
|
/// @param loc the start and end location for the source range
|
||||||
inline explicit Source(const Location& loc) : range(Range(loc)) {}
|
inline explicit Source(const Location& loc) : range(Range(loc)) {}
|
||||||
|
|
||||||
/// Constructs the Source with the Range `rng` and File `f`
|
/// Constructs the Source with the Range `rng` and File `file`
|
||||||
/// @param rng the source range
|
/// @param rng the source range
|
||||||
/// @param f the source file
|
/// @param file the source file
|
||||||
inline Source(const Range& rng, File const* f) : range(rng), file(f) {}
|
inline Source(const Range& rng, File const* file)
|
||||||
|
: range(rng), file_path(file->path), file_content(&file->content) {}
|
||||||
|
|
||||||
/// range is the span of text this source refers to in #file
|
/// Constructs the Source with the Range `rng`, file path `path` and content
|
||||||
|
/// `content`
|
||||||
|
/// @param rng the source range
|
||||||
|
/// @param path the source file path
|
||||||
|
/// @param content the source file content
|
||||||
|
inline Source(const Range& rng,
|
||||||
|
const std::string& path,
|
||||||
|
FileContent* content = nullptr)
|
||||||
|
: range(rng), file_path(path), file_content(content) {}
|
||||||
|
|
||||||
|
/// range is the span of text this source refers to in #file_path
|
||||||
Range range;
|
Range range;
|
||||||
/// file is the source file this source refers to
|
/// file is the optional file path this source refers to
|
||||||
File const* file = nullptr;
|
std::string file_path;
|
||||||
|
/// file is the optional source content this source refers to
|
||||||
|
const FileContent* file_content = nullptr;
|
||||||
};
|
};
|
||||||
|
|
||||||
|
/// Writes the Source::FileContent to the std::ostream.
|
||||||
|
/// @param out the std::ostream to write to
|
||||||
|
/// @param content the file content to write
|
||||||
|
/// @returns out so calls can be chained
|
||||||
|
inline std::ostream& operator<<(std::ostream& out,
|
||||||
|
const Source::FileContent& content) {
|
||||||
|
out << content.data;
|
||||||
|
return out;
|
||||||
|
}
|
||||||
|
|
||||||
} // namespace tint
|
} // namespace tint
|
||||||
|
|
||||||
#endif // SRC_SOURCE_H_
|
#endif // SRC_SOURCE_H_
|
||||||
|
@ -32,7 +32,5 @@ int main(int argc, char** argv) {
|
|||||||
|
|
||||||
auto res = RUN_ALL_TESTS();
|
auto res = RUN_ALL_TESTS();
|
||||||
|
|
||||||
tint::FreeInternalCompilerErrors();
|
|
||||||
|
|
||||||
return res;
|
return res;
|
||||||
}
|
}
|
||||||
|
Loading…
x
Reference in New Issue
Block a user