mirror of
https://github.com/encounter/dawn-cmake.git
synced 2025-08-04 11:15:46 +00:00
reader/wgsl: Remove old token methods.
This CL removes the old token `Is<Name>()` methods which are no longer being used in the parser or lexer. (Some of them were used in tests but the generic `Is(<type>)` is used instead. New `peek` methods are added which will peek if the given token comes either next or at the given index. Change-Id: I8d15f2a42889f981d273b6459d20c4202db1ae32 Reviewed-on: https://dawn-review.googlesource.com/c/tint/+/57540 Auto-Submit: dan sinclair <dan.sinclair@gmail.com> Kokoro: Kokoro <noreply+kokoro@google.com> Reviewed-by: Ben Clayton <bclayton@chromium.org> Reviewed-by: Ben Clayton <bclayton@google.com> Commit-Queue: Ben Clayton <bclayton@chromium.org>
This commit is contained in:
parent
9569e2c790
commit
bf1ec305db
@ -90,7 +90,7 @@ TEST_P(FloatTest, Parse) {
|
||||
Lexer l("test.wgsl", &content);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_TRUE(t.IsFloatLiteral());
|
||||
EXPECT_TRUE(t.Is(Token::Type::kFloatLiteral));
|
||||
EXPECT_EQ(t.to_f32(), params.result);
|
||||
EXPECT_EQ(t.source().range.begin.line, 1u);
|
||||
EXPECT_EQ(t.source().range.begin.column, 1u);
|
||||
@ -126,7 +126,7 @@ TEST_P(FloatTest_Invalid, Handles) {
|
||||
Lexer l("test.wgsl", &content);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_FALSE(t.IsFloatLiteral());
|
||||
EXPECT_FALSE(t.Is(Token::Type::kFloatLiteral));
|
||||
}
|
||||
INSTANTIATE_TEST_SUITE_P(LexerTest,
|
||||
FloatTest_Invalid,
|
||||
@ -180,7 +180,7 @@ TEST_P(IntegerTest_HexSigned, Matches) {
|
||||
Lexer l("test.wgsl", &content);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_TRUE(t.IsSintLiteral());
|
||||
EXPECT_TRUE(t.Is(Token::Type::kSintLiteral));
|
||||
EXPECT_EQ(t.source().range.begin.line, 1u);
|
||||
EXPECT_EQ(t.source().range.begin.column, 1u);
|
||||
EXPECT_EQ(t.source().range.end.line, 1u);
|
||||
@ -203,7 +203,7 @@ TEST_F(LexerTest, IntegerTest_HexSignedTooLarge) {
|
||||
Lexer l("test.wgsl", &content);
|
||||
|
||||
auto t = l.next();
|
||||
ASSERT_TRUE(t.IsError());
|
||||
ASSERT_TRUE(t.Is(Token::Type::kError));
|
||||
EXPECT_EQ(t.to_str(), "i32 (0x80000000) too large");
|
||||
}
|
||||
|
||||
@ -212,7 +212,7 @@ TEST_F(LexerTest, IntegerTest_HexSignedTooSmall) {
|
||||
Lexer l("test.wgsl", &content);
|
||||
|
||||
auto t = l.next();
|
||||
ASSERT_TRUE(t.IsError());
|
||||
ASSERT_TRUE(t.Is(Token::Type::kError));
|
||||
EXPECT_EQ(t.to_str(), "i32 (-0x8000000F) too small");
|
||||
}
|
||||
|
||||
@ -231,7 +231,7 @@ TEST_P(IntegerTest_HexUnsigned, Matches) {
|
||||
Lexer l("test.wgsl", &content);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_TRUE(t.IsUintLiteral());
|
||||
EXPECT_TRUE(t.Is(Token::Type::kUintLiteral));
|
||||
EXPECT_EQ(t.source().range.begin.line, 1u);
|
||||
EXPECT_EQ(t.source().range.begin.column, 1u);
|
||||
EXPECT_EQ(t.source().range.end.line, 1u);
|
||||
@ -257,7 +257,7 @@ TEST_F(LexerTest, IntegerTest_HexUnsignedTooLarge) {
|
||||
Lexer l("test.wgsl", &content);
|
||||
|
||||
auto t = l.next();
|
||||
ASSERT_TRUE(t.IsError());
|
||||
ASSERT_TRUE(t.Is(Token::Type::kError));
|
||||
EXPECT_EQ(t.to_str(), "u32 (0xffffffffff) too large");
|
||||
}
|
||||
|
||||
@ -276,7 +276,7 @@ TEST_P(IntegerTest_Unsigned, Matches) {
|
||||
Lexer l("test.wgsl", &content);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_TRUE(t.IsUintLiteral());
|
||||
EXPECT_TRUE(t.Is(Token::Type::kUintLiteral));
|
||||
EXPECT_EQ(t.to_u32(), params.result);
|
||||
EXPECT_EQ(t.source().range.begin.line, 1u);
|
||||
EXPECT_EQ(t.source().range.begin.column, 1u);
|
||||
@ -305,7 +305,7 @@ TEST_P(IntegerTest_Signed, Matches) {
|
||||
Lexer l("test.wgsl", &content);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_TRUE(t.IsSintLiteral());
|
||||
EXPECT_TRUE(t.Is(Token::Type::kSintLiteral));
|
||||
EXPECT_EQ(t.to_i32(), params.result);
|
||||
EXPECT_EQ(t.source().range.begin.line, 1u);
|
||||
EXPECT_EQ(t.source().range.begin.column, 1u);
|
||||
@ -328,8 +328,8 @@ TEST_P(IntegerTest_Invalid, Parses) {
|
||||
Lexer l("test.wgsl", &content);
|
||||
|
||||
auto t = l.next();
|
||||
EXPECT_FALSE(t.IsSintLiteral());
|
||||
EXPECT_FALSE(t.IsUintLiteral());
|
||||
EXPECT_FALSE(t.Is(Token::Type::kSintLiteral));
|
||||
EXPECT_FALSE(t.Is(Token::Type::kUintLiteral));
|
||||
}
|
||||
INSTANTIATE_TEST_SUITE_P(LexerTest,
|
||||
IntegerTest_Invalid,
|
||||
|
@ -309,8 +309,8 @@ Token ParserImpl::peek(size_t idx) {
|
||||
return token_queue_[idx];
|
||||
}
|
||||
|
||||
Token ParserImpl::peek() {
|
||||
return peek(0);
|
||||
bool ParserImpl::peek_is(Token::Type tok, size_t idx) {
|
||||
return peek(idx).Is(tok);
|
||||
}
|
||||
|
||||
Token ParserImpl::last_token() const {
|
||||
@ -892,7 +892,7 @@ Expect<ParserImpl::TypedIdentifier> ParserImpl::expect_variable_ident_decl(
|
||||
if (ident.errored)
|
||||
return Failure::kErrored;
|
||||
|
||||
if (allow_inferred && !peek().Is(Token::Type::kColon)) {
|
||||
if (allow_inferred && !peek_is(Token::Type::kColon)) {
|
||||
return TypedIdentifier{nullptr, ident.value, ident.source};
|
||||
}
|
||||
|
||||
@ -934,7 +934,7 @@ Expect<ast::Access> ParserImpl::expect_access(const std::string& use) {
|
||||
// variable_qualifier
|
||||
// : LESS_THAN storage_class (COMMA access_mode)? GREATER_THAN
|
||||
Maybe<ParserImpl::VariableQualifier> ParserImpl::variable_qualifier() {
|
||||
if (!peek().IsLessThan()) {
|
||||
if (!peek_is(Token::Type::kLessThan)) {
|
||||
return Failure::kNoMatch;
|
||||
}
|
||||
|
||||
@ -966,12 +966,10 @@ Maybe<ParserImpl::VariableQualifier> ParserImpl::variable_qualifier() {
|
||||
// type_alias
|
||||
// : TYPE IDENT EQUAL type_decl
|
||||
Maybe<ast::Alias*> ParserImpl::type_alias() {
|
||||
auto t = peek();
|
||||
if (!t.IsType())
|
||||
if (!peek_is(Token::Type::kType))
|
||||
return Failure::kNoMatch;
|
||||
|
||||
next(); // Consume the peek
|
||||
|
||||
auto t = next();
|
||||
const char* use = "type alias";
|
||||
|
||||
auto name = expect_ident(use);
|
||||
@ -1059,7 +1057,7 @@ Maybe<ast::Type*> ParserImpl::type_decl(ast::DecorationList& decos) {
|
||||
if (match(Token::Type::kU32, &source))
|
||||
return builder_.ty.u32(source);
|
||||
|
||||
if (t.IsVec2() || t.IsVec3() || t.IsVec4()) {
|
||||
if (t.IsVector()) {
|
||||
next(); // Consume the peek
|
||||
return expect_type_decl_vector(t);
|
||||
}
|
||||
@ -1076,9 +1074,7 @@ Maybe<ast::Type*> ParserImpl::type_decl(ast::DecorationList& decos) {
|
||||
return expect_type_decl_array(t, std::move(decos));
|
||||
}
|
||||
|
||||
if (t.IsMat2x2() || t.IsMat2x3() || t.IsMat2x4() || t.IsMat3x2() ||
|
||||
t.IsMat3x3() || t.IsMat3x4() || t.IsMat4x2() || t.IsMat4x3() ||
|
||||
t.IsMat4x4()) {
|
||||
if (t.IsMatrix()) {
|
||||
next(); // Consume the peek
|
||||
return expect_type_decl_matrix(t);
|
||||
}
|
||||
@ -1155,9 +1151,9 @@ Expect<ast::Type*> ParserImpl::expect_type_decl_atomic(Token t) {
|
||||
|
||||
Expect<ast::Type*> ParserImpl::expect_type_decl_vector(Token t) {
|
||||
uint32_t count = 2;
|
||||
if (t.IsVec3())
|
||||
if (t.Is(Token::Type::kVec3))
|
||||
count = 3;
|
||||
else if (t.IsVec4())
|
||||
else if (t.Is(Token::Type::kVec4))
|
||||
count = 4;
|
||||
|
||||
const char* use = "vector";
|
||||
@ -1203,14 +1199,14 @@ Expect<ast::Type*> ParserImpl::expect_type_decl_array(
|
||||
Expect<ast::Type*> ParserImpl::expect_type_decl_matrix(Token t) {
|
||||
uint32_t rows = 2;
|
||||
uint32_t columns = 2;
|
||||
if (t.IsMat3x2() || t.IsMat3x3() || t.IsMat3x4()) {
|
||||
if (t.IsMat3xN()) {
|
||||
columns = 3;
|
||||
} else if (t.IsMat4x2() || t.IsMat4x3() || t.IsMat4x4()) {
|
||||
} else if (t.IsMat4xN()) {
|
||||
columns = 4;
|
||||
}
|
||||
if (t.IsMat2x3() || t.IsMat3x3() || t.IsMat4x3()) {
|
||||
if (t.IsMatNx3()) {
|
||||
rows = 3;
|
||||
} else if (t.IsMat2x4() || t.IsMat3x4() || t.IsMat4x4()) {
|
||||
} else if (t.IsMatNx4()) {
|
||||
rows = 4;
|
||||
}
|
||||
|
||||
@ -1289,7 +1285,8 @@ Expect<ast::StructMemberList> ParserImpl::expect_struct_body_decl() {
|
||||
|
||||
ast::StructMemberList members;
|
||||
|
||||
while (synchronized_ && !peek().IsBraceRight() && !peek().IsEof()) {
|
||||
while (synchronized_ && !peek_is(Token::Type::kBraceRight) &&
|
||||
!peek_is(Token::Type::kEOF)) {
|
||||
auto member = sync(Token::Type::kSemicolon,
|
||||
[&]() -> Expect<ast::StructMember*> {
|
||||
auto decos = decoration_list();
|
||||
@ -1443,7 +1440,7 @@ Expect<ast::VariableList> ParserImpl::expect_param_list() {
|
||||
while (synchronized_) {
|
||||
// Check for the end of the list.
|
||||
auto t = peek();
|
||||
if (!t.IsIdentifier() && !t.IsAttrLeft()) {
|
||||
if (!t.IsIdentifier() && !t.Is(Token::Type::kAttrLeft)) {
|
||||
break;
|
||||
}
|
||||
|
||||
@ -1625,7 +1622,7 @@ Maybe<ast::Statement*> ParserImpl::statement() {
|
||||
if (stmt_for.matched)
|
||||
return stmt_for.value;
|
||||
|
||||
if (peek().IsBraceLeft()) {
|
||||
if (peek_is(Token::Type::kBraceLeft)) {
|
||||
auto body = expect_body_stmt();
|
||||
if (body.errored)
|
||||
return Failure::kErrored;
|
||||
@ -1701,7 +1698,7 @@ Maybe<ast::ReturnStatement*> ParserImpl::return_stmt() {
|
||||
if (!match(Token::Type::kReturn, &source))
|
||||
return Failure::kNoMatch;
|
||||
|
||||
if (peek().IsSemicolon())
|
||||
if (peek_is(Token::Type::kSemicolon))
|
||||
return create<ast::ReturnStatement>(source, nullptr);
|
||||
|
||||
auto expr = logical_or_expression();
|
||||
@ -1885,15 +1882,14 @@ Maybe<ast::SwitchStatement*> ParserImpl::switch_stmt() {
|
||||
// : CASE case_selectors COLON BRACKET_LEFT case_body BRACKET_RIGHT
|
||||
// | DEFAULT COLON BRACKET_LEFT case_body BRACKET_RIGHT
|
||||
Maybe<ast::CaseStatement*> ParserImpl::switch_body() {
|
||||
auto t = peek();
|
||||
if (!t.IsCase() && !t.IsDefault())
|
||||
if (!peek_is(Token::Type::kCase) && !peek_is(Token::Type::kDefault))
|
||||
return Failure::kNoMatch;
|
||||
|
||||
auto t = next();
|
||||
auto source = t.source();
|
||||
next(); // Consume the peek
|
||||
|
||||
ast::CaseSelectorList selector_list;
|
||||
if (t.IsCase()) {
|
||||
if (t.Is(Token::Type::kCase)) {
|
||||
auto selectors = expect_case_selectors();
|
||||
if (selectors.errored)
|
||||
return Failure::kErrored;
|
||||
@ -2096,7 +2092,7 @@ Maybe<ast::ForLoopStatement*> ParserImpl::for_stmt() {
|
||||
Maybe<ast::CallStatement*> ParserImpl::func_call_stmt() {
|
||||
auto t = peek();
|
||||
auto t2 = peek(1);
|
||||
if (!t.IsIdentifier() || !t2.IsParenLeft())
|
||||
if (!t.IsIdentifier() || !t2.Is(Token::Type::kParenLeft))
|
||||
return Failure::kNoMatch;
|
||||
|
||||
next(); // Consume the first peek
|
||||
@ -2161,7 +2157,7 @@ Maybe<ast::Expression*> ParserImpl::primary_expression() {
|
||||
if (lit.matched)
|
||||
return create<ast::ScalarConstructorExpression>(source, lit.value);
|
||||
|
||||
if (t.IsParenLeft()) {
|
||||
if (t.Is(Token::Type::kParenLeft)) {
|
||||
auto paren = expect_paren_rhs_stmt();
|
||||
if (paren.errored)
|
||||
return Failure::kErrored;
|
||||
@ -2189,7 +2185,7 @@ Maybe<ast::Expression*> ParserImpl::primary_expression() {
|
||||
auto* ident = create<ast::IdentifierExpression>(
|
||||
t.source(), builder_.Symbols().Register(t.to_str()));
|
||||
|
||||
if (peek().IsParenLeft()) {
|
||||
if (peek_is(Token::Type::kParenLeft)) {
|
||||
auto params = expect_argument_expression_list("function call");
|
||||
if (params.errored)
|
||||
return Failure::kErrored;
|
||||
@ -2345,21 +2341,19 @@ Maybe<ast::Expression*> ParserImpl::unary_expression() {
|
||||
Expect<ast::Expression*> ParserImpl::expect_multiplicative_expr(
|
||||
ast::Expression* lhs) {
|
||||
while (synchronized_) {
|
||||
auto t = peek();
|
||||
|
||||
ast::BinaryOp op = ast::BinaryOp::kNone;
|
||||
if (t.IsStar())
|
||||
if (peek_is(Token::Type::kStar))
|
||||
op = ast::BinaryOp::kMultiply;
|
||||
else if (t.IsForwardSlash())
|
||||
else if (peek_is(Token::Type::kForwardSlash))
|
||||
op = ast::BinaryOp::kDivide;
|
||||
else if (t.IsMod())
|
||||
else if (peek_is(Token::Type::kMod))
|
||||
op = ast::BinaryOp::kModulo;
|
||||
else
|
||||
return lhs;
|
||||
|
||||
auto t = next();
|
||||
auto source = t.source();
|
||||
auto name = t.to_name();
|
||||
next(); // Consume the peek
|
||||
|
||||
auto rhs = unary_expression();
|
||||
if (rhs.errored)
|
||||
@ -2393,18 +2387,16 @@ Maybe<ast::Expression*> ParserImpl::multiplicative_expression() {
|
||||
Expect<ast::Expression*> ParserImpl::expect_additive_expr(
|
||||
ast::Expression* lhs) {
|
||||
while (synchronized_) {
|
||||
auto t = peek();
|
||||
|
||||
ast::BinaryOp op = ast::BinaryOp::kNone;
|
||||
if (t.IsPlus())
|
||||
if (peek_is(Token::Type::kPlus))
|
||||
op = ast::BinaryOp::kAdd;
|
||||
else if (t.IsMinus())
|
||||
else if (peek_is(Token::Type::kMinus))
|
||||
op = ast::BinaryOp::kSubtract;
|
||||
else
|
||||
return lhs;
|
||||
|
||||
auto t = next();
|
||||
auto source = t.source();
|
||||
next(); // Consume the peek
|
||||
|
||||
auto rhs = multiplicative_expression();
|
||||
if (rhs.errored)
|
||||
@ -2435,23 +2427,20 @@ Maybe<ast::Expression*> ParserImpl::additive_expression() {
|
||||
// | SHIFT_RIGHT additive_expression shift_expr
|
||||
Expect<ast::Expression*> ParserImpl::expect_shift_expr(ast::Expression* lhs) {
|
||||
while (synchronized_) {
|
||||
auto t = peek();
|
||||
auto source = t.source();
|
||||
|
||||
auto* name = "";
|
||||
ast::BinaryOp op = ast::BinaryOp::kNone;
|
||||
if (t.IsShiftLeft()) {
|
||||
next(); // Consume the peek
|
||||
if (peek_is(Token::Type::kShiftLeft)) {
|
||||
op = ast::BinaryOp::kShiftLeft;
|
||||
name = "<<";
|
||||
} else if (t.IsShiftRight()) {
|
||||
next(); // Consume the peek
|
||||
} else if (peek_is(Token::Type::kShiftRight)) {
|
||||
op = ast::BinaryOp::kShiftRight;
|
||||
name = ">>";
|
||||
} else {
|
||||
return lhs;
|
||||
}
|
||||
|
||||
auto t = next();
|
||||
auto source = t.source();
|
||||
auto rhs = additive_expression();
|
||||
if (rhs.errored)
|
||||
return Failure::kErrored;
|
||||
@ -2486,22 +2475,21 @@ Maybe<ast::Expression*> ParserImpl::shift_expression() {
|
||||
Expect<ast::Expression*> ParserImpl::expect_relational_expr(
|
||||
ast::Expression* lhs) {
|
||||
while (synchronized_) {
|
||||
auto t = peek();
|
||||
ast::BinaryOp op = ast::BinaryOp::kNone;
|
||||
if (t.IsLessThan())
|
||||
if (peek_is(Token::Type::kLessThan))
|
||||
op = ast::BinaryOp::kLessThan;
|
||||
else if (t.IsGreaterThan())
|
||||
else if (peek_is(Token::Type::kGreaterThan))
|
||||
op = ast::BinaryOp::kGreaterThan;
|
||||
else if (t.IsLessThanEqual())
|
||||
else if (peek_is(Token::Type::kLessThanEqual))
|
||||
op = ast::BinaryOp::kLessThanEqual;
|
||||
else if (t.IsGreaterThanEqual())
|
||||
else if (peek_is(Token::Type::kGreaterThanEqual))
|
||||
op = ast::BinaryOp::kGreaterThanEqual;
|
||||
else
|
||||
return lhs;
|
||||
|
||||
auto t = next();
|
||||
auto source = t.source();
|
||||
auto name = t.to_name();
|
||||
next(); // Consume the peek
|
||||
|
||||
auto rhs = shift_expression();
|
||||
if (rhs.errored)
|
||||
@ -2535,18 +2523,17 @@ Maybe<ast::Expression*> ParserImpl::relational_expression() {
|
||||
Expect<ast::Expression*> ParserImpl::expect_equality_expr(
|
||||
ast::Expression* lhs) {
|
||||
while (synchronized_) {
|
||||
auto t = peek();
|
||||
ast::BinaryOp op = ast::BinaryOp::kNone;
|
||||
if (t.IsEqualEqual())
|
||||
if (peek_is(Token::Type::kEqualEqual))
|
||||
op = ast::BinaryOp::kEqual;
|
||||
else if (t.IsNotEqual())
|
||||
else if (peek_is(Token::Type::kNotEqual))
|
||||
op = ast::BinaryOp::kNotEqual;
|
||||
else
|
||||
return lhs;
|
||||
|
||||
auto t = next();
|
||||
auto source = t.source();
|
||||
auto name = t.to_name();
|
||||
next(); // Consume the peek
|
||||
|
||||
auto rhs = relational_expression();
|
||||
if (rhs.errored)
|
||||
@ -2578,12 +2565,11 @@ Maybe<ast::Expression*> ParserImpl::equality_expression() {
|
||||
// | AND equality_expression and_expr
|
||||
Expect<ast::Expression*> ParserImpl::expect_and_expr(ast::Expression* lhs) {
|
||||
while (synchronized_) {
|
||||
auto t = peek();
|
||||
if (!t.IsAnd())
|
||||
if (!peek_is(Token::Type::kAnd))
|
||||
return lhs;
|
||||
|
||||
auto t = next();
|
||||
auto source = t.source();
|
||||
next(); // Consume the peek
|
||||
|
||||
auto rhs = equality_expression();
|
||||
if (rhs.errored)
|
||||
@ -2683,12 +2669,11 @@ Maybe<ast::Expression*> ParserImpl::inclusive_or_expression() {
|
||||
Expect<ast::Expression*> ParserImpl::expect_logical_and_expr(
|
||||
ast::Expression* lhs) {
|
||||
while (synchronized_) {
|
||||
auto t = peek();
|
||||
if (!t.IsAndAnd())
|
||||
if (!peek_is(Token::Type::kAndAnd))
|
||||
return lhs;
|
||||
|
||||
auto t = next();
|
||||
auto source = t.source();
|
||||
next(); // Consume the peek
|
||||
|
||||
auto rhs = inclusive_or_expression();
|
||||
if (rhs.errored)
|
||||
@ -2757,7 +2742,7 @@ Maybe<ast::AssignmentStatement*> ParserImpl::assignment_stmt() {
|
||||
// tint:295 - Test for `ident COLON` - this is invalid grammar, and without
|
||||
// special casing will error as "missing = for assignment", which is less
|
||||
// helpful than this error message:
|
||||
if (peek(0).IsIdentifier() && peek(1).IsColon()) {
|
||||
if (peek_is(Token::Type::kIdentifier) && peek_is(Token::Type::kColon, 1)) {
|
||||
return add_error(peek(0).source(),
|
||||
"expected 'var' for variable declaration");
|
||||
}
|
||||
@ -2827,7 +2812,8 @@ Expect<ast::ConstructorExpression*> ParserImpl::expect_const_expr() {
|
||||
[&]() -> Expect<ast::ExpressionList> {
|
||||
ast::ExpressionList list;
|
||||
while (synchronized_) {
|
||||
if (peek().IsParenRight()) {
|
||||
if (peek_is(
|
||||
Token::Type::kParenRight)) {
|
||||
break;
|
||||
}
|
||||
|
||||
@ -3114,7 +3100,7 @@ Maybe<ast::Decoration*> ParserImpl::decoration() {
|
||||
if (s == kOverrideDecoration) {
|
||||
const char* use = "override decoration";
|
||||
|
||||
if (peek().IsParenLeft()) {
|
||||
if (peek_is(Token::Type::kParenLeft)) {
|
||||
// [[override(x)]]
|
||||
return expect_paren_block(use, [&]() -> Result {
|
||||
auto val = expect_positive_sint(use);
|
||||
@ -3162,15 +3148,16 @@ bool ParserImpl::expect(const std::string& use, Token::Type tok) {
|
||||
|
||||
// Special case to split `>>` and `>=` tokens if we are looking for a `>`.
|
||||
if (tok == Token::Type::kGreaterThan &&
|
||||
(t.IsShiftRight() || t.IsGreaterThanEqual())) {
|
||||
(t.Is(Token::Type::kShiftRight) ||
|
||||
t.Is(Token::Type::kGreaterThanEqual))) {
|
||||
next();
|
||||
|
||||
// Push the second character to the token queue.
|
||||
auto source = t.source();
|
||||
source.range.begin.column++;
|
||||
if (t.IsShiftRight()) {
|
||||
if (t.Is(Token::Type::kShiftRight)) {
|
||||
token_queue_.push_front(Token(Token::Type::kGreaterThan, source));
|
||||
} else if (t.IsGreaterThanEqual()) {
|
||||
} else if (t.Is(Token::Type::kGreaterThanEqual)) {
|
||||
token_queue_.push_front(Token(Token::Type::kEqual, source));
|
||||
}
|
||||
|
||||
@ -3179,7 +3166,7 @@ bool ParserImpl::expect(const std::string& use, Token::Type tok) {
|
||||
|
||||
// Handle the case when `]` is expected but the actual token is `]]`.
|
||||
// For example, in `arr1[arr2[0]]`.
|
||||
if (tok == Token::Type::kBracketRight && t.IsAttrRight()) {
|
||||
if (tok == Token::Type::kBracketRight && t.Is(Token::Type::kAttrRight)) {
|
||||
next();
|
||||
auto source = t.source();
|
||||
source.range.begin.column++;
|
||||
@ -3199,7 +3186,7 @@ bool ParserImpl::expect(const std::string& use, Token::Type tok) {
|
||||
|
||||
Expect<int32_t> ParserImpl::expect_sint(const std::string& use) {
|
||||
auto t = peek();
|
||||
if (!t.IsSintLiteral())
|
||||
if (!t.Is(Token::Type::kSintLiteral))
|
||||
return add_error(t.source(), "expected signed integer literal", use);
|
||||
|
||||
next();
|
||||
|
@ -336,12 +336,16 @@ class ParserImpl {
|
||||
|
||||
/// @returns the next token
|
||||
Token next();
|
||||
/// @returns the next token without advancing
|
||||
Token peek();
|
||||
/// Peeks ahead and returns the token at `idx` head of the current position
|
||||
/// Peeks ahead and returns the token at `idx` ahead of the current position
|
||||
/// @param idx the index of the token to return
|
||||
/// @returns the token `idx` positions ahead without advancing
|
||||
Token peek(size_t idx);
|
||||
Token peek(size_t idx = 0);
|
||||
/// Peeks ahead and returns true if the token at `idx` ahead of the current
|
||||
/// position is |tok|
|
||||
/// @param idx the index of the token to return
|
||||
/// @param tok the token to look for
|
||||
/// @returns true if the token `idx` positions ahead is |tok|
|
||||
bool peek_is(Token::Type tok, size_t idx = 0);
|
||||
/// @returns the last token that was returned by `next()`
|
||||
Token last_token() const;
|
||||
/// Appends an error at `t` with the message `msg`
|
||||
|
@ -97,7 +97,7 @@ TEST_F(ParserImplTest, VariableQualifier_MissingLessThan) {
|
||||
EXPECT_FALSE(sc.matched);
|
||||
|
||||
auto t = p->next();
|
||||
ASSERT_TRUE(t.IsPrivate());
|
||||
ASSERT_TRUE(t.Is(Token::Type::kPrivate));
|
||||
}
|
||||
|
||||
TEST_F(ParserImplTest, VariableQualifier_MissingLessThan_AfterSC) {
|
||||
@ -108,7 +108,7 @@ TEST_F(ParserImplTest, VariableQualifier_MissingLessThan_AfterSC) {
|
||||
EXPECT_FALSE(sc.matched);
|
||||
|
||||
auto t = p->next();
|
||||
ASSERT_TRUE(t.IsPrivate());
|
||||
ASSERT_TRUE(t.Is(Token::Type::kPrivate));
|
||||
}
|
||||
|
||||
TEST_F(ParserImplTest, VariableQualifier_MissingGreaterThan) {
|
||||
|
@ -364,309 +364,44 @@ class Token {
|
||||
|
||||
/// @returns true if the token is uninitialized
|
||||
bool IsUninitialized() const { return type_ == Type::kUninitialized; }
|
||||
/// @returns true if the token is reserved
|
||||
/// @returns true if the token is an error
|
||||
bool IsError() const { return type_ == Type::kError; }
|
||||
/// @returns true if the token is EOF
|
||||
bool IsEof() const { return type_ == Type::kEOF; }
|
||||
/// @returns true if the token is an identifier
|
||||
bool IsIdentifier() const { return type_ == Type::kIdentifier; }
|
||||
/// @returns true if the token is a float
|
||||
bool IsFloatLiteral() const { return type_ == Type::kFloatLiteral; }
|
||||
/// @returns true if the token is an signed int
|
||||
bool IsSintLiteral() const { return type_ == Type::kSintLiteral; }
|
||||
/// @returns true if the token is a unsigned int
|
||||
bool IsUintLiteral() const { return type_ == Type::kUintLiteral; }
|
||||
|
||||
/// @returns true if token is a '&'
|
||||
bool IsAnd() const { return type_ == Type::kAnd; }
|
||||
/// @returns true if token is a '&&'
|
||||
bool IsAndAnd() const { return type_ == Type::kAndAnd; }
|
||||
/// @returns true if token is a '->'
|
||||
bool IsArrow() const { return type_ == Type::kArrow; }
|
||||
/// @returns true if token is a '[['
|
||||
bool IsAttrLeft() const { return type_ == Type::kAttrLeft; }
|
||||
/// @returns true if token is a ']]'
|
||||
bool IsAttrRight() const { return type_ == Type::kAttrRight; }
|
||||
/// @returns true if token is a '/'
|
||||
bool IsForwardSlash() const { return type_ == Type::kForwardSlash; }
|
||||
/// @returns true if token is a '!'
|
||||
bool IsBang() const { return type_ == Type::kBang; }
|
||||
/// @returns true if token is a '['
|
||||
bool IsBracketLeft() const { return type_ == Type::kBracketLeft; }
|
||||
/// @returns true if token is a ']'
|
||||
bool IsBracketRight() const { return type_ == Type::kBracketRight; }
|
||||
/// @returns true if token is a '{'
|
||||
bool IsBraceLeft() const { return type_ == Type::kBraceLeft; }
|
||||
/// @returns true if token is a '}'
|
||||
bool IsBraceRight() const { return type_ == Type::kBraceRight; }
|
||||
/// @returns true if token is a ':'
|
||||
bool IsColon() const { return type_ == Type::kColon; }
|
||||
/// @returns true if token is a ','
|
||||
bool IsComma() const { return type_ == Type::kComma; }
|
||||
/// @returns true if token is a '='
|
||||
bool IsEqual() const { return type_ == Type::kEqual; }
|
||||
/// @returns true if token is a '=='
|
||||
bool IsEqualEqual() const { return type_ == Type::kEqualEqual; }
|
||||
/// @returns true if token is a '>'
|
||||
bool IsGreaterThan() const { return type_ == Type::kGreaterThan; }
|
||||
/// @returns true if token is a '>='
|
||||
bool IsGreaterThanEqual() const { return type_ == Type::kGreaterThanEqual; }
|
||||
/// @returns true if token is a '>>'
|
||||
bool IsShiftRight() const { return type_ == Type::kShiftRight; }
|
||||
/// @returns true if token is a '<'
|
||||
bool IsLessThan() const { return type_ == Type::kLessThan; }
|
||||
/// @returns true if token is a '<='
|
||||
bool IsLessThanEqual() const { return type_ == Type::kLessThanEqual; }
|
||||
/// @returns true if token is a '<<'
|
||||
bool IsShiftLeft() const { return type_ == Type::kShiftLeft; }
|
||||
/// @returns true if token is a '%'
|
||||
bool IsMod() const { return type_ == Type::kMod; }
|
||||
/// @returns true if token is a '-'
|
||||
bool IsMinus() const { return type_ == Type::kMinus; }
|
||||
/// @returns true if token is a '!='
|
||||
bool IsNotEqual() const { return type_ == Type::kNotEqual; }
|
||||
/// @returns true if token is a '.'
|
||||
bool IsPeriod() const { return type_ == Type::kPeriod; }
|
||||
/// @returns true if token is a '+'
|
||||
bool IsPlus() const { return type_ == Type::kPlus; }
|
||||
/// @returns true if token is a '|'
|
||||
bool IsOr() const { return type_ == Type::kOr; }
|
||||
/// @returns true if token is a '||'
|
||||
bool IsOrOr() const { return type_ == Type::kOrOr; }
|
||||
/// @returns true if token is a '('
|
||||
bool IsParenLeft() const { return type_ == Type::kParenLeft; }
|
||||
/// @returns true if token is a ')'
|
||||
bool IsParenRight() const { return type_ == Type::kParenRight; }
|
||||
/// @returns true if token is a ';'
|
||||
bool IsSemicolon() const { return type_ == Type::kSemicolon; }
|
||||
/// @returns true if token is a '*'
|
||||
bool IsStar() const { return type_ == Type::kStar; }
|
||||
/// @returns true if token is a '~'
|
||||
bool IsTilde() const { return type_ == Type::kTilde; }
|
||||
/// @returns true if token is a '^'
|
||||
bool IsXor() const { return type_ == Type::kXor; }
|
||||
/// @returns true if token is a 'matNxM'
|
||||
bool IsMatrix() const {
|
||||
return type_ == Type::kMat2x2 || type_ == Type::kMat2x3 ||
|
||||
type_ == Type::kMat2x4 || type_ == Type::kMat3x2 ||
|
||||
type_ == Type::kMat3x3 || type_ == Type::kMat3x4 ||
|
||||
type_ == Type::kMat4x2 || type_ == Type::kMat4x3 ||
|
||||
type_ == Type::kMat4x4;
|
||||
}
|
||||
/// @returns true if token is a 'mat3xM'
|
||||
bool IsMat3xN() const {
|
||||
return type_ == Type::kMat3x2 || type_ == Type::kMat3x3 ||
|
||||
type_ == Type::kMat3x4;
|
||||
}
|
||||
/// @returns true if token is a 'mat4xM'
|
||||
bool IsMat4xN() const {
|
||||
return type_ == Type::kMat4x2 || type_ == Type::kMat4x3 ||
|
||||
type_ == Type::kMat4x4;
|
||||
}
|
||||
/// @returns true if token is a 'matNx3'
|
||||
bool IsMatNx3() const {
|
||||
return type_ == Type::kMat2x3 || type_ == Type::kMat3x3 ||
|
||||
type_ == Type::kMat4x3;
|
||||
}
|
||||
/// @returns true if token is a 'matNx4'
|
||||
bool IsMatNx4() const {
|
||||
return type_ == Type::kMat2x4 || type_ == Type::kMat3x4 ||
|
||||
type_ == Type::kMat4x4;
|
||||
}
|
||||
|
||||
/// @returns true if token is a 'array'
|
||||
bool IsArray() const { return type_ == Type::kArray; }
|
||||
/// @returns true if token is a 'bitcast'
|
||||
bool IsBitcast() const { return type_ == Type::kBitcast; }
|
||||
/// @returns true if token is a 'bool'
|
||||
bool IsBool() const { return type_ == Type::kBool; }
|
||||
/// @returns true if token is a 'break'
|
||||
bool IsBreak() const { return type_ == Type::kBreak; }
|
||||
/// @returns true if token is a 'case'
|
||||
bool IsCase() const { return type_ == Type::kCase; }
|
||||
/// @returns true if token is a 'sampler_comparison'
|
||||
bool IsComparisonSampler() const { return type_ == Type::kComparisonSampler; }
|
||||
/// @returns true if token is a 'continue'
|
||||
bool IsContinue() const { return type_ == Type::kContinue; }
|
||||
/// @returns true if token is a 'continuing'
|
||||
bool IsContinuing() const { return type_ == Type::kContinuing; }
|
||||
/// @returns true if token is a 'discard'
|
||||
bool IsDiscard() const { return type_ == Type::kDiscard; }
|
||||
/// @returns true if token is a 'default'
|
||||
bool IsDefault() const { return type_ == Type::kDefault; }
|
||||
/// @returns true if token is a 'else'
|
||||
bool IsElse() const { return type_ == Type::kElse; }
|
||||
/// @returns true if token is a 'elseif'
|
||||
bool IsElseIf() const { return type_ == Type::kElseIf; }
|
||||
/// @returns true if token is a 'f32'
|
||||
bool IsF32() const { return type_ == Type::kF32; }
|
||||
/// @returns true if token is a 'fallthrough'
|
||||
bool IsFallthrough() const { return type_ == Type::kFallthrough; }
|
||||
/// @returns true if token is a 'false'
|
||||
bool IsFalse() const { return type_ == Type::kFalse; }
|
||||
/// @returns true if token is a 'fn'
|
||||
bool IsFn() const { return type_ == Type::kFn; }
|
||||
/// @returns true if token is a 'for'
|
||||
bool IsFor() const { return type_ == Type::kFor; }
|
||||
/// @returns true if token is a 'Bgra8Unorm' format
|
||||
bool IsFormatBgra8Unorm() const { return type_ == Type::kFormatBgra8Unorm; }
|
||||
/// @returns true if token is a 'Bgra8UnormSrgb' format
|
||||
bool IsFormatBgra8UnormSrgb() const {
|
||||
return type_ == Type::kFormatBgra8UnormSrgb;
|
||||
/// @returns true if token is a 'vecN'
|
||||
bool IsVector() const {
|
||||
return type_ == Type::kVec2 || type_ == Type::kVec3 || type_ == Type::kVec4;
|
||||
}
|
||||
/// @returns true if token is a 'R16Float' format
|
||||
bool IsFormatR16Float() const { return type_ == Type::kFormatR16Float; }
|
||||
/// @returns true if token is a 'R16Sint' format
|
||||
bool IsFormatR16Sint() const { return type_ == Type::kFormatR16Sint; }
|
||||
/// @returns true if token is a 'R16Uint' format
|
||||
bool IsFormatR16Uint() const { return type_ == Type::kFormatR16Uint; }
|
||||
/// @returns true if token is a 'R32Float' format
|
||||
bool IsFormatR32Float() const { return type_ == Type::kFormatR32Float; }
|
||||
/// @returns true if token is a 'R32Sint' format
|
||||
bool IsFormatR32Sint() const { return type_ == Type::kFormatR32Sint; }
|
||||
/// @returns true if token is a 'R32Uint' format
|
||||
bool IsFormatR32Uint() const { return type_ == Type::kFormatR32Uint; }
|
||||
/// @returns true if token is a 'R8Sint' format
|
||||
bool IsFormatR8Sint() const { return type_ == Type::kFormatR8Sint; }
|
||||
/// @returns true if token is a 'R8Snorm' format
|
||||
bool IsFormatR8Snorm() const { return type_ == Type::kFormatR8Snorm; }
|
||||
/// @returns true if token is a 'R8Uint' format
|
||||
bool IsFormatR8Uint() const { return type_ == Type::kFormatR8Uint; }
|
||||
/// @returns true if token is a 'R8Unorm' format
|
||||
bool IsFormatR8Unorm() const { return type_ == Type::kFormatR8Unorm; }
|
||||
/// @returns true if token is a 'Rg11B10Float' format
|
||||
bool IsFormatRg11B10Float() const {
|
||||
return type_ == Type::kFormatRg11B10Float;
|
||||
}
|
||||
/// @returns true if token is a 'Rg16Float' format
|
||||
bool IsFormatRg16Float() const { return type_ == Type::kFormatRg16Float; }
|
||||
/// @returns true if token is a 'Rg16Sint' format
|
||||
bool IsFormatRg16Sint() const { return type_ == Type::kFormatRg16Sint; }
|
||||
/// @returns true if token is a 'Rg16Uint' format
|
||||
bool IsFormatRg16Uint() const { return type_ == Type::kFormatRg16Uint; }
|
||||
/// @returns true if token is a 'Rg32Float' format
|
||||
bool IsFormatRg32Float() const { return type_ == Type::kFormatRg32Float; }
|
||||
/// @returns true if token is a 'Rg32Sint' format
|
||||
bool IsFormatRg32Sint() const { return type_ == Type::kFormatRg32Sint; }
|
||||
/// @returns true if token is a 'Rg32Uint' format
|
||||
bool IsFormatRg32Uint() const { return type_ == Type::kFormatRg32Uint; }
|
||||
/// @returns true if token is a 'Rg8Sint' format
|
||||
bool IsFormatRg8Sint() const { return type_ == Type::kFormatRg8Sint; }
|
||||
/// @returns true if token is a 'Rg8Snorm' format
|
||||
bool IsFormatRg8Snorm() const { return type_ == Type::kFormatRg8Snorm; }
|
||||
/// @returns true if token is a 'Rg8Uint' format
|
||||
bool IsFormatRg8Uint() const { return type_ == Type::kFormatRg8Uint; }
|
||||
/// @returns true if token is a 'Rg8Unorm' format
|
||||
bool IsFormatRg8Unorm() const { return type_ == Type::kFormatRg8Unorm; }
|
||||
/// @returns true if token is a 'Rgb10A2Unorm' format
|
||||
bool IsFormatRgb10A2Unorm() const {
|
||||
return type_ == Type::kFormatRgb10A2Unorm;
|
||||
}
|
||||
/// @returns true if token is a 'Rgba16Float' format
|
||||
bool IsFormatRgba16Float() const { return type_ == Type::kFormatRgba16Float; }
|
||||
/// @returns true if token is a 'Rgba16Sint' format
|
||||
bool IsFormatRgba16Sint() const { return type_ == Type::kFormatRgba16Sint; }
|
||||
/// @returns true if token is a 'Rgba16Uint' format
|
||||
bool IsFormatRgba16Uint() const { return type_ == Type::kFormatRgba16Uint; }
|
||||
/// @returns true if token is a 'Rgba32Float' format
|
||||
bool IsFormatRgba32Float() const { return type_ == Type::kFormatRgba32Float; }
|
||||
/// @returns true if token is a 'Rgba32Sint' format
|
||||
bool IsFormatRgba32Sint() const { return type_ == Type::kFormatRgba32Sint; }
|
||||
/// @returns true if token is a 'Rgba32Uint' format
|
||||
bool IsFormatRgba32Uint() const { return type_ == Type::kFormatRgba32Uint; }
|
||||
/// @returns true if token is a 'Rgba8Sint' format
|
||||
bool IsFormatRgba8Sint() const { return type_ == Type::kFormatRgba8Sint; }
|
||||
/// @returns true if token is a 'Rgba8Snorm' format
|
||||
bool IsFormatRgba8Snorm() const { return type_ == Type::kFormatRgba8Snorm; }
|
||||
/// @returns true if token is a 'Rgba8Uint' format
|
||||
bool IsFormatRgba8Uint() const { return type_ == Type::kFormatRgba8Uint; }
|
||||
/// @returns true if token is a 'Rgba8Unorm' format
|
||||
bool IsFormatRgba8Unorm() const { return type_ == Type::kFormatRgba8Unorm; }
|
||||
/// @returns true if token is a 'Rgba8UnormSrgb' format
|
||||
bool IsFormatRgba8UnormSrgb() const {
|
||||
return type_ == Type::kFormatRgba8UnormSrgb;
|
||||
}
|
||||
/// @returns true if token is a 'function'
|
||||
bool IsFunction() const { return type_ == Type::kFunction; }
|
||||
/// @returns true if token is a 'i32'
|
||||
bool IsI32() const { return type_ == Type::kI32; }
|
||||
/// @returns true if token is a 'if'
|
||||
bool IsIf() const { return type_ == Type::kIf; }
|
||||
/// @returns true if token is a 'image'
|
||||
bool IsImage() const { return type_ == Type::kImage; }
|
||||
/// @returns true if token is a 'import'
|
||||
bool IsImport() const { return type_ == Type::kImport; }
|
||||
/// @returns true if token is a 'let'
|
||||
bool IsLet() const { return type_ == Type::kLet; }
|
||||
/// @returns true if token is a 'loop'
|
||||
bool IsLoop() const { return type_ == Type::kLoop; }
|
||||
/// @returns true if token is a 'mat2x2'
|
||||
bool IsMat2x2() const { return type_ == Type::kMat2x2; }
|
||||
/// @returns true if token is a 'mat2x3'
|
||||
bool IsMat2x3() const { return type_ == Type::kMat2x3; }
|
||||
/// @returns true if token is a 'mat2x4'
|
||||
bool IsMat2x4() const { return type_ == Type::kMat2x4; }
|
||||
/// @returns true if token is a 'mat3x2'
|
||||
bool IsMat3x2() const { return type_ == Type::kMat3x2; }
|
||||
/// @returns true if token is a 'mat3x3'
|
||||
bool IsMat3x3() const { return type_ == Type::kMat3x3; }
|
||||
/// @returns true if token is a 'mat3x4'
|
||||
bool IsMat3x4() const { return type_ == Type::kMat3x4; }
|
||||
/// @returns true if token is a 'mat4x2'
|
||||
bool IsMat4x2() const { return type_ == Type::kMat4x2; }
|
||||
/// @returns true if token is a 'mat4x3'
|
||||
bool IsMat4x3() const { return type_ == Type::kMat4x3; }
|
||||
/// @returns true if token is a 'mat4x4'
|
||||
bool IsMat4x4() const { return type_ == Type::kMat4x4; }
|
||||
/// @returns true if token is a 'private'
|
||||
bool IsPrivate() const { return type_ == Type::kPrivate; }
|
||||
/// @returns true if token is a 'ptr'
|
||||
bool IsPtr() const { return type_ == Type::kPtr; }
|
||||
/// @returns true if token is a 'return'
|
||||
bool IsReturn() const { return type_ == Type::kReturn; }
|
||||
/// @returns true if token is a 'sampler'
|
||||
bool IsSampler() const { return type_ == Type::kSampler; }
|
||||
/// @returns true if token is a 'storage'
|
||||
bool IsStorage() const { return type_ == Type::kStorage; }
|
||||
/// @returns true if token is a 'struct'
|
||||
bool IsStruct() const { return type_ == Type::kStruct; }
|
||||
/// @returns true if token is a 'switch'
|
||||
bool IsSwitch() const { return type_ == Type::kSwitch; }
|
||||
/// @returns true if token is a 'texture_depth_2d'
|
||||
bool IsTextureDepth2d() const { return type_ == Type::kTextureDepth2d; }
|
||||
/// @returns true if token is a 'texture_depth_2d_array'
|
||||
bool IsTextureDepth2dArray() const {
|
||||
return type_ == Type::kTextureDepth2dArray;
|
||||
}
|
||||
/// @returns true if token is a 'texture_depth_cube'
|
||||
bool IsTextureDepthCube() const { return type_ == Type::kTextureDepthCube; }
|
||||
/// @returns true if token is a 'texture_depth_cube_array'
|
||||
bool IsTextureDepthCubeArray() const {
|
||||
return type_ == Type::kTextureDepthCubeArray;
|
||||
}
|
||||
/// @returns true if the token is a 'texture_multisample_2d'
|
||||
bool IsTextureMultisampled2d() const {
|
||||
return type_ == Type::kTextureMultisampled2d;
|
||||
}
|
||||
/// @returns true if token is a 'texture_storage_1d'
|
||||
bool IsTextureStorage1d() const { return type_ == Type::kTextureStorage1d; }
|
||||
/// @returns true if token is a 'texture_storage_2d'
|
||||
bool IsTextureStorage2d() const { return type_ == Type::kTextureStorage2d; }
|
||||
/// @returns true if token is a 'texture_storage_2d_array'
|
||||
bool IsTextureStorage2dArray() const {
|
||||
return type_ == Type::kTextureStorage2dArray;
|
||||
}
|
||||
/// @returns true if token is a 'texture_storage_3d'
|
||||
bool IsTextureStorage3d() const { return type_ == Type::kTextureStorage3d; }
|
||||
/// @returns true if token is a 'texture_1d'
|
||||
bool IsTextureSampled1d() const { return type_ == Type::kTextureSampled1d; }
|
||||
/// @returns true if token is a 'texture_2d'
|
||||
bool IsTextureSampled2d() const { return type_ == Type::kTextureSampled2d; }
|
||||
/// @returns true if token is a 'texture_2d_array'
|
||||
bool IsTextureSampled2dArray() const {
|
||||
return type_ == Type::kTextureSampled2dArray;
|
||||
}
|
||||
/// @returns true if token is a 'texture_3d'
|
||||
bool IsTextureSampled3d() const { return type_ == Type::kTextureSampled3d; }
|
||||
/// @returns true if token is a 'texture_cube'
|
||||
bool IsTextureSampledCube() const {
|
||||
return type_ == Type::kTextureSampledCube;
|
||||
}
|
||||
/// @returns true if token is a 'texture_cube_array'
|
||||
bool IsTextureSampledCubeArray() const {
|
||||
return type_ == Type::kTextureSampledCubeArray;
|
||||
}
|
||||
/// @returns true if token is a 'true'
|
||||
bool IsTrue() const { return type_ == Type::kTrue; }
|
||||
/// @returns true if token is a 'type'
|
||||
bool IsType() const { return type_ == Type::kType; }
|
||||
/// @returns true if token is a 'u32'
|
||||
bool IsU32() const { return type_ == Type::kU32; }
|
||||
/// @returns true if token is a 'uniform'
|
||||
bool IsUniform() const { return type_ == Type::kUniform; }
|
||||
/// @returns true if token is a 'var'
|
||||
bool IsVar() const { return type_ == Type::kVar; }
|
||||
/// @returns true if token is a 'vec2'
|
||||
bool IsVec2() const { return type_ == Type::kVec2; }
|
||||
/// @returns true if token is a 'vec3'
|
||||
bool IsVec3() const { return type_ == Type::kVec3; }
|
||||
/// @returns true if token is a 'vec4'
|
||||
bool IsVec4() const { return type_ == Type::kVec4; }
|
||||
/// @returns true if token is a 'workgroup'
|
||||
bool IsWorkgroup() const { return type_ == Type::kWorkgroup; }
|
||||
|
||||
/// @returns the source information for this token
|
||||
Source source() const { return source_; }
|
||||
|
@ -60,8 +60,7 @@ TEST_F(ResolverAtomicValidationTest, Local) {
|
||||
WrapInFunction(Var("a", ty.atomic(Source{{12, 34}}, ty.i32())));
|
||||
|
||||
EXPECT_FALSE(r()->Resolve());
|
||||
EXPECT_EQ(r()->error(),
|
||||
"12:34 error: atomic var requires workgroup storage");
|
||||
EXPECT_EQ(r()->error(), "12:34 error: atomic var requires workgroup storage");
|
||||
}
|
||||
|
||||
TEST_F(ResolverAtomicValidationTest, NoAtomicExpr) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user