wgsl: float literals can have 'f' suffix
Fixes: tint:1307 Change-Id: Ie21a2c24e5aedf0353f95e7a66c41e6177e8a168 Reviewed-on: https://dawn-review.googlesource.com/c/tint/+/69760 Auto-Submit: David Neto <dneto@google.com> Reviewed-by: Antonio Maiorano <amaiorano@google.com> Commit-Queue: David Neto <dneto@google.com> Kokoro: Kokoro <noreply+kokoro@google.com>
This commit is contained in:
parent
8085671ed2
commit
be11f9f9ca
|
@ -256,11 +256,18 @@ Token Lexer::try_float() {
|
|||
}
|
||||
}
|
||||
|
||||
if (!has_point && !has_exponent) {
|
||||
bool has_f_suffix = false;
|
||||
if (end < len_ && matches(end, "f")) {
|
||||
end++;
|
||||
has_f_suffix = true;
|
||||
}
|
||||
|
||||
if (!has_point && !has_exponent && !has_f_suffix) {
|
||||
// If it only has digits then it's an integer.
|
||||
return {};
|
||||
}
|
||||
|
||||
// Save the error string, for use by diagnostics.
|
||||
const auto str = content_->data.substr(start, end - start);
|
||||
|
||||
pos_ = end;
|
||||
|
@ -488,6 +495,14 @@ Token Lexer::try_hex_float() {
|
|||
}
|
||||
end++;
|
||||
}
|
||||
|
||||
// Parse optional 'f' suffix. For a hex float, it can only exist
|
||||
// when the exponent is present. Otherwise it will look like
|
||||
// one of the mantissa digits.
|
||||
if (end < len_ && matches(end, "f")) {
|
||||
end++;
|
||||
}
|
||||
|
||||
if (!has_exponent_digits) {
|
||||
return {Token::Type::kError, source,
|
||||
"expected an exponent value for hex float"};
|
||||
|
|
|
@ -137,29 +137,65 @@ TEST_P(FloatTest, Parse) {
|
|||
}
|
||||
INSTANTIATE_TEST_SUITE_P(LexerTest,
|
||||
FloatTest,
|
||||
testing::Values(FloatData{"0.0", 0.0f},
|
||||
FloatData{"0.", 0.0f},
|
||||
FloatData{".0", 0.0f},
|
||||
FloatData{"5.7", 5.7f},
|
||||
FloatData{"5.", 5.f},
|
||||
FloatData{".7", .7f},
|
||||
FloatData{"-0.0", 0.0f},
|
||||
FloatData{"-.0", 0.0f},
|
||||
FloatData{"-0.", 0.0f},
|
||||
FloatData{"-5.7", -5.7f},
|
||||
FloatData{"-5.", -5.f},
|
||||
FloatData{"-.7", -.7f},
|
||||
// No decimal, with exponent
|
||||
FloatData{"1e5", 1e5f},
|
||||
FloatData{"1E5", 1e5f},
|
||||
FloatData{"1e-5", 1e-5f},
|
||||
FloatData{"1E-5", 1e-5f},
|
||||
// With decimal and exponents
|
||||
FloatData{"0.2e+12", 0.2e12f},
|
||||
FloatData{"1.2e-5", 1.2e-5f},
|
||||
FloatData{"2.57e23", 2.57e23f},
|
||||
FloatData{"2.5e+0", 2.5f},
|
||||
FloatData{"2.5e-0", 2.5f}));
|
||||
testing::Values(
|
||||
// No decimal, with 'f' suffix
|
||||
FloatData{"0f", 0.0f},
|
||||
FloatData{"1f", 1.0f},
|
||||
FloatData{"-0f", 0.0f},
|
||||
FloatData{"-1f", -1.0f},
|
||||
|
||||
// Zero, with decimal.
|
||||
FloatData{"0.0", 0.0f},
|
||||
FloatData{"0.", 0.0f},
|
||||
FloatData{".0", 0.0f},
|
||||
FloatData{"-0.0", 0.0f},
|
||||
FloatData{"-0.", 0.0f},
|
||||
FloatData{"-.0", 0.0f},
|
||||
// Zero, with decimal and 'f' suffix
|
||||
FloatData{"0.0f", 0.0f},
|
||||
FloatData{"0.f", 0.0f},
|
||||
FloatData{".0f", 0.0f},
|
||||
FloatData{"-0.0f", 0.0f},
|
||||
FloatData{"-0.f", 0.0f},
|
||||
FloatData{"-.0", 0.0f},
|
||||
|
||||
// Non-zero with decimal
|
||||
FloatData{"5.7", 5.7f},
|
||||
FloatData{"5.", 5.f},
|
||||
FloatData{".7", .7f},
|
||||
FloatData{"-5.7", -5.7f},
|
||||
FloatData{"-5.", -5.f},
|
||||
FloatData{"-.7", -.7f},
|
||||
// Non-zero with decimal and 'f' suffix
|
||||
FloatData{"5.7f", 5.7f},
|
||||
FloatData{"5.f", 5.f},
|
||||
FloatData{".7f", .7f},
|
||||
FloatData{"-5.7f", -5.7f},
|
||||
FloatData{"-5.f", -5.f},
|
||||
FloatData{"-.7f", -.7f},
|
||||
|
||||
// No decimal, with exponent
|
||||
FloatData{"1e5", 1e5f},
|
||||
FloatData{"1E5", 1e5f},
|
||||
FloatData{"1e-5", 1e-5f},
|
||||
FloatData{"1E-5", 1e-5f},
|
||||
// No decimal, with exponent and 'f' suffix
|
||||
FloatData{"1e5f", 1e5f},
|
||||
FloatData{"1E5f", 1e5f},
|
||||
FloatData{"1e-5f", 1e-5f},
|
||||
FloatData{"1E-5f", 1e-5f},
|
||||
// With decimal and exponents
|
||||
FloatData{"0.2e+12", 0.2e12f},
|
||||
FloatData{"1.2e-5", 1.2e-5f},
|
||||
FloatData{"2.57e23", 2.57e23f},
|
||||
FloatData{"2.5e+0", 2.5f},
|
||||
FloatData{"2.5e-0", 2.5f},
|
||||
// With decimal and exponents and 'f' suffix
|
||||
FloatData{"0.2e+12f", 0.2e12f},
|
||||
FloatData{"1.2e-5f", 1.2e-5f},
|
||||
FloatData{"2.57e23f", 2.57e23f},
|
||||
FloatData{"2.5e+0f", 2.5f},
|
||||
FloatData{"2.5e-0f", 2.5f}));
|
||||
|
||||
using FloatTest_Invalid = testing::TestWithParam<const char*>;
|
||||
TEST_P(FloatTest_Invalid, Handles) {
|
||||
|
|
|
@ -2844,12 +2844,6 @@ Maybe<const ast::LiteralExpression*> ParserImpl::const_literal() {
|
|||
return create<ast::UintLiteralExpression>(t.source(), t.to_u32());
|
||||
}
|
||||
if (match(Token::Type::kFloatLiteral)) {
|
||||
auto p = peek();
|
||||
if (p.IsIdentifier() && p.to_str() == "f") {
|
||||
next(); // Consume 'f'
|
||||
return add_error(p.source(),
|
||||
"float literals must not be suffixed with 'f'");
|
||||
}
|
||||
return create<ast::FloatLiteralExpression>(t.source(), t.to_f32());
|
||||
}
|
||||
return Failure::kNoMatch;
|
||||
|
|
|
@ -312,6 +312,16 @@ FloatLiteralTestCase hexfloat_literal_test_cases[] = {
|
|||
{"-0x1.", -1.0f},
|
||||
{"-0x.8", -0.5f},
|
||||
{"-0x1.8", -1.5f},
|
||||
|
||||
// Examples with a binary exponent and a 'f' suffix.
|
||||
{"0x1.p0f", 1.0f},
|
||||
{"0x.8p2f", 2.0f},
|
||||
{"0x1.8p-1f", 0.75f},
|
||||
{"0x2p-2f", 0.5f}, // No binary point
|
||||
{"-0x1.p0f", -1.0f},
|
||||
{"-0x.8p2f", -2.0f},
|
||||
{"-0x1.8p-1f", -0.75f},
|
||||
{"-0x2p-2f", -0.5f}, // No binary point
|
||||
};
|
||||
INSTANTIATE_TEST_SUITE_P(ParserImplFloatLiteralTest_HexFloat,
|
||||
ParserImplFloatLiteralTest,
|
||||
|
|
|
@ -226,13 +226,6 @@ TEST_F(ParserImplErrorTest, EqualityInvalidExpr) {
|
|||
" ^\n");
|
||||
}
|
||||
|
||||
TEST_F(ParserImplErrorTest, FloatLiteralSuffixedWithF) {
|
||||
EXPECT("var f : f32 = 1.23f;",
|
||||
"test.wgsl:1:19 error: float literals must not be suffixed with 'f'\n"
|
||||
"var f : f32 = 1.23f;\n"
|
||||
" ^\n");
|
||||
}
|
||||
|
||||
TEST_F(ParserImplErrorTest, ForLoopInitializerMissingSemicolon) {
|
||||
EXPECT("fn f() { for (var i : i32 = 0 i < 8; i=i+1) {} }",
|
||||
"test.wgsl:1:31 error: expected ';' for initializer in for loop\n"
|
||||
|
|
Loading…
Reference in New Issue