mirror of
https://github.com/encounter/dawn-cmake.git
synced 2025-12-21 02:39:11 +00:00
Move ast/address_space to type/
This CL moves the ast/address_space to type/address_space. This breaks the type dependency on ast for AddressSpace. Change-Id: Icb48e7423e18904865ec735024eb3b9864c947c2 Reviewed-on: https://dawn-review.googlesource.com/c/dawn/+/117604 Kokoro: Kokoro <noreply+kokoro@google.com> Reviewed-by: Ben Clayton <bclayton@google.com> Commit-Queue: Dan Sinclair <dsinclair@chromium.org>
This commit is contained in:
committed by
Dawn LUCI CQ
parent
3cbf3fc4c5
commit
18b2158b4e
@@ -47,7 +47,7 @@ Transform::ApplyResult AddBlockAttribute::Apply(const Program* src,
|
||||
bool made_changes = false;
|
||||
for (auto* global : src->AST().GlobalVariables()) {
|
||||
auto* var = sem.Get(global);
|
||||
if (!ast::IsHostShareable(var->AddressSpace())) {
|
||||
if (!type::IsHostShareable(var->AddressSpace())) {
|
||||
// Not declared in a host-sharable address space
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -107,7 +107,7 @@ struct ArrayLengthFromUniform::State {
|
||||
u32((max_buffer_size_index / 4) + 1))),
|
||||
});
|
||||
buffer_size_ubo =
|
||||
b.GlobalVar(b.Sym(), b.ty.Of(buffer_size_struct), ast::AddressSpace::kUniform,
|
||||
b.GlobalVar(b.Sym(), b.ty.Of(buffer_size_struct), type::AddressSpace::kUniform,
|
||||
b.Group(AInt(cfg->ubo_binding.group)),
|
||||
b.Binding(AInt(cfg->ubo_binding.binding)));
|
||||
}
|
||||
|
||||
@@ -130,7 +130,7 @@ Transform::ApplyResult BindingRemapper::Apply(const Program* src,
|
||||
return Program(std::move(b));
|
||||
}
|
||||
auto* sem = src->Sem().Get(var);
|
||||
if (sem->AddressSpace() != ast::AddressSpace::kStorage) {
|
||||
if (sem->AddressSpace() != type::AddressSpace::kStorage) {
|
||||
b.Diagnostics().add_error(
|
||||
diag::System::Transform,
|
||||
"cannot apply access control to variable with address space " +
|
||||
|
||||
@@ -679,7 +679,7 @@ struct BuiltinPolyfill::State {
|
||||
auto name = b.Symbols().New("tint_workgroupUniformLoad");
|
||||
b.Func(name,
|
||||
utils::Vector{
|
||||
b.Param("p", b.ty.pointer(T(type), ast::AddressSpace::kWorkgroup)),
|
||||
b.Param("p", b.ty.pointer(T(type), type::AddressSpace::kWorkgroup)),
|
||||
},
|
||||
T(type),
|
||||
utils::Vector{
|
||||
|
||||
@@ -112,7 +112,7 @@ Transform::ApplyResult CalculateArrayLength::Apply(const Program* src,
|
||||
b.Param("buffer",
|
||||
b.ty.pointer(type, buffer_type->AddressSpace(), buffer_type->Access()),
|
||||
utils::Vector{disable_validation}),
|
||||
b.Param("result", b.ty.pointer(b.ty.u32(), ast::AddressSpace::kFunction)),
|
||||
b.Param("result", b.ty.pointer(b.ty.u32(), type::AddressSpace::kFunction)),
|
||||
},
|
||||
b.ty.void_(), nullptr,
|
||||
utils::Vector{
|
||||
|
||||
@@ -234,7 +234,7 @@ struct CanonicalizeEntryPointIO::State {
|
||||
auto* builtin = ast::GetAttribute<ast::BuiltinAttribute>(attributes);
|
||||
if (cfg.shader_style == ShaderStyle::kGlsl && builtin) {
|
||||
name = GLSLBuiltinToString(builtin->builtin, func_ast->PipelineStage(),
|
||||
ast::AddressSpace::kIn);
|
||||
type::AddressSpace::kIn);
|
||||
}
|
||||
auto symbol = ctx.dst->Symbols().New(name);
|
||||
|
||||
@@ -251,7 +251,7 @@ struct CanonicalizeEntryPointIO::State {
|
||||
value = ctx.dst->IndexAccessor(value, 0_i);
|
||||
}
|
||||
}
|
||||
ctx.dst->GlobalVar(symbol, ast_type, ast::AddressSpace::kIn, std::move(attributes));
|
||||
ctx.dst->GlobalVar(symbol, ast_type, type::AddressSpace::kIn, std::move(attributes));
|
||||
return value;
|
||||
} else if (cfg.shader_style == ShaderStyle::kMsl &&
|
||||
ast::HasAttribute<ast::BuiltinAttribute>(attributes)) {
|
||||
@@ -300,7 +300,7 @@ struct CanonicalizeEntryPointIO::State {
|
||||
if (cfg.shader_style == ShaderStyle::kGlsl) {
|
||||
if (auto* b = ast::GetAttribute<ast::BuiltinAttribute>(attributes)) {
|
||||
name = GLSLBuiltinToString(b->builtin, func_ast->PipelineStage(),
|
||||
ast::AddressSpace::kOut);
|
||||
type::AddressSpace::kOut);
|
||||
value = ToGLSLBuiltin(b->builtin, value, type);
|
||||
}
|
||||
}
|
||||
@@ -530,7 +530,7 @@ struct CanonicalizeEntryPointIO::State {
|
||||
type = ctx.dst->ty.array(type, 1_u);
|
||||
lhs = ctx.dst->IndexAccessor(lhs, 0_i);
|
||||
}
|
||||
ctx.dst->GlobalVar(name, type, ast::AddressSpace::kOut, std::move(attributes));
|
||||
ctx.dst->GlobalVar(name, type, type::AddressSpace::kOut, std::move(attributes));
|
||||
wrapper_body.Push(ctx.dst->Assign(lhs, outval.value));
|
||||
}
|
||||
}
|
||||
@@ -674,7 +674,7 @@ struct CanonicalizeEntryPointIO::State {
|
||||
/// @returns the gl_ string corresponding to that builtin
|
||||
const char* GLSLBuiltinToString(ast::BuiltinValue builtin,
|
||||
ast::PipelineStage stage,
|
||||
ast::AddressSpace address_space) {
|
||||
type::AddressSpace address_space) {
|
||||
switch (builtin) {
|
||||
case ast::BuiltinValue::kPosition:
|
||||
switch (stage) {
|
||||
@@ -706,7 +706,7 @@ struct CanonicalizeEntryPointIO::State {
|
||||
case ast::BuiltinValue::kSampleIndex:
|
||||
return "gl_SampleID";
|
||||
case ast::BuiltinValue::kSampleMask:
|
||||
if (address_space == ast::AddressSpace::kIn) {
|
||||
if (address_space == type::AddressSpace::kIn) {
|
||||
return "gl_SampleMaskIn";
|
||||
} else {
|
||||
return "gl_SampleMask";
|
||||
|
||||
@@ -89,7 +89,7 @@ Transform::ApplyResult ClampFragDepth::Apply(const Program* src, const DataMap&,
|
||||
// Abort on any use of push constants in the module.
|
||||
for (auto* global : src->AST().GlobalVariables()) {
|
||||
if (auto* var = global->As<ast::Var>()) {
|
||||
if (TINT_UNLIKELY(var->declared_address_space == ast::AddressSpace::kPushConstant)) {
|
||||
if (TINT_UNLIKELY(var->declared_address_space == type::AddressSpace::kPushConstant)) {
|
||||
TINT_ICE(Transform, b.Diagnostics())
|
||||
<< "ClampFragDepth doesn't know how to handle module that already use push "
|
||||
"constants.";
|
||||
@@ -124,7 +124,7 @@ Transform::ApplyResult ClampFragDepth::Apply(const Program* src, const DataMap&,
|
||||
utils::Vector{b.Member("min", b.ty.f32()), b.Member("max", b.ty.f32())});
|
||||
|
||||
auto args_sym = b.Symbols().New("frag_depth_clamp_args");
|
||||
b.GlobalVar(args_sym, b.ty.type_name("FragDepthClampArgs"), ast::AddressSpace::kPushConstant);
|
||||
b.GlobalVar(args_sym, b.ty.type_name("FragDepthClampArgs"), type::AddressSpace::kPushConstant);
|
||||
|
||||
auto base_fn_sym = b.Symbols().New("clamp_frag_depth");
|
||||
b.Func(base_fn_sym, utils::Vector{b.Param("v", b.ty.f32())}, b.ty.f32(),
|
||||
|
||||
@@ -50,8 +50,8 @@ namespace {
|
||||
bool ShouldRun(const Program* program) {
|
||||
for (auto* decl : program->AST().GlobalDeclarations()) {
|
||||
if (auto* var = program->Sem().Get<sem::Variable>(decl)) {
|
||||
if (var->AddressSpace() == ast::AddressSpace::kStorage ||
|
||||
var->AddressSpace() == ast::AddressSpace::kUniform) {
|
||||
if (var->AddressSpace() == type::AddressSpace::kStorage ||
|
||||
var->AddressSpace() == type::AddressSpace::kUniform) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -109,7 +109,7 @@ struct OffsetBinOp : Offset {
|
||||
|
||||
/// LoadStoreKey is the unordered map key to a load or store intrinsic.
|
||||
struct LoadStoreKey {
|
||||
ast::AddressSpace const address_space; // buffer address space
|
||||
type::AddressSpace const address_space; // buffer address space
|
||||
ast::Access const access; // buffer access
|
||||
type::Type const* buf_ty = nullptr; // buffer type
|
||||
type::Type const* el_ty = nullptr; // element type
|
||||
@@ -223,7 +223,7 @@ bool IntrinsicDataTypeFor(const type::Type* ty, DecomposeMemoryAccess::Intrinsic
|
||||
/// @returns a DecomposeMemoryAccess::Intrinsic attribute that can be applied
|
||||
/// to a stub function to load the type `ty`.
|
||||
DecomposeMemoryAccess::Intrinsic* IntrinsicLoadFor(ProgramBuilder* builder,
|
||||
ast::AddressSpace address_space,
|
||||
type::AddressSpace address_space,
|
||||
const type::Type* ty) {
|
||||
DecomposeMemoryAccess::Intrinsic::DataType type;
|
||||
if (!IntrinsicDataTypeFor(ty, type)) {
|
||||
@@ -237,7 +237,7 @@ DecomposeMemoryAccess::Intrinsic* IntrinsicLoadFor(ProgramBuilder* builder,
|
||||
/// @returns a DecomposeMemoryAccess::Intrinsic attribute that can be applied
|
||||
/// to a stub function to store the type `ty`.
|
||||
DecomposeMemoryAccess::Intrinsic* IntrinsicStoreFor(ProgramBuilder* builder,
|
||||
ast::AddressSpace address_space,
|
||||
type::AddressSpace address_space,
|
||||
const type::Type* ty) {
|
||||
DecomposeMemoryAccess::Intrinsic::DataType type;
|
||||
if (!IntrinsicDataTypeFor(ty, type)) {
|
||||
@@ -300,7 +300,7 @@ DecomposeMemoryAccess::Intrinsic* IntrinsicAtomicFor(ProgramBuilder* builder,
|
||||
return nullptr;
|
||||
}
|
||||
return builder->ASTNodes().Create<DecomposeMemoryAccess::Intrinsic>(
|
||||
builder->ID(), builder->AllocateNodeID(), op, ast::AddressSpace::kStorage, type);
|
||||
builder->ID(), builder->AllocateNodeID(), op, type::AddressSpace::kStorage, type);
|
||||
}
|
||||
|
||||
/// BufferAccess describes a single storage or uniform buffer access
|
||||
@@ -466,7 +466,7 @@ struct DecomposeMemoryAccess::State {
|
||||
const sem::VariableUser* var_user) {
|
||||
auto address_space = var_user->Variable()->AddressSpace();
|
||||
auto access = var_user->Variable()->Access();
|
||||
if (address_space != ast::AddressSpace::kStorage) {
|
||||
if (address_space != type::AddressSpace::kStorage) {
|
||||
access = ast::Access::kUndefined;
|
||||
}
|
||||
return utils::GetOrCreate(
|
||||
@@ -565,7 +565,7 @@ struct DecomposeMemoryAccess::State {
|
||||
const sem::VariableUser* var_user) {
|
||||
auto address_space = var_user->Variable()->AddressSpace();
|
||||
auto access = var_user->Variable()->Access();
|
||||
if (address_space != ast::AddressSpace::kStorage) {
|
||||
if (address_space != type::AddressSpace::kStorage) {
|
||||
access = ast::Access::kUndefined;
|
||||
}
|
||||
return utils::GetOrCreate(
|
||||
@@ -678,7 +678,7 @@ struct DecomposeMemoryAccess::State {
|
||||
auto op = intrinsic->Type();
|
||||
auto address_space = var_user->Variable()->AddressSpace();
|
||||
auto access = var_user->Variable()->Access();
|
||||
if (address_space != ast::AddressSpace::kStorage) {
|
||||
if (address_space != type::AddressSpace::kStorage) {
|
||||
access = ast::Access::kUndefined;
|
||||
}
|
||||
return utils::GetOrCreate(atomic_funcs, AtomicKey{access, buf_ty, el_ty, op}, [&] {
|
||||
@@ -686,7 +686,7 @@ struct DecomposeMemoryAccess::State {
|
||||
// atomic. This is replaced with two parameters: the buffer and offset.
|
||||
utils::Vector params{
|
||||
b.Param("buffer",
|
||||
b.ty.pointer(CreateASTTypeFor(ctx, buf_ty), ast::AddressSpace::kStorage,
|
||||
b.ty.pointer(CreateASTTypeFor(ctx, buf_ty), type::AddressSpace::kStorage,
|
||||
access),
|
||||
utils::Vector{b.Disable(ast::DisabledValidation::kFunctionParameter)}),
|
||||
b.Param("offset", b.ty.u32()),
|
||||
@@ -744,7 +744,7 @@ struct DecomposeMemoryAccess::State {
|
||||
DecomposeMemoryAccess::Intrinsic::Intrinsic(ProgramID pid,
|
||||
ast::NodeID nid,
|
||||
Op o,
|
||||
ast::AddressSpace sc,
|
||||
type::AddressSpace sc,
|
||||
DataType ty)
|
||||
: Base(pid, nid), op(o), address_space(sc), type(ty) {}
|
||||
DecomposeMemoryAccess::Intrinsic::~Intrinsic() = default;
|
||||
@@ -883,8 +883,8 @@ Transform::ApplyResult DecomposeMemoryAccess::Apply(const Program* src,
|
||||
// X
|
||||
if (auto* sem_ident = sem.Get(ident)) {
|
||||
if (auto* var = sem_ident->UnwrapLoad()->As<sem::VariableUser>()) {
|
||||
if (var->Variable()->AddressSpace() == ast::AddressSpace::kStorage ||
|
||||
var->Variable()->AddressSpace() == ast::AddressSpace::kUniform) {
|
||||
if (var->Variable()->AddressSpace() == type::AddressSpace::kStorage ||
|
||||
var->Variable()->AddressSpace() == type::AddressSpace::kUniform) {
|
||||
// Variable to a storage or uniform buffer
|
||||
state.AddAccess(ident, {
|
||||
var,
|
||||
|
||||
@@ -81,7 +81,7 @@ class DecomposeMemoryAccess final : public Castable<DecomposeMemoryAccess, Trans
|
||||
/// @param o the op of the intrinsic
|
||||
/// @param sc the address space of the buffer
|
||||
/// @param ty the data type of the intrinsic
|
||||
Intrinsic(ProgramID pid, ast::NodeID nid, Op o, ast::AddressSpace sc, DataType ty);
|
||||
Intrinsic(ProgramID pid, ast::NodeID nid, Op o, type::AddressSpace sc, DataType ty);
|
||||
/// Destructor
|
||||
~Intrinsic() override;
|
||||
|
||||
@@ -101,7 +101,7 @@ class DecomposeMemoryAccess final : public Castable<DecomposeMemoryAccess, Trans
|
||||
const Op op;
|
||||
|
||||
/// The address space of the buffer this intrinsic operates on
|
||||
ast::AddressSpace const address_space;
|
||||
type::AddressSpace const address_space;
|
||||
|
||||
/// The type of the intrinsic
|
||||
const DataType type;
|
||||
|
||||
@@ -39,7 +39,7 @@ TEST_F(DecomposeStridedArrayTest, ShouldRunNonStridedArray) {
|
||||
// var<private> arr : array<f32, 4u>
|
||||
|
||||
ProgramBuilder b;
|
||||
b.GlobalVar("arr", b.ty.array<f32, 4u>(), ast::AddressSpace::kPrivate);
|
||||
b.GlobalVar("arr", b.ty.array<f32, 4u>(), type::AddressSpace::kPrivate);
|
||||
EXPECT_FALSE(ShouldRun<DecomposeStridedArray>(Program(std::move(b))));
|
||||
}
|
||||
|
||||
@@ -47,7 +47,7 @@ TEST_F(DecomposeStridedArrayTest, ShouldRunDefaultStridedArray) {
|
||||
// var<private> arr : @stride(4) array<f32, 4u>
|
||||
|
||||
ProgramBuilder b;
|
||||
b.GlobalVar("arr", b.ty.array<f32, 4u>(4), ast::AddressSpace::kPrivate);
|
||||
b.GlobalVar("arr", b.ty.array<f32, 4u>(4), type::AddressSpace::kPrivate);
|
||||
EXPECT_TRUE(ShouldRun<DecomposeStridedArray>(Program(std::move(b))));
|
||||
}
|
||||
|
||||
@@ -55,7 +55,7 @@ TEST_F(DecomposeStridedArrayTest, ShouldRunExplicitStridedArray) {
|
||||
// var<private> arr : @stride(16) array<f32, 4u>
|
||||
|
||||
ProgramBuilder b;
|
||||
b.GlobalVar("arr", b.ty.array<f32, 4u>(16), ast::AddressSpace::kPrivate);
|
||||
b.GlobalVar("arr", b.ty.array<f32, 4u>(16), type::AddressSpace::kPrivate);
|
||||
EXPECT_TRUE(ShouldRun<DecomposeStridedArray>(Program(std::move(b))));
|
||||
}
|
||||
|
||||
@@ -78,7 +78,7 @@ TEST_F(DecomposeStridedArrayTest, PrivateDefaultStridedArray) {
|
||||
// }
|
||||
|
||||
ProgramBuilder b;
|
||||
b.GlobalVar("arr", b.ty.array<f32, 4u>(4), ast::AddressSpace::kPrivate);
|
||||
b.GlobalVar("arr", b.ty.array<f32, 4u>(4), type::AddressSpace::kPrivate);
|
||||
b.Func("f", utils::Empty, b.ty.void_(),
|
||||
utils::Vector{
|
||||
b.Decl(b.Let("a", b.ty.array<f32, 4u>(4), b.Expr("arr"))),
|
||||
@@ -114,7 +114,7 @@ TEST_F(DecomposeStridedArrayTest, PrivateStridedArray) {
|
||||
// }
|
||||
|
||||
ProgramBuilder b;
|
||||
b.GlobalVar("arr", b.ty.array<f32, 4u>(32), ast::AddressSpace::kPrivate);
|
||||
b.GlobalVar("arr", b.ty.array<f32, 4u>(32), type::AddressSpace::kPrivate);
|
||||
b.Func("f", utils::Empty, b.ty.void_(),
|
||||
utils::Vector{
|
||||
b.Decl(b.Let("a", b.ty.array<f32, 4u>(32), b.Expr("arr"))),
|
||||
@@ -158,7 +158,7 @@ TEST_F(DecomposeStridedArrayTest, ReadUniformStridedArray) {
|
||||
// }
|
||||
ProgramBuilder b;
|
||||
auto* S = b.Structure("S", utils::Vector{b.Member("a", b.ty.array<f32, 4u>(32))});
|
||||
b.GlobalVar("s", b.ty.Of(S), ast::AddressSpace::kUniform, b.Group(0_a), b.Binding(0_a));
|
||||
b.GlobalVar("s", b.ty.Of(S), type::AddressSpace::kUniform, b.Group(0_a), b.Binding(0_a));
|
||||
b.Func("f", utils::Empty, b.ty.void_(),
|
||||
utils::Vector{
|
||||
b.Decl(b.Let("a", b.ty.array<f32, 4u>(32), b.MemberAccessor("s", "a"))),
|
||||
@@ -206,7 +206,7 @@ TEST_F(DecomposeStridedArrayTest, ReadUniformDefaultStridedArray) {
|
||||
// }
|
||||
ProgramBuilder b;
|
||||
auto* S = b.Structure("S", utils::Vector{b.Member("a", b.ty.array(b.ty.vec4<f32>(), 4_u, 16))});
|
||||
b.GlobalVar("s", b.ty.Of(S), ast::AddressSpace::kUniform, b.Group(0_a), b.Binding(0_a));
|
||||
b.GlobalVar("s", b.ty.Of(S), type::AddressSpace::kUniform, b.Group(0_a), b.Binding(0_a));
|
||||
b.Func(
|
||||
"f", utils::Empty, b.ty.void_(),
|
||||
utils::Vector{
|
||||
@@ -252,7 +252,7 @@ TEST_F(DecomposeStridedArrayTest, ReadStorageStridedArray) {
|
||||
// }
|
||||
ProgramBuilder b;
|
||||
auto* S = b.Structure("S", utils::Vector{b.Member("a", b.ty.array<f32, 4u>(32))});
|
||||
b.GlobalVar("s", b.ty.Of(S), ast::AddressSpace::kStorage, b.Group(0_a), b.Binding(0_a));
|
||||
b.GlobalVar("s", b.ty.Of(S), type::AddressSpace::kStorage, b.Group(0_a), b.Binding(0_a));
|
||||
b.Func("f", utils::Empty, b.ty.void_(),
|
||||
utils::Vector{
|
||||
b.Decl(b.Let("a", b.ty.array<f32, 4u>(32), b.MemberAccessor("s", "a"))),
|
||||
@@ -300,7 +300,7 @@ TEST_F(DecomposeStridedArrayTest, ReadStorageDefaultStridedArray) {
|
||||
// }
|
||||
ProgramBuilder b;
|
||||
auto* S = b.Structure("S", utils::Vector{b.Member("a", b.ty.array<f32, 4u>(4))});
|
||||
b.GlobalVar("s", b.ty.Of(S), ast::AddressSpace::kStorage, b.Group(0_a), b.Binding(0_a));
|
||||
b.GlobalVar("s", b.ty.Of(S), type::AddressSpace::kStorage, b.Group(0_a), b.Binding(0_a));
|
||||
b.Func("f", utils::Empty, b.ty.void_(),
|
||||
utils::Vector{
|
||||
b.Decl(b.Let("a", b.ty.array<f32, 4u>(4), b.MemberAccessor("s", "a"))),
|
||||
@@ -344,8 +344,8 @@ TEST_F(DecomposeStridedArrayTest, WriteStorageStridedArray) {
|
||||
// }
|
||||
ProgramBuilder b;
|
||||
auto* S = b.Structure("S", utils::Vector{b.Member("a", b.ty.array<f32, 4u>(32))});
|
||||
b.GlobalVar("s", b.ty.Of(S), ast::AddressSpace::kStorage, ast::Access::kReadWrite, b.Group(0_a),
|
||||
b.Binding(0_a));
|
||||
b.GlobalVar("s", b.ty.Of(S), type::AddressSpace::kStorage, ast::Access::kReadWrite,
|
||||
b.Group(0_a), b.Binding(0_a));
|
||||
b.Func("f", utils::Empty, b.ty.void_(),
|
||||
utils::Vector{
|
||||
b.Assign(b.MemberAccessor("s", "a"), b.Construct(b.ty.array<f32, 4u>(32))),
|
||||
@@ -398,8 +398,8 @@ TEST_F(DecomposeStridedArrayTest, WriteStorageDefaultStridedArray) {
|
||||
// }
|
||||
ProgramBuilder b;
|
||||
auto* S = b.Structure("S", utils::Vector{b.Member("a", b.ty.array<f32, 4u>(4))});
|
||||
b.GlobalVar("s", b.ty.Of(S), ast::AddressSpace::kStorage, ast::Access::kReadWrite, b.Group(0_a),
|
||||
b.Binding(0_a));
|
||||
b.GlobalVar("s", b.ty.Of(S), type::AddressSpace::kStorage, ast::Access::kReadWrite,
|
||||
b.Group(0_a), b.Binding(0_a));
|
||||
b.Func("f", utils::Empty, b.ty.void_(),
|
||||
utils::Vector{
|
||||
b.Assign(b.MemberAccessor("s", "a"), b.Construct(b.ty.array<f32, 4u>(4))),
|
||||
@@ -450,8 +450,8 @@ TEST_F(DecomposeStridedArrayTest, ReadWriteViaPointerLets) {
|
||||
// }
|
||||
ProgramBuilder b;
|
||||
auto* S = b.Structure("S", utils::Vector{b.Member("a", b.ty.array<f32, 4u>(32))});
|
||||
b.GlobalVar("s", b.ty.Of(S), ast::AddressSpace::kStorage, ast::Access::kReadWrite, b.Group(0_a),
|
||||
b.Binding(0_a));
|
||||
b.GlobalVar("s", b.ty.Of(S), type::AddressSpace::kStorage, ast::Access::kReadWrite,
|
||||
b.Group(0_a), b.Binding(0_a));
|
||||
b.Func("f", utils::Empty, b.ty.void_(),
|
||||
utils::Vector{
|
||||
b.Decl(b.Let("a", b.AddressOf(b.MemberAccessor("s", "a")))),
|
||||
@@ -511,8 +511,8 @@ TEST_F(DecomposeStridedArrayTest, PrivateAliasedStridedArray) {
|
||||
ProgramBuilder b;
|
||||
b.Alias("ARR", b.ty.array<f32, 4u>(32));
|
||||
auto* S = b.Structure("S", utils::Vector{b.Member("a", b.ty.type_name("ARR"))});
|
||||
b.GlobalVar("s", b.ty.Of(S), ast::AddressSpace::kStorage, ast::Access::kReadWrite, b.Group(0_a),
|
||||
b.Binding(0_a));
|
||||
b.GlobalVar("s", b.ty.Of(S), type::AddressSpace::kStorage, ast::Access::kReadWrite,
|
||||
b.Group(0_a), b.Binding(0_a));
|
||||
b.Func("f", utils::Empty, b.ty.void_(),
|
||||
utils::Vector{
|
||||
b.Decl(b.Let("a", b.ty.type_name("ARR"), b.MemberAccessor("s", "a"))),
|
||||
@@ -581,8 +581,8 @@ TEST_F(DecomposeStridedArrayTest, PrivateNestedStridedArray) {
|
||||
b.ty.array(b.ty.type_name("ARR_A"), 3_u, 16), //
|
||||
4_u, 128));
|
||||
auto* S = b.Structure("S", utils::Vector{b.Member("a", b.ty.type_name("ARR_B"))});
|
||||
b.GlobalVar("s", b.ty.Of(S), ast::AddressSpace::kStorage, ast::Access::kReadWrite, b.Group(0_a),
|
||||
b.Binding(0_a));
|
||||
b.GlobalVar("s", b.ty.Of(S), type::AddressSpace::kStorage, ast::Access::kReadWrite,
|
||||
b.Group(0_a), b.Binding(0_a));
|
||||
b.Func("f", utils::Empty, b.ty.void_(),
|
||||
utils::Vector{
|
||||
b.Decl(b.Let("a", b.ty.type_name("ARR_B"), b.MemberAccessor("s", "a"))),
|
||||
|
||||
@@ -72,8 +72,8 @@ Transform::ApplyResult DecomposeStridedMatrix::Apply(const Program* src,
|
||||
for (auto* node : src->ASTNodes().Objects()) {
|
||||
if (auto* str = node->As<ast::Struct>()) {
|
||||
auto* str_ty = src->Sem().Get(str);
|
||||
if (!str_ty->UsedAs(ast::AddressSpace::kUniform) &&
|
||||
!str_ty->UsedAs(ast::AddressSpace::kStorage)) {
|
||||
if (!str_ty->UsedAs(type::AddressSpace::kUniform) &&
|
||||
!str_ty->UsedAs(type::AddressSpace::kStorage)) {
|
||||
continue;
|
||||
}
|
||||
for (auto* member : str_ty->Members()) {
|
||||
|
||||
@@ -76,7 +76,7 @@ TEST_F(DecomposeStridedMatrixTest, ReadUniformMatrix) {
|
||||
b.Disable(ast::DisabledValidation::kIgnoreStrideAttribute),
|
||||
}),
|
||||
});
|
||||
b.GlobalVar("s", b.ty.Of(S), ast::AddressSpace::kUniform, b.Group(0_a), b.Binding(0_a));
|
||||
b.GlobalVar("s", b.ty.Of(S), type::AddressSpace::kUniform, b.Group(0_a), b.Binding(0_a));
|
||||
b.Func("f", utils::Empty, b.ty.void_(),
|
||||
utils::Vector{
|
||||
b.Decl(b.Let("x", b.ty.mat2x2<f32>(), b.MemberAccessor("s", "m"))),
|
||||
@@ -133,7 +133,7 @@ TEST_F(DecomposeStridedMatrixTest, ReadUniformColumn) {
|
||||
b.Disable(ast::DisabledValidation::kIgnoreStrideAttribute),
|
||||
}),
|
||||
});
|
||||
b.GlobalVar("s", b.ty.Of(S), ast::AddressSpace::kUniform, b.Group(0_a), b.Binding(0_a));
|
||||
b.GlobalVar("s", b.ty.Of(S), type::AddressSpace::kUniform, b.Group(0_a), b.Binding(0_a));
|
||||
b.Func(
|
||||
"f", utils::Empty, b.ty.void_(),
|
||||
utils::Vector{
|
||||
@@ -187,7 +187,7 @@ TEST_F(DecomposeStridedMatrixTest, ReadUniformMatrix_DefaultStride) {
|
||||
b.Disable(ast::DisabledValidation::kIgnoreStrideAttribute),
|
||||
}),
|
||||
});
|
||||
b.GlobalVar("s", b.ty.Of(S), ast::AddressSpace::kUniform, b.Group(0_a), b.Binding(0_a));
|
||||
b.GlobalVar("s", b.ty.Of(S), type::AddressSpace::kUniform, b.Group(0_a), b.Binding(0_a));
|
||||
b.Func("f", utils::Empty, b.ty.void_(),
|
||||
utils::Vector{
|
||||
b.Decl(b.Let("x", b.ty.mat2x2<f32>(), b.MemberAccessor("s", "m"))),
|
||||
@@ -241,8 +241,8 @@ TEST_F(DecomposeStridedMatrixTest, ReadStorageMatrix) {
|
||||
b.Disable(ast::DisabledValidation::kIgnoreStrideAttribute),
|
||||
}),
|
||||
});
|
||||
b.GlobalVar("s", b.ty.Of(S), ast::AddressSpace::kStorage, ast::Access::kReadWrite, b.Group(0_a),
|
||||
b.Binding(0_a));
|
||||
b.GlobalVar("s", b.ty.Of(S), type::AddressSpace::kStorage, ast::Access::kReadWrite,
|
||||
b.Group(0_a), b.Binding(0_a));
|
||||
b.Func("f", utils::Empty, b.ty.void_(),
|
||||
utils::Vector{
|
||||
b.Decl(b.Let("x", b.ty.mat2x2<f32>(), b.MemberAccessor("s", "m"))),
|
||||
@@ -299,8 +299,8 @@ TEST_F(DecomposeStridedMatrixTest, ReadStorageColumn) {
|
||||
b.Disable(ast::DisabledValidation::kIgnoreStrideAttribute),
|
||||
}),
|
||||
});
|
||||
b.GlobalVar("s", b.ty.Of(S), ast::AddressSpace::kStorage, ast::Access::kReadWrite, b.Group(0_a),
|
||||
b.Binding(0_a));
|
||||
b.GlobalVar("s", b.ty.Of(S), type::AddressSpace::kStorage, ast::Access::kReadWrite,
|
||||
b.Group(0_a), b.Binding(0_a));
|
||||
b.Func(
|
||||
"f", utils::Empty, b.ty.void_(),
|
||||
utils::Vector{
|
||||
@@ -354,8 +354,8 @@ TEST_F(DecomposeStridedMatrixTest, WriteStorageMatrix) {
|
||||
b.Disable(ast::DisabledValidation::kIgnoreStrideAttribute),
|
||||
}),
|
||||
});
|
||||
b.GlobalVar("s", b.ty.Of(S), ast::AddressSpace::kStorage, ast::Access::kReadWrite, b.Group(0_a),
|
||||
b.Binding(0_a));
|
||||
b.GlobalVar("s", b.ty.Of(S), type::AddressSpace::kStorage, ast::Access::kReadWrite,
|
||||
b.Group(0_a), b.Binding(0_a));
|
||||
b.Func("f", utils::Empty, b.ty.void_(),
|
||||
utils::Vector{
|
||||
b.Assign(b.MemberAccessor("s", "m"),
|
||||
@@ -413,8 +413,8 @@ TEST_F(DecomposeStridedMatrixTest, WriteStorageColumn) {
|
||||
b.Disable(ast::DisabledValidation::kIgnoreStrideAttribute),
|
||||
}),
|
||||
});
|
||||
b.GlobalVar("s", b.ty.Of(S), ast::AddressSpace::kStorage, ast::Access::kReadWrite, b.Group(0_a),
|
||||
b.Binding(0_a));
|
||||
b.GlobalVar("s", b.ty.Of(S), type::AddressSpace::kStorage, ast::Access::kReadWrite,
|
||||
b.Group(0_a), b.Binding(0_a));
|
||||
b.Func("f", utils::Empty, b.ty.void_(),
|
||||
utils::Vector{
|
||||
b.Assign(b.IndexAccessor(b.MemberAccessor("s", "m"), 1_i), b.vec2<f32>(1_f, 2_f)),
|
||||
@@ -473,8 +473,8 @@ TEST_F(DecomposeStridedMatrixTest, ReadWriteViaPointerLets) {
|
||||
b.Disable(ast::DisabledValidation::kIgnoreStrideAttribute),
|
||||
}),
|
||||
});
|
||||
b.GlobalVar("s", b.ty.Of(S), ast::AddressSpace::kStorage, ast::Access::kReadWrite, b.Group(0_a),
|
||||
b.Binding(0_a));
|
||||
b.GlobalVar("s", b.ty.Of(S), type::AddressSpace::kStorage, ast::Access::kReadWrite,
|
||||
b.Group(0_a), b.Binding(0_a));
|
||||
b.Func("f", utils::Empty, b.ty.void_(),
|
||||
utils::Vector{
|
||||
b.Decl(b.Let("a", b.AddressOf(b.MemberAccessor("s", "m")))),
|
||||
@@ -545,7 +545,7 @@ TEST_F(DecomposeStridedMatrixTest, ReadPrivateMatrix) {
|
||||
b.Disable(ast::DisabledValidation::kIgnoreStrideAttribute),
|
||||
}),
|
||||
});
|
||||
b.GlobalVar("s", b.ty.Of(S), ast::AddressSpace::kPrivate);
|
||||
b.GlobalVar("s", b.ty.Of(S), type::AddressSpace::kPrivate);
|
||||
b.Func("f", utils::Empty, b.ty.void_(),
|
||||
utils::Vector{
|
||||
b.Decl(b.Let("x", b.ty.mat2x2<f32>(), b.MemberAccessor("s", "m"))),
|
||||
@@ -599,7 +599,7 @@ TEST_F(DecomposeStridedMatrixTest, WritePrivateMatrix) {
|
||||
b.Disable(ast::DisabledValidation::kIgnoreStrideAttribute),
|
||||
}),
|
||||
});
|
||||
b.GlobalVar("s", b.ty.Of(S), ast::AddressSpace::kPrivate);
|
||||
b.GlobalVar("s", b.ty.Of(S), type::AddressSpace::kPrivate);
|
||||
b.Func("f", utils::Empty, b.ty.void_(),
|
||||
utils::Vector{
|
||||
b.Assign(b.MemberAccessor("s", "m"),
|
||||
|
||||
@@ -81,7 +81,7 @@ Transform::ApplyResult DemoteToHelper::Apply(const Program* src, const DataMap&,
|
||||
|
||||
// Create a module-scope flag that indicates whether the current invocation has been discarded.
|
||||
auto flag = b.Symbols().New("tint_discarded");
|
||||
b.GlobalVar(flag, ast::AddressSpace::kPrivate, b.Expr(false));
|
||||
b.GlobalVar(flag, type::AddressSpace::kPrivate, b.Expr(false));
|
||||
|
||||
// Replace all discard statements with a statement that marks the invocation as discarded.
|
||||
ctx.ReplaceAll([&](const ast::DiscardStatement*) -> const ast::Statement* {
|
||||
@@ -125,12 +125,12 @@ Transform::ApplyResult DemoteToHelper::Apply(const Program* src, const DataMap&,
|
||||
// Skip writes to invocation-private address spaces.
|
||||
auto* ref = sem.Get(assign->lhs)->Type()->As<type::Reference>();
|
||||
switch (ref->AddressSpace()) {
|
||||
case ast::AddressSpace::kStorage:
|
||||
case type::AddressSpace::kStorage:
|
||||
// Need to mask these.
|
||||
break;
|
||||
case ast::AddressSpace::kFunction:
|
||||
case ast::AddressSpace::kPrivate:
|
||||
case ast::AddressSpace::kOut:
|
||||
case type::AddressSpace::kFunction:
|
||||
case type::AddressSpace::kPrivate:
|
||||
case type::AddressSpace::kOut:
|
||||
// Skip these.
|
||||
return;
|
||||
default:
|
||||
|
||||
@@ -50,7 +50,7 @@ struct AccessRoot {
|
||||
/// function-scope variable ('function'), or pointer parameter in the source program.
|
||||
tint::sem::Variable const* variable = nullptr;
|
||||
/// The address space of the variable or pointer type.
|
||||
tint::ast::AddressSpace address_space = tint::ast::AddressSpace::kUndefined;
|
||||
tint::type::AddressSpace address_space = tint::type::AddressSpace::kUndefined;
|
||||
};
|
||||
|
||||
/// Inequality operator for AccessRoot
|
||||
@@ -450,7 +450,7 @@ struct DirectVariableAccess::State {
|
||||
Switch(
|
||||
variable->Declaration(),
|
||||
[&](const ast::Var*) {
|
||||
if (variable->AddressSpace() != ast::AddressSpace::kHandle) {
|
||||
if (variable->AddressSpace() != type::AddressSpace::kHandle) {
|
||||
// Start a new access chain for the non-handle 'var' access
|
||||
create_new_chain();
|
||||
}
|
||||
@@ -749,15 +749,15 @@ struct DirectVariableAccess::State {
|
||||
|
||||
/// @returns true if the address space @p address_space requires transforming given the
|
||||
/// transform's options.
|
||||
bool AddressSpaceRequiresTransform(ast::AddressSpace address_space) const {
|
||||
bool AddressSpaceRequiresTransform(type::AddressSpace address_space) const {
|
||||
switch (address_space) {
|
||||
case ast::AddressSpace::kUniform:
|
||||
case ast::AddressSpace::kStorage:
|
||||
case ast::AddressSpace::kWorkgroup:
|
||||
case type::AddressSpace::kUniform:
|
||||
case type::AddressSpace::kStorage:
|
||||
case type::AddressSpace::kWorkgroup:
|
||||
return true;
|
||||
case ast::AddressSpace::kPrivate:
|
||||
case type::AddressSpace::kPrivate:
|
||||
return opts.transform_private;
|
||||
case ast::AddressSpace::kFunction:
|
||||
case type::AddressSpace::kFunction:
|
||||
return opts.transform_function;
|
||||
default:
|
||||
return false;
|
||||
@@ -1180,9 +1180,9 @@ struct DirectVariableAccess::State {
|
||||
for (auto* param : fn->Parameters()) {
|
||||
if (auto* ptr = param->Type()->As<type::Pointer>()) {
|
||||
switch (ptr->AddressSpace()) {
|
||||
case ast::AddressSpace::kUniform:
|
||||
case ast::AddressSpace::kStorage:
|
||||
case ast::AddressSpace::kWorkgroup:
|
||||
case type::AddressSpace::kUniform:
|
||||
case type::AddressSpace::kStorage:
|
||||
case type::AddressSpace::kWorkgroup:
|
||||
return true;
|
||||
default:
|
||||
return false;
|
||||
@@ -1193,8 +1193,8 @@ struct DirectVariableAccess::State {
|
||||
}
|
||||
|
||||
/// @returns true if the given address space is 'private' or 'function'.
|
||||
static bool IsPrivateOrFunction(const ast::AddressSpace sc) {
|
||||
return sc == ast::AddressSpace::kPrivate || sc == ast::AddressSpace::kFunction;
|
||||
static bool IsPrivateOrFunction(const type::AddressSpace sc) {
|
||||
return sc == type::AddressSpace::kPrivate || sc == type::AddressSpace::kFunction;
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
@@ -130,7 +130,7 @@ Transform::ApplyResult FirstIndexOffset::Apply(const Program* src,
|
||||
|
||||
// Create a global to hold the uniform buffer
|
||||
Symbol buffer_name = b.Sym();
|
||||
b.GlobalVar(buffer_name, b.ty.Of(struct_), ast::AddressSpace::kUniform,
|
||||
b.GlobalVar(buffer_name, b.ty.Of(struct_), type::AddressSpace::kUniform,
|
||||
utils::Vector{
|
||||
b.Binding(AInt(ub_binding)),
|
||||
b.Group(AInt(ub_group)),
|
||||
|
||||
@@ -60,8 +60,8 @@ struct LocalizeStructArrayAssignment::State {
|
||||
continue;
|
||||
}
|
||||
auto og = GetOriginatingTypeAndAddressSpace(assign_stmt);
|
||||
if (!(og.first->Is<sem::Struct>() && (og.second == ast::AddressSpace::kFunction ||
|
||||
og.second == ast::AddressSpace::kPrivate))) {
|
||||
if (!(og.first->Is<sem::Struct>() && (og.second == type::AddressSpace::kFunction ||
|
||||
og.second == type::AddressSpace::kPrivate))) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -184,7 +184,7 @@ struct LocalizeStructArrayAssignment::State {
|
||||
// Returns the type and address space of the originating variable of the lhs
|
||||
// of the assignment statement.
|
||||
// See https://www.w3.org/TR/WGSL/#originating-variable-section
|
||||
std::pair<const type::Type*, ast::AddressSpace> GetOriginatingTypeAndAddressSpace(
|
||||
std::pair<const type::Type*, type::AddressSpace> GetOriginatingTypeAndAddressSpace(
|
||||
const ast::AssignmentStatement* assign_stmt) {
|
||||
auto* root_ident = src->Sem().Get(assign_stmt->lhs)->RootIdentifier();
|
||||
if (TINT_UNLIKELY(!root_ident)) {
|
||||
@@ -206,7 +206,7 @@ struct LocalizeStructArrayAssignment::State {
|
||||
TINT_ICE(Transform, b.Diagnostics())
|
||||
<< "Expecting to find variable of type pointer or reference on lhs "
|
||||
"of assignment statement";
|
||||
return std::pair<const type::Type*, ast::AddressSpace>{};
|
||||
return std::pair<const type::Type*, type::AddressSpace>{};
|
||||
});
|
||||
}
|
||||
};
|
||||
|
||||
@@ -123,9 +123,9 @@ struct ModuleScopeVarToEntryPointParam::State {
|
||||
// Helper to create an AST node for the store type of the variable.
|
||||
auto store_type = [&]() { return CreateASTTypeFor(ctx, ty); };
|
||||
|
||||
ast::AddressSpace sc = var->AddressSpace();
|
||||
type::AddressSpace sc = var->AddressSpace();
|
||||
switch (sc) {
|
||||
case ast::AddressSpace::kHandle: {
|
||||
case type::AddressSpace::kHandle: {
|
||||
// For a texture or sampler variable, redeclare it as an entry point parameter.
|
||||
// Disable entry point parameter validation.
|
||||
auto* disable_validation =
|
||||
@@ -137,8 +137,8 @@ struct ModuleScopeVarToEntryPointParam::State {
|
||||
|
||||
break;
|
||||
}
|
||||
case ast::AddressSpace::kStorage:
|
||||
case ast::AddressSpace::kUniform: {
|
||||
case type::AddressSpace::kStorage:
|
||||
case type::AddressSpace::kUniform: {
|
||||
// Variables into the Storage and Uniform address spaces are redeclared as entry
|
||||
// point parameters with a pointer type.
|
||||
auto attributes = ctx.Clone(var->Declaration()->attributes);
|
||||
@@ -167,7 +167,7 @@ struct ModuleScopeVarToEntryPointParam::State {
|
||||
|
||||
break;
|
||||
}
|
||||
case ast::AddressSpace::kWorkgroup: {
|
||||
case type::AddressSpace::kWorkgroup: {
|
||||
if (ContainsMatrix(var->Type())) {
|
||||
// Due to a bug in the MSL compiler, we use a threadgroup memory argument for
|
||||
// any workgroup allocation that contains a matrix. See crbug.com/tint/938.
|
||||
@@ -183,7 +183,7 @@ struct ModuleScopeVarToEntryPointParam::State {
|
||||
ctx.dst->MemberAccessor(ctx.dst->Deref(workgroup_param()), member));
|
||||
auto* local_var = ctx.dst->Let(
|
||||
new_var_symbol,
|
||||
ctx.dst->ty.pointer(store_type(), ast::AddressSpace::kWorkgroup),
|
||||
ctx.dst->ty.pointer(store_type(), type::AddressSpace::kWorkgroup),
|
||||
member_ptr);
|
||||
ctx.InsertFront(func->body->statements, ctx.dst->Decl(local_var));
|
||||
is_pointer = true;
|
||||
@@ -192,7 +192,7 @@ struct ModuleScopeVarToEntryPointParam::State {
|
||||
}
|
||||
[[fallthrough]];
|
||||
}
|
||||
case ast::AddressSpace::kPrivate: {
|
||||
case type::AddressSpace::kPrivate: {
|
||||
// Variables in the Private and Workgroup address spaces are redeclared at function
|
||||
// scope. Disable address space validation on this variable.
|
||||
auto* disable_validation =
|
||||
@@ -204,7 +204,7 @@ struct ModuleScopeVarToEntryPointParam::State {
|
||||
|
||||
break;
|
||||
}
|
||||
case ast::AddressSpace::kPushConstant: {
|
||||
case type::AddressSpace::kPushConstant: {
|
||||
ctx.dst->Diagnostics().add_error(
|
||||
diag::System::Transform,
|
||||
"unhandled module-scope address space (" + utils::ToString(sc) + ")");
|
||||
@@ -233,13 +233,13 @@ struct ModuleScopeVarToEntryPointParam::State {
|
||||
auto* param_type = CreateASTTypeFor(ctx, ty);
|
||||
auto sc = var->AddressSpace();
|
||||
switch (sc) {
|
||||
case ast::AddressSpace::kPrivate:
|
||||
case ast::AddressSpace::kStorage:
|
||||
case ast::AddressSpace::kUniform:
|
||||
case ast::AddressSpace::kHandle:
|
||||
case ast::AddressSpace::kWorkgroup:
|
||||
case type::AddressSpace::kPrivate:
|
||||
case type::AddressSpace::kStorage:
|
||||
case type::AddressSpace::kUniform:
|
||||
case type::AddressSpace::kHandle:
|
||||
case type::AddressSpace::kWorkgroup:
|
||||
break;
|
||||
case ast::AddressSpace::kPushConstant: {
|
||||
case type::AddressSpace::kPushConstant: {
|
||||
ctx.dst->Diagnostics().add_error(
|
||||
diag::System::Transform,
|
||||
"unhandled module-scope address space (" + utils::ToString(sc) + ")");
|
||||
@@ -321,7 +321,7 @@ struct ModuleScopeVarToEntryPointParam::State {
|
||||
|
||||
bool needs_processing = false;
|
||||
for (auto* var : func_sem->TransitivelyReferencedGlobals()) {
|
||||
if (var->AddressSpace() != ast::AddressSpace::kNone) {
|
||||
if (var->AddressSpace() != type::AddressSpace::kNone) {
|
||||
needs_processing = true;
|
||||
break;
|
||||
}
|
||||
@@ -378,7 +378,7 @@ struct ModuleScopeVarToEntryPointParam::State {
|
||||
|
||||
// Process and redeclare all variables referenced by the function.
|
||||
for (auto* var : func_sem->TransitivelyReferencedGlobals()) {
|
||||
if (var->AddressSpace() == ast::AddressSpace::kNone) {
|
||||
if (var->AddressSpace() == type::AddressSpace::kNone) {
|
||||
continue;
|
||||
}
|
||||
if (local_private_vars_.count(var)) {
|
||||
@@ -396,7 +396,7 @@ struct ModuleScopeVarToEntryPointParam::State {
|
||||
|
||||
// Check if this is a private variable that is only referenced by this function.
|
||||
bool local_private = false;
|
||||
if (var->AddressSpace() == ast::AddressSpace::kPrivate) {
|
||||
if (var->AddressSpace() == type::AddressSpace::kPrivate) {
|
||||
local_private = true;
|
||||
for (auto* user : var->Users()) {
|
||||
auto* stmt = user->Stmt();
|
||||
@@ -414,7 +414,7 @@ struct ModuleScopeVarToEntryPointParam::State {
|
||||
auto* initializer = ctx.Clone(var->Declaration()->initializer);
|
||||
auto* local_var = ctx.dst->Var(new_var_symbol,
|
||||
CreateASTTypeFor(ctx, var->Type()->UnwrapRef()),
|
||||
ast::AddressSpace::kPrivate, initializer,
|
||||
type::AddressSpace::kPrivate, initializer,
|
||||
utils::Vector{disable_validation});
|
||||
ctx.InsertFront(func_ast->body->statements, ctx.dst->Decl(local_var));
|
||||
local_private_vars_.insert(var);
|
||||
@@ -426,7 +426,7 @@ struct ModuleScopeVarToEntryPointParam::State {
|
||||
is_wrapped);
|
||||
} else {
|
||||
ProcessVariableInUserFunction(func_ast, var, new_var_symbol, is_pointer);
|
||||
if (var->AddressSpace() == ast::AddressSpace::kWorkgroup) {
|
||||
if (var->AddressSpace() == type::AddressSpace::kWorkgroup) {
|
||||
needs_pointer_aliasing = true;
|
||||
}
|
||||
}
|
||||
@@ -451,7 +451,7 @@ struct ModuleScopeVarToEntryPointParam::State {
|
||||
auto* str =
|
||||
ctx.dst->Structure(ctx.dst->Sym(), std::move(workgroup_parameter_members));
|
||||
auto* param_type =
|
||||
ctx.dst->ty.pointer(ctx.dst->ty.Of(str), ast::AddressSpace::kWorkgroup);
|
||||
ctx.dst->ty.pointer(ctx.dst->ty.Of(str), type::AddressSpace::kWorkgroup);
|
||||
auto* param = ctx.dst->Param(
|
||||
workgroup_param(), param_type,
|
||||
utils::Vector{
|
||||
@@ -470,7 +470,7 @@ struct ModuleScopeVarToEntryPointParam::State {
|
||||
// For entry points, pass non-handle types as pointers.
|
||||
for (auto* target_var : target_sem->TransitivelyReferencedGlobals()) {
|
||||
auto sc = target_var->AddressSpace();
|
||||
if (sc == ast::AddressSpace::kNone) {
|
||||
if (sc == type::AddressSpace::kNone) {
|
||||
continue;
|
||||
}
|
||||
|
||||
@@ -501,7 +501,7 @@ struct ModuleScopeVarToEntryPointParam::State {
|
||||
// Now remove all module-scope variables with these address spaces.
|
||||
for (auto* var_ast : ctx.src->AST().GlobalVariables()) {
|
||||
auto* var_sem = ctx.src->Sem().Get(var_ast);
|
||||
if (var_sem->AddressSpace() != ast::AddressSpace::kNone) {
|
||||
if (var_sem->AddressSpace() != type::AddressSpace::kNone) {
|
||||
ctx.Remove(ctx.src->AST().GlobalDeclarations(), var_ast);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -145,7 +145,7 @@ struct MultiplanarExternalTexture::State {
|
||||
b.Group(AInt(bps.plane_1.group)), b.Binding(AInt(bps.plane_1.binding)));
|
||||
syms.params = b.Symbols().New("ext_tex_params");
|
||||
b.GlobalVar(syms.params, b.ty.type_name("ExternalTextureParams"),
|
||||
ast::AddressSpace::kUniform, b.Group(AInt(bps.params.group)),
|
||||
type::AddressSpace::kUniform, b.Group(AInt(bps.params.group)),
|
||||
b.Binding(AInt(bps.params.binding)));
|
||||
|
||||
// Replace the original texture_external binding with a texture_2d<f32> binding.
|
||||
|
||||
@@ -159,7 +159,7 @@ Transform::ApplyResult NumWorkgroupsFromUniform::Apply(const Program* src,
|
||||
}
|
||||
|
||||
num_workgroups_ubo =
|
||||
b.GlobalVar(b.Sym(), b.ty.Of(num_workgroups_struct), ast::AddressSpace::kUniform,
|
||||
b.GlobalVar(b.Sym(), b.ty.Of(num_workgroups_struct), type::AddressSpace::kUniform,
|
||||
b.Group(AInt(group)), b.Binding(AInt(binding)));
|
||||
}
|
||||
return num_workgroups_ubo;
|
||||
|
||||
@@ -80,7 +80,7 @@ Transform::ApplyResult PadStructs::Apply(const Program* src, const DataMap&, Dat
|
||||
new_members.Push(b.Member(name, type));
|
||||
|
||||
uint32_t size = ty->Size();
|
||||
if (ty->Is<sem::Struct>() && str->UsedAs(ast::AddressSpace::kUniform)) {
|
||||
if (ty->Is<sem::Struct>() && str->UsedAs(type::AddressSpace::kUniform)) {
|
||||
// std140 structs should be padded out to 16 bytes.
|
||||
size = utils::RoundUp(16u, size);
|
||||
} else if (auto* array_ty = ty->As<type::Array>()) {
|
||||
@@ -93,7 +93,7 @@ Transform::ApplyResult PadStructs::Apply(const Program* src, const DataMap&, Dat
|
||||
|
||||
// Add any required padding after the last member, if it's not a runtime-sized array.
|
||||
uint32_t struct_size = str->Size();
|
||||
if (str->UsedAs(ast::AddressSpace::kUniform)) {
|
||||
if (str->UsedAs(type::AddressSpace::kUniform)) {
|
||||
struct_size = utils::RoundUp(16u, struct_size);
|
||||
}
|
||||
if (offset < struct_size && !has_runtime_sized_array) {
|
||||
|
||||
@@ -53,7 +53,7 @@ struct PreservePadding::State {
|
||||
// Ignore phony assignment.
|
||||
return;
|
||||
}
|
||||
if (ty->As<type::Reference>()->AddressSpace() != ast::AddressSpace::kStorage) {
|
||||
if (ty->As<type::Reference>()->AddressSpace() != type::AddressSpace::kStorage) {
|
||||
// We only care about assignments that write to variables in the storage
|
||||
// address space, as nothing else is host-visible.
|
||||
return;
|
||||
@@ -120,7 +120,7 @@ struct PreservePadding::State {
|
||||
auto helper_name = b.Symbols().New("assign_and_preserve_padding");
|
||||
utils::Vector<const ast::Parameter*, 2> params = {
|
||||
b.Param(kDestParamName,
|
||||
b.ty.pointer(CreateASTTypeFor(ctx, ty), ast::AddressSpace::kStorage,
|
||||
b.ty.pointer(CreateASTTypeFor(ctx, ty), type::AddressSpace::kStorage,
|
||||
ast::Access::kReadWrite)),
|
||||
b.Param(kValueParamName, CreateASTTypeFor(ctx, ty)),
|
||||
};
|
||||
|
||||
@@ -38,7 +38,7 @@ struct Robustness::State {
|
||||
/// Constructor
|
||||
/// @param program the source program
|
||||
/// @param omitted the omitted address spaces
|
||||
State(const Program* program, std::unordered_set<ast::AddressSpace>&& omitted)
|
||||
State(const Program* program, std::unordered_set<type::AddressSpace>&& omitted)
|
||||
: src(program), omitted_address_spaces(std::move(omitted)) {}
|
||||
|
||||
/// Runs the transform
|
||||
@@ -60,7 +60,7 @@ struct Robustness::State {
|
||||
CloneContext ctx = {&b, src, /* auto_clone_symbols */ true};
|
||||
|
||||
/// Set of address spaces to not apply the transform to
|
||||
std::unordered_set<ast::AddressSpace> omitted_address_spaces;
|
||||
std::unordered_set<type::AddressSpace> omitted_address_spaces;
|
||||
|
||||
/// Apply bounds clamping to array, vector and matrix indexing
|
||||
/// @param expr the array, vector or matrix index expression
|
||||
@@ -294,14 +294,14 @@ Transform::ApplyResult Robustness::Apply(const Program* src,
|
||||
cfg = *cfg_data;
|
||||
}
|
||||
|
||||
std::unordered_set<ast::AddressSpace> omitted_address_spaces;
|
||||
std::unordered_set<type::AddressSpace> omitted_address_spaces;
|
||||
for (auto sc : cfg.omitted_address_spaces) {
|
||||
switch (sc) {
|
||||
case AddressSpace::kUniform:
|
||||
omitted_address_spaces.insert(ast::AddressSpace::kUniform);
|
||||
omitted_address_spaces.insert(type::AddressSpace::kUniform);
|
||||
break;
|
||||
case AddressSpace::kStorage:
|
||||
omitted_address_spaces.insert(ast::AddressSpace::kStorage);
|
||||
omitted_address_spaces.insert(type::AddressSpace::kStorage);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
@@ -144,7 +144,7 @@ struct Std140::State {
|
||||
// Scan structures for members that need forking
|
||||
for (auto* ty : src->Types()) {
|
||||
if (auto* str = ty->As<sem::Struct>()) {
|
||||
if (str->UsedAs(ast::AddressSpace::kUniform)) {
|
||||
if (str->UsedAs(type::AddressSpace::kUniform)) {
|
||||
for (auto* member : str->Members()) {
|
||||
if (needs_fork(member->Type())) {
|
||||
return true;
|
||||
@@ -157,7 +157,7 @@ struct Std140::State {
|
||||
// Scan uniform variables that have types that need forking
|
||||
for (auto* decl : src->AST().GlobalVariables()) {
|
||||
auto* global = src->Sem().Get(decl);
|
||||
if (global->AddressSpace() == ast::AddressSpace::kUniform) {
|
||||
if (global->AddressSpace() == type::AddressSpace::kUniform) {
|
||||
if (needs_fork(global->Type()->UnwrapRef())) {
|
||||
return true;
|
||||
}
|
||||
@@ -280,7 +280,7 @@ struct Std140::State {
|
||||
for (auto* global : src->Sem().Module()->DependencyOrderedDeclarations()) {
|
||||
// Check to see if this is a structure used by a uniform buffer...
|
||||
auto* str = sem.Get<sem::Struct>(global);
|
||||
if (str && str->UsedAs(ast::AddressSpace::kUniform)) {
|
||||
if (str && str->UsedAs(type::AddressSpace::kUniform)) {
|
||||
// Should this uniform buffer be forked for std140 usage?
|
||||
bool fork_std140 = false;
|
||||
utils::Vector<const ast::StructMember*, 8> members;
|
||||
@@ -350,7 +350,7 @@ struct Std140::State {
|
||||
void ReplaceUniformVarTypes() {
|
||||
for (auto* global : src->AST().GlobalVariables()) {
|
||||
if (auto* var = global->As<ast::Var>()) {
|
||||
if (var->declared_address_space == ast::AddressSpace::kUniform) {
|
||||
if (var->declared_address_space == type::AddressSpace::kUniform) {
|
||||
auto* v = sem.Get(var);
|
||||
if (auto* std140_ty = Std140Type(v->Type()->UnwrapRef())) {
|
||||
ctx.Replace(global->type, std140_ty);
|
||||
|
||||
@@ -321,7 +321,7 @@ struct VertexPulling::State {
|
||||
});
|
||||
for (uint32_t i = 0; i < cfg.vertex_state.size(); ++i) {
|
||||
// The decorated variable with struct type
|
||||
b.GlobalVar(GetVertexBufferName(i), b.ty.Of(struct_type), ast::AddressSpace::kStorage,
|
||||
b.GlobalVar(GetVertexBufferName(i), b.ty.Of(struct_type), type::AddressSpace::kStorage,
|
||||
ast::Access::kRead, b.Binding(AInt(i)), b.Group(AInt(cfg.pulling_group)));
|
||||
}
|
||||
}
|
||||
|
||||
@@ -36,7 +36,7 @@ namespace {
|
||||
bool ShouldRun(const Program* program) {
|
||||
for (auto* global : program->AST().GlobalVariables()) {
|
||||
if (auto* var = global->As<ast::Var>()) {
|
||||
if (var->declared_address_space == ast::AddressSpace::kWorkgroup) {
|
||||
if (var->declared_address_space == type::AddressSpace::kWorkgroup) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
@@ -139,7 +139,7 @@ struct ZeroInitWorkgroupMemory::State {
|
||||
// workgroup storage variables used by `fn`. This will populate #statements.
|
||||
auto* func = sem.Get(fn);
|
||||
for (auto* var : func->TransitivelyReferencedGlobals()) {
|
||||
if (var->AddressSpace() == ast::AddressSpace::kWorkgroup) {
|
||||
if (var->AddressSpace() == type::AddressSpace::kWorkgroup) {
|
||||
auto get_expr = [&](uint32_t num_values) {
|
||||
auto var_name = ctx.Clone(var->Declaration()->symbol);
|
||||
return Expression{b.Expr(var_name), num_values, ArrayIndices{}};
|
||||
|
||||
Reference in New Issue
Block a user