diff options
| author | 2021-05-30 17:27:00 -0400 | |
|---|---|---|
| committer | 2021-07-22 21:51:37 -0400 | |
| commit | 1269a0cf8b3844c1a9bb06c843a7698b0a9643d5 (patch) | |
| tree | a0716589fa3952bdeb0f1d19b4bb455d9cdd86e5 /src/shader_recompiler/backend/glsl/emit_glsl_image.cpp | |
| parent | glsl: Fix ATOM and implement ATOMS (diff) | |
| download | yuzu-1269a0cf8b3844c1a9bb06c843a7698b0a9643d5.tar.gz yuzu-1269a0cf8b3844c1a9bb06c843a7698b0a9643d5.tar.xz yuzu-1269a0cf8b3844c1a9bb06c843a7698b0a9643d5.zip | |
glsl: Rework variable allocator to allow for variable reuse
Diffstat (limited to 'src/shader_recompiler/backend/glsl/emit_glsl_image.cpp')
| -rw-r--r-- | src/shader_recompiler/backend/glsl/emit_glsl_image.cpp | 26 |
1 files changed, 13 insertions, 13 deletions
diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_image.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_image.cpp index e63e3f2bd..eb427d8b5 100644 --- a/src/shader_recompiler/backend/glsl/emit_glsl_image.cpp +++ b/src/shader_recompiler/backend/glsl/emit_glsl_image.cpp | |||
| @@ -104,12 +104,12 @@ void EmitImageSampleImplicitLod([[maybe_unused]] EmitContext& ctx, [[maybe_unuse | |||
| 104 | } | 104 | } |
| 105 | const auto texture{Texture(ctx, info, index)}; | 105 | const auto texture{Texture(ctx, info, index)}; |
| 106 | const auto bias{info.has_bias ? fmt::format(",{}", bias_lc) : ""}; | 106 | const auto bias{info.has_bias ? fmt::format(",{}", bias_lc) : ""}; |
| 107 | const auto texel{ctx.reg_alloc.Define(inst, Type::F32x4)}; | 107 | const auto texel{ctx.var_alloc.Define(inst, GlslVarType::F32x4)}; |
| 108 | const auto sparse_inst{PrepareSparse(inst)}; | 108 | const auto sparse_inst{PrepareSparse(inst)}; |
| 109 | if (!sparse_inst) { | 109 | if (!sparse_inst) { |
| 110 | if (!offset.IsEmpty()) { | 110 | if (!offset.IsEmpty()) { |
| 111 | ctx.Add("{}=textureOffset({},{},{}{});", texel, texture, coords, | 111 | ctx.Add("{}=textureOffset({},{},{}{});", texel, texture, coords, |
| 112 | CastToIntVec(ctx.reg_alloc.Consume(offset), info), bias); | 112 | CastToIntVec(ctx.var_alloc.Consume(offset), info), bias); |
| 113 | } else { | 113 | } else { |
| 114 | if (ctx.stage == Stage::Fragment) { | 114 | if (ctx.stage == Stage::Fragment) { |
| 115 | ctx.Add("{}=texture({},{}{});", texel, texture, coords, bias); | 115 | ctx.Add("{}=texture({},{}{});", texel, texture, coords, bias); |
| @@ -122,7 +122,7 @@ void EmitImageSampleImplicitLod([[maybe_unused]] EmitContext& ctx, [[maybe_unuse | |||
| 122 | // TODO: Query sparseTexels extension support | 122 | // TODO: Query sparseTexels extension support |
| 123 | if (!offset.IsEmpty()) { | 123 | if (!offset.IsEmpty()) { |
| 124 | ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureOffsetARB({},{},{},{}{}));", | 124 | ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureOffsetARB({},{},{},{}{}));", |
| 125 | *sparse_inst, texture, coords, CastToIntVec(ctx.reg_alloc.Consume(offset), info), | 125 | *sparse_inst, texture, coords, CastToIntVec(ctx.var_alloc.Consume(offset), info), |
| 126 | texel, bias); | 126 | texel, bias); |
| 127 | } else { | 127 | } else { |
| 128 | ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureARB({},{},{}{}));", *sparse_inst, | 128 | ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureARB({},{},{}{}));", *sparse_inst, |
| @@ -143,12 +143,12 @@ void EmitImageSampleExplicitLod([[maybe_unused]] EmitContext& ctx, [[maybe_unuse | |||
| 143 | throw NotImplementedException("Lod clamp samples"); | 143 | throw NotImplementedException("Lod clamp samples"); |
| 144 | } | 144 | } |
| 145 | const auto texture{Texture(ctx, info, index)}; | 145 | const auto texture{Texture(ctx, info, index)}; |
| 146 | const auto texel{ctx.reg_alloc.Define(inst, Type::F32x4)}; | 146 | const auto texel{ctx.var_alloc.Define(inst, GlslVarType::F32x4)}; |
| 147 | const auto sparse_inst{PrepareSparse(inst)}; | 147 | const auto sparse_inst{PrepareSparse(inst)}; |
| 148 | if (!sparse_inst) { | 148 | if (!sparse_inst) { |
| 149 | if (!offset.IsEmpty()) { | 149 | if (!offset.IsEmpty()) { |
| 150 | ctx.Add("{}=textureLodOffset({},{},{},{});", texel, texture, coords, lod_lc, | 150 | ctx.Add("{}=textureLodOffset({},{},{},{});", texel, texture, coords, lod_lc, |
| 151 | CastToIntVec(ctx.reg_alloc.Consume(offset), info)); | 151 | CastToIntVec(ctx.var_alloc.Consume(offset), info)); |
| 152 | } else { | 152 | } else { |
| 153 | ctx.Add("{}=textureLod({},{},{});", texel, texture, coords, lod_lc); | 153 | ctx.Add("{}=textureLod({},{},{});", texel, texture, coords, lod_lc); |
| 154 | } | 154 | } |
| @@ -158,7 +158,7 @@ void EmitImageSampleExplicitLod([[maybe_unused]] EmitContext& ctx, [[maybe_unuse | |||
| 158 | if (!offset.IsEmpty()) { | 158 | if (!offset.IsEmpty()) { |
| 159 | ctx.AddU1("{}=sparseTexelsResidentARB(sparseTexelFetchOffsetARB({},{},int({}),{},{}));", | 159 | ctx.AddU1("{}=sparseTexelsResidentARB(sparseTexelFetchOffsetARB({},{},int({}),{},{}));", |
| 160 | *sparse_inst, texture, CastToIntVec(coords, info), lod_lc, | 160 | *sparse_inst, texture, CastToIntVec(coords, info), lod_lc, |
| 161 | CastToIntVec(ctx.reg_alloc.Consume(offset), info), texel); | 161 | CastToIntVec(ctx.var_alloc.Consume(offset), info), texel); |
| 162 | } else { | 162 | } else { |
| 163 | ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureLodARB({},{},{},{}));", *sparse_inst, | 163 | ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureLodARB({},{},{},{}));", *sparse_inst, |
| 164 | texture, coords, lod_lc, texel); | 164 | texture, coords, lod_lc, texel); |
| @@ -232,7 +232,7 @@ void EmitImageGather([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Ins | |||
| 232 | [[maybe_unused]] const IR::Value& offset2) { | 232 | [[maybe_unused]] const IR::Value& offset2) { |
| 233 | const auto info{inst.Flags<IR::TextureInstInfo>()}; | 233 | const auto info{inst.Flags<IR::TextureInstInfo>()}; |
| 234 | const auto texture{Texture(ctx, info, index)}; | 234 | const auto texture{Texture(ctx, info, index)}; |
| 235 | const auto texel{ctx.reg_alloc.Define(inst, Type::F32x4)}; | 235 | const auto texel{ctx.var_alloc.Define(inst, GlslVarType::F32x4)}; |
| 236 | const auto sparse_inst{PrepareSparse(inst)}; | 236 | const auto sparse_inst{PrepareSparse(inst)}; |
| 237 | if (!sparse_inst) { | 237 | if (!sparse_inst) { |
| 238 | if (offset.IsEmpty()) { | 238 | if (offset.IsEmpty()) { |
| @@ -242,7 +242,7 @@ void EmitImageGather([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Ins | |||
| 242 | } | 242 | } |
| 243 | if (offset2.IsEmpty()) { | 243 | if (offset2.IsEmpty()) { |
| 244 | ctx.Add("{}=textureGatherOffset({},{},{},int({}));", texel, texture, coords, | 244 | ctx.Add("{}=textureGatherOffset({},{},{},int({}));", texel, texture, coords, |
| 245 | CastToIntVec(ctx.reg_alloc.Consume(offset), info), info.gather_component); | 245 | CastToIntVec(ctx.var_alloc.Consume(offset), info), info.gather_component); |
| 246 | return; | 246 | return; |
| 247 | } | 247 | } |
| 248 | // PTP | 248 | // PTP |
| @@ -259,7 +259,7 @@ void EmitImageGather([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Ins | |||
| 259 | if (offset2.IsEmpty()) { | 259 | if (offset2.IsEmpty()) { |
| 260 | ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},{},int({})));", | 260 | ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},{},int({})));", |
| 261 | *sparse_inst, texture, CastToIntVec(coords, info), | 261 | *sparse_inst, texture, CastToIntVec(coords, info), |
| 262 | CastToIntVec(ctx.reg_alloc.Consume(offset), info), texel, info.gather_component); | 262 | CastToIntVec(ctx.var_alloc.Consume(offset), info), texel, info.gather_component); |
| 263 | } | 263 | } |
| 264 | // PTP | 264 | // PTP |
| 265 | const auto offsets{PtpOffsets(offset, offset2)}; | 265 | const auto offsets{PtpOffsets(offset, offset2)}; |
| @@ -276,7 +276,7 @@ void EmitImageGatherDref([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR: | |||
| 276 | [[maybe_unused]] std::string_view dref) { | 276 | [[maybe_unused]] std::string_view dref) { |
| 277 | const auto info{inst.Flags<IR::TextureInstInfo>()}; | 277 | const auto info{inst.Flags<IR::TextureInstInfo>()}; |
| 278 | const auto texture{Texture(ctx, info, index)}; | 278 | const auto texture{Texture(ctx, info, index)}; |
| 279 | const auto texel{ctx.reg_alloc.Define(inst, Type::F32x4)}; | 279 | const auto texel{ctx.var_alloc.Define(inst, GlslVarType::F32x4)}; |
| 280 | const auto sparse_inst{PrepareSparse(inst)}; | 280 | const auto sparse_inst{PrepareSparse(inst)}; |
| 281 | if (!sparse_inst) { | 281 | if (!sparse_inst) { |
| 282 | if (offset.IsEmpty()) { | 282 | if (offset.IsEmpty()) { |
| @@ -285,7 +285,7 @@ void EmitImageGatherDref([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR: | |||
| 285 | } | 285 | } |
| 286 | if (offset2.IsEmpty()) { | 286 | if (offset2.IsEmpty()) { |
| 287 | ctx.Add("{}=textureGatherOffset({},{},{},{});", texel, texture, coords, dref, | 287 | ctx.Add("{}=textureGatherOffset({},{},{},{});", texel, texture, coords, dref, |
| 288 | CastToIntVec(ctx.reg_alloc.Consume(offset), info)); | 288 | CastToIntVec(ctx.var_alloc.Consume(offset), info)); |
| 289 | return; | 289 | return; |
| 290 | } | 290 | } |
| 291 | // PTP | 291 | // PTP |
| @@ -301,7 +301,7 @@ void EmitImageGatherDref([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR: | |||
| 301 | if (offset2.IsEmpty()) { | 301 | if (offset2.IsEmpty()) { |
| 302 | ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},,{},{}));", | 302 | ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},,{},{}));", |
| 303 | *sparse_inst, texture, CastToIntVec(coords, info), dref, | 303 | *sparse_inst, texture, CastToIntVec(coords, info), dref, |
| 304 | CastToIntVec(ctx.reg_alloc.Consume(offset), info), texel); | 304 | CastToIntVec(ctx.var_alloc.Consume(offset), info), texel); |
| 305 | } | 305 | } |
| 306 | // PTP | 306 | // PTP |
| 307 | const auto offsets{PtpOffsets(offset, offset2)}; | 307 | const auto offsets{PtpOffsets(offset, offset2)}; |
| @@ -323,7 +323,7 @@ void EmitImageFetch([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst | |||
| 323 | } | 323 | } |
| 324 | const auto texture{Texture(ctx, info, index)}; | 324 | const auto texture{Texture(ctx, info, index)}; |
| 325 | const auto sparse_inst{PrepareSparse(inst)}; | 325 | const auto sparse_inst{PrepareSparse(inst)}; |
| 326 | const auto texel{ctx.reg_alloc.Define(inst, Type::F32x4)}; | 326 | const auto texel{ctx.var_alloc.Define(inst, GlslVarType::F32x4)}; |
| 327 | if (!sparse_inst) { | 327 | if (!sparse_inst) { |
| 328 | if (!offset.empty()) { | 328 | if (!offset.empty()) { |
| 329 | ctx.Add("{}=texelFetchOffset({},{},int({}),{});", texel, texture, | 329 | ctx.Add("{}=texelFetchOffset({},{},int({}),{});", texel, texture, |