diff options
Diffstat (limited to 'src/shader_recompiler/backend/glsl/emit_glsl_image.cpp')
| -rw-r--r-- | src/shader_recompiler/backend/glsl/emit_glsl_image.cpp | 101 |
1 files changed, 71 insertions, 30 deletions
diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_image.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_image.cpp index 3de19cdfe..c62451e23 100644 --- a/src/shader_recompiler/backend/glsl/emit_glsl_image.cpp +++ b/src/shader_recompiler/backend/glsl/emit_glsl_image.cpp | |||
| @@ -100,7 +100,7 @@ void EmitImageSampleImplicitLod([[maybe_unused]] EmitContext& ctx, [[maybe_unuse | |||
| 100 | [[maybe_unused]] const IR::Value& offset) { | 100 | [[maybe_unused]] const IR::Value& offset) { |
| 101 | const auto info{inst.Flags<IR::TextureInstInfo>()}; | 101 | const auto info{inst.Flags<IR::TextureInstInfo>()}; |
| 102 | if (info.has_lod_clamp) { | 102 | if (info.has_lod_clamp) { |
| 103 | throw NotImplementedException("Lod clamp samples"); | 103 | throw NotImplementedException("EmitImageSampleImplicitLod Lod clamp samples"); |
| 104 | } | 104 | } |
| 105 | const auto texture{Texture(ctx, info, index)}; | 105 | const auto texture{Texture(ctx, info, index)}; |
| 106 | const auto bias{info.has_bias ? fmt::format(",{}", bias_lc) : ""}; | 106 | const auto bias{info.has_bias ? fmt::format(",{}", bias_lc) : ""}; |
| @@ -108,8 +108,12 @@ void EmitImageSampleImplicitLod([[maybe_unused]] EmitContext& ctx, [[maybe_unuse | |||
| 108 | const auto sparse_inst{PrepareSparse(inst)}; | 108 | const auto sparse_inst{PrepareSparse(inst)}; |
| 109 | if (!sparse_inst) { | 109 | if (!sparse_inst) { |
| 110 | if (!offset.IsEmpty()) { | 110 | if (!offset.IsEmpty()) { |
| 111 | ctx.Add("{}=textureOffset({},{},{}{});", texel, texture, coords, | 111 | const auto offset_str{CastToIntVec(ctx.var_alloc.Consume(offset), info)}; |
| 112 | CastToIntVec(ctx.var_alloc.Consume(offset), info), bias); | 112 | if (ctx.stage == Stage::Fragment) { |
| 113 | ctx.Add("{}=textureOffset({},{},{}{});", texel, texture, coords, offset_str, bias); | ||
| 114 | } else { | ||
| 115 | ctx.Add("{}=textureLodOffset({},{},0.0,{});", texel, texture, coords, offset_str); | ||
| 116 | } | ||
| 113 | } else { | 117 | } else { |
| 114 | if (ctx.stage == Stage::Fragment) { | 118 | if (ctx.stage == Stage::Fragment) { |
| 115 | ctx.Add("{}=texture({},{}{});", texel, texture, coords, bias); | 119 | ctx.Add("{}=texture({},{}{});", texel, texture, coords, bias); |
| @@ -137,10 +141,10 @@ void EmitImageSampleExplicitLod([[maybe_unused]] EmitContext& ctx, [[maybe_unuse | |||
| 137 | [[maybe_unused]] const IR::Value& offset) { | 141 | [[maybe_unused]] const IR::Value& offset) { |
| 138 | const auto info{inst.Flags<IR::TextureInstInfo>()}; | 142 | const auto info{inst.Flags<IR::TextureInstInfo>()}; |
| 139 | if (info.has_bias) { | 143 | if (info.has_bias) { |
| 140 | throw NotImplementedException("Bias texture samples"); | 144 | throw NotImplementedException("EmitImageSampleExplicitLod Bias texture samples"); |
| 141 | } | 145 | } |
| 142 | if (info.has_lod_clamp) { | 146 | if (info.has_lod_clamp) { |
| 143 | throw NotImplementedException("Lod clamp samples"); | 147 | throw NotImplementedException("EmitImageSampleExplicitLod Lod clamp samples"); |
| 144 | } | 148 | } |
| 145 | const auto texture{Texture(ctx, info, index)}; | 149 | const auto texture{Texture(ctx, info, index)}; |
| 146 | const auto texel{ctx.var_alloc.Define(inst, GlslVarType::F32x4)}; | 150 | const auto texel{ctx.var_alloc.Define(inst, GlslVarType::F32x4)}; |
| @@ -175,24 +179,32 @@ void EmitImageSampleDrefImplicitLod([[maybe_unused]] EmitContext& ctx, | |||
| 175 | const auto info{inst.Flags<IR::TextureInstInfo>()}; | 179 | const auto info{inst.Flags<IR::TextureInstInfo>()}; |
| 176 | const auto sparse_inst{PrepareSparse(inst)}; | 180 | const auto sparse_inst{PrepareSparse(inst)}; |
| 177 | if (sparse_inst) { | 181 | if (sparse_inst) { |
| 178 | throw NotImplementedException("Sparse texture samples"); | 182 | throw NotImplementedException("EmitImageSampleDrefImplicitLod Sparse texture samples"); |
| 179 | } | 183 | } |
| 180 | if (info.has_bias) { | 184 | if (info.has_bias) { |
| 181 | throw NotImplementedException("Bias texture samples"); | 185 | throw NotImplementedException("EmitImageSampleDrefImplicitLod Bias texture samples"); |
| 182 | } | 186 | } |
| 183 | if (info.has_lod_clamp) { | 187 | if (info.has_lod_clamp) { |
| 184 | throw NotImplementedException("Lod clamp samples"); | 188 | throw NotImplementedException("EmitImageSampleDrefImplicitLod Lod clamp samples"); |
| 185 | } | ||
| 186 | if (!offset.IsEmpty()) { | ||
| 187 | throw NotImplementedException("textureLodOffset"); | ||
| 188 | } | 189 | } |
| 189 | const auto texture{Texture(ctx, info, index)}; | 190 | const auto texture{Texture(ctx, info, index)}; |
| 190 | const auto bias{info.has_bias ? fmt::format(",{}", bias_lc) : ""}; | 191 | const auto bias{info.has_bias ? fmt::format(",{}", bias_lc) : ""}; |
| 191 | const auto cast{ShadowSamplerVecCast(info.type)}; | 192 | const auto cast{ShadowSamplerVecCast(info.type)}; |
| 192 | if (ctx.stage == Stage::Fragment) { | 193 | if (!offset.IsEmpty()) { |
| 193 | ctx.AddF32("{}=texture({},{}({},{}){});", inst, texture, cast, coords, dref, bias); | 194 | const auto offset_str{CastToIntVec(ctx.var_alloc.Consume(offset), info)}; |
| 195 | if (ctx.stage == Stage::Fragment) { | ||
| 196 | ctx.AddF32("{}=textureOffset({},{}({},{}),{}{});", inst, texture, cast, coords, dref, | ||
| 197 | offset_str, bias); | ||
| 198 | } else { | ||
| 199 | ctx.AddF32("{}=textureLodOffset({},{}({},{}),0.0,{});", inst, texture, cast, coords, | ||
| 200 | dref, offset_str); | ||
| 201 | } | ||
| 194 | } else { | 202 | } else { |
| 195 | ctx.AddF32("{}=textureLod({},{}({},{}),0.0);", inst, texture, cast, coords, dref); | 203 | if (ctx.stage == Stage::Fragment) { |
| 204 | ctx.AddF32("{}=texture({},{}({},{}){});", inst, texture, cast, coords, dref, bias); | ||
| 205 | } else { | ||
| 206 | ctx.AddF32("{}=textureLod({},{}({},{}),0.0);", inst, texture, cast, coords, dref); | ||
| 207 | } | ||
| 196 | } | 208 | } |
| 197 | } | 209 | } |
| 198 | 210 | ||
| @@ -206,22 +218,30 @@ void EmitImageSampleDrefExplicitLod([[maybe_unused]] EmitContext& ctx, | |||
| 206 | const auto info{inst.Flags<IR::TextureInstInfo>()}; | 218 | const auto info{inst.Flags<IR::TextureInstInfo>()}; |
| 207 | const auto sparse_inst{PrepareSparse(inst)}; | 219 | const auto sparse_inst{PrepareSparse(inst)}; |
| 208 | if (sparse_inst) { | 220 | if (sparse_inst) { |
| 209 | throw NotImplementedException("Sparse texture samples"); | 221 | throw NotImplementedException("EmitImageSampleDrefExplicitLod Sparse texture samples"); |
| 210 | } | 222 | } |
| 211 | if (info.has_bias) { | 223 | if (info.has_bias) { |
| 212 | throw NotImplementedException("Bias texture samples"); | 224 | throw NotImplementedException("EmitImageSampleDrefExplicitLod Bias texture samples"); |
| 213 | } | 225 | } |
| 214 | if (info.has_lod_clamp) { | 226 | if (info.has_lod_clamp) { |
| 215 | throw NotImplementedException("Lod clamp samples"); | 227 | throw NotImplementedException("EmitImageSampleDrefExplicitLod Lod clamp samples"); |
| 216 | } | ||
| 217 | if (!offset.IsEmpty()) { | ||
| 218 | throw NotImplementedException("textureLodOffset"); | ||
| 219 | } | 228 | } |
| 220 | const auto texture{Texture(ctx, info, index)}; | 229 | const auto texture{Texture(ctx, info, index)}; |
| 221 | if (info.type == TextureType::ColorArrayCube) { | 230 | if (!offset.IsEmpty()) { |
| 222 | ctx.AddF32("{}=textureLod({},{},{},{});", inst, texture, coords, dref, lod_lc); | 231 | const auto offset_str{CastToIntVec(ctx.var_alloc.Consume(offset), info)}; |
| 232 | if (info.type == TextureType::ColorArrayCube) { | ||
| 233 | ctx.AddF32("{}=textureLodOffset({},{},{},{},{});", inst, texture, coords, dref, lod_lc, | ||
| 234 | offset_str); | ||
| 235 | } else { | ||
| 236 | ctx.AddF32("{}=textureLodOffset({},vec3({},{}),{},{});", inst, texture, coords, dref, | ||
| 237 | lod_lc, offset_str); | ||
| 238 | } | ||
| 223 | } else { | 239 | } else { |
| 224 | ctx.AddF32("{}=textureLod({},vec3({},{}),{});", inst, texture, coords, dref, lod_lc); | 240 | if (info.type == TextureType::ColorArrayCube) { |
| 241 | ctx.AddF32("{}=textureLod({},{},{},{});", inst, texture, coords, dref, lod_lc); | ||
| 242 | } else { | ||
| 243 | ctx.AddF32("{}=textureLod({},vec3({},{}),{});", inst, texture, coords, dref, lod_lc); | ||
| 244 | } | ||
| 225 | } | 245 | } |
| 226 | } | 246 | } |
| 227 | 247 | ||
| @@ -316,10 +336,10 @@ void EmitImageFetch([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst | |||
| 316 | [[maybe_unused]] std::string_view ms) { | 336 | [[maybe_unused]] std::string_view ms) { |
| 317 | const auto info{inst.Flags<IR::TextureInstInfo>()}; | 337 | const auto info{inst.Flags<IR::TextureInstInfo>()}; |
| 318 | if (info.has_bias) { | 338 | if (info.has_bias) { |
| 319 | throw NotImplementedException("Bias texture samples"); | 339 | throw NotImplementedException("EmitImageFetch Bias texture samples"); |
| 320 | } | 340 | } |
| 321 | if (info.has_lod_clamp) { | 341 | if (info.has_lod_clamp) { |
| 322 | throw NotImplementedException("Lod clamp samples"); | 342 | throw NotImplementedException("EmitImageFetch Lod clamp samples"); |
| 323 | } | 343 | } |
| 324 | const auto texture{Texture(ctx, info, index)}; | 344 | const auto texture{Texture(ctx, info, index)}; |
| 325 | const auto sparse_inst{PrepareSparse(inst)}; | 345 | const auto sparse_inst{PrepareSparse(inst)}; |
| @@ -368,7 +388,7 @@ void EmitImageQueryDimensions([[maybe_unused]] EmitContext& ctx, [[maybe_unused] | |||
| 368 | "{}=uvec4(uvec3(textureSize({},int({}))),uint(textureQueryLevels({})));", inst, texture, | 388 | "{}=uvec4(uvec3(textureSize({},int({}))),uint(textureQueryLevels({})));", inst, texture, |
| 369 | lod, texture); | 389 | lod, texture); |
| 370 | case TextureType::Buffer: | 390 | case TextureType::Buffer: |
| 371 | throw NotImplementedException("Texture buffers"); | 391 | throw NotImplementedException("EmitImageQueryDimensions Texture buffers"); |
| 372 | } | 392 | } |
| 373 | throw LogicError("Unspecified image type {}", info.type.Value()); | 393 | throw LogicError("Unspecified image type {}", info.type.Value()); |
| 374 | } | 394 | } |
| @@ -384,10 +404,31 @@ void EmitImageQueryLod([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::I | |||
| 384 | void EmitImageGradient([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, | 404 | void EmitImageGradient([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, |
| 385 | [[maybe_unused]] const IR::Value& index, | 405 | [[maybe_unused]] const IR::Value& index, |
| 386 | [[maybe_unused]] std::string_view coords, | 406 | [[maybe_unused]] std::string_view coords, |
| 387 | [[maybe_unused]] std::string_view derivates, | 407 | [[maybe_unused]] const IR::Value& derivatives, |
| 388 | [[maybe_unused]] std::string_view offset, | 408 | [[maybe_unused]] const IR::Value& offset, |
| 389 | [[maybe_unused]] std::string_view lod_clamp) { | 409 | [[maybe_unused]] const IR::Value& lod_clamp) { |
| 390 | NotImplemented(); | 410 | const auto info{inst.Flags<IR::TextureInstInfo>()}; |
| 411 | if (info.has_lod_clamp) { | ||
| 412 | throw NotImplementedException("EmitImageGradient Lod clamp samples"); | ||
| 413 | } | ||
| 414 | const auto sparse_inst{PrepareSparse(inst)}; | ||
| 415 | if (sparse_inst) { | ||
| 416 | throw NotImplementedException("EmitImageGradient Sparse"); | ||
| 417 | } | ||
| 418 | if (!offset.IsEmpty()) { | ||
| 419 | throw NotImplementedException("EmitImageGradient offset"); | ||
| 420 | } | ||
| 421 | const auto texture{Texture(ctx, info, index)}; | ||
| 422 | const auto texel{ctx.var_alloc.Define(inst, GlslVarType::F32x4)}; | ||
| 423 | const bool multi_component{info.num_derivates > 1 || info.has_lod_clamp}; | ||
| 424 | const auto derivatives_vec{ctx.var_alloc.Consume(derivatives)}; | ||
| 425 | if (multi_component) { | ||
| 426 | ctx.Add("{}=textureGrad({},{},vec2({}.xz),vec2({}.yz));", texel, texture, coords, | ||
| 427 | derivatives_vec, derivatives_vec); | ||
| 428 | } else { | ||
| 429 | ctx.Add("{}=textureGrad({},{},float({}.x),float({}.y));", texel, texture, coords, | ||
| 430 | derivatives_vec, derivatives_vec); | ||
| 431 | } | ||
| 391 | } | 432 | } |
| 392 | 433 | ||
| 393 | void EmitImageRead([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, | 434 | void EmitImageRead([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, |