summaryrefslogtreecommitdiff
path: root/src/shader_recompiler
diff options
context:
space:
mode:
Diffstat (limited to 'src/shader_recompiler')
-rw-r--r--src/shader_recompiler/backend/glsl/emit_glsl_image.cpp45
1 files changed, 33 insertions, 12 deletions
diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_image.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_image.cpp
index ce3a82656..a62e2b181 100644
--- a/src/shader_recompiler/backend/glsl/emit_glsl_image.cpp
+++ b/src/shader_recompiler/backend/glsl/emit_glsl_image.cpp
@@ -78,6 +78,28 @@ std::string ShadowSamplerVecCast(TextureType type) {
78 } 78 }
79} 79}
80 80
81std::string GetOffsetVec(EmitContext& ctx, const IR::Value& offset) {
82 if (offset.IsImmediate()) {
83 return fmt::format("int({})", offset.U32());
84 }
85 IR::Inst* const inst{offset.InstRecursive()};
86 if (inst->AreAllArgsImmediates()) {
87 switch (inst->GetOpcode()) {
88 case IR::Opcode::CompositeConstructU32x2:
89 return fmt::format("ivec2({},{})", inst->Arg(0).U32(), inst->Arg(1).U32());
90 case IR::Opcode::CompositeConstructU32x3:
91 return fmt::format("ivec3({},{},{})", inst->Arg(0).U32(), inst->Arg(1).U32(),
92 inst->Arg(2).U32());
93 case IR::Opcode::CompositeConstructU32x4:
94 return fmt::format("ivec4({},{},{},{})", inst->Arg(0).U32(), inst->Arg(1).U32(),
95 inst->Arg(2).U32(), inst->Arg(3).U32());
96 default:
97 break;
98 }
99 }
100 return ctx.var_alloc.Consume(offset);
101}
102
81std::string PtpOffsets(const IR::Value& offset, const IR::Value& offset2) { 103std::string PtpOffsets(const IR::Value& offset, const IR::Value& offset2) {
82 const std::array values{offset.InstRecursive(), offset2.InstRecursive()}; 104 const std::array values{offset.InstRecursive(), offset2.InstRecursive()};
83 if (!values[0]->AreAllArgsImmediates() || !values[1]->AreAllArgsImmediates()) { 105 if (!values[0]->AreAllArgsImmediates() || !values[1]->AreAllArgsImmediates()) {
@@ -119,7 +141,7 @@ void EmitImageSampleImplicitLod([[maybe_unused]] EmitContext& ctx, [[maybe_unuse
119 const auto sparse_inst{PrepareSparse(inst)}; 141 const auto sparse_inst{PrepareSparse(inst)};
120 if (!sparse_inst) { 142 if (!sparse_inst) {
121 if (!offset.IsEmpty()) { 143 if (!offset.IsEmpty()) {
122 const auto offset_str{CastToIntVec(ctx.var_alloc.Consume(offset), info)}; 144 const auto offset_str{GetOffsetVec(ctx, offset)};
123 if (ctx.stage == Stage::Fragment) { 145 if (ctx.stage == Stage::Fragment) {
124 ctx.Add("{}=textureOffset({},{},{}{});", texel, texture, coords, offset_str, bias); 146 ctx.Add("{}=textureOffset({},{},{}{});", texel, texture, coords, offset_str, bias);
125 } else { 147 } else {
@@ -137,8 +159,7 @@ void EmitImageSampleImplicitLod([[maybe_unused]] EmitContext& ctx, [[maybe_unuse
137 // TODO: Query sparseTexels extension support 159 // TODO: Query sparseTexels extension support
138 if (!offset.IsEmpty()) { 160 if (!offset.IsEmpty()) {
139 ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureOffsetARB({},{},{},{}{}));", 161 ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureOffsetARB({},{},{},{}{}));",
140 *sparse_inst, texture, coords, CastToIntVec(ctx.var_alloc.Consume(offset), info), 162 *sparse_inst, texture, coords, GetOffsetVec(ctx, offset), texel, bias);
141 texel, bias);
142 } else { 163 } else {
143 ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureARB({},{},{}{}));", *sparse_inst, 164 ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureARB({},{},{}{}));", *sparse_inst,
144 texture, coords, texel, bias); 165 texture, coords, texel, bias);
@@ -163,7 +184,7 @@ void EmitImageSampleExplicitLod([[maybe_unused]] EmitContext& ctx, [[maybe_unuse
163 if (!sparse_inst) { 184 if (!sparse_inst) {
164 if (!offset.IsEmpty()) { 185 if (!offset.IsEmpty()) {
165 ctx.Add("{}=textureLodOffset({},{},{},{});", texel, texture, coords, lod_lc, 186 ctx.Add("{}=textureLodOffset({},{},{},{});", texel, texture, coords, lod_lc,
166 CastToIntVec(ctx.var_alloc.Consume(offset), info)); 187 GetOffsetVec(ctx, offset));
167 } else { 188 } else {
168 ctx.Add("{}=textureLod({},{},{});", texel, texture, coords, lod_lc); 189 ctx.Add("{}=textureLod({},{},{});", texel, texture, coords, lod_lc);
169 } 190 }
@@ -173,7 +194,7 @@ void EmitImageSampleExplicitLod([[maybe_unused]] EmitContext& ctx, [[maybe_unuse
173 if (!offset.IsEmpty()) { 194 if (!offset.IsEmpty()) {
174 ctx.AddU1("{}=sparseTexelsResidentARB(sparseTexelFetchOffsetARB({},{},int({}),{},{}));", 195 ctx.AddU1("{}=sparseTexelsResidentARB(sparseTexelFetchOffsetARB({},{},int({}),{},{}));",
175 *sparse_inst, texture, CastToIntVec(coords, info), lod_lc, 196 *sparse_inst, texture, CastToIntVec(coords, info), lod_lc,
176 CastToIntVec(ctx.var_alloc.Consume(offset), info), texel); 197 GetOffsetVec(ctx, offset), texel);
177 } else { 198 } else {
178 ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureLodARB({},{},{},{}));", *sparse_inst, 199 ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureLodARB({},{},{},{}));", *sparse_inst,
179 texture, coords, lod_lc, texel); 200 texture, coords, lod_lc, texel);
@@ -202,7 +223,7 @@ void EmitImageSampleDrefImplicitLod([[maybe_unused]] EmitContext& ctx,
202 const auto bias{info.has_bias ? fmt::format(",{}", bias_lc) : ""}; 223 const auto bias{info.has_bias ? fmt::format(",{}", bias_lc) : ""};
203 const auto cast{ShadowSamplerVecCast(info.type)}; 224 const auto cast{ShadowSamplerVecCast(info.type)};
204 if (!offset.IsEmpty()) { 225 if (!offset.IsEmpty()) {
205 const auto offset_str{CastToIntVec(ctx.var_alloc.Consume(offset), info)}; 226 const auto offset_str{GetOffsetVec(ctx, offset)};
206 if (ctx.stage == Stage::Fragment) { 227 if (ctx.stage == Stage::Fragment) {
207 ctx.AddF32("{}=textureOffset({},{}({},{}),{}{});", inst, texture, cast, coords, dref, 228 ctx.AddF32("{}=textureOffset({},{}({},{}),{}{});", inst, texture, cast, coords, dref,
208 offset_str, bias); 229 offset_str, bias);
@@ -244,7 +265,7 @@ void EmitImageSampleDrefExplicitLod([[maybe_unused]] EmitContext& ctx,
244 const auto texture{Texture(ctx, info, index)}; 265 const auto texture{Texture(ctx, info, index)};
245 const auto cast{ShadowSamplerVecCast(info.type)}; 266 const auto cast{ShadowSamplerVecCast(info.type)};
246 if (!offset.IsEmpty()) { 267 if (!offset.IsEmpty()) {
247 const auto offset_str{CastToIntVec(ctx.var_alloc.Consume(offset), info)}; 268 const auto offset_str{GetOffsetVec(ctx, offset)};
248 if (info.type == TextureType::ColorArrayCube) { 269 if (info.type == TextureType::ColorArrayCube) {
249 ctx.AddF32("{}=textureLodOffset({},{},{},{},{});", inst, texture, coords, dref, lod_lc, 270 ctx.AddF32("{}=textureLodOffset({},{},{},{},{});", inst, texture, coords, dref, lod_lc,
250 offset_str); 271 offset_str);
@@ -279,7 +300,7 @@ void EmitImageGather([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Ins
279 } 300 }
280 if (offset2.IsEmpty()) { 301 if (offset2.IsEmpty()) {
281 ctx.Add("{}=textureGatherOffset({},{},{},int({}));", texel, texture, coords, 302 ctx.Add("{}=textureGatherOffset({},{},{},int({}));", texel, texture, coords,
282 CastToIntVec(ctx.var_alloc.Consume(offset), info), info.gather_component); 303 GetOffsetVec(ctx, offset), info.gather_component);
283 return; 304 return;
284 } 305 }
285 // PTP 306 // PTP
@@ -295,8 +316,8 @@ void EmitImageGather([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Ins
295 } 316 }
296 if (offset2.IsEmpty()) { 317 if (offset2.IsEmpty()) {
297 ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},{},int({})));", 318 ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},{},int({})));",
298 *sparse_inst, texture, CastToIntVec(coords, info), 319 *sparse_inst, texture, CastToIntVec(coords, info), GetOffsetVec(ctx, offset),
299 CastToIntVec(ctx.var_alloc.Consume(offset), info), texel, info.gather_component); 320 texel, info.gather_component);
300 } 321 }
301 // PTP 322 // PTP
302 const auto offsets{PtpOffsets(offset, offset2)}; 323 const auto offsets{PtpOffsets(offset, offset2)};
@@ -322,7 +343,7 @@ void EmitImageGatherDref([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR:
322 } 343 }
323 if (offset2.IsEmpty()) { 344 if (offset2.IsEmpty()) {
324 ctx.Add("{}=textureGatherOffset({},{},{},{});", texel, texture, coords, dref, 345 ctx.Add("{}=textureGatherOffset({},{},{},{});", texel, texture, coords, dref,
325 CastToIntVec(ctx.var_alloc.Consume(offset), info)); 346 GetOffsetVec(ctx, offset));
326 return; 347 return;
327 } 348 }
328 // PTP 349 // PTP
@@ -338,7 +359,7 @@ void EmitImageGatherDref([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR:
338 if (offset2.IsEmpty()) { 359 if (offset2.IsEmpty()) {
339 ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},,{},{}));", 360 ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},,{},{}));",
340 *sparse_inst, texture, CastToIntVec(coords, info), dref, 361 *sparse_inst, texture, CastToIntVec(coords, info), dref,
341 CastToIntVec(ctx.var_alloc.Consume(offset), info), texel); 362 GetOffsetVec(ctx, offset), texel);
342 } 363 }
343 // PTP 364 // PTP
344 const auto offsets{PtpOffsets(offset, offset2)}; 365 const auto offsets{PtpOffsets(offset, offset2)};