summaryrefslogtreecommitdiff
path: root/src/shader_recompiler/backend/glsl
diff options
context:
space:
mode:
Diffstat (limited to 'src/shader_recompiler/backend/glsl')
-rw-r--r--src/shader_recompiler/backend/glsl/emit_glsl_image.cpp91
1 files changed, 89 insertions, 2 deletions
diff --git a/src/shader_recompiler/backend/glsl/emit_glsl_image.cpp b/src/shader_recompiler/backend/glsl/emit_glsl_image.cpp
index 78e2d5bac..e12d7b850 100644
--- a/src/shader_recompiler/backend/glsl/emit_glsl_image.cpp
+++ b/src/shader_recompiler/backend/glsl/emit_glsl_image.cpp
@@ -67,6 +67,23 @@ std::string ShadowSamplerVecCast(TextureType type) {
67 } 67 }
68} 68}
69 69
70std::string PtpOffsets(const IR::Value& offset, const IR::Value& offset2) {
71 const std::array values{offset.InstRecursive(), offset2.InstRecursive()};
72 if (!values[0]->AreAllArgsImmediates() || !values[1]->AreAllArgsImmediates()) {
73 // LOG_WARNING("Not all arguments in PTP are immediate, STUBBING");
74 return "";
75 }
76 const IR::Opcode opcode{values[0]->GetOpcode()};
77 if (opcode != values[1]->GetOpcode() || opcode != IR::Opcode::CompositeConstructU32x4) {
78 throw LogicError("Invalid PTP arguments");
79 }
80 auto read{[&](unsigned int a, unsigned int b) { return values[a]->Arg(b).U32(); }};
81
82 return fmt::format("ivec2[](ivec2({},{}),ivec2({},{}),ivec2({},{}),ivec2({},{}))", read(0, 0),
83 read(0, 1), read(0, 2), read(0, 3), read(1, 0), read(1, 1), read(1, 2),
84 read(1, 3));
85}
86
70IR::Inst* PrepareSparse(IR::Inst& inst) { 87IR::Inst* PrepareSparse(IR::Inst& inst) {
71 const auto sparse_inst{inst.GetAssociatedPseudoOperation(IR::Opcode::GetSparseFromOp)}; 88 const auto sparse_inst{inst.GetAssociatedPseudoOperation(IR::Opcode::GetSparseFromOp)};
72 if (sparse_inst) { 89 if (sparse_inst) {
@@ -213,7 +230,45 @@ void EmitImageGather([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Ins
213 [[maybe_unused]] std::string_view coords, 230 [[maybe_unused]] std::string_view coords,
214 [[maybe_unused]] const IR::Value& offset, 231 [[maybe_unused]] const IR::Value& offset,
215 [[maybe_unused]] const IR::Value& offset2) { 232 [[maybe_unused]] const IR::Value& offset2) {
216 throw NotImplementedException("GLSL Instruction"); 233 const auto info{inst.Flags<IR::TextureInstInfo>()};
234 const auto texture{Texture(ctx, info, index)};
235 const auto texel{ctx.reg_alloc.Define(inst, Type::F32x4)};
236 const auto sparse_inst{PrepareSparse(inst)};
237 if (!offset2.IsEmpty()) {
238 ctx.Add("/*OFFSET 2 IS {}*/", ctx.reg_alloc.Consume(offset2));
239 }
240 if (!sparse_inst) {
241 if (offset.IsEmpty()) {
242 ctx.Add("{}=textureGather({},{},int({}));", texel, texture, coords,
243 info.gather_component);
244 return;
245 }
246 if (offset2.IsEmpty()) {
247 ctx.Add("{}=textureGatherOffset({},{},{},int({}));", texel, texture, coords,
248 CastToIntVec(ctx.reg_alloc.Consume(offset), info), info.gather_component);
249 return;
250 }
251 // PTP
252 const auto offsets{PtpOffsets(offset, offset2)};
253 ctx.Add("{}=textureGatherOffsets({},{},{},int({}));", texel, texture, coords, offsets,
254 info.gather_component);
255 return;
256 }
257 // TODO: Query sparseTexels extension support
258 if (offset.IsEmpty()) {
259 ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherARB({},{},{},int({})));",
260 *sparse_inst, texture, coords, texel, info.gather_component);
261 }
262 if (offset2.IsEmpty()) {
263 ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},{},int({})));",
264 *sparse_inst, texture, CastToIntVec(coords, info),
265 CastToIntVec(ctx.reg_alloc.Consume(offset), info), texel, info.gather_component);
266 }
267 // PTP
268 const auto offsets{PtpOffsets(offset, offset2)};
269 ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},{},int({})));",
270 *sparse_inst, texture, CastToIntVec(coords, info), offsets, texel,
271 info.gather_component);
217} 272}
218 273
219void EmitImageGatherDref([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, 274void EmitImageGatherDref([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,
@@ -222,7 +277,39 @@ void EmitImageGatherDref([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR:
222 [[maybe_unused]] const IR::Value& offset, 277 [[maybe_unused]] const IR::Value& offset,
223 [[maybe_unused]] const IR::Value& offset2, 278 [[maybe_unused]] const IR::Value& offset2,
224 [[maybe_unused]] std::string_view dref) { 279 [[maybe_unused]] std::string_view dref) {
225 throw NotImplementedException("GLSL Instruction"); 280 const auto info{inst.Flags<IR::TextureInstInfo>()};
281 const auto texture{Texture(ctx, info, index)};
282 const auto texel{ctx.reg_alloc.Define(inst, Type::F32x4)};
283 const auto sparse_inst{PrepareSparse(inst)};
284 if (!sparse_inst) {
285 if (offset.IsEmpty()) {
286 ctx.Add("{}=textureGather({},{},{});", texel, texture, coords, dref);
287 return;
288 }
289 if (offset2.IsEmpty()) {
290 ctx.Add("{}=textureGatherOffset({},{},{},{});", texel, texture, coords, dref,
291 CastToIntVec(ctx.reg_alloc.Consume(offset), info));
292 return;
293 }
294 // PTP
295 const auto offsets{PtpOffsets(offset, offset2)};
296 ctx.Add("{}=textureGatherOffsets({},{},{},{});", texel, texture, coords, dref, offsets);
297 return;
298 }
299 // TODO: Query sparseTexels extension support
300 if (offset.IsEmpty()) {
301 ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherARB({},{},{},{}));", *sparse_inst,
302 texture, coords, dref, texel);
303 }
304 if (offset2.IsEmpty()) {
305 ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},,{},{}));",
306 *sparse_inst, texture, CastToIntVec(coords, info), dref,
307 CastToIntVec(ctx.reg_alloc.Consume(offset), info), texel);
308 }
309 // PTP
310 const auto offsets{PtpOffsets(offset, offset2)};
311 ctx.AddU1("{}=sparseTexelsResidentARB(sparseTextureGatherOffsetARB({},{},{},,{},{}));",
312 *sparse_inst, texture, CastToIntVec(coords, info), dref, offsets, texel);
226} 313}
227 314
228void EmitImageFetch([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst, 315void EmitImageFetch([[maybe_unused]] EmitContext& ctx, [[maybe_unused]] IR::Inst& inst,