summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
authorGravatar ReinUsesLisp2020-06-22 03:03:27 -0300
committerGravatar ReinUsesLisp2020-06-26 20:57:22 -0300
commitc94b398f14c05697dc9cbb79aa31601b2ad0d70e (patch)
treec7002ea49c330e03ad6c5dae7311d6450a4ea2ae /src
parentrenderer_vulkan/wrapper: Add VK_EXT_extended_dynamic_state functions (diff)
downloadyuzu-c94b398f14c05697dc9cbb79aa31601b2ad0d70e.tar.gz
yuzu-c94b398f14c05697dc9cbb79aa31601b2ad0d70e.tar.xz
yuzu-c94b398f14c05697dc9cbb79aa31601b2ad0d70e.zip
vk_rasterizer: Use VK_EXT_extended_dynamic_state
Diffstat (limited to 'src')
-rw-r--r--src/video_core/renderer_vulkan/vk_graphics_pipeline.cpp20
-rw-r--r--src/video_core/renderer_vulkan/vk_rasterizer.cpp254
-rw-r--r--src/video_core/renderer_vulkan/vk_rasterizer.h10
-rw-r--r--src/video_core/renderer_vulkan/vk_state_tracker.cpp68
-rw-r--r--src/video_core/renderer_vulkan/vk_state_tracker.h50
5 files changed, 356 insertions, 46 deletions
diff --git a/src/video_core/renderer_vulkan/vk_graphics_pipeline.cpp b/src/video_core/renderer_vulkan/vk_graphics_pipeline.cpp
index b892df412..739801f5d 100644
--- a/src/video_core/renderer_vulkan/vk_graphics_pipeline.cpp
+++ b/src/video_core/renderer_vulkan/vk_graphics_pipeline.cpp
@@ -354,11 +354,27 @@ vk::Pipeline VKGraphicsPipeline::CreatePipeline(const RenderPassParams& renderpa
354 color_blend_ci.pAttachments = cb_attachments.data(); 354 color_blend_ci.pAttachments = cb_attachments.data();
355 std::memset(color_blend_ci.blendConstants, 0, sizeof(color_blend_ci.blendConstants)); 355 std::memset(color_blend_ci.blendConstants, 0, sizeof(color_blend_ci.blendConstants));
356 356
357 static constexpr std::array dynamic_states = { 357 std::vector dynamic_states = {
358 VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR, 358 VK_DYNAMIC_STATE_VIEWPORT, VK_DYNAMIC_STATE_SCISSOR,
359 VK_DYNAMIC_STATE_DEPTH_BIAS, VK_DYNAMIC_STATE_BLEND_CONSTANTS, 359 VK_DYNAMIC_STATE_DEPTH_BIAS, VK_DYNAMIC_STATE_BLEND_CONSTANTS,
360 VK_DYNAMIC_STATE_DEPTH_BOUNDS, VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK, 360 VK_DYNAMIC_STATE_DEPTH_BOUNDS, VK_DYNAMIC_STATE_STENCIL_COMPARE_MASK,
361 VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, VK_DYNAMIC_STATE_STENCIL_REFERENCE}; 361 VK_DYNAMIC_STATE_STENCIL_WRITE_MASK, VK_DYNAMIC_STATE_STENCIL_REFERENCE,
362 };
363 if (device.IsExtExtendedDynamicStateSupported()) {
364 static constexpr std::array extended = {
365 VK_DYNAMIC_STATE_CULL_MODE_EXT,
366 VK_DYNAMIC_STATE_FRONT_FACE_EXT,
367 VK_DYNAMIC_STATE_PRIMITIVE_TOPOLOGY_EXT,
368 VK_DYNAMIC_STATE_VERTEX_INPUT_BINDING_STRIDE_EXT,
369 VK_DYNAMIC_STATE_DEPTH_TEST_ENABLE_EXT,
370 VK_DYNAMIC_STATE_DEPTH_WRITE_ENABLE_EXT,
371 VK_DYNAMIC_STATE_DEPTH_COMPARE_OP_EXT,
372 VK_DYNAMIC_STATE_DEPTH_BOUNDS_TEST_ENABLE_EXT,
373 VK_DYNAMIC_STATE_STENCIL_TEST_ENABLE_EXT,
374 VK_DYNAMIC_STATE_STENCIL_OP_EXT,
375 };
376 dynamic_states.insert(dynamic_states.end(), extended.begin(), extended.end());
377 }
362 378
363 VkPipelineDynamicStateCreateInfo dynamic_state_ci; 379 VkPipelineDynamicStateCreateInfo dynamic_state_ci;
364 dynamic_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO; 380 dynamic_state_ci.sType = VK_STRUCTURE_TYPE_PIPELINE_DYNAMIC_STATE_CREATE_INFO;
diff --git a/src/video_core/renderer_vulkan/vk_rasterizer.cpp b/src/video_core/renderer_vulkan/vk_rasterizer.cpp
index a42f8c564..40d8df8c7 100644
--- a/src/video_core/renderer_vulkan/vk_rasterizer.cpp
+++ b/src/video_core/renderer_vulkan/vk_rasterizer.cpp
@@ -186,13 +186,22 @@ bool HasToPreserveDepthContents(bool is_clear, const Maxwell& regs) {
186 scissor.max_y < regs.zeta_height; 186 scissor.max_y < regs.zeta_height;
187} 187}
188 188
189template <std::size_t N>
190std::array<VkDeviceSize, N> ExpandStrides(const std::array<u16, N>& strides) {
191 std::array<VkDeviceSize, N> expanded;
192 std::copy(strides.begin(), strides.end(), expanded.begin());
193 return expanded;
194}
195
189} // Anonymous namespace 196} // Anonymous namespace
190 197
191class BufferBindings final { 198class BufferBindings final {
192public: 199public:
193 void AddVertexBinding(VkBuffer buffer, VkDeviceSize offset) { 200 void AddVertexBinding(VkBuffer buffer, VkDeviceSize offset, VkDeviceSize size, u32 stride) {
194 vertex.buffers[vertex.num_buffers] = buffer; 201 vertex.buffers[vertex.num_buffers] = buffer;
195 vertex.offsets[vertex.num_buffers] = offset; 202 vertex.offsets[vertex.num_buffers] = offset;
203 vertex.sizes[vertex.num_buffers] = size;
204 vertex.strides[vertex.num_buffers] = static_cast<u16>(stride);
196 ++vertex.num_buffers; 205 ++vertex.num_buffers;
197 } 206 }
198 207
@@ -202,76 +211,76 @@ public:
202 index.type = type; 211 index.type = type;
203 } 212 }
204 213
205 void Bind(VKScheduler& scheduler) const { 214 void Bind(const VKDevice& device, VKScheduler& scheduler) const {
206 // Use this large switch case to avoid dispatching more memory in the record lambda than 215 // Use this large switch case to avoid dispatching more memory in the record lambda than
207 // what we need. It looks horrible, but it's the best we can do on standard C++. 216 // what we need. It looks horrible, but it's the best we can do on standard C++.
208 switch (vertex.num_buffers) { 217 switch (vertex.num_buffers) {
209 case 0: 218 case 0:
210 return BindStatic<0>(scheduler); 219 return BindStatic<0>(device, scheduler);
211 case 1: 220 case 1:
212 return BindStatic<1>(scheduler); 221 return BindStatic<1>(device, scheduler);
213 case 2: 222 case 2:
214 return BindStatic<2>(scheduler); 223 return BindStatic<2>(device, scheduler);
215 case 3: 224 case 3:
216 return BindStatic<3>(scheduler); 225 return BindStatic<3>(device, scheduler);
217 case 4: 226 case 4:
218 return BindStatic<4>(scheduler); 227 return BindStatic<4>(device, scheduler);
219 case 5: 228 case 5:
220 return BindStatic<5>(scheduler); 229 return BindStatic<5>(device, scheduler);
221 case 6: 230 case 6:
222 return BindStatic<6>(scheduler); 231 return BindStatic<6>(device, scheduler);
223 case 7: 232 case 7:
224 return BindStatic<7>(scheduler); 233 return BindStatic<7>(device, scheduler);
225 case 8: 234 case 8:
226 return BindStatic<8>(scheduler); 235 return BindStatic<8>(device, scheduler);
227 case 9: 236 case 9:
228 return BindStatic<9>(scheduler); 237 return BindStatic<9>(device, scheduler);
229 case 10: 238 case 10:
230 return BindStatic<10>(scheduler); 239 return BindStatic<10>(device, scheduler);
231 case 11: 240 case 11:
232 return BindStatic<11>(scheduler); 241 return BindStatic<11>(device, scheduler);
233 case 12: 242 case 12:
234 return BindStatic<12>(scheduler); 243 return BindStatic<12>(device, scheduler);
235 case 13: 244 case 13:
236 return BindStatic<13>(scheduler); 245 return BindStatic<13>(device, scheduler);
237 case 14: 246 case 14:
238 return BindStatic<14>(scheduler); 247 return BindStatic<14>(device, scheduler);
239 case 15: 248 case 15:
240 return BindStatic<15>(scheduler); 249 return BindStatic<15>(device, scheduler);
241 case 16: 250 case 16:
242 return BindStatic<16>(scheduler); 251 return BindStatic<16>(device, scheduler);
243 case 17: 252 case 17:
244 return BindStatic<17>(scheduler); 253 return BindStatic<17>(device, scheduler);
245 case 18: 254 case 18:
246 return BindStatic<18>(scheduler); 255 return BindStatic<18>(device, scheduler);
247 case 19: 256 case 19:
248 return BindStatic<19>(scheduler); 257 return BindStatic<19>(device, scheduler);
249 case 20: 258 case 20:
250 return BindStatic<20>(scheduler); 259 return BindStatic<20>(device, scheduler);
251 case 21: 260 case 21:
252 return BindStatic<21>(scheduler); 261 return BindStatic<21>(device, scheduler);
253 case 22: 262 case 22:
254 return BindStatic<22>(scheduler); 263 return BindStatic<22>(device, scheduler);
255 case 23: 264 case 23:
256 return BindStatic<23>(scheduler); 265 return BindStatic<23>(device, scheduler);
257 case 24: 266 case 24:
258 return BindStatic<24>(scheduler); 267 return BindStatic<24>(device, scheduler);
259 case 25: 268 case 25:
260 return BindStatic<25>(scheduler); 269 return BindStatic<25>(device, scheduler);
261 case 26: 270 case 26:
262 return BindStatic<26>(scheduler); 271 return BindStatic<26>(device, scheduler);
263 case 27: 272 case 27:
264 return BindStatic<27>(scheduler); 273 return BindStatic<27>(device, scheduler);
265 case 28: 274 case 28:
266 return BindStatic<28>(scheduler); 275 return BindStatic<28>(device, scheduler);
267 case 29: 276 case 29:
268 return BindStatic<29>(scheduler); 277 return BindStatic<29>(device, scheduler);
269 case 30: 278 case 30:
270 return BindStatic<30>(scheduler); 279 return BindStatic<30>(device, scheduler);
271 case 31: 280 case 31:
272 return BindStatic<31>(scheduler); 281 return BindStatic<31>(device, scheduler);
273 case 32: 282 case 32:
274 return BindStatic<32>(scheduler); 283 return BindStatic<32>(device, scheduler);
275 } 284 }
276 UNREACHABLE(); 285 UNREACHABLE();
277 } 286 }
@@ -282,6 +291,8 @@ private:
282 std::size_t num_buffers = 0; 291 std::size_t num_buffers = 0;
283 std::array<VkBuffer, Maxwell::NumVertexArrays> buffers; 292 std::array<VkBuffer, Maxwell::NumVertexArrays> buffers;
284 std::array<VkDeviceSize, Maxwell::NumVertexArrays> offsets; 293 std::array<VkDeviceSize, Maxwell::NumVertexArrays> offsets;
294 std::array<VkDeviceSize, Maxwell::NumVertexArrays> sizes;
295 std::array<u16, Maxwell::NumVertexArrays> strides;
285 } vertex; 296 } vertex;
286 297
287 struct { 298 struct {
@@ -291,15 +302,23 @@ private:
291 } index; 302 } index;
292 303
293 template <std::size_t N> 304 template <std::size_t N>
294 void BindStatic(VKScheduler& scheduler) const { 305 void BindStatic(const VKDevice& device, VKScheduler& scheduler) const {
295 if (index.buffer) { 306 if (device.IsExtExtendedDynamicStateSupported()) {
296 BindStatic<N, true>(scheduler); 307 if (index.buffer) {
308 BindStatic<N, true, true>(scheduler);
309 } else {
310 BindStatic<N, false, true>(scheduler);
311 }
297 } else { 312 } else {
298 BindStatic<N, false>(scheduler); 313 if (index.buffer) {
314 BindStatic<N, true, false>(scheduler);
315 } else {
316 BindStatic<N, false, false>(scheduler);
317 }
299 } 318 }
300 } 319 }
301 320
302 template <std::size_t N, bool is_indexed> 321 template <std::size_t N, bool is_indexed, bool has_extended_dynamic_state>
303 void BindStatic(VKScheduler& scheduler) const { 322 void BindStatic(VKScheduler& scheduler) const {
304 static_assert(N <= Maxwell::NumVertexArrays); 323 static_assert(N <= Maxwell::NumVertexArrays);
305 if constexpr (N == 0) { 324 if constexpr (N == 0) {
@@ -311,6 +330,31 @@ private:
311 std::copy(vertex.buffers.begin(), vertex.buffers.begin() + N, buffers.begin()); 330 std::copy(vertex.buffers.begin(), vertex.buffers.begin() + N, buffers.begin());
312 std::copy(vertex.offsets.begin(), vertex.offsets.begin() + N, offsets.begin()); 331 std::copy(vertex.offsets.begin(), vertex.offsets.begin() + N, offsets.begin());
313 332
333 if constexpr (has_extended_dynamic_state) {
334 // With extended dynamic states we can specify the length and stride of a vertex buffer
335 std::array<VkDeviceSize, N> sizes;
336 std::array<u16, N> strides;
337 std::copy(vertex.sizes.begin(), vertex.sizes.begin() + N, sizes.begin());
338 std::copy(vertex.strides.begin(), vertex.strides.begin() + N, strides.begin());
339
340 if constexpr (is_indexed) {
341 scheduler.Record(
342 [buffers, offsets, sizes, strides, index = index](vk::CommandBuffer cmdbuf) {
343 cmdbuf.BindIndexBuffer(index.buffer, index.offset, index.type);
344 cmdbuf.BindVertexBuffers2EXT(0, static_cast<u32>(N), buffers.data(),
345 offsets.data(), sizes.data(),
346 ExpandStrides(strides).data());
347 });
348 } else {
349 scheduler.Record([buffers, offsets, sizes, strides](vk::CommandBuffer cmdbuf) {
350 cmdbuf.BindVertexBuffers2EXT(0, static_cast<u32>(N), buffers.data(),
351 offsets.data(), sizes.data(),
352 ExpandStrides(strides).data());
353 });
354 }
355 return;
356 }
357
314 if constexpr (is_indexed) { 358 if constexpr (is_indexed) {
315 // Indexed draw 359 // Indexed draw
316 scheduler.Record([buffers, offsets, index = index](vk::CommandBuffer cmdbuf) { 360 scheduler.Record([buffers, offsets, index = index](vk::CommandBuffer cmdbuf) {
@@ -402,7 +446,7 @@ void RasterizerVulkan::Draw(bool is_indexed, bool is_instanced) {
402 446
403 UpdateDynamicStates(); 447 UpdateDynamicStates();
404 448
405 buffer_bindings.Bind(scheduler); 449 buffer_bindings.Bind(device, scheduler);
406 450
407 BeginTransformFeedback(); 451 BeginTransformFeedback();
408 452
@@ -893,6 +937,17 @@ void RasterizerVulkan::UpdateDynamicStates() {
893 UpdateBlendConstants(regs); 937 UpdateBlendConstants(regs);
894 UpdateDepthBounds(regs); 938 UpdateDepthBounds(regs);
895 UpdateStencilFaces(regs); 939 UpdateStencilFaces(regs);
940 if (device.IsExtExtendedDynamicStateSupported()) {
941 UpdateCullMode(regs);
942 UpdateDepthBoundsTestEnable(regs);
943 UpdateDepthTestEnable(regs);
944 UpdateDepthWriteEnable(regs);
945 UpdateDepthCompareOp(regs);
946 UpdateFrontFace(regs);
947 UpdatePrimitiveTopology(regs);
948 UpdateStencilOp(regs);
949 UpdateStencilTestEnable(regs);
950 }
896} 951}
897 952
898void RasterizerVulkan::BeginTransformFeedback() { 953void RasterizerVulkan::BeginTransformFeedback() {
@@ -952,13 +1007,13 @@ void RasterizerVulkan::SetupVertexArrays(BufferBindings& buffer_bindings) {
952 const GPUVAddr end{regs.vertex_array_limit[index].LimitAddress()}; 1007 const GPUVAddr end{regs.vertex_array_limit[index].LimitAddress()};
953 1008
954 ASSERT(end >= start); 1009 ASSERT(end >= start);
955 const std::size_t size{end - start}; 1010 const std::size_t size = end - start;
956 if (size == 0) { 1011 if (size == 0) {
957 buffer_bindings.AddVertexBinding(DefaultBuffer(), 0); 1012 buffer_bindings.AddVertexBinding(DefaultBuffer(), 0, DEFAULT_BUFFER_SIZE, 0);
958 continue; 1013 continue;
959 } 1014 }
960 const auto info = buffer_cache.UploadMemory(start, size); 1015 const auto info = buffer_cache.UploadMemory(start, size);
961 buffer_bindings.AddVertexBinding(info.handle, info.offset); 1016 buffer_bindings.AddVertexBinding(info.handle, info.offset, size, vertex_array.stride);
962 } 1017 }
963} 1018}
964 1019
@@ -1310,6 +1365,117 @@ void RasterizerVulkan::UpdateStencilFaces(Tegra::Engines::Maxwell3D::Regs& regs)
1310 } 1365 }
1311} 1366}
1312 1367
1368void RasterizerVulkan::UpdateCullMode(Tegra::Engines::Maxwell3D::Regs& regs) {
1369 if (!state_tracker.TouchCullMode()) {
1370 return;
1371 }
1372 scheduler.Record(
1373 [enabled = regs.cull_test_enabled, cull_face = regs.cull_face](vk::CommandBuffer cmdbuf) {
1374 cmdbuf.SetCullModeEXT(enabled ? MaxwellToVK::CullFace(cull_face) : VK_CULL_MODE_NONE);
1375 });
1376}
1377
1378void RasterizerVulkan::UpdateDepthBoundsTestEnable(Tegra::Engines::Maxwell3D::Regs& regs) {
1379 if (!state_tracker.TouchDepthBoundsTestEnable()) {
1380 return;
1381 }
1382 scheduler.Record([enable = regs.depth_bounds_enable](vk::CommandBuffer cmdbuf) {
1383 cmdbuf.SetDepthBoundsTestEnableEXT(enable);
1384 });
1385}
1386
1387void RasterizerVulkan::UpdateDepthTestEnable(Tegra::Engines::Maxwell3D::Regs& regs) {
1388 if (!state_tracker.TouchDepthTestEnable()) {
1389 return;
1390 }
1391 scheduler.Record([enable = regs.depth_test_enable](vk::CommandBuffer cmdbuf) {
1392 cmdbuf.SetDepthTestEnableEXT(enable);
1393 });
1394}
1395
1396void RasterizerVulkan::UpdateDepthWriteEnable(Tegra::Engines::Maxwell3D::Regs& regs) {
1397 if (!state_tracker.TouchDepthWriteEnable()) {
1398 return;
1399 }
1400 scheduler.Record([enable = regs.depth_write_enabled](vk::CommandBuffer cmdbuf) {
1401 cmdbuf.SetDepthWriteEnableEXT(enable);
1402 });
1403}
1404
1405void RasterizerVulkan::UpdateDepthCompareOp(Tegra::Engines::Maxwell3D::Regs& regs) {
1406 if (!state_tracker.TouchDepthCompareOp()) {
1407 return;
1408 }
1409 scheduler.Record([func = regs.depth_test_func](vk::CommandBuffer cmdbuf) {
1410 cmdbuf.SetDepthCompareOpEXT(MaxwellToVK::ComparisonOp(func));
1411 });
1412}
1413
1414void RasterizerVulkan::UpdateFrontFace(Tegra::Engines::Maxwell3D::Regs& regs) {
1415 if (!state_tracker.TouchFrontFace()) {
1416 return;
1417 }
1418
1419 VkFrontFace front_face = MaxwellToVK::FrontFace(regs.front_face);
1420 if (regs.screen_y_control.triangle_rast_flip != 0) {
1421 front_face = front_face == VK_FRONT_FACE_CLOCKWISE ? VK_FRONT_FACE_COUNTER_CLOCKWISE
1422 : VK_FRONT_FACE_CLOCKWISE;
1423 }
1424 scheduler.Record(
1425 [front_face](vk::CommandBuffer cmdbuf) { cmdbuf.SetFrontFaceEXT(front_face); });
1426}
1427
1428void RasterizerVulkan::UpdatePrimitiveTopology(Tegra::Engines::Maxwell3D::Regs& regs) {
1429 if (!state_tracker.TouchPrimitiveTopology()) {
1430 return;
1431 }
1432 const Maxwell::PrimitiveTopology primitive_topology = regs.draw.topology.Value();
1433 scheduler.Record([this, primitive_topology](vk::CommandBuffer cmdbuf) {
1434 cmdbuf.SetPrimitiveTopologyEXT(MaxwellToVK::PrimitiveTopology(device, primitive_topology));
1435 });
1436}
1437
1438void RasterizerVulkan::UpdateStencilOp(Tegra::Engines::Maxwell3D::Regs& regs) {
1439 if (!state_tracker.TouchStencilOp()) {
1440 return;
1441 }
1442 const Maxwell::StencilOp fail = regs.stencil_front_op_fail;
1443 const Maxwell::StencilOp zfail = regs.stencil_front_op_zfail;
1444 const Maxwell::StencilOp zpass = regs.stencil_front_op_zpass;
1445 const Maxwell::ComparisonOp compare = regs.stencil_front_func_func;
1446 if (regs.stencil_two_side_enable) {
1447 scheduler.Record([fail, zfail, zpass, compare](vk::CommandBuffer cmdbuf) {
1448 cmdbuf.SetStencilOpEXT(VK_STENCIL_FACE_FRONT_AND_BACK, MaxwellToVK::StencilOp(fail),
1449 MaxwellToVK::StencilOp(zpass), MaxwellToVK::StencilOp(zfail),
1450 MaxwellToVK::ComparisonOp(compare));
1451 });
1452 } else {
1453 const Maxwell::StencilOp back_fail = regs.stencil_back_op_fail;
1454 const Maxwell::StencilOp back_zfail = regs.stencil_back_op_zfail;
1455 const Maxwell::StencilOp back_zpass = regs.stencil_back_op_zpass;
1456 const Maxwell::ComparisonOp back_compare = regs.stencil_back_func_func;
1457 scheduler.Record([fail, zfail, zpass, compare, back_fail, back_zfail, back_zpass,
1458 back_compare](vk::CommandBuffer cmdbuf) {
1459 cmdbuf.SetStencilOpEXT(VK_STENCIL_FACE_FRONT_BIT, MaxwellToVK::StencilOp(fail),
1460 MaxwellToVK::StencilOp(zpass), MaxwellToVK::StencilOp(zfail),
1461 MaxwellToVK::ComparisonOp(compare));
1462 cmdbuf.SetStencilOpEXT(VK_STENCIL_FACE_BACK_BIT, MaxwellToVK::StencilOp(back_fail),
1463 MaxwellToVK::StencilOp(back_zpass),
1464 MaxwellToVK::StencilOp(back_zfail),
1465 MaxwellToVK::ComparisonOp(back_compare));
1466 });
1467 }
1468}
1469
1470void RasterizerVulkan::UpdateStencilTestEnable(Tegra::Engines::Maxwell3D::Regs& regs) {
1471 if (!state_tracker.TouchStencilTestEnable()) {
1472 return;
1473 }
1474 scheduler.Record([enable = regs.stencil_enable](vk::CommandBuffer cmdbuf) {
1475 cmdbuf.SetStencilTestEnableEXT(enable);
1476 });
1477}
1478
1313std::size_t RasterizerVulkan::CalculateGraphicsStreamBufferSize(bool is_indexed) const { 1479std::size_t RasterizerVulkan::CalculateGraphicsStreamBufferSize(bool is_indexed) const {
1314 std::size_t size = CalculateVertexArraysSize(); 1480 std::size_t size = CalculateVertexArraysSize();
1315 if (is_indexed) { 1481 if (is_indexed) {
diff --git a/src/video_core/renderer_vulkan/vk_rasterizer.h b/src/video_core/renderer_vulkan/vk_rasterizer.h
index e1cd1e392..923178b0b 100644
--- a/src/video_core/renderer_vulkan/vk_rasterizer.h
+++ b/src/video_core/renderer_vulkan/vk_rasterizer.h
@@ -245,6 +245,16 @@ private:
245 void UpdateDepthBounds(Tegra::Engines::Maxwell3D::Regs& regs); 245 void UpdateDepthBounds(Tegra::Engines::Maxwell3D::Regs& regs);
246 void UpdateStencilFaces(Tegra::Engines::Maxwell3D::Regs& regs); 246 void UpdateStencilFaces(Tegra::Engines::Maxwell3D::Regs& regs);
247 247
248 void UpdateCullMode(Tegra::Engines::Maxwell3D::Regs& regs);
249 void UpdateDepthBoundsTestEnable(Tegra::Engines::Maxwell3D::Regs& regs);
250 void UpdateDepthTestEnable(Tegra::Engines::Maxwell3D::Regs& regs);
251 void UpdateDepthWriteEnable(Tegra::Engines::Maxwell3D::Regs& regs);
252 void UpdateDepthCompareOp(Tegra::Engines::Maxwell3D::Regs& regs);
253 void UpdateFrontFace(Tegra::Engines::Maxwell3D::Regs& regs);
254 void UpdatePrimitiveTopology(Tegra::Engines::Maxwell3D::Regs& regs);
255 void UpdateStencilOp(Tegra::Engines::Maxwell3D::Regs& regs);
256 void UpdateStencilTestEnable(Tegra::Engines::Maxwell3D::Regs& regs);
257
248 std::size_t CalculateGraphicsStreamBufferSize(bool is_indexed) const; 258 std::size_t CalculateGraphicsStreamBufferSize(bool is_indexed) const;
249 259
250 std::size_t CalculateComputeStreamBufferSize() const; 260 std::size_t CalculateComputeStreamBufferSize() const;
diff --git a/src/video_core/renderer_vulkan/vk_state_tracker.cpp b/src/video_core/renderer_vulkan/vk_state_tracker.cpp
index 94a89e388..e5a583dd5 100644
--- a/src/video_core/renderer_vulkan/vk_state_tracker.cpp
+++ b/src/video_core/renderer_vulkan/vk_state_tracker.cpp
@@ -36,6 +36,15 @@ Flags MakeInvalidationFlags() {
36 flags[BlendConstants] = true; 36 flags[BlendConstants] = true;
37 flags[DepthBounds] = true; 37 flags[DepthBounds] = true;
38 flags[StencilProperties] = true; 38 flags[StencilProperties] = true;
39 flags[CullMode] = true;
40 flags[DepthBoundsEnable] = true;
41 flags[DepthTestEnable] = true;
42 flags[DepthWriteEnable] = true;
43 flags[DepthCompareOp] = true;
44 flags[FrontFace] = true;
45 flags[PrimitiveTopology] = true;
46 flags[StencilOp] = true;
47 flags[StencilTestEnable] = true;
39 return flags; 48 return flags;
40} 49}
41 50
@@ -75,6 +84,57 @@ void SetupDirtyStencilProperties(Tables& tables) {
75 table[OFF(stencil_back_func_mask)] = StencilProperties; 84 table[OFF(stencil_back_func_mask)] = StencilProperties;
76} 85}
77 86
87void SetupDirtyCullMode(Tables& tables) {
88 auto& table = tables[0];
89 table[OFF(cull_face)] = CullMode;
90 table[OFF(cull_test_enabled)] = CullMode;
91}
92
93void SetupDirtyDepthBoundsEnable(Tables& tables) {
94 tables[0][OFF(depth_bounds_enable)] = DepthBoundsEnable;
95}
96
97void SetupDirtyDepthTestEnable(Tables& tables) {
98 tables[0][OFF(depth_test_enable)] = DepthTestEnable;
99}
100
101void SetupDirtyDepthWriteEnable(Tables& tables) {
102 tables[0][OFF(depth_write_enabled)] = DepthWriteEnable;
103}
104
105void SetupDirtyDepthCompareOp(Tables& tables) {
106 tables[0][OFF(depth_test_func)] = DepthCompareOp;
107}
108
109void SetupDirtyFrontFace(Tables& tables) {
110 auto& table = tables[0];
111 table[OFF(front_face)] = FrontFace;
112 table[OFF(screen_y_control)] = FrontFace;
113}
114
115void SetupDirtyPrimitiveTopology(Tables& tables) {
116 tables[0][OFF(draw.topology)] = PrimitiveTopology;
117}
118
119void SetupDirtyStencilOp(Tables& tables) {
120 auto& table = tables[0];
121 table[OFF(stencil_front_op_fail)] = StencilOp;
122 table[OFF(stencil_front_op_zfail)] = StencilOp;
123 table[OFF(stencil_front_op_zpass)] = StencilOp;
124 table[OFF(stencil_front_func_func)] = StencilOp;
125 table[OFF(stencil_back_op_fail)] = StencilOp;
126 table[OFF(stencil_back_op_zfail)] = StencilOp;
127 table[OFF(stencil_back_op_zpass)] = StencilOp;
128 table[OFF(stencil_back_func_func)] = StencilOp;
129
130 // Table 0 is used by StencilProperties
131 tables[1][OFF(stencil_two_side_enable)] = StencilOp;
132}
133
134void SetupDirtyStencilTestEnable(Tables& tables) {
135 tables[0][OFF(stencil_enable)] = StencilTestEnable;
136}
137
78} // Anonymous namespace 138} // Anonymous namespace
79 139
80StateTracker::StateTracker(Core::System& system) 140StateTracker::StateTracker(Core::System& system)
@@ -90,6 +150,14 @@ void StateTracker::Initialize() {
90 SetupDirtyBlendConstants(tables); 150 SetupDirtyBlendConstants(tables);
91 SetupDirtyDepthBounds(tables); 151 SetupDirtyDepthBounds(tables);
92 SetupDirtyStencilProperties(tables); 152 SetupDirtyStencilProperties(tables);
153 SetupDirtyCullMode(tables);
154 SetupDirtyDepthBoundsEnable(tables);
155 SetupDirtyDepthTestEnable(tables);
156 SetupDirtyDepthWriteEnable(tables);
157 SetupDirtyDepthCompareOp(tables);
158 SetupDirtyFrontFace(tables);
159 SetupDirtyPrimitiveTopology(tables);
160 SetupDirtyStencilOp(tables);
93} 161}
94 162
95void StateTracker::InvalidateCommandBufferState() { 163void StateTracker::InvalidateCommandBufferState() {
diff --git a/src/video_core/renderer_vulkan/vk_state_tracker.h b/src/video_core/renderer_vulkan/vk_state_tracker.h
index 03bc415b2..54ca0d6c6 100644
--- a/src/video_core/renderer_vulkan/vk_state_tracker.h
+++ b/src/video_core/renderer_vulkan/vk_state_tracker.h
@@ -26,6 +26,16 @@ enum : u8 {
26 DepthBounds, 26 DepthBounds,
27 StencilProperties, 27 StencilProperties,
28 28
29 CullMode,
30 DepthBoundsEnable,
31 DepthTestEnable,
32 DepthWriteEnable,
33 DepthCompareOp,
34 FrontFace,
35 PrimitiveTopology,
36 StencilOp,
37 StencilTestEnable,
38
29 Last 39 Last
30}; 40};
31static_assert(Last <= std::numeric_limits<u8>::max()); 41static_assert(Last <= std::numeric_limits<u8>::max());
@@ -64,6 +74,46 @@ public:
64 return Exchange(Dirty::StencilProperties, false); 74 return Exchange(Dirty::StencilProperties, false);
65 } 75 }
66 76
77 bool TouchCullMode() {
78 return Exchange(Dirty::CullMode, false);
79 }
80
81 bool TouchDepthBoundsTestEnable() {
82 return Exchange(Dirty::DepthBoundsEnable, false);
83 }
84
85 bool TouchDepthTestEnable() {
86 return Exchange(Dirty::DepthTestEnable, false);
87 }
88
89 bool TouchDepthBoundsEnable() {
90 return Exchange(Dirty::DepthBoundsEnable, false);
91 }
92
93 bool TouchDepthWriteEnable() {
94 return Exchange(Dirty::DepthWriteEnable, false);
95 }
96
97 bool TouchDepthCompareOp() {
98 return Exchange(Dirty::DepthCompareOp, false);
99 }
100
101 bool TouchFrontFace() {
102 return Exchange(Dirty::FrontFace, false);
103 }
104
105 bool TouchPrimitiveTopology() {
106 return Exchange(Dirty::PrimitiveTopology, false);
107 }
108
109 bool TouchStencilOp() {
110 return Exchange(Dirty::StencilOp, false);
111 }
112
113 bool TouchStencilTestEnable() {
114 return Exchange(Dirty::StencilTestEnable, false);
115 }
116
67private: 117private:
68 bool Exchange(std::size_t id, bool new_value) const noexcept { 118 bool Exchange(std::size_t id, bool new_value) const noexcept {
69 auto& flags = system.GPU().Maxwell3D().dirty.flags; 119 auto& flags = system.GPU().Maxwell3D().dirty.flags;