summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorGravatar ReinUsesLisp2021-01-23 17:59:32 -0300
committerGravatar ReinUsesLisp2021-02-13 02:18:38 -0300
commit025fe458aef87d28c68dc0d4c0f2c43492b62c18 (patch)
treefbd93f27fb86a5230fae6168a6ee791f52009856
parentvk_buffer_cache: Add support for null index buffers (diff)
downloadyuzu-025fe458aef87d28c68dc0d4c0f2c43492b62c18.tar.gz
yuzu-025fe458aef87d28c68dc0d4c0f2c43492b62c18.tar.xz
yuzu-025fe458aef87d28c68dc0d4c0f2c43492b62c18.zip
vk_staging_buffer_pool: Fix softlock when stream buffer overflows
There was still a code path that could wait on a timeline semaphore tick that would never be signalled. While we are at it, make use of more STL algorithms.
-rw-r--r--src/video_core/renderer_vulkan/vk_staging_buffer_pool.cpp37
-rw-r--r--src/video_core/renderer_vulkan/vk_staging_buffer_pool.h2
2 files changed, 20 insertions, 19 deletions
diff --git a/src/video_core/renderer_vulkan/vk_staging_buffer_pool.cpp b/src/video_core/renderer_vulkan/vk_staging_buffer_pool.cpp
index 4baf791d4..dfd8c8e5a 100644
--- a/src/video_core/renderer_vulkan/vk_staging_buffer_pool.cpp
+++ b/src/video_core/renderer_vulkan/vk_staging_buffer_pool.cpp
@@ -142,33 +142,27 @@ void StagingBufferPool::TickFrame() {
142} 142}
143 143
144StagingBufferRef StagingBufferPool::GetStreamBuffer(size_t size) { 144StagingBufferRef StagingBufferPool::GetStreamBuffer(size_t size) {
145 for (size_t region = Region(free_iterator) + 1, 145 if (AreRegionsActive(Region(free_iterator) + 1,
146 region_end = std::min(Region(iterator + size) + 1, NUM_SYNCS); 146 std::min(Region(iterator + size) + 1, NUM_SYNCS))) {
147 region < region_end; ++region) { 147 // Avoid waiting for the previous usages to be free
148 // If we'd have to wait, get a staging buffer to avoid waiting 148 return GetStagingBuffer(size, MemoryUsage::Upload);
149 if (!scheduler.IsFree(sync_ticks[region])) {
150 return GetStagingBuffer(size, MemoryUsage::Upload);
151 }
152 }
153 for (size_t region = Region(used_iterator), region_end = Region(iterator); region < region_end;
154 ++region) {
155 sync_ticks[region] = scheduler.CurrentTick();
156 } 149 }
150 const u64 current_tick = scheduler.CurrentTick();
151 std::fill(sync_ticks.begin() + Region(used_iterator), sync_ticks.begin() + Region(iterator),
152 current_tick);
157 used_iterator = iterator; 153 used_iterator = iterator;
154 free_iterator = std::max(free_iterator, iterator + size);
158 155
159 if (iterator + size > free_iterator) {
160 free_iterator = iterator + size;
161 }
162 if (iterator + size > STREAM_BUFFER_SIZE) { 156 if (iterator + size > STREAM_BUFFER_SIZE) {
163 for (size_t region = Region(used_iterator); region < NUM_SYNCS; ++region) { 157 std::fill(sync_ticks.begin() + Region(used_iterator), sync_ticks.begin() + NUM_SYNCS,
164 sync_ticks[region] = scheduler.CurrentTick(); 158 current_tick);
165 }
166 used_iterator = 0; 159 used_iterator = 0;
167 iterator = 0; 160 iterator = 0;
168 free_iterator = size; 161 free_iterator = size;
169 162
170 for (size_t region = 0, region_end = Region(size); region <= region_end; ++region) { 163 if (AreRegionsActive(0, Region(size) + 1)) {
171 scheduler.Wait(sync_ticks[region]); 164 // Avoid waiting for the previous usages to be free
165 return GetStagingBuffer(size, MemoryUsage::Upload);
172 } 166 }
173 } 167 }
174 const size_t offset = iterator; 168 const size_t offset = iterator;
@@ -180,6 +174,11 @@ StagingBufferRef StagingBufferPool::GetStreamBuffer(size_t size) {
180 }; 174 };
181} 175}
182 176
177bool StagingBufferPool::AreRegionsActive(size_t region_begin, size_t region_end) const {
178 return std::any_of(sync_ticks.begin() + region_begin, sync_ticks.begin() + region_end,
179 [this](u64 sync_tick) { return !scheduler.IsFree(sync_tick); });
180};
181
183StagingBufferRef StagingBufferPool::GetStagingBuffer(size_t size, MemoryUsage usage) { 182StagingBufferRef StagingBufferPool::GetStagingBuffer(size_t size, MemoryUsage usage) {
184 if (const std::optional<StagingBufferRef> ref = TryGetReservedBuffer(size, usage)) { 183 if (const std::optional<StagingBufferRef> ref = TryGetReservedBuffer(size, usage)) {
185 return *ref; 184 return *ref;
diff --git a/src/video_core/renderer_vulkan/vk_staging_buffer_pool.h b/src/video_core/renderer_vulkan/vk_staging_buffer_pool.h
index 8e4cadf9b..69f7618de 100644
--- a/src/video_core/renderer_vulkan/vk_staging_buffer_pool.h
+++ b/src/video_core/renderer_vulkan/vk_staging_buffer_pool.h
@@ -67,6 +67,8 @@ private:
67 67
68 StagingBufferRef GetStreamBuffer(size_t size); 68 StagingBufferRef GetStreamBuffer(size_t size);
69 69
70 bool AreRegionsActive(size_t region_begin, size_t region_end) const;
71
70 StagingBufferRef GetStagingBuffer(size_t size, MemoryUsage usage); 72 StagingBufferRef GetStagingBuffer(size_t size, MemoryUsage usage);
71 73
72 std::optional<StagingBufferRef> TryGetReservedBuffer(size_t size, MemoryUsage usage); 74 std::optional<StagingBufferRef> TryGetReservedBuffer(size_t size, MemoryUsage usage);