summaryrefslogtreecommitdiff
path: root/src
diff options
context:
space:
mode:
Diffstat (limited to 'src')
-rw-r--r--src/video_core/renderer_vulkan/vk_buffer_cache.cpp4
-rw-r--r--src/video_core/renderer_vulkan/vk_buffer_cache.h2
-rw-r--r--src/video_core/renderer_vulkan/vk_scheduler.cpp16
-rw-r--r--src/video_core/renderer_vulkan/vk_scheduler.h78
-rw-r--r--src/video_core/renderer_vulkan/vk_stream_buffer.cpp8
-rw-r--r--src/video_core/renderer_vulkan/vk_stream_buffer.h2
6 files changed, 60 insertions, 50 deletions
diff --git a/src/video_core/renderer_vulkan/vk_buffer_cache.cpp b/src/video_core/renderer_vulkan/vk_buffer_cache.cpp
index 02a9f5ecb..d2e9f4031 100644
--- a/src/video_core/renderer_vulkan/vk_buffer_cache.cpp
+++ b/src/video_core/renderer_vulkan/vk_buffer_cache.cpp
@@ -109,8 +109,8 @@ void VKBufferCache::Reserve(std::size_t max_size) {
109 } 109 }
110} 110}
111 111
112VKExecutionContext VKBufferCache::Send(VKExecutionContext exctx) { 112void VKBufferCache::Send() {
113 return stream_buffer->Send(exctx, buffer_offset - buffer_offset_base); 113 stream_buffer->Send(buffer_offset - buffer_offset_base);
114} 114}
115 115
116void VKBufferCache::AlignBuffer(std::size_t alignment) { 116void VKBufferCache::AlignBuffer(std::size_t alignment) {
diff --git a/src/video_core/renderer_vulkan/vk_buffer_cache.h b/src/video_core/renderer_vulkan/vk_buffer_cache.h
index 3edf460df..49f13bcdc 100644
--- a/src/video_core/renderer_vulkan/vk_buffer_cache.h
+++ b/src/video_core/renderer_vulkan/vk_buffer_cache.h
@@ -77,7 +77,7 @@ public:
77 void Reserve(std::size_t max_size); 77 void Reserve(std::size_t max_size);
78 78
79 /// Ensures that the set data is sent to the device. 79 /// Ensures that the set data is sent to the device.
80 [[nodiscard]] VKExecutionContext Send(VKExecutionContext exctx); 80 void Send();
81 81
82 /// Returns the buffer cache handle. 82 /// Returns the buffer cache handle.
83 vk::Buffer GetBuffer() const { 83 vk::Buffer GetBuffer() const {
diff --git a/src/video_core/renderer_vulkan/vk_scheduler.cpp b/src/video_core/renderer_vulkan/vk_scheduler.cpp
index f1fea1871..0f8116458 100644
--- a/src/video_core/renderer_vulkan/vk_scheduler.cpp
+++ b/src/video_core/renderer_vulkan/vk_scheduler.cpp
@@ -19,23 +19,19 @@ VKScheduler::VKScheduler(const VKDevice& device, VKResourceManager& resource_man
19 19
20VKScheduler::~VKScheduler() = default; 20VKScheduler::~VKScheduler() = default;
21 21
22VKExecutionContext VKScheduler::GetExecutionContext() const { 22void VKScheduler::Flush(bool release_fence, vk::Semaphore semaphore) {
23 return VKExecutionContext(current_fence, current_cmdbuf);
24}
25
26VKExecutionContext VKScheduler::Flush(vk::Semaphore semaphore) {
27 SubmitExecution(semaphore); 23 SubmitExecution(semaphore);
28 current_fence->Release(); 24 if (release_fence)
25 current_fence->Release();
29 AllocateNewContext(); 26 AllocateNewContext();
30 return GetExecutionContext();
31} 27}
32 28
33VKExecutionContext VKScheduler::Finish(vk::Semaphore semaphore) { 29void VKScheduler::Finish(bool release_fence, vk::Semaphore semaphore) {
34 SubmitExecution(semaphore); 30 SubmitExecution(semaphore);
35 current_fence->Wait(); 31 current_fence->Wait();
36 current_fence->Release(); 32 if (release_fence)
33 current_fence->Release();
37 AllocateNewContext(); 34 AllocateNewContext();
38 return GetExecutionContext();
39} 35}
40 36
41void VKScheduler::SubmitExecution(vk::Semaphore semaphore) { 37void VKScheduler::SubmitExecution(vk::Semaphore semaphore) {
diff --git a/src/video_core/renderer_vulkan/vk_scheduler.h b/src/video_core/renderer_vulkan/vk_scheduler.h
index cfaf5376f..0e5b49c7f 100644
--- a/src/video_core/renderer_vulkan/vk_scheduler.h
+++ b/src/video_core/renderer_vulkan/vk_scheduler.h
@@ -10,10 +10,43 @@
10namespace Vulkan { 10namespace Vulkan {
11 11
12class VKDevice; 12class VKDevice;
13class VKExecutionContext;
14class VKFence; 13class VKFence;
15class VKResourceManager; 14class VKResourceManager;
16 15
16class VKFenceView {
17public:
18 VKFenceView() = default;
19 VKFenceView(VKFence* const& fence) : fence{fence} {}
20
21 VKFence* operator->() const noexcept {
22 return fence;
23 }
24
25 operator VKFence&() const noexcept {
26 return *fence;
27 }
28
29private:
30 VKFence* const& fence;
31};
32
33class VKCommandBufferView {
34public:
35 VKCommandBufferView() = default;
36 VKCommandBufferView(const vk::CommandBuffer& cmdbuf) : cmdbuf{cmdbuf} {}
37
38 const vk::CommandBuffer* operator->() const noexcept {
39 return &cmdbuf;
40 }
41
42 operator vk::CommandBuffer() const noexcept {
43 return cmdbuf;
44 }
45
46private:
47 const vk::CommandBuffer& cmdbuf;
48};
49
17/// The scheduler abstracts command buffer and fence management with an interface that's able to do 50/// The scheduler abstracts command buffer and fence management with an interface that's able to do
18/// OpenGL-like operations on Vulkan command buffers. 51/// OpenGL-like operations on Vulkan command buffers.
19class VKScheduler { 52class VKScheduler {
@@ -21,16 +54,21 @@ public:
21 explicit VKScheduler(const VKDevice& device, VKResourceManager& resource_manager); 54 explicit VKScheduler(const VKDevice& device, VKResourceManager& resource_manager);
22 ~VKScheduler(); 55 ~VKScheduler();
23 56
24 /// Gets the current execution context. 57 /// Gets a reference to the current fence.
25 [[nodiscard]] VKExecutionContext GetExecutionContext() const; 58 VKFenceView GetFence() const {
59 return current_fence;
60 }
61
62 /// Gets a reference to the current command buffer.
63 VKCommandBufferView GetCommandBuffer() const {
64 return current_cmdbuf;
65 }
26 66
27 /// Sends the current execution context to the GPU. It invalidates the current execution context 67 /// Sends the current execution context to the GPU.
28 /// and returns a new one. 68 void Flush(bool release_fence = true, vk::Semaphore semaphore = nullptr);
29 VKExecutionContext Flush(vk::Semaphore semaphore = nullptr);
30 69
31 /// Sends the current execution context to the GPU and waits for it to complete. It invalidates 70 /// Sends the current execution context to the GPU and waits for it to complete.
32 /// the current execution context and returns a new one. 71 void Finish(bool release_fence = true, vk::Semaphore semaphore = nullptr);
33 VKExecutionContext Finish(vk::Semaphore semaphore = nullptr);
34 72
35private: 73private:
36 void SubmitExecution(vk::Semaphore semaphore); 74 void SubmitExecution(vk::Semaphore semaphore);
@@ -44,26 +82,4 @@ private:
44 VKFence* next_fence = nullptr; 82 VKFence* next_fence = nullptr;
45}; 83};
46 84
47class VKExecutionContext {
48 friend class VKScheduler;
49
50public:
51 VKExecutionContext() = default;
52
53 VKFence& GetFence() const {
54 return *fence;
55 }
56
57 vk::CommandBuffer GetCommandBuffer() const {
58 return cmdbuf;
59 }
60
61private:
62 explicit VKExecutionContext(VKFence* fence, vk::CommandBuffer cmdbuf)
63 : fence{fence}, cmdbuf{cmdbuf} {}
64
65 VKFence* fence{};
66 vk::CommandBuffer cmdbuf;
67};
68
69} // namespace Vulkan 85} // namespace Vulkan
diff --git a/src/video_core/renderer_vulkan/vk_stream_buffer.cpp b/src/video_core/renderer_vulkan/vk_stream_buffer.cpp
index 58ffa42f2..62f1427f5 100644
--- a/src/video_core/renderer_vulkan/vk_stream_buffer.cpp
+++ b/src/video_core/renderer_vulkan/vk_stream_buffer.cpp
@@ -46,12 +46,12 @@ std::tuple<u8*, u64, bool> VKStreamBuffer::Reserve(u64 size) {
46 return {mapped_pointer + offset, offset, invalidation_mark.has_value()}; 46 return {mapped_pointer + offset, offset, invalidation_mark.has_value()};
47} 47}
48 48
49VKExecutionContext VKStreamBuffer::Send(VKExecutionContext exctx, u64 size) { 49void VKStreamBuffer::Send(u64 size) {
50 ASSERT_MSG(size <= mapped_size, "Reserved size is too small"); 50 ASSERT_MSG(size <= mapped_size, "Reserved size is too small");
51 51
52 if (invalidation_mark) { 52 if (invalidation_mark) {
53 // TODO(Rodrigo): Find a better way to invalidate than waiting for all watches to finish. 53 // TODO(Rodrigo): Find a better way to invalidate than waiting for all watches to finish.
54 exctx = scheduler.Flush(); 54 scheduler.Flush();
55 std::for_each(watches.begin(), watches.begin() + *invalidation_mark, 55 std::for_each(watches.begin(), watches.begin() + *invalidation_mark,
56 [&](auto& resource) { resource->Wait(); }); 56 [&](auto& resource) { resource->Wait(); });
57 invalidation_mark = std::nullopt; 57 invalidation_mark = std::nullopt;
@@ -62,11 +62,9 @@ VKExecutionContext VKStreamBuffer::Send(VKExecutionContext exctx, u64 size) {
62 ReserveWatches(WATCHES_RESERVE_CHUNK); 62 ReserveWatches(WATCHES_RESERVE_CHUNK);
63 } 63 }
64 // Add a watch for this allocation. 64 // Add a watch for this allocation.
65 watches[used_watches++]->Watch(exctx.GetFence()); 65 watches[used_watches++]->Watch(scheduler.GetFence());
66 66
67 offset += size; 67 offset += size;
68
69 return exctx;
70} 68}
71 69
72void VKStreamBuffer::CreateBuffers(VKMemoryManager& memory_manager, vk::BufferUsageFlags usage) { 70void VKStreamBuffer::CreateBuffers(VKMemoryManager& memory_manager, vk::BufferUsageFlags usage) {
diff --git a/src/video_core/renderer_vulkan/vk_stream_buffer.h b/src/video_core/renderer_vulkan/vk_stream_buffer.h
index 69d036ccd..842e54162 100644
--- a/src/video_core/renderer_vulkan/vk_stream_buffer.h
+++ b/src/video_core/renderer_vulkan/vk_stream_buffer.h
@@ -37,7 +37,7 @@ public:
37 std::tuple<u8*, u64, bool> Reserve(u64 size); 37 std::tuple<u8*, u64, bool> Reserve(u64 size);
38 38
39 /// Ensures that "size" bytes of memory are available to the GPU, potentially recording a copy. 39 /// Ensures that "size" bytes of memory are available to the GPU, potentially recording a copy.
40 [[nodiscard]] VKExecutionContext Send(VKExecutionContext exctx, u64 size); 40 void Send(u64 size);
41 41
42 vk::Buffer GetBuffer() const { 42 vk::Buffer GetBuffer() const {
43 return *buffer; 43 return *buffer;