summaryrefslogtreecommitdiff
path: root/src/video_core/buffer_cache
diff options
context:
space:
mode:
authorGravatar ReinUsesLisp2020-05-20 23:55:40 -0300
committerGravatar ReinUsesLisp2020-05-21 16:44:00 -0300
commit19d4f28001d3a8e28b41187a7940d14d0a8d708c (patch)
tree4d37b3407d89641cac1a1621792dc97bbcead2fe /src/video_core/buffer_cache
parentbuffer_cache: Use boost::intrusive::set for caching (diff)
downloadyuzu-19d4f28001d3a8e28b41187a7940d14d0a8d708c.tar.gz
yuzu-19d4f28001d3a8e28b41187a7940d14d0a8d708c.tar.xz
yuzu-19d4f28001d3a8e28b41187a7940d14d0a8d708c.zip
buffer_cache: Use boost::container::small_vector for maps in range
Most overlaps in the buffer cache only contain one mapped address. We can avoid close to all heap allocations once the buffer cache is warmed up by using a small_vector with a stack size of one.
Diffstat (limited to 'src/video_core/buffer_cache')
-rw-r--r--src/video_core/buffer_cache/buffer_cache.h28
1 files changed, 15 insertions, 13 deletions
diff --git a/src/video_core/buffer_cache/buffer_cache.h b/src/video_core/buffer_cache/buffer_cache.h
index fb12af9d8..0c8500c04 100644
--- a/src/video_core/buffer_cache/buffer_cache.h
+++ b/src/video_core/buffer_cache/buffer_cache.h
@@ -12,6 +12,7 @@
12#include <utility> 12#include <utility>
13#include <vector> 13#include <vector>
14 14
15#include <boost/container/small_vector.hpp>
15#include <boost/icl/interval_map.hpp> 16#include <boost/icl/interval_map.hpp>
16#include <boost/icl/interval_set.hpp> 17#include <boost/icl/interval_set.hpp>
17#include <boost/intrusive/set.hpp> 18#include <boost/intrusive/set.hpp>
@@ -33,6 +34,10 @@ namespace VideoCommon {
33 34
34template <typename OwnerBuffer, typename BufferType, typename StreamBuffer> 35template <typename OwnerBuffer, typename BufferType, typename StreamBuffer>
35class BufferCache { 36class BufferCache {
37 using IntervalSet = boost::icl::interval_set<VAddr>;
38 using IntervalType = typename IntervalSet::interval_type;
39 using VectorMapInterval = boost::container::small_vector<MapInterval*, 1>;
40
36public: 41public:
37 using BufferInfo = std::pair<BufferType, u64>; 42 using BufferInfo = std::pair<BufferType, u64>;
38 43
@@ -133,7 +138,7 @@ public:
133 void FlushRegion(VAddr addr, std::size_t size) { 138 void FlushRegion(VAddr addr, std::size_t size) {
134 std::lock_guard lock{mutex}; 139 std::lock_guard lock{mutex};
135 140
136 std::vector<MapInterval*> objects = GetMapsInRange(addr, size); 141 VectorMapInterval objects = GetMapsInRange(addr, size);
137 std::sort(objects.begin(), objects.end(), 142 std::sort(objects.begin(), objects.end(),
138 [](MapInterval* lhs, MapInterval* rhs) { return lhs->ticks < rhs->ticks; }); 143 [](MapInterval* lhs, MapInterval* rhs) { return lhs->ticks < rhs->ticks; });
139 for (MapInterval* object : objects) { 144 for (MapInterval* object : objects) {
@@ -148,7 +153,7 @@ public:
148 bool MustFlushRegion(VAddr addr, std::size_t size) { 153 bool MustFlushRegion(VAddr addr, std::size_t size) {
149 std::lock_guard lock{mutex}; 154 std::lock_guard lock{mutex};
150 155
151 const std::vector<MapInterval*> objects = GetMapsInRange(addr, size); 156 const VectorMapInterval objects = GetMapsInRange(addr, size);
152 return std::any_of(objects.cbegin(), objects.cend(), [](const MapInterval* map) { 157 return std::any_of(objects.cbegin(), objects.cend(), [](const MapInterval* map) {
153 return map->is_modified && map->is_registered; 158 return map->is_modified && map->is_registered;
154 }); 159 });
@@ -158,8 +163,7 @@ public:
158 void InvalidateRegion(VAddr addr, u64 size) { 163 void InvalidateRegion(VAddr addr, u64 size) {
159 std::lock_guard lock{mutex}; 164 std::lock_guard lock{mutex};
160 165
161 std::vector<MapInterval*> objects = GetMapsInRange(addr, size); 166 for (auto& object : GetMapsInRange(addr, size)) {
162 for (auto& object : objects) {
163 if (object->is_registered) { 167 if (object->is_registered) {
164 Unregister(object); 168 Unregister(object);
165 } 169 }
@@ -314,7 +318,7 @@ protected:
314private: 318private:
315 MapInterval* MapAddress(const OwnerBuffer& block, GPUVAddr gpu_addr, VAddr cpu_addr, 319 MapInterval* MapAddress(const OwnerBuffer& block, GPUVAddr gpu_addr, VAddr cpu_addr,
316 std::size_t size) { 320 std::size_t size) {
317 std::vector<MapInterval*> overlaps = GetMapsInRange(cpu_addr, size); 321 const VectorMapInterval overlaps = GetMapsInRange(cpu_addr, size);
318 if (overlaps.empty()) { 322 if (overlaps.empty()) {
319 auto& memory_manager = system.GPU().MemoryManager(); 323 auto& memory_manager = system.GPU().MemoryManager();
320 const VAddr cpu_addr_end = cpu_addr + size; 324 const VAddr cpu_addr_end = cpu_addr + size;
@@ -368,7 +372,7 @@ private:
368 } 372 }
369 373
370 void UpdateBlock(const OwnerBuffer& block, VAddr start, VAddr end, 374 void UpdateBlock(const OwnerBuffer& block, VAddr start, VAddr end,
371 std::vector<MapInterval*>& overlaps) { 375 const VectorMapInterval& overlaps) {
372 const IntervalType base_interval{start, end}; 376 const IntervalType base_interval{start, end};
373 IntervalSet interval_set{}; 377 IntervalSet interval_set{};
374 interval_set.add(base_interval); 378 interval_set.add(base_interval);
@@ -387,14 +391,13 @@ private:
387 } 391 }
388 } 392 }
389 393
390 std::vector<MapInterval*> GetMapsInRange(VAddr addr, std::size_t size) { 394 VectorMapInterval GetMapsInRange(VAddr addr, std::size_t size) {
395 VectorMapInterval result;
391 if (size == 0) { 396 if (size == 0) {
392 return {}; 397 return result;
393 } 398 }
394 399
395 std::vector<MapInterval*> result;
396 const VAddr addr_end = addr + size; 400 const VAddr addr_end = addr + size;
397
398 auto it = mapped_addresses.lower_bound(addr); 401 auto it = mapped_addresses.lower_bound(addr);
399 if (it != mapped_addresses.begin()) { 402 if (it != mapped_addresses.begin()) {
400 --it; 403 --it;
@@ -574,10 +577,9 @@ private:
574 u64 buffer_offset = 0; 577 u64 buffer_offset = 0;
575 u64 buffer_offset_base = 0; 578 u64 buffer_offset_base = 0;
576 579
577 using IntervalSet = boost::icl::interval_set<VAddr>;
578 using IntervalType = typename IntervalSet::interval_type;
579 std::list<MapInterval> mapped_addresses_storage; // Temporary hack 580 std::list<MapInterval> mapped_addresses_storage; // Temporary hack
580 boost::intrusive::set<MapInterval, boost::intrusive::compare<MapIntervalCompare>> mapped_addresses; 581 boost::intrusive::set<MapInterval, boost::intrusive::compare<MapIntervalCompare>>
582 mapped_addresses;
581 583
582 static constexpr u64 write_page_bit = 11; 584 static constexpr u64 write_page_bit = 11;
583 std::unordered_map<u64, u32> written_pages; 585 std::unordered_map<u64, u32> written_pages;