diff options
| author | 2020-05-17 16:56:08 -0300 | |
|---|---|---|
| committer | 2020-05-21 16:44:00 -0300 | |
| commit | 891236124caaed34cdefac61cf90896a5b66b267 (patch) | |
| tree | 68c53427967586052ef6e39eab2a48beba58f1e4 /src/video_core/buffer_cache | |
| parent | buffer_cache: Remove shared pointers (diff) | |
| download | yuzu-891236124caaed34cdefac61cf90896a5b66b267.tar.gz yuzu-891236124caaed34cdefac61cf90896a5b66b267.tar.xz yuzu-891236124caaed34cdefac61cf90896a5b66b267.zip | |
buffer_cache: Use boost::intrusive::set for caching
Instead of using boost::icl::interval_map for caching, use
boost::intrusive::set. interval_map is intended as a container where the
keys can overlap with one another; we don't need this for caching
buffers and a std::set-like data structure that allows us to search with
lower_bound is enough.
Diffstat (limited to 'src/video_core/buffer_cache')
| -rw-r--r-- | src/video_core/buffer_cache/buffer_cache.h | 42 | ||||
| -rw-r--r-- | src/video_core/buffer_cache/map_interval.h | 32 |
2 files changed, 44 insertions, 30 deletions
diff --git a/src/video_core/buffer_cache/buffer_cache.h b/src/video_core/buffer_cache/buffer_cache.h index eb03879c4..fb12af9d8 100644 --- a/src/video_core/buffer_cache/buffer_cache.h +++ b/src/video_core/buffer_cache/buffer_cache.h | |||
| @@ -14,9 +14,11 @@ | |||
| 14 | 14 | ||
| 15 | #include <boost/icl/interval_map.hpp> | 15 | #include <boost/icl/interval_map.hpp> |
| 16 | #include <boost/icl/interval_set.hpp> | 16 | #include <boost/icl/interval_set.hpp> |
| 17 | #include <boost/intrusive/set.hpp> | ||
| 17 | #include <boost/range/iterator_range.hpp> | 18 | #include <boost/range/iterator_range.hpp> |
| 18 | 19 | ||
| 19 | #include "common/alignment.h" | 20 | #include "common/alignment.h" |
| 21 | #include "common/assert.h" | ||
| 20 | #include "common/common_types.h" | 22 | #include "common/common_types.h" |
| 21 | #include "common/logging/log.h" | 23 | #include "common/logging/log.h" |
| 22 | #include "core/core.h" | 24 | #include "core/core.h" |
| @@ -73,7 +75,7 @@ public: | |||
| 73 | } | 75 | } |
| 74 | } | 76 | } |
| 75 | 77 | ||
| 76 | auto block = GetBlock(cpu_addr, size); | 78 | OwnerBuffer block = GetBlock(cpu_addr, size); |
| 77 | MapInterval* const map = MapAddress(block, gpu_addr, cpu_addr, size); | 79 | MapInterval* const map = MapAddress(block, gpu_addr, cpu_addr, size); |
| 78 | if (!map) { | 80 | if (!map) { |
| 79 | return {GetEmptyBuffer(size), 0}; | 81 | return {GetEmptyBuffer(size), 0}; |
| @@ -272,16 +274,16 @@ protected: | |||
| 272 | } | 274 | } |
| 273 | const std::size_t size = new_map.end - new_map.start; | 275 | const std::size_t size = new_map.end - new_map.start; |
| 274 | new_map.is_registered = true; | 276 | new_map.is_registered = true; |
| 275 | const IntervalType interval{new_map.start, new_map.end}; | ||
| 276 | rasterizer.UpdatePagesCachedCount(cpu_addr, size, 1); | 277 | rasterizer.UpdatePagesCachedCount(cpu_addr, size, 1); |
| 277 | new_map.is_memory_marked = true; | 278 | new_map.is_memory_marked = true; |
| 278 | if (inherit_written) { | 279 | if (inherit_written) { |
| 279 | MarkRegionAsWritten(new_map.start, new_map.end - 1); | 280 | MarkRegionAsWritten(new_map.start, new_map.end - 1); |
| 280 | new_map.is_written = true; | 281 | new_map.is_written = true; |
| 281 | } | 282 | } |
| 282 | mapped_addresses.insert({interval, new_map}); | 283 | // Temporary hack, leaks memory and it's not cache local |
| 283 | // Temporary hack until this is replaced with boost::intrusive::rbtree | 284 | MapInterval* const storage = &mapped_addresses_storage.emplace_back(new_map); |
| 284 | return const_cast<MapInterval*>(&mapped_addresses.find(interval)->second); | 285 | mapped_addresses.insert(*storage); |
| 286 | return storage; | ||
| 285 | } | 287 | } |
| 286 | 288 | ||
| 287 | void UnmarkMemory(MapInterval* map) { | 289 | void UnmarkMemory(MapInterval* map) { |
| @@ -304,8 +306,9 @@ protected: | |||
| 304 | if (map->is_written) { | 306 | if (map->is_written) { |
| 305 | UnmarkRegionAsWritten(map->start, map->end - 1); | 307 | UnmarkRegionAsWritten(map->start, map->end - 1); |
| 306 | } | 308 | } |
| 307 | const IntervalType delete_interval{map->start, map->end}; | 309 | const auto it = mapped_addresses.find(*map); |
| 308 | mapped_addresses.erase(delete_interval); | 310 | ASSERT(it != mapped_addresses.end()); |
| 311 | mapped_addresses.erase(it); | ||
| 309 | } | 312 | } |
| 310 | 313 | ||
| 311 | private: | 314 | private: |
| @@ -389,13 +392,20 @@ private: | |||
| 389 | return {}; | 392 | return {}; |
| 390 | } | 393 | } |
| 391 | 394 | ||
| 392 | std::vector<MapInterval*> objects; | 395 | std::vector<MapInterval*> result; |
| 393 | const IntervalType interval{addr, addr + size}; | 396 | const VAddr addr_end = addr + size; |
| 394 | for (auto& pair : boost::make_iterator_range(mapped_addresses.equal_range(interval))) { | ||
| 395 | objects.push_back(&pair.second); | ||
| 396 | } | ||
| 397 | 397 | ||
| 398 | return objects; | 398 | auto it = mapped_addresses.lower_bound(addr); |
| 399 | if (it != mapped_addresses.begin()) { | ||
| 400 | --it; | ||
| 401 | } | ||
| 402 | while (it != mapped_addresses.end() && it->start < addr_end) { | ||
| 403 | if (it->Overlaps(addr, addr_end)) { | ||
| 404 | result.push_back(&*it); | ||
| 405 | } | ||
| 406 | ++it; | ||
| 407 | } | ||
| 408 | return result; | ||
| 399 | } | 409 | } |
| 400 | 410 | ||
| 401 | /// Returns a ticks counter used for tracking when cached objects were last modified | 411 | /// Returns a ticks counter used for tracking when cached objects were last modified |
| @@ -565,9 +575,9 @@ private: | |||
| 565 | u64 buffer_offset_base = 0; | 575 | u64 buffer_offset_base = 0; |
| 566 | 576 | ||
| 567 | using IntervalSet = boost::icl::interval_set<VAddr>; | 577 | using IntervalSet = boost::icl::interval_set<VAddr>; |
| 568 | using IntervalCache = boost::icl::interval_map<VAddr, MapInterval>; | 578 | using IntervalType = typename IntervalSet::interval_type; |
| 569 | using IntervalType = typename IntervalCache::interval_type; | 579 | std::list<MapInterval> mapped_addresses_storage; // Temporary hack |
| 570 | IntervalCache mapped_addresses; | 580 | boost::intrusive::set<MapInterval, boost::intrusive::compare<MapIntervalCompare>> mapped_addresses; |
| 571 | 581 | ||
| 572 | static constexpr u64 write_page_bit = 11; | 582 | static constexpr u64 write_page_bit = 11; |
| 573 | std::unordered_map<u64, u32> written_pages; | 583 | std::unordered_map<u64, u32> written_pages; |
diff --git a/src/video_core/buffer_cache/map_interval.h b/src/video_core/buffer_cache/map_interval.h index ad4db0135..45705cccf 100644 --- a/src/video_core/buffer_cache/map_interval.h +++ b/src/video_core/buffer_cache/map_interval.h | |||
| @@ -4,38 +4,36 @@ | |||
| 4 | 4 | ||
| 5 | #pragma once | 5 | #pragma once |
| 6 | 6 | ||
| 7 | #include <boost/intrusive/set_hook.hpp> | ||
| 8 | |||
| 7 | #include "common/common_types.h" | 9 | #include "common/common_types.h" |
| 8 | #include "video_core/gpu.h" | 10 | #include "video_core/gpu.h" |
| 9 | 11 | ||
| 10 | namespace VideoCommon { | 12 | namespace VideoCommon { |
| 11 | 13 | ||
| 12 | struct MapInterval { | 14 | struct MapInterval : public boost::intrusive::set_base_hook<boost::intrusive::optimize_size<true>> { |
| 13 | constexpr explicit MapInterval() noexcept = default; | 15 | /*implicit*/ MapInterval(VAddr start_) noexcept : start{start_} {} |
| 14 | |||
| 15 | constexpr explicit MapInterval(VAddr start, VAddr end, GPUVAddr gpu_addr) noexcept | ||
| 16 | : start{start}, end{end}, gpu_addr{gpu_addr} {} | ||
| 17 | 16 | ||
| 18 | constexpr bool IsInside(VAddr other_start, VAddr other_end) const noexcept { | 17 | explicit MapInterval(VAddr start_, VAddr end_, GPUVAddr gpu_addr_) noexcept |
| 19 | return (start <= other_start && other_end <= end); | 18 | : start{start_}, end{end_}, gpu_addr{gpu_addr_} {} |
| 20 | } | ||
| 21 | 19 | ||
| 22 | constexpr bool operator==(const MapInterval& rhs) const noexcept { | 20 | bool IsInside(VAddr other_start, VAddr other_end) const noexcept { |
| 23 | return start == rhs.start && end == rhs.end; | 21 | return start <= other_start && other_end <= end; |
| 24 | } | 22 | } |
| 25 | 23 | ||
| 26 | constexpr bool operator!=(const MapInterval& rhs) const noexcept { | 24 | bool Overlaps(VAddr other_start, VAddr other_end) const noexcept { |
| 27 | return !operator==(rhs); | 25 | return start < other_end && other_start < end; |
| 28 | } | 26 | } |
| 29 | 27 | ||
| 30 | constexpr void MarkAsModified(bool is_modified_, u64 ticks_) noexcept { | 28 | void MarkAsModified(bool is_modified_, u64 ticks_) noexcept { |
| 31 | is_modified = is_modified_; | 29 | is_modified = is_modified_; |
| 32 | ticks = ticks_; | 30 | ticks = ticks_; |
| 33 | } | 31 | } |
| 34 | 32 | ||
| 33 | boost::intrusive::set_member_hook<> member_hook_; | ||
| 35 | VAddr start = 0; | 34 | VAddr start = 0; |
| 36 | VAddr end = 0; | 35 | VAddr end = 0; |
| 37 | GPUVAddr gpu_addr = 0; | 36 | GPUVAddr gpu_addr = 0; |
| 38 | VAddr cpu_addr = 0; | ||
| 39 | u64 ticks = 0; | 37 | u64 ticks = 0; |
| 40 | bool is_written = false; | 38 | bool is_written = false; |
| 41 | bool is_modified = false; | 39 | bool is_modified = false; |
| @@ -44,4 +42,10 @@ struct MapInterval { | |||
| 44 | bool is_sync_pending = false; | 42 | bool is_sync_pending = false; |
| 45 | }; | 43 | }; |
| 46 | 44 | ||
| 45 | struct MapIntervalCompare { | ||
| 46 | constexpr bool operator()(const MapInterval& lhs, const MapInterval& rhs) const noexcept { | ||
| 47 | return lhs.start < rhs.start; | ||
| 48 | } | ||
| 49 | }; | ||
| 50 | |||
| 47 | } // namespace VideoCommon | 51 | } // namespace VideoCommon |