summaryrefslogtreecommitdiff
path: root/src/video_core/rasterizer_cache.h
diff options
context:
space:
mode:
authorGravatar bunnei2018-08-23 15:38:57 -0400
committerGravatar bunnei2018-08-27 18:26:45 -0400
commit382852418be0fd14fb0290d7b0ff86644685b59b (patch)
tree72294f2832a24c559a47c3b296a636001378e28a /src/video_core/rasterizer_cache.h
parentMerge pull request #1169 from Lakumakkara/sel (diff)
downloadyuzu-382852418be0fd14fb0290d7b0ff86644685b59b.tar.gz
yuzu-382852418be0fd14fb0290d7b0ff86644685b59b.tar.xz
yuzu-382852418be0fd14fb0290d7b0ff86644685b59b.zip
video_core: Add RasterizerCache class for common cache management code.
Diffstat (limited to 'src/video_core/rasterizer_cache.h')
-rw-r--r--src/video_core/rasterizer_cache.h116
1 files changed, 116 insertions, 0 deletions
diff --git a/src/video_core/rasterizer_cache.h b/src/video_core/rasterizer_cache.h
new file mode 100644
index 000000000..7a0492a4e
--- /dev/null
+++ b/src/video_core/rasterizer_cache.h
@@ -0,0 +1,116 @@
1// Copyright 2018 yuzu Emulator Project
2// Licensed under GPLv2 or any later version
3// Refer to the license.txt file included.
4
5#pragma once
6
7#include <unordered_map>
8#include <boost/icl/interval_map.hpp>
9#include <boost/range/iterator_range.hpp>
10
11#include "common/common_types.h"
12#include "core/memory.h"
13#include "video_core/memory_manager.h"
14
15template <class T>
16class RasterizerCache : NonCopyable {
17public:
18 /// Mark the specified region as being invalidated
19 void InvalidateRegion(Tegra::GPUVAddr region_addr, size_t region_size) {
20 for (auto iter = cached_objects.cbegin(); iter != cached_objects.cend();) {
21 const auto& object{iter->second};
22
23 ++iter;
24
25 if (object->GetAddr() <= (region_addr + region_size) &&
26 region_addr <= (object->GetAddr() + object->GetSizeInBytes())) {
27 // Regions overlap, so invalidate
28 Unregister(object);
29 }
30 }
31 }
32
33protected:
34 /// Tries to get an object from the cache with the specified address
35 T TryGet(Tegra::GPUVAddr addr) const {
36 const auto& search{cached_objects.find(addr)};
37 if (search != cached_objects.end()) {
38 return search->second;
39 }
40
41 return nullptr;
42 }
43
44 /// Gets a reference to the cache
45 const std::unordered_map<Tegra::GPUVAddr, T>& GetCache() const {
46 return cached_objects;
47 }
48
49 /// Register an object into the cache
50 void Register(const T& object) {
51 const auto& search{cached_objects.find(object->GetAddr())};
52 if (search != cached_objects.end()) {
53 // Registered already
54 return;
55 }
56
57 cached_objects[object->GetAddr()] = object;
58 UpdatePagesCachedCount(object->GetAddr(), object->GetSizeInBytes(), 1);
59 }
60
61 /// Unregisters an object from the cache
62 void Unregister(const T& object) {
63 const auto& search{cached_objects.find(object->GetAddr())};
64 if (search == cached_objects.end()) {
65 // Unregistered already
66 return;
67 }
68
69 UpdatePagesCachedCount(object->GetAddr(), object->GetSizeInBytes(), -1);
70 cached_objects.erase(search);
71 }
72
73private:
74 using PageMap = boost::icl::interval_map<u64, int>;
75
76 template <typename Map, typename Interval>
77 constexpr auto RangeFromInterval(Map& map, const Interval& interval) {
78 return boost::make_iterator_range(map.equal_range(interval));
79 }
80
81 /// Increase/decrease the number of object in pages touching the specified region
82 void UpdatePagesCachedCount(Tegra::GPUVAddr addr, u64 size, int delta) {
83 const u64 page_start{addr >> Tegra::MemoryManager::PAGE_BITS};
84 const u64 page_end{(addr + size) >> Tegra::MemoryManager::PAGE_BITS};
85
86 // Interval maps will erase segments if count reaches 0, so if delta is negative we have to
87 // subtract after iterating
88 const auto pages_interval = PageMap::interval_type::right_open(page_start, page_end);
89 if (delta > 0)
90 cached_pages.add({pages_interval, delta});
91
92 for (const auto& pair : RangeFromInterval(cached_pages, pages_interval)) {
93 const auto interval = pair.first & pages_interval;
94 const int count = pair.second;
95
96 const Tegra::GPUVAddr interval_start_addr = boost::icl::first(interval)
97 << Tegra::MemoryManager::PAGE_BITS;
98 const Tegra::GPUVAddr interval_end_addr = boost::icl::last_next(interval)
99 << Tegra::MemoryManager::PAGE_BITS;
100 const u64 interval_size = interval_end_addr - interval_start_addr;
101
102 if (delta > 0 && count == delta)
103 Memory::RasterizerMarkRegionCached(interval_start_addr, interval_size, true);
104 else if (delta < 0 && count == -delta)
105 Memory::RasterizerMarkRegionCached(interval_start_addr, interval_size, false);
106 else
107 ASSERT(count >= 0);
108 }
109
110 if (delta < 0)
111 cached_pages.add({pages_interval, delta});
112 }
113
114 std::unordered_map<Tegra::GPUVAddr, T> cached_objects;
115 PageMap cached_pages;
116};