summaryrefslogtreecommitdiff
path: root/src/video_core/shader_cache.h
diff options
context:
space:
mode:
authorGravatar ReinUsesLisp2020-06-28 04:58:58 -0300
committerGravatar ReinUsesLisp2020-07-01 18:16:53 -0300
commitf6cb128eac864484d15688811239e8d5206526c8 (patch)
tree5b6562709d97fe2268b4c56fdcb816aed63f6bdd /src/video_core/shader_cache.h
parentMerge pull request #4208 from jbeich/freebsd (diff)
downloadyuzu-f6cb128eac864484d15688811239e8d5206526c8.tar.gz
yuzu-f6cb128eac864484d15688811239e8d5206526c8.tar.xz
yuzu-f6cb128eac864484d15688811239e8d5206526c8.zip
shader_cache: Fix use-after-free and orphan invalidation cache entries
This fixes some cases where entries could have been removed multiple times reading freed memory. To address this issue this commit removes duplicates from entries marked for removal and sorts out the removal process to fix another use-after-free situation. Another issue fixed in this commit is orphan invalidation cache entries. Previously only the entries that were invalidated in the current operations had its entries removed. This led to more use-after-free situations when these entries were actually invalidated but referenced an object that didn't exist.
Diffstat (limited to 'src/video_core/shader_cache.h')
-rw-r--r--src/video_core/shader_cache.h70
1 files changed, 41 insertions, 29 deletions
diff --git a/src/video_core/shader_cache.h b/src/video_core/shader_cache.h
index 2dd270e99..b7608fc7b 100644
--- a/src/video_core/shader_cache.h
+++ b/src/video_core/shader_cache.h
@@ -20,6 +20,7 @@ namespace VideoCommon {
20template <class T> 20template <class T>
21class ShaderCache { 21class ShaderCache {
22 static constexpr u64 PAGE_BITS = 14; 22 static constexpr u64 PAGE_BITS = 14;
23 static constexpr u64 PAGE_SIZE = u64(1) << PAGE_BITS;
23 24
24 struct Entry { 25 struct Entry {
25 VAddr addr_start; 26 VAddr addr_start;
@@ -87,8 +88,8 @@ protected:
87 const VAddr addr_end = addr + size; 88 const VAddr addr_end = addr + size;
88 Entry* const entry = NewEntry(addr, addr_end, data.get()); 89 Entry* const entry = NewEntry(addr, addr_end, data.get());
89 90
90 const u64 page_end = addr_end >> PAGE_BITS; 91 const u64 page_end = (addr_end + PAGE_SIZE - 1) >> PAGE_BITS;
91 for (u64 page = addr >> PAGE_BITS; page <= page_end; ++page) { 92 for (u64 page = addr >> PAGE_BITS; page < page_end; ++page) {
92 invalidation_cache[page].push_back(entry); 93 invalidation_cache[page].push_back(entry);
93 } 94 }
94 95
@@ -108,20 +109,13 @@ private:
108 /// @pre invalidation_mutex is locked 109 /// @pre invalidation_mutex is locked
109 void InvalidatePagesInRegion(VAddr addr, std::size_t size) { 110 void InvalidatePagesInRegion(VAddr addr, std::size_t size) {
110 const VAddr addr_end = addr + size; 111 const VAddr addr_end = addr + size;
111 const u64 page_end = addr_end >> PAGE_BITS; 112 const u64 page_end = (addr_end + PAGE_SIZE - 1) >> PAGE_BITS;
112 for (u64 page = addr >> PAGE_BITS; page <= page_end; ++page) { 113 for (u64 page = addr >> PAGE_BITS; page < page_end; ++page) {
113 const auto it = invalidation_cache.find(page); 114 auto it = invalidation_cache.find(page);
114 if (it == invalidation_cache.end()) { 115 if (it == invalidation_cache.end()) {
115 continue; 116 continue;
116 } 117 }
117 118 InvalidatePageEntries(it->second, addr, addr_end);
118 std::vector<Entry*>& entries = it->second;
119 InvalidatePageEntries(entries, addr, addr_end);
120
121 // If there's nothing else in this page, remove it to avoid overpopulating the hash map.
122 if (entries.empty()) {
123 invalidation_cache.erase(it);
124 }
125 } 119 }
126 } 120 }
127 121
@@ -131,15 +125,22 @@ private:
131 if (marked_for_removal.empty()) { 125 if (marked_for_removal.empty()) {
132 return; 126 return;
133 } 127 }
134 std::scoped_lock lock{lookup_mutex}; 128 // Remove duplicates
129 std::sort(marked_for_removal.begin(), marked_for_removal.end());
130 marked_for_removal.erase(std::unique(marked_for_removal.begin(), marked_for_removal.end()),
131 marked_for_removal.end());
135 132
136 std::vector<T*> removed_shaders; 133 std::vector<T*> removed_shaders;
137 removed_shaders.reserve(marked_for_removal.size()); 134 removed_shaders.reserve(marked_for_removal.size());
138 135
136 std::scoped_lock lock{lookup_mutex};
137
139 for (Entry* const entry : marked_for_removal) { 138 for (Entry* const entry : marked_for_removal) {
140 if (lookup_cache.erase(entry->addr_start) > 0) { 139 removed_shaders.push_back(entry->data);
141 removed_shaders.push_back(entry->data); 140
142 } 141 const auto it = lookup_cache.find(entry->addr_start);
142 ASSERT(it != lookup_cache.end());
143 lookup_cache.erase(it);
143 } 144 }
144 marked_for_removal.clear(); 145 marked_for_removal.clear();
145 146
@@ -154,17 +155,33 @@ private:
154 /// @param addr_end Non-inclusive end address of the invalidation 155 /// @param addr_end Non-inclusive end address of the invalidation
155 /// @pre invalidation_mutex is locked 156 /// @pre invalidation_mutex is locked
156 void InvalidatePageEntries(std::vector<Entry*>& entries, VAddr addr, VAddr addr_end) { 157 void InvalidatePageEntries(std::vector<Entry*>& entries, VAddr addr, VAddr addr_end) {
157 auto it = entries.begin(); 158 std::size_t index = 0;
158 while (it != entries.end()) { 159 while (index < entries.size()) {
159 Entry* const entry = *it; 160 Entry* const entry = entries[index];
160 if (!entry->Overlaps(addr, addr_end)) { 161 if (!entry->Overlaps(addr, addr_end)) {
161 ++it; 162 ++index;
162 continue; 163 continue;
163 } 164 }
165
164 UnmarkMemory(entry); 166 UnmarkMemory(entry);
167 RemoveEntryFromInvalidationCache(entry);
165 marked_for_removal.push_back(entry); 168 marked_for_removal.push_back(entry);
169 }
170 }
166 171
167 it = entries.erase(it); 172 /// @brief Removes all references to an entry in the invalidation cache
173 /// @param entry Entry to remove from the invalidation cache
174 /// @pre invalidation_mutex is locked
175 void RemoveEntryFromInvalidationCache(const Entry* entry) {
176 const u64 page_end = (entry->addr_end + PAGE_SIZE - 1) >> PAGE_BITS;
177 for (u64 page = entry->addr_start >> PAGE_BITS; page < page_end; ++page) {
178 const auto entries_it = invalidation_cache.find(page);
179 ASSERT(entries_it != invalidation_cache.end());
180 std::vector<Entry*>& entries = entries_it->second;
181
182 const auto entry_it = std::find(entries.begin(), entries.end(), entry);
183 ASSERT(entry_it != entries.end());
184 entries.erase(entry_it);
168 } 185 }
169 } 186 }
170 187
@@ -182,16 +199,11 @@ private:
182 } 199 }
183 200
184 /// @brief Removes a vector of shaders from a list 201 /// @brief Removes a vector of shaders from a list
185 /// @param removed_shaders Shaders to be removed from the storage, it can contain duplicates 202 /// @param removed_shaders Shaders to be removed from the storage
186 /// @pre invalidation_mutex is locked 203 /// @pre invalidation_mutex is locked
187 /// @pre lookup_mutex is locked 204 /// @pre lookup_mutex is locked
188 void RemoveShadersFromStorage(std::vector<T*> removed_shaders) { 205 void RemoveShadersFromStorage(std::vector<T*> removed_shaders) {
189 // Remove duplicates 206 // Notify removals
190 std::sort(removed_shaders.begin(), removed_shaders.end());
191 removed_shaders.erase(std::unique(removed_shaders.begin(), removed_shaders.end()),
192 removed_shaders.end());
193
194 // Now that there are no duplicates, we can notify removals
195 for (T* const shader : removed_shaders) { 207 for (T* const shader : removed_shaders) {
196 OnShaderRemoval(shader); 208 OnShaderRemoval(shader);
197 } 209 }