summaryrefslogtreecommitdiff
path: root/src/video_core/rasterizer_accelerated.cpp
diff options
context:
space:
mode:
authorGravatar bunnei2020-03-31 15:10:44 -0400
committerGravatar bunnei2020-04-17 00:59:28 -0400
commit4caff51710a793c6c2c1069ddd6e92185aa731fe (patch)
tree9770a5cdbfc40f6bddab093d5010f80ddad5bd26 /src/video_core/rasterizer_accelerated.cpp
parentcommon: alignment: Add a helper function for generic alignment checking. (diff)
downloadyuzu-4caff51710a793c6c2c1069ddd6e92185aa731fe.tar.gz
yuzu-4caff51710a793c6c2c1069ddd6e92185aa731fe.tar.xz
yuzu-4caff51710a793c6c2c1069ddd6e92185aa731fe.zip
core: memory: Move to Core::Memory namespace.
- helpful to disambiguate Kernel::Memory namespace.
Diffstat (limited to 'src/video_core/rasterizer_accelerated.cpp')
-rw-r--r--src/video_core/rasterizer_accelerated.cpp10
1 files changed, 5 insertions, 5 deletions
diff --git a/src/video_core/rasterizer_accelerated.cpp b/src/video_core/rasterizer_accelerated.cpp
index d01db97da..53622ca05 100644
--- a/src/video_core/rasterizer_accelerated.cpp
+++ b/src/video_core/rasterizer_accelerated.cpp
@@ -23,15 +23,15 @@ constexpr auto RangeFromInterval(Map& map, const Interval& interval) {
23 23
24} // Anonymous namespace 24} // Anonymous namespace
25 25
26RasterizerAccelerated::RasterizerAccelerated(Memory::Memory& cpu_memory_) 26RasterizerAccelerated::RasterizerAccelerated(Core::Memory::Memory& cpu_memory_)
27 : cpu_memory{cpu_memory_} {} 27 : cpu_memory{cpu_memory_} {}
28 28
29RasterizerAccelerated::~RasterizerAccelerated() = default; 29RasterizerAccelerated::~RasterizerAccelerated() = default;
30 30
31void RasterizerAccelerated::UpdatePagesCachedCount(VAddr addr, u64 size, int delta) { 31void RasterizerAccelerated::UpdatePagesCachedCount(VAddr addr, u64 size, int delta) {
32 std::lock_guard lock{pages_mutex}; 32 std::lock_guard lock{pages_mutex};
33 const u64 page_start{addr >> Memory::PAGE_BITS}; 33 const u64 page_start{addr >> Core::Memory::PAGE_BITS};
34 const u64 page_end{(addr + size + Memory::PAGE_SIZE - 1) >> Memory::PAGE_BITS}; 34 const u64 page_end{(addr + size + Core::Memory::PAGE_SIZE - 1) >> Core::Memory::PAGE_BITS};
35 35
36 // Interval maps will erase segments if count reaches 0, so if delta is negative we have to 36 // Interval maps will erase segments if count reaches 0, so if delta is negative we have to
37 // subtract after iterating 37 // subtract after iterating
@@ -44,8 +44,8 @@ void RasterizerAccelerated::UpdatePagesCachedCount(VAddr addr, u64 size, int del
44 const auto interval = pair.first & pages_interval; 44 const auto interval = pair.first & pages_interval;
45 const int count = pair.second; 45 const int count = pair.second;
46 46
47 const VAddr interval_start_addr = boost::icl::first(interval) << Memory::PAGE_BITS; 47 const VAddr interval_start_addr = boost::icl::first(interval) << Core::Memory::PAGE_BITS;
48 const VAddr interval_end_addr = boost::icl::last_next(interval) << Memory::PAGE_BITS; 48 const VAddr interval_end_addr = boost::icl::last_next(interval) << Core::Memory::PAGE_BITS;
49 const u64 interval_size = interval_end_addr - interval_start_addr; 49 const u64 interval_size = interval_end_addr - interval_start_addr;
50 50
51 if (delta > 0 && count == delta) { 51 if (delta > 0 && count == delta) {