summaryrefslogtreecommitdiff
path: root/src/core/memory.cpp
diff options
context:
space:
mode:
authorGravatar FernandoS272021-06-06 09:57:24 +0200
committerGravatar Markus Wick2021-06-11 17:27:17 +0200
commit5ba28325b262d44fcd7721aa00074955bd794015 (patch)
tree1c388684ccc0ef52b124145cf3dfe98d5d427980 /src/core/memory.cpp
parentcommon/host_memory: Optimize for huge tables. (diff)
downloadyuzu-5ba28325b262d44fcd7721aa00074955bd794015.tar.gz
yuzu-5ba28325b262d44fcd7721aa00074955bd794015.tar.xz
yuzu-5ba28325b262d44fcd7721aa00074955bd794015.zip
General: Add settings for fastmem and disabling adress space check.
Diffstat (limited to 'src/core/memory.cpp')
-rw-r--r--src/core/memory.cpp14
1 files changed, 10 insertions, 4 deletions
diff --git a/src/core/memory.cpp b/src/core/memory.cpp
index 79468e4dc..f285c6f63 100644
--- a/src/core/memory.cpp
+++ b/src/core/memory.cpp
@@ -47,7 +47,9 @@ struct Memory::Impl {
47 "Out of bounds target: {:016X}", target); 47 "Out of bounds target: {:016X}", target);
48 MapPages(page_table, base / PAGE_SIZE, size / PAGE_SIZE, target, Common::PageType::Memory); 48 MapPages(page_table, base / PAGE_SIZE, size / PAGE_SIZE, target, Common::PageType::Memory);
49 49
50 system.DeviceMemory().buffer.Map(base, target - DramMemoryMap::Base, size); 50 if (Settings::IsFastmemEnabled()) {
51 system.DeviceMemory().buffer.Map(base, target - DramMemoryMap::Base, size);
52 }
51 } 53 }
52 54
53 void UnmapRegion(Common::PageTable& page_table, VAddr base, u64 size) { 55 void UnmapRegion(Common::PageTable& page_table, VAddr base, u64 size) {
@@ -55,7 +57,9 @@ struct Memory::Impl {
55 ASSERT_MSG((base & PAGE_MASK) == 0, "non-page aligned base: {:016X}", base); 57 ASSERT_MSG((base & PAGE_MASK) == 0, "non-page aligned base: {:016X}", base);
56 MapPages(page_table, base / PAGE_SIZE, size / PAGE_SIZE, 0, Common::PageType::Unmapped); 58 MapPages(page_table, base / PAGE_SIZE, size / PAGE_SIZE, 0, Common::PageType::Unmapped);
57 59
58 system.DeviceMemory().buffer.Unmap(base, size); 60 if (Settings::IsFastmemEnabled()) {
61 system.DeviceMemory().buffer.Unmap(base, size);
62 }
59 } 63 }
60 64
61 bool IsValidVirtualAddress(const Kernel::KProcess& process, const VAddr vaddr) const { 65 bool IsValidVirtualAddress(const Kernel::KProcess& process, const VAddr vaddr) const {
@@ -475,8 +479,10 @@ struct Memory::Impl {
475 return; 479 return;
476 } 480 }
477 481
478 const bool is_read_enable = Settings::IsGPULevelHigh() || !cached; 482 if (Settings::IsFastmemEnabled()) {
479 system.DeviceMemory().buffer.Protect(vaddr, size, is_read_enable, !cached); 483 const bool is_read_enable = Settings::IsGPULevelHigh() || !cached;
484 system.DeviceMemory().buffer.Protect(vaddr, size, is_read_enable, !cached);
485 }
480 486
481 // Iterate over a contiguous CPU address space, which corresponds to the specified GPU 487 // Iterate over a contiguous CPU address space, which corresponds to the specified GPU
482 // address space, marking the region as un/cached. The region is marked un/cached at a 488 // address space, marking the region as un/cached. The region is marked un/cached at a