summaryrefslogtreecommitdiff
path: root/src/core/hle/kernel
diff options
context:
space:
mode:
authorGravatar Markus Wick2020-01-12 17:04:15 +0100
committerGravatar Markus Wick2020-01-18 08:38:47 +0100
commit56672b8c9809d8f5585d208f5b85549bc3fe2a0e (patch)
tree9ac5f757fb4dbb26272c2b169e0a33bc6164d4e9 /src/core/hle/kernel
parentcore/hle: Simplify PhysicalMemory usage in vm_manager. (diff)
downloadyuzu-56672b8c9809d8f5585d208f5b85549bc3fe2a0e.tar.gz
yuzu-56672b8c9809d8f5585d208f5b85549bc3fe2a0e.tar.xz
yuzu-56672b8c9809d8f5585d208f5b85549bc3fe2a0e.zip
core/memory: Create a special MapMemoryRegion for physical memory.
This allows us to create a fastmem arena within the memory.cpp helpers.
Diffstat (limited to 'src/core/hle/kernel')
-rw-r--r--src/core/hle/kernel/physical_memory.h5
-rw-r--r--src/core/hle/kernel/vm_manager.cpp3
2 files changed, 5 insertions, 3 deletions
diff --git a/src/core/hle/kernel/physical_memory.h b/src/core/hle/kernel/physical_memory.h
index 090565310..b689e8e8b 100644
--- a/src/core/hle/kernel/physical_memory.h
+++ b/src/core/hle/kernel/physical_memory.h
@@ -14,6 +14,9 @@ namespace Kernel {
14// - Second to ensure all host backing memory used is aligned to 256 bytes due 14// - Second to ensure all host backing memory used is aligned to 256 bytes due
15// to strict alignment restrictions on GPU memory. 15// to strict alignment restrictions on GPU memory.
16 16
17using PhysicalMemory = std::vector<u8, Common::AlignmentAllocator<u8, 256>>; 17using PhysicalMemoryVector = std::vector<u8, Common::AlignmentAllocator<u8, 256>>;
18class PhysicalMemory final : public PhysicalMemoryVector {
19 using PhysicalMemoryVector::PhysicalMemoryVector;
20};
18 21
19} // namespace Kernel 22} // namespace Kernel
diff --git a/src/core/hle/kernel/vm_manager.cpp b/src/core/hle/kernel/vm_manager.cpp
index d223b4ecc..0b3500fce 100644
--- a/src/core/hle/kernel/vm_manager.cpp
+++ b/src/core/hle/kernel/vm_manager.cpp
@@ -780,8 +780,7 @@ void VMManager::UpdatePageTableForVMA(const VirtualMemoryArea& vma) {
780 memory.UnmapRegion(page_table, vma.base, vma.size); 780 memory.UnmapRegion(page_table, vma.base, vma.size);
781 break; 781 break;
782 case VMAType::AllocatedMemoryBlock: 782 case VMAType::AllocatedMemoryBlock:
783 memory.MapMemoryRegion(page_table, vma.base, vma.size, 783 memory.MapMemoryRegion(page_table, vma.base, vma.size, *vma.backing_block, vma.offset);
784 vma.backing_block->data() + vma.offset);
785 break; 784 break;
786 case VMAType::BackingMemory: 785 case VMAType::BackingMemory:
787 memory.MapMemoryRegion(page_table, vma.base, vma.size, vma.backing_memory); 786 memory.MapMemoryRegion(page_table, vma.base, vma.size, vma.backing_memory);