diff options
| author | 2014-04-27 21:25:16 -0400 | |
|---|---|---|
| committer | 2014-04-27 21:25:16 -0400 | |
| commit | 438dba40c1def91e9de252ef05f8650464e5c0c2 (patch) | |
| tree | 8f323d6095dfefe9d00f34cc4d7229be58a9f409 /src/core/mem_map_funcs.cpp | |
| parent | Merge pull request #4 from cpp3ds/master (diff) | |
| parent | removed DISALLOW_COPY_AND_ASSIGN in favor of NonCopyable class (diff) | |
| download | yuzu-438dba40c1def91e9de252ef05f8650464e5c0c2.tar.gz yuzu-438dba40c1def91e9de252ef05f8650464e5c0c2.tar.xz yuzu-438dba40c1def91e9de252ef05f8650464e5c0c2.zip | |
Merge branch 'hle-interface-updates'
Diffstat (limited to 'src/core/mem_map_funcs.cpp')
| -rw-r--r-- | src/core/mem_map_funcs.cpp | 111 |
1 files changed, 91 insertions, 20 deletions
diff --git a/src/core/mem_map_funcs.cpp b/src/core/mem_map_funcs.cpp index af4cfacbd..c8daf0df5 100644 --- a/src/core/mem_map_funcs.cpp +++ b/src/core/mem_map_funcs.cpp | |||
| @@ -12,15 +12,25 @@ | |||
| 12 | 12 | ||
| 13 | namespace Memory { | 13 | namespace Memory { |
| 14 | 14 | ||
| 15 | std::map<u32, HeapBlock> g_heap_gsp_map; | 15 | std::map<u32, MemoryBlock> g_heap_map; |
| 16 | std::map<u32, MemoryBlock> g_heap_gsp_map; | ||
| 17 | std::map<u32, MemoryBlock> g_shared_map; | ||
| 16 | 18 | ||
| 17 | /// Convert a physical address to virtual address | 19 | /// Convert a physical address to virtual address |
| 18 | u32 _AddressPhysicalToVirtual(const u32 addr) { | 20 | u32 _AddressPhysicalToVirtual(const u32 addr) { |
| 19 | // Our memory interface read/write functions assume virtual addresses. Put any physical address | 21 | // Our memory interface read/write functions assume virtual addresses. Put any physical address |
| 20 | // to virtual address translations here. This is obviously quite hacky... But we're not doing | 22 | // to virtual address translations here. This is obviously quite hacky... But we're not doing |
| 21 | // any MMU emulation yet or anything | 23 | // any MMU emulation yet or anything |
| 22 | if ((addr >= FCRAM_PADDR) && (addr < (FCRAM_PADDR_END))) { | 24 | if ((addr >= FCRAM_PADDR) && (addr < FCRAM_PADDR_END)) { |
| 23 | return (addr & FCRAM_MASK) | FCRAM_VADDR; | 25 | return VirtualAddressFromPhysical_FCRAM(addr); |
| 26 | |||
| 27 | // Hardware IO | ||
| 28 | // TODO(bunnei): FixMe | ||
| 29 | // This isn't going to work... The physical address of HARDWARE_IO conflicts with the virtual | ||
| 30 | // address of shared memory. | ||
| 31 | //} else if ((addr >= HARDWARE_IO_PADDR) && (addr < HARDWARE_IO_PADDR_END)) { | ||
| 32 | // return (addr + 0x0EB00000); | ||
| 33 | |||
| 24 | } | 34 | } |
| 25 | return addr; | 35 | return addr; |
| 26 | } | 36 | } |
| @@ -41,19 +51,24 @@ inline void _Read(T &var, const u32 addr) { | |||
| 41 | 51 | ||
| 42 | // Hardware I/O register reads | 52 | // Hardware I/O register reads |
| 43 | // 0x10XXXXXX- is physical address space, 0x1EXXXXXX is virtual address space | 53 | // 0x10XXXXXX- is physical address space, 0x1EXXXXXX is virtual address space |
| 44 | } else if ((vaddr & 0xFF000000) == 0x10000000 || (vaddr & 0xFF000000) == 0x1E000000) { | 54 | } else if ((vaddr >= HARDWARE_IO_VADDR) && (vaddr < HARDWARE_IO_VADDR_END)) { |
| 45 | HW::Read<T>(var, vaddr); | 55 | HW::Read<T>(var, vaddr); |
| 46 | 56 | ||
| 47 | // FCRAM - GSP heap | 57 | // FCRAM - GSP heap |
| 48 | } else if ((vaddr > HEAP_GSP_VADDR) && (vaddr < HEAP_GSP_VADDR_END)) { | 58 | } else if ((vaddr >= HEAP_GSP_VADDR) && (vaddr < HEAP_GSP_VADDR_END)) { |
| 49 | var = *((const T*)&g_heap_gsp[vaddr & HEAP_GSP_MASK]); | 59 | var = *((const T*)&g_heap_gsp[vaddr & HEAP_GSP_MASK]); |
| 50 | 60 | ||
| 51 | // FCRAM - application heap | 61 | // FCRAM - application heap |
| 52 | } else if ((vaddr > HEAP_VADDR) && (vaddr < HEAP_VADDR_END)) { | 62 | } else if ((vaddr >= HEAP_VADDR) && (vaddr < HEAP_VADDR_END)) { |
| 53 | var = *((const T*)&g_heap[vaddr & HEAP_MASK]); | 63 | var = *((const T*)&g_heap[vaddr & HEAP_MASK]); |
| 54 | 64 | ||
| 55 | /*else if ((vaddr & 0x3F800000) == 0x04000000) { | 65 | // Shared memory |
| 56 | var = *((const T*)&m_pVRAM[vaddr & VRAM_MASK]);*/ | 66 | } else if ((vaddr >= SHARED_MEMORY_VADDR) && (vaddr < SHARED_MEMORY_VADDR_END)) { |
| 67 | var = *((const T*)&g_shared_mem[vaddr & SHARED_MEMORY_MASK]); | ||
| 68 | |||
| 69 | // VRAM | ||
| 70 | } else if ((vaddr >= VRAM_VADDR) && (vaddr < VRAM_VADDR_END)) { | ||
| 71 | var = *((const T*)&g_vram[vaddr & VRAM_MASK]); | ||
| 57 | 72 | ||
| 58 | } else { | 73 | } else { |
| 59 | //_assert_msg_(MEMMAP, false, "unknown Read%d @ 0x%08X", sizeof(var) * 8, vaddr); | 74 | //_assert_msg_(MEMMAP, false, "unknown Read%d @ 0x%08X", sizeof(var) * 8, vaddr); |
| @@ -72,23 +87,25 @@ inline void _Write(u32 addr, const T data) { | |||
| 72 | 87 | ||
| 73 | // Hardware I/O register writes | 88 | // Hardware I/O register writes |
| 74 | // 0x10XXXXXX- is physical address space, 0x1EXXXXXX is virtual address space | 89 | // 0x10XXXXXX- is physical address space, 0x1EXXXXXX is virtual address space |
| 75 | } else if ((vaddr & 0xFF000000) == 0x10000000 || (vaddr & 0xFF000000) == 0x1E000000) { | 90 | } else if ((vaddr >= HARDWARE_IO_VADDR) && (vaddr < HARDWARE_IO_VADDR_END)) { |
| 76 | HW::Write<T>(vaddr, data); | 91 | HW::Write<T>(vaddr, data); |
| 77 | 92 | ||
| 78 | // FCRAM - GSP heap | 93 | // FCRAM - GSP heap |
| 79 | } else if ((vaddr > HEAP_GSP_VADDR) && (vaddr < HEAP_GSP_VADDR_END)) { | 94 | } else if ((vaddr >= HEAP_GSP_VADDR) && (vaddr < HEAP_GSP_VADDR_END)) { |
| 80 | *(T*)&g_heap_gsp[vaddr & HEAP_GSP_MASK] = data; | 95 | *(T*)&g_heap_gsp[vaddr & HEAP_GSP_MASK] = data; |
| 81 | 96 | ||
| 82 | // FCRAM - application heap | 97 | // FCRAM - application heap |
| 83 | } else if ((vaddr > HEAP_VADDR) && (vaddr < HEAP_VADDR_END)) { | 98 | } else if ((vaddr >= HEAP_VADDR) && (vaddr < HEAP_VADDR_END)) { |
| 84 | *(T*)&g_heap[vaddr & HEAP_MASK] = data; | 99 | *(T*)&g_heap[vaddr & HEAP_MASK] = data; |
| 85 | 100 | ||
| 86 | } else if ((vaddr & 0xFF000000) == 0x14000000) { | 101 | // Shared memory |
| 87 | _assert_msg_(MEMMAP, false, "umimplemented write to GSP heap"); | 102 | } else if ((vaddr >= SHARED_MEMORY_VADDR) && (vaddr < SHARED_MEMORY_VADDR_END)) { |
| 88 | } else if ((vaddr & 0xFFF00000) == 0x1EC00000) { | 103 | *(T*)&g_shared_mem[vaddr & SHARED_MEMORY_MASK] = data; |
| 89 | _assert_msg_(MEMMAP, false, "umimplemented write to IO registers"); | 104 | |
| 90 | } else if ((vaddr & 0xFF000000) == 0x1F000000) { | 105 | // VRAM |
| 91 | _assert_msg_(MEMMAP, false, "umimplemented write to VRAM"); | 106 | } else if ((vaddr >= VRAM_VADDR) && (vaddr < VRAM_VADDR_END)) { |
| 107 | *(T*)&g_vram[vaddr & VRAM_MASK] = data; | ||
| 108 | |||
| 92 | } else if ((vaddr & 0xFFF00000) == 0x1FF00000) { | 109 | } else if ((vaddr & 0xFFF00000) == 0x1FF00000) { |
| 93 | _assert_msg_(MEMMAP, false, "umimplemented write to DSP memory"); | 110 | _assert_msg_(MEMMAP, false, "umimplemented write to DSP memory"); |
| 94 | } else if ((vaddr & 0xFFFF0000) == 0x1FF80000) { | 111 | } else if ((vaddr & 0xFFFF0000) == 0x1FF80000) { |
| @@ -114,19 +131,73 @@ u8 *GetPointer(const u32 addr) { | |||
| 114 | } else if ((vaddr >= HEAP_VADDR) && (vaddr < HEAP_VADDR_END)) { | 131 | } else if ((vaddr >= HEAP_VADDR) && (vaddr < HEAP_VADDR_END)) { |
| 115 | return g_heap + (vaddr & HEAP_MASK); | 132 | return g_heap + (vaddr & HEAP_MASK); |
| 116 | 133 | ||
| 134 | // Shared memory | ||
| 135 | } else if ((vaddr > SHARED_MEMORY_VADDR) && (vaddr < SHARED_MEMORY_VADDR_END)) { | ||
| 136 | return g_shared_mem + (vaddr & SHARED_MEMORY_MASK); | ||
| 137 | |||
| 138 | // VRAM | ||
| 139 | } else if ((vaddr > VRAM_VADDR) && (vaddr < VRAM_VADDR_END)) { | ||
| 140 | return g_vram + (vaddr & VRAM_MASK); | ||
| 141 | |||
| 117 | } else { | 142 | } else { |
| 118 | ERROR_LOG(MEMMAP, "Unknown GetPointer @ 0x%08x", vaddr); | 143 | ERROR_LOG(MEMMAP, "unknown GetPointer @ 0x%08x", vaddr); |
| 119 | return 0; | 144 | return 0; |
| 120 | } | 145 | } |
| 121 | } | 146 | } |
| 122 | 147 | ||
| 123 | /** | 148 | /** |
| 149 | * Maps a block of memory in shared memory | ||
| 150 | * @param handle Handle to map memory block for | ||
| 151 | * @param addr Address to map memory block to | ||
| 152 | * @param permissions Memory map permissions | ||
| 153 | */ | ||
| 154 | u32 MapBlock_Shared(u32 handle, u32 addr,u32 permissions) { | ||
| 155 | MemoryBlock block; | ||
| 156 | |||
| 157 | block.handle = handle; | ||
| 158 | block.base_address = addr; | ||
| 159 | block.permissions = permissions; | ||
| 160 | |||
| 161 | if (g_shared_map.size() > 0) { | ||
| 162 | const MemoryBlock last_block = g_shared_map.rbegin()->second; | ||
| 163 | block.address = last_block.address + last_block.size; | ||
| 164 | } | ||
| 165 | g_shared_map[block.GetVirtualAddress()] = block; | ||
| 166 | |||
| 167 | return block.GetVirtualAddress(); | ||
| 168 | } | ||
| 169 | |||
| 170 | /** | ||
| 171 | * Maps a block of memory on the heap | ||
| 172 | * @param size Size of block in bytes | ||
| 173 | * @param operation Memory map operation type | ||
| 174 | * @param flags Memory allocation flags | ||
| 175 | */ | ||
| 176 | u32 MapBlock_Heap(u32 size, u32 operation, u32 permissions) { | ||
| 177 | MemoryBlock block; | ||
| 178 | |||
| 179 | block.base_address = HEAP_VADDR; | ||
| 180 | block.size = size; | ||
| 181 | block.operation = operation; | ||
| 182 | block.permissions = permissions; | ||
| 183 | |||
| 184 | if (g_heap_map.size() > 0) { | ||
| 185 | const MemoryBlock last_block = g_heap_map.rbegin()->second; | ||
| 186 | block.address = last_block.address + last_block.size; | ||
| 187 | } | ||
| 188 | g_heap_map[block.GetVirtualAddress()] = block; | ||
| 189 | |||
| 190 | return block.GetVirtualAddress(); | ||
| 191 | } | ||
| 192 | |||
| 193 | /** | ||
| 124 | * Maps a block of memory on the GSP heap | 194 | * Maps a block of memory on the GSP heap |
| 125 | * @param size Size of block in bytes | 195 | * @param size Size of block in bytes |
| 196 | * @param operation Memory map operation type | ||
| 126 | * @param flags Memory allocation flags | 197 | * @param flags Memory allocation flags |
| 127 | */ | 198 | */ |
| 128 | u32 MapBlock_HeapGSP(u32 size, u32 operation, u32 permissions) { | 199 | u32 MapBlock_HeapGSP(u32 size, u32 operation, u32 permissions) { |
| 129 | HeapBlock block; | 200 | MemoryBlock block; |
| 130 | 201 | ||
| 131 | block.base_address = HEAP_GSP_VADDR; | 202 | block.base_address = HEAP_GSP_VADDR; |
| 132 | block.size = size; | 203 | block.size = size; |
| @@ -134,7 +205,7 @@ u32 MapBlock_HeapGSP(u32 size, u32 operation, u32 permissions) { | |||
| 134 | block.permissions = permissions; | 205 | block.permissions = permissions; |
| 135 | 206 | ||
| 136 | if (g_heap_gsp_map.size() > 0) { | 207 | if (g_heap_gsp_map.size() > 0) { |
| 137 | const HeapBlock last_block = g_heap_gsp_map.rbegin()->second; | 208 | const MemoryBlock last_block = g_heap_gsp_map.rbegin()->second; |
| 138 | block.address = last_block.address + last_block.size; | 209 | block.address = last_block.address + last_block.size; |
| 139 | } | 210 | } |
| 140 | g_heap_gsp_map[block.GetVirtualAddress()] = block; | 211 | g_heap_gsp_map[block.GetVirtualAddress()] = block; |