diff options
Diffstat (limited to 'src/core/hle/kernel/kernel.cpp')
| -rw-r--r-- | src/core/hle/kernel/kernel.cpp | 108 |
1 files changed, 57 insertions, 51 deletions
diff --git a/src/core/hle/kernel/kernel.cpp b/src/core/hle/kernel/kernel.cpp index 98ecaf12f..29809b2c5 100644 --- a/src/core/hle/kernel/kernel.cpp +++ b/src/core/hle/kernel/kernel.cpp | |||
| @@ -271,9 +271,9 @@ struct KernelCore::Impl { | |||
| 271 | system.CoreTiming().ScheduleLoopingEvent(time_interval, time_interval, preemption_event); | 271 | system.CoreTiming().ScheduleLoopingEvent(time_interval, time_interval, preemption_event); |
| 272 | } | 272 | } |
| 273 | 273 | ||
| 274 | void InitializeResourceManagers(KernelCore& kernel, VAddr address, size_t size) { | 274 | void InitializeResourceManagers(KernelCore& kernel, KVirtualAddress address, size_t size) { |
| 275 | // Ensure that the buffer is suitable for our use. | 275 | // Ensure that the buffer is suitable for our use. |
| 276 | ASSERT(Common::IsAligned(address, PageSize)); | 276 | ASSERT(Common::IsAligned(GetInteger(address), PageSize)); |
| 277 | ASSERT(Common::IsAligned(size, PageSize)); | 277 | ASSERT(Common::IsAligned(size, PageSize)); |
| 278 | 278 | ||
| 279 | // Ensure that we have space for our reference counts. | 279 | // Ensure that we have space for our reference counts. |
| @@ -462,29 +462,30 @@ struct KernelCore::Impl { | |||
| 462 | KernelPhysicalAddressSpaceBase + KernelPhysicalAddressSpaceSize - 1); | 462 | KernelPhysicalAddressSpaceBase + KernelPhysicalAddressSpaceSize - 1); |
| 463 | 463 | ||
| 464 | // Save start and end for ease of use. | 464 | // Save start and end for ease of use. |
| 465 | const VAddr code_start_virt_addr = KernelVirtualAddressCodeBase; | 465 | constexpr KVirtualAddress code_start_virt_addr = KernelVirtualAddressCodeBase; |
| 466 | const VAddr code_end_virt_addr = KernelVirtualAddressCodeEnd; | 466 | constexpr KVirtualAddress code_end_virt_addr = KernelVirtualAddressCodeEnd; |
| 467 | 467 | ||
| 468 | // Setup the containing kernel region. | 468 | // Setup the containing kernel region. |
| 469 | constexpr size_t KernelRegionSize = 1_GiB; | 469 | constexpr size_t KernelRegionSize = 1_GiB; |
| 470 | constexpr size_t KernelRegionAlign = 1_GiB; | 470 | constexpr size_t KernelRegionAlign = 1_GiB; |
| 471 | constexpr VAddr kernel_region_start = | 471 | constexpr KVirtualAddress kernel_region_start = |
| 472 | Common::AlignDown(code_start_virt_addr, KernelRegionAlign); | 472 | Common::AlignDown(GetInteger(code_start_virt_addr), KernelRegionAlign); |
| 473 | size_t kernel_region_size = KernelRegionSize; | 473 | size_t kernel_region_size = KernelRegionSize; |
| 474 | if (!(kernel_region_start + KernelRegionSize - 1 <= KernelVirtualAddressSpaceLast)) { | 474 | if (!(kernel_region_start + KernelRegionSize - 1 <= KernelVirtualAddressSpaceLast)) { |
| 475 | kernel_region_size = KernelVirtualAddressSpaceEnd - kernel_region_start; | 475 | kernel_region_size = KernelVirtualAddressSpaceEnd - GetInteger(kernel_region_start); |
| 476 | } | 476 | } |
| 477 | ASSERT(memory_layout->GetVirtualMemoryRegionTree().Insert( | 477 | ASSERT(memory_layout->GetVirtualMemoryRegionTree().Insert( |
| 478 | kernel_region_start, kernel_region_size, KMemoryRegionType_Kernel)); | 478 | GetInteger(kernel_region_start), kernel_region_size, KMemoryRegionType_Kernel)); |
| 479 | 479 | ||
| 480 | // Setup the code region. | 480 | // Setup the code region. |
| 481 | constexpr size_t CodeRegionAlign = PageSize; | 481 | constexpr size_t CodeRegionAlign = PageSize; |
| 482 | constexpr VAddr code_region_start = | 482 | constexpr KVirtualAddress code_region_start = |
| 483 | Common::AlignDown(code_start_virt_addr, CodeRegionAlign); | 483 | Common::AlignDown(GetInteger(code_start_virt_addr), CodeRegionAlign); |
| 484 | constexpr VAddr code_region_end = Common::AlignUp(code_end_virt_addr, CodeRegionAlign); | 484 | constexpr KVirtualAddress code_region_end = |
| 485 | Common::AlignUp(GetInteger(code_end_virt_addr), CodeRegionAlign); | ||
| 485 | constexpr size_t code_region_size = code_region_end - code_region_start; | 486 | constexpr size_t code_region_size = code_region_end - code_region_start; |
| 486 | ASSERT(memory_layout->GetVirtualMemoryRegionTree().Insert( | 487 | ASSERT(memory_layout->GetVirtualMemoryRegionTree().Insert( |
| 487 | code_region_start, code_region_size, KMemoryRegionType_KernelCode)); | 488 | GetInteger(code_region_start), code_region_size, KMemoryRegionType_KernelCode)); |
| 488 | 489 | ||
| 489 | // Setup board-specific device physical regions. | 490 | // Setup board-specific device physical regions. |
| 490 | Init::SetupDevicePhysicalMemoryRegions(*memory_layout); | 491 | Init::SetupDevicePhysicalMemoryRegions(*memory_layout); |
| @@ -520,11 +521,11 @@ struct KernelCore::Impl { | |||
| 520 | ASSERT(misc_region_size > 0); | 521 | ASSERT(misc_region_size > 0); |
| 521 | 522 | ||
| 522 | // Setup the misc region. | 523 | // Setup the misc region. |
| 523 | const VAddr misc_region_start = | 524 | const KVirtualAddress misc_region_start = |
| 524 | memory_layout->GetVirtualMemoryRegionTree().GetRandomAlignedRegion( | 525 | memory_layout->GetVirtualMemoryRegionTree().GetRandomAlignedRegion( |
| 525 | misc_region_size, MiscRegionAlign, KMemoryRegionType_Kernel); | 526 | misc_region_size, MiscRegionAlign, KMemoryRegionType_Kernel); |
| 526 | ASSERT(memory_layout->GetVirtualMemoryRegionTree().Insert( | 527 | ASSERT(memory_layout->GetVirtualMemoryRegionTree().Insert( |
| 527 | misc_region_start, misc_region_size, KMemoryRegionType_KernelMisc)); | 528 | GetInteger(misc_region_start), misc_region_size, KMemoryRegionType_KernelMisc)); |
| 528 | 529 | ||
| 529 | // Determine if we'll use extra thread resources. | 530 | // Determine if we'll use extra thread resources. |
| 530 | const bool use_extra_resources = KSystemControl::Init::ShouldIncreaseThreadResourceLimit(); | 531 | const bool use_extra_resources = KSystemControl::Init::ShouldIncreaseThreadResourceLimit(); |
| @@ -532,11 +533,11 @@ struct KernelCore::Impl { | |||
| 532 | // Setup the stack region. | 533 | // Setup the stack region. |
| 533 | constexpr size_t StackRegionSize = 14_MiB; | 534 | constexpr size_t StackRegionSize = 14_MiB; |
| 534 | constexpr size_t StackRegionAlign = KernelAslrAlignment; | 535 | constexpr size_t StackRegionAlign = KernelAslrAlignment; |
| 535 | const VAddr stack_region_start = | 536 | const KVirtualAddress stack_region_start = |
| 536 | memory_layout->GetVirtualMemoryRegionTree().GetRandomAlignedRegion( | 537 | memory_layout->GetVirtualMemoryRegionTree().GetRandomAlignedRegion( |
| 537 | StackRegionSize, StackRegionAlign, KMemoryRegionType_Kernel); | 538 | StackRegionSize, StackRegionAlign, KMemoryRegionType_Kernel); |
| 538 | ASSERT(memory_layout->GetVirtualMemoryRegionTree().Insert( | 539 | ASSERT(memory_layout->GetVirtualMemoryRegionTree().Insert( |
| 539 | stack_region_start, StackRegionSize, KMemoryRegionType_KernelStack)); | 540 | GetInteger(stack_region_start), StackRegionSize, KMemoryRegionType_KernelStack)); |
| 540 | 541 | ||
| 541 | // Determine the size of the resource region. | 542 | // Determine the size of the resource region. |
| 542 | const size_t resource_region_size = | 543 | const size_t resource_region_size = |
| @@ -548,29 +549,29 @@ struct KernelCore::Impl { | |||
| 548 | ASSERT(slab_region_size <= resource_region_size); | 549 | ASSERT(slab_region_size <= resource_region_size); |
| 549 | 550 | ||
| 550 | // Setup the slab region. | 551 | // Setup the slab region. |
| 551 | const PAddr code_start_phys_addr = KernelPhysicalAddressCodeBase; | 552 | const KPhysicalAddress code_start_phys_addr = KernelPhysicalAddressCodeBase; |
| 552 | const PAddr code_end_phys_addr = code_start_phys_addr + code_region_size; | 553 | const KPhysicalAddress code_end_phys_addr = code_start_phys_addr + code_region_size; |
| 553 | const PAddr slab_start_phys_addr = code_end_phys_addr; | 554 | const KPhysicalAddress slab_start_phys_addr = code_end_phys_addr; |
| 554 | const PAddr slab_end_phys_addr = slab_start_phys_addr + slab_region_size; | 555 | const KPhysicalAddress slab_end_phys_addr = slab_start_phys_addr + slab_region_size; |
| 555 | constexpr size_t SlabRegionAlign = KernelAslrAlignment; | 556 | constexpr size_t SlabRegionAlign = KernelAslrAlignment; |
| 556 | const size_t slab_region_needed_size = | 557 | const size_t slab_region_needed_size = |
| 557 | Common::AlignUp(code_end_phys_addr + slab_region_size, SlabRegionAlign) - | 558 | Common::AlignUp(GetInteger(code_end_phys_addr) + slab_region_size, SlabRegionAlign) - |
| 558 | Common::AlignDown(code_end_phys_addr, SlabRegionAlign); | 559 | Common::AlignDown(GetInteger(code_end_phys_addr), SlabRegionAlign); |
| 559 | const VAddr slab_region_start = | 560 | const KVirtualAddress slab_region_start = |
| 560 | memory_layout->GetVirtualMemoryRegionTree().GetRandomAlignedRegion( | 561 | memory_layout->GetVirtualMemoryRegionTree().GetRandomAlignedRegion( |
| 561 | slab_region_needed_size, SlabRegionAlign, KMemoryRegionType_Kernel) + | 562 | slab_region_needed_size, SlabRegionAlign, KMemoryRegionType_Kernel) + |
| 562 | (code_end_phys_addr % SlabRegionAlign); | 563 | (GetInteger(code_end_phys_addr) % SlabRegionAlign); |
| 563 | ASSERT(memory_layout->GetVirtualMemoryRegionTree().Insert( | 564 | ASSERT(memory_layout->GetVirtualMemoryRegionTree().Insert( |
| 564 | slab_region_start, slab_region_size, KMemoryRegionType_KernelSlab)); | 565 | GetInteger(slab_region_start), slab_region_size, KMemoryRegionType_KernelSlab)); |
| 565 | 566 | ||
| 566 | // Setup the temp region. | 567 | // Setup the temp region. |
| 567 | constexpr size_t TempRegionSize = 128_MiB; | 568 | constexpr size_t TempRegionSize = 128_MiB; |
| 568 | constexpr size_t TempRegionAlign = KernelAslrAlignment; | 569 | constexpr size_t TempRegionAlign = KernelAslrAlignment; |
| 569 | const VAddr temp_region_start = | 570 | const KVirtualAddress temp_region_start = |
| 570 | memory_layout->GetVirtualMemoryRegionTree().GetRandomAlignedRegion( | 571 | memory_layout->GetVirtualMemoryRegionTree().GetRandomAlignedRegion( |
| 571 | TempRegionSize, TempRegionAlign, KMemoryRegionType_Kernel); | 572 | TempRegionSize, TempRegionAlign, KMemoryRegionType_Kernel); |
| 572 | ASSERT(memory_layout->GetVirtualMemoryRegionTree().Insert(temp_region_start, TempRegionSize, | 573 | ASSERT(memory_layout->GetVirtualMemoryRegionTree().Insert( |
| 573 | KMemoryRegionType_KernelTemp)); | 574 | GetInteger(temp_region_start), TempRegionSize, KMemoryRegionType_KernelTemp)); |
| 574 | 575 | ||
| 575 | // Automatically map in devices that have auto-map attributes. | 576 | // Automatically map in devices that have auto-map attributes. |
| 576 | for (auto& region : memory_layout->GetPhysicalMemoryRegionTree()) { | 577 | for (auto& region : memory_layout->GetPhysicalMemoryRegionTree()) { |
| @@ -596,35 +597,37 @@ struct KernelCore::Impl { | |||
| 596 | region.SetTypeAttribute(KMemoryRegionAttr_DidKernelMap); | 597 | region.SetTypeAttribute(KMemoryRegionAttr_DidKernelMap); |
| 597 | 598 | ||
| 598 | // Create a virtual pair region and insert it into the tree. | 599 | // Create a virtual pair region and insert it into the tree. |
| 599 | const PAddr map_phys_addr = Common::AlignDown(region.GetAddress(), PageSize); | 600 | const KPhysicalAddress map_phys_addr = Common::AlignDown(region.GetAddress(), PageSize); |
| 600 | const size_t map_size = | 601 | const size_t map_size = |
| 601 | Common::AlignUp(region.GetEndAddress(), PageSize) - map_phys_addr; | 602 | Common::AlignUp(region.GetEndAddress(), PageSize) - GetInteger(map_phys_addr); |
| 602 | const VAddr map_virt_addr = | 603 | const KVirtualAddress map_virt_addr = |
| 603 | memory_layout->GetVirtualMemoryRegionTree().GetRandomAlignedRegionWithGuard( | 604 | memory_layout->GetVirtualMemoryRegionTree().GetRandomAlignedRegionWithGuard( |
| 604 | map_size, PageSize, KMemoryRegionType_KernelMisc, PageSize); | 605 | map_size, PageSize, KMemoryRegionType_KernelMisc, PageSize); |
| 605 | ASSERT(memory_layout->GetVirtualMemoryRegionTree().Insert( | 606 | ASSERT(memory_layout->GetVirtualMemoryRegionTree().Insert( |
| 606 | map_virt_addr, map_size, KMemoryRegionType_KernelMiscMappedDevice)); | 607 | GetInteger(map_virt_addr), map_size, KMemoryRegionType_KernelMiscMappedDevice)); |
| 607 | region.SetPairAddress(map_virt_addr + region.GetAddress() - map_phys_addr); | 608 | region.SetPairAddress(GetInteger(map_virt_addr) + region.GetAddress() - |
| 609 | GetInteger(map_phys_addr)); | ||
| 608 | } | 610 | } |
| 609 | 611 | ||
| 610 | Init::SetupDramPhysicalMemoryRegions(*memory_layout); | 612 | Init::SetupDramPhysicalMemoryRegions(*memory_layout); |
| 611 | 613 | ||
| 612 | // Insert a physical region for the kernel code region. | 614 | // Insert a physical region for the kernel code region. |
| 613 | ASSERT(memory_layout->GetPhysicalMemoryRegionTree().Insert( | 615 | ASSERT(memory_layout->GetPhysicalMemoryRegionTree().Insert( |
| 614 | code_start_phys_addr, code_region_size, KMemoryRegionType_DramKernelCode)); | 616 | GetInteger(code_start_phys_addr), code_region_size, KMemoryRegionType_DramKernelCode)); |
| 615 | 617 | ||
| 616 | // Insert a physical region for the kernel slab region. | 618 | // Insert a physical region for the kernel slab region. |
| 617 | ASSERT(memory_layout->GetPhysicalMemoryRegionTree().Insert( | 619 | ASSERT(memory_layout->GetPhysicalMemoryRegionTree().Insert( |
| 618 | slab_start_phys_addr, slab_region_size, KMemoryRegionType_DramKernelSlab)); | 620 | GetInteger(slab_start_phys_addr), slab_region_size, KMemoryRegionType_DramKernelSlab)); |
| 619 | 621 | ||
| 620 | // Determine size available for kernel page table heaps, requiring > 8 MB. | 622 | // Determine size available for kernel page table heaps, requiring > 8 MB. |
| 621 | const PAddr resource_end_phys_addr = slab_start_phys_addr + resource_region_size; | 623 | const KPhysicalAddress resource_end_phys_addr = slab_start_phys_addr + resource_region_size; |
| 622 | const size_t page_table_heap_size = resource_end_phys_addr - slab_end_phys_addr; | 624 | const size_t page_table_heap_size = resource_end_phys_addr - slab_end_phys_addr; |
| 623 | ASSERT(page_table_heap_size / 4_MiB > 2); | 625 | ASSERT(page_table_heap_size / 4_MiB > 2); |
| 624 | 626 | ||
| 625 | // Insert a physical region for the kernel page table heap region | 627 | // Insert a physical region for the kernel page table heap region |
| 626 | ASSERT(memory_layout->GetPhysicalMemoryRegionTree().Insert( | 628 | ASSERT(memory_layout->GetPhysicalMemoryRegionTree().Insert( |
| 627 | slab_end_phys_addr, page_table_heap_size, KMemoryRegionType_DramKernelPtHeap)); | 629 | GetInteger(slab_end_phys_addr), page_table_heap_size, |
| 630 | KMemoryRegionType_DramKernelPtHeap)); | ||
| 628 | 631 | ||
| 629 | // All DRAM regions that we haven't tagged by this point will be mapped under the linear | 632 | // All DRAM regions that we haven't tagged by this point will be mapped under the linear |
| 630 | // mapping. Tag them. | 633 | // mapping. Tag them. |
| @@ -646,20 +649,21 @@ struct KernelCore::Impl { | |||
| 646 | 649 | ||
| 647 | // Setup the linear mapping region. | 650 | // Setup the linear mapping region. |
| 648 | constexpr size_t LinearRegionAlign = 1_GiB; | 651 | constexpr size_t LinearRegionAlign = 1_GiB; |
| 649 | const PAddr aligned_linear_phys_start = | 652 | const KPhysicalAddress aligned_linear_phys_start = |
| 650 | Common::AlignDown(linear_extents.GetAddress(), LinearRegionAlign); | 653 | Common::AlignDown(linear_extents.GetAddress(), LinearRegionAlign); |
| 651 | const size_t linear_region_size = | 654 | const size_t linear_region_size = |
| 652 | Common::AlignUp(linear_extents.GetEndAddress(), LinearRegionAlign) - | 655 | Common::AlignUp(linear_extents.GetEndAddress(), LinearRegionAlign) - |
| 653 | aligned_linear_phys_start; | 656 | GetInteger(aligned_linear_phys_start); |
| 654 | const VAddr linear_region_start = | 657 | const KVirtualAddress linear_region_start = |
| 655 | memory_layout->GetVirtualMemoryRegionTree().GetRandomAlignedRegionWithGuard( | 658 | memory_layout->GetVirtualMemoryRegionTree().GetRandomAlignedRegionWithGuard( |
| 656 | linear_region_size, LinearRegionAlign, KMemoryRegionType_None, LinearRegionAlign); | 659 | linear_region_size, LinearRegionAlign, KMemoryRegionType_None, LinearRegionAlign); |
| 657 | 660 | ||
| 658 | const u64 linear_region_phys_to_virt_diff = linear_region_start - aligned_linear_phys_start; | 661 | const u64 linear_region_phys_to_virt_diff = |
| 662 | GetInteger(linear_region_start) - GetInteger(aligned_linear_phys_start); | ||
| 659 | 663 | ||
| 660 | // Map and create regions for all the linearly-mapped data. | 664 | // Map and create regions for all the linearly-mapped data. |
| 661 | { | 665 | { |
| 662 | PAddr cur_phys_addr = 0; | 666 | KPhysicalAddress cur_phys_addr = 0; |
| 663 | u64 cur_size = 0; | 667 | u64 cur_size = 0; |
| 664 | for (auto& region : memory_layout->GetPhysicalMemoryRegionTree()) { | 668 | for (auto& region : memory_layout->GetPhysicalMemoryRegionTree()) { |
| 665 | if (!region.HasTypeAttribute(KMemoryRegionAttr_LinearMapped)) { | 669 | if (!region.HasTypeAttribute(KMemoryRegionAttr_LinearMapped)) { |
| @@ -678,15 +682,16 @@ struct KernelCore::Impl { | |||
| 678 | cur_size = region.GetSize(); | 682 | cur_size = region.GetSize(); |
| 679 | } | 683 | } |
| 680 | 684 | ||
| 681 | const VAddr region_virt_addr = | 685 | const KVirtualAddress region_virt_addr = |
| 682 | region.GetAddress() + linear_region_phys_to_virt_diff; | 686 | region.GetAddress() + linear_region_phys_to_virt_diff; |
| 683 | ASSERT(memory_layout->GetVirtualMemoryRegionTree().Insert( | 687 | ASSERT(memory_layout->GetVirtualMemoryRegionTree().Insert( |
| 684 | region_virt_addr, region.GetSize(), | 688 | GetInteger(region_virt_addr), region.GetSize(), |
| 685 | GetTypeForVirtualLinearMapping(region.GetType()))); | 689 | GetTypeForVirtualLinearMapping(region.GetType()))); |
| 686 | region.SetPairAddress(region_virt_addr); | 690 | region.SetPairAddress(GetInteger(region_virt_addr)); |
| 687 | 691 | ||
| 688 | KMemoryRegion* virt_region = | 692 | KMemoryRegion* virt_region = |
| 689 | memory_layout->GetVirtualMemoryRegionTree().FindModifiable(region_virt_addr); | 693 | memory_layout->GetVirtualMemoryRegionTree().FindModifiable( |
| 694 | GetInteger(region_virt_addr)); | ||
| 690 | ASSERT(virt_region != nullptr); | 695 | ASSERT(virt_region != nullptr); |
| 691 | virt_region->SetPairAddress(region.GetAddress()); | 696 | virt_region->SetPairAddress(region.GetAddress()); |
| 692 | } | 697 | } |
| @@ -694,10 +699,11 @@ struct KernelCore::Impl { | |||
| 694 | 699 | ||
| 695 | // Insert regions for the initial page table region. | 700 | // Insert regions for the initial page table region. |
| 696 | ASSERT(memory_layout->GetPhysicalMemoryRegionTree().Insert( | 701 | ASSERT(memory_layout->GetPhysicalMemoryRegionTree().Insert( |
| 697 | resource_end_phys_addr, KernelPageTableHeapSize, KMemoryRegionType_DramKernelInitPt)); | 702 | GetInteger(resource_end_phys_addr), KernelPageTableHeapSize, |
| 703 | KMemoryRegionType_DramKernelInitPt)); | ||
| 698 | ASSERT(memory_layout->GetVirtualMemoryRegionTree().Insert( | 704 | ASSERT(memory_layout->GetVirtualMemoryRegionTree().Insert( |
| 699 | resource_end_phys_addr + linear_region_phys_to_virt_diff, KernelPageTableHeapSize, | 705 | GetInteger(resource_end_phys_addr) + linear_region_phys_to_virt_diff, |
| 700 | KMemoryRegionType_VirtualDramKernelInitPt)); | 706 | KernelPageTableHeapSize, KMemoryRegionType_VirtualDramKernelInitPt)); |
| 701 | 707 | ||
| 702 | // All linear-mapped DRAM regions that we haven't tagged by this point will be allocated to | 708 | // All linear-mapped DRAM regions that we haven't tagged by this point will be allocated to |
| 703 | // some pool partition. Tag them. | 709 | // some pool partition. Tag them. |
| @@ -969,12 +975,12 @@ void KernelCore::InvalidateAllInstructionCaches() { | |||
| 969 | } | 975 | } |
| 970 | } | 976 | } |
| 971 | 977 | ||
| 972 | void KernelCore::InvalidateCpuInstructionCacheRange(VAddr addr, std::size_t size) { | 978 | void KernelCore::InvalidateCpuInstructionCacheRange(KProcessAddress addr, std::size_t size) { |
| 973 | for (auto& physical_core : impl->cores) { | 979 | for (auto& physical_core : impl->cores) { |
| 974 | if (!physical_core->IsInitialized()) { | 980 | if (!physical_core->IsInitialized()) { |
| 975 | continue; | 981 | continue; |
| 976 | } | 982 | } |
| 977 | physical_core->ArmInterface().InvalidateCacheRange(addr, size); | 983 | physical_core->ArmInterface().InvalidateCacheRange(GetInteger(addr), size); |
| 978 | } | 984 | } |
| 979 | } | 985 | } |
| 980 | 986 | ||