summaryrefslogtreecommitdiff
path: root/src/core/hle/kernel
diff options
context:
space:
mode:
authorGravatar bunnei2021-08-06 23:27:33 -0700
committerGravatar bunnei2021-12-06 16:39:16 -0800
commit284015dfd7d0b963b9ad0d196ee283ef2287b812 (patch)
tree6a3bcb829075d98d63317242e5617dd7ea006958 /src/core/hle/kernel
parentcore: cpu_manager: Use KScopedDisableDispatch. (diff)
downloadyuzu-284015dfd7d0b963b9ad0d196ee283ef2287b812.tar.gz
yuzu-284015dfd7d0b963b9ad0d196ee283ef2287b812.tar.xz
yuzu-284015dfd7d0b963b9ad0d196ee283ef2287b812.zip
core: hle: kernel: k_scheduler: Improve DisableScheduling and EnableScheduling.
Diffstat (limited to 'src/core/hle/kernel')
-rw-r--r--src/core/hle/kernel/k_scheduler.cpp23
1 files changed, 9 insertions, 14 deletions
diff --git a/src/core/hle/kernel/k_scheduler.cpp b/src/core/hle/kernel/k_scheduler.cpp
index f5236dfea..6ddbae52c 100644
--- a/src/core/hle/kernel/k_scheduler.cpp
+++ b/src/core/hle/kernel/k_scheduler.cpp
@@ -376,20 +376,18 @@ void KScheduler::ClearSchedulerUpdateNeeded(KernelCore& kernel) {
376} 376}
377 377
378void KScheduler::DisableScheduling(KernelCore& kernel) { 378void KScheduler::DisableScheduling(KernelCore& kernel) {
379 if (auto* scheduler = kernel.CurrentScheduler(); scheduler) { 379 ASSERT(GetCurrentThreadPointer(kernel)->GetDisableDispatchCount() >= 0);
380 ASSERT(scheduler->GetCurrentThread()->GetDisableDispatchCount() >= 0); 380 GetCurrentThreadPointer(kernel)->DisableDispatch();
381 scheduler->GetCurrentThread()->DisableDispatch();
382 }
383} 381}
384 382
385void KScheduler::EnableScheduling(KernelCore& kernel, u64 cores_needing_scheduling) { 383void KScheduler::EnableScheduling(KernelCore& kernel, u64 cores_needing_scheduling) {
386 if (auto* scheduler = kernel.CurrentScheduler(); scheduler) { 384 ASSERT(GetCurrentThreadPointer(kernel)->GetDisableDispatchCount() >= 1);
387 ASSERT(scheduler->GetCurrentThread()->GetDisableDispatchCount() >= 1); 385
388 if (scheduler->GetCurrentThread()->GetDisableDispatchCount() >= 1) { 386 if (GetCurrentThreadPointer(kernel)->GetDisableDispatchCount() > 1) {
389 scheduler->GetCurrentThread()->EnableDispatch(); 387 GetCurrentThreadPointer(kernel)->EnableDispatch();
390 } 388 } else {
389 RescheduleCores(kernel, cores_needing_scheduling);
391 } 390 }
392 RescheduleCores(kernel, cores_needing_scheduling);
393} 391}
394 392
395u64 KScheduler::UpdateHighestPriorityThreads(KernelCore& kernel) { 393u64 KScheduler::UpdateHighestPriorityThreads(KernelCore& kernel) {
@@ -646,6 +644,7 @@ void KScheduler::RescheduleCurrentCore() {
646 if (phys_core.IsInterrupted()) { 644 if (phys_core.IsInterrupted()) {
647 phys_core.ClearInterrupt(); 645 phys_core.ClearInterrupt();
648 } 646 }
647
649 guard.Lock(); 648 guard.Lock();
650 if (state.needs_scheduling.load()) { 649 if (state.needs_scheduling.load()) {
651 Schedule(); 650 Schedule();
@@ -662,10 +661,6 @@ void KScheduler::OnThreadStart() {
662void KScheduler::Unload(KThread* thread) { 661void KScheduler::Unload(KThread* thread) {
663 ASSERT(thread); 662 ASSERT(thread);
664 663
665 if (!thread) {
666 return;
667 }
668
669 LOG_TRACE(Kernel, "core {}, unload thread {}", core_id, thread ? thread->GetName() : "nullptr"); 664 LOG_TRACE(Kernel, "core {}, unload thread {}", core_id, thread ? thread->GetName() : "nullptr");
670 665
671 if (thread->IsCallingSvc()) { 666 if (thread->IsCallingSvc()) {