diff options
Diffstat (limited to 'src/core/hle/kernel/thread.cpp')
| -rw-r--r-- | src/core/hle/kernel/thread.cpp | 50 |
1 files changed, 13 insertions, 37 deletions
diff --git a/src/core/hle/kernel/thread.cpp b/src/core/hle/kernel/thread.cpp index 38b4a0987..804e07f2b 100644 --- a/src/core/hle/kernel/thread.cpp +++ b/src/core/hle/kernel/thread.cpp | |||
| @@ -17,10 +17,10 @@ | |||
| 17 | #include "core/hardware_properties.h" | 17 | #include "core/hardware_properties.h" |
| 18 | #include "core/hle/kernel/errors.h" | 18 | #include "core/hle/kernel/errors.h" |
| 19 | #include "core/hle/kernel/handle_table.h" | 19 | #include "core/hle/kernel/handle_table.h" |
| 20 | #include "core/hle/kernel/k_scheduler.h" | ||
| 20 | #include "core/hle/kernel/kernel.h" | 21 | #include "core/hle/kernel/kernel.h" |
| 21 | #include "core/hle/kernel/object.h" | 22 | #include "core/hle/kernel/object.h" |
| 22 | #include "core/hle/kernel/process.h" | 23 | #include "core/hle/kernel/process.h" |
| 23 | #include "core/hle/kernel/scheduler.h" | ||
| 24 | #include "core/hle/kernel/thread.h" | 24 | #include "core/hle/kernel/thread.h" |
| 25 | #include "core/hle/kernel/time_manager.h" | 25 | #include "core/hle/kernel/time_manager.h" |
| 26 | #include "core/hle/result.h" | 26 | #include "core/hle/result.h" |
| @@ -186,9 +186,11 @@ ResultVal<std::shared_ptr<Thread>> Thread::Create(Core::System& system, ThreadTy | |||
| 186 | thread->status = ThreadStatus::Dormant; | 186 | thread->status = ThreadStatus::Dormant; |
| 187 | thread->entry_point = entry_point; | 187 | thread->entry_point = entry_point; |
| 188 | thread->stack_top = stack_top; | 188 | thread->stack_top = stack_top; |
| 189 | thread->disable_count = 1; | ||
| 189 | thread->tpidr_el0 = 0; | 190 | thread->tpidr_el0 = 0; |
| 190 | thread->nominal_priority = thread->current_priority = priority; | 191 | thread->nominal_priority = thread->current_priority = priority; |
| 191 | thread->last_running_ticks = 0; | 192 | thread->schedule_count = -1; |
| 193 | thread->last_scheduled_tick = 0; | ||
| 192 | thread->processor_id = processor_id; | 194 | thread->processor_id = processor_id; |
| 193 | thread->ideal_core = processor_id; | 195 | thread->ideal_core = processor_id; |
| 194 | thread->affinity_mask.SetAffinity(processor_id, true); | 196 | thread->affinity_mask.SetAffinity(processor_id, true); |
| @@ -201,7 +203,7 @@ ResultVal<std::shared_ptr<Thread>> Thread::Create(Core::System& system, ThreadTy | |||
| 201 | thread->owner_process = owner_process; | 203 | thread->owner_process = owner_process; |
| 202 | thread->type = type_flags; | 204 | thread->type = type_flags; |
| 203 | if ((type_flags & THREADTYPE_IDLE) == 0) { | 205 | if ((type_flags & THREADTYPE_IDLE) == 0) { |
| 204 | auto& scheduler = kernel.GlobalScheduler(); | 206 | auto& scheduler = kernel.GlobalSchedulerContext(); |
| 205 | scheduler.AddThread(thread); | 207 | scheduler.AddThread(thread); |
| 206 | } | 208 | } |
| 207 | if (owner_process) { | 209 | if (owner_process) { |
| @@ -402,39 +404,12 @@ ResultCode Thread::Sleep(s64 nanoseconds) { | |||
| 402 | return RESULT_SUCCESS; | 404 | return RESULT_SUCCESS; |
| 403 | } | 405 | } |
| 404 | 406 | ||
| 405 | std::pair<ResultCode, bool> Thread::YieldSimple() { | ||
| 406 | bool is_redundant = false; | ||
| 407 | { | ||
| 408 | SchedulerLock lock(kernel); | ||
| 409 | is_redundant = kernel.GlobalScheduler().YieldThread(this); | ||
| 410 | } | ||
| 411 | return {RESULT_SUCCESS, is_redundant}; | ||
| 412 | } | ||
| 413 | |||
| 414 | std::pair<ResultCode, bool> Thread::YieldAndBalanceLoad() { | ||
| 415 | bool is_redundant = false; | ||
| 416 | { | ||
| 417 | SchedulerLock lock(kernel); | ||
| 418 | is_redundant = kernel.GlobalScheduler().YieldThreadAndBalanceLoad(this); | ||
| 419 | } | ||
| 420 | return {RESULT_SUCCESS, is_redundant}; | ||
| 421 | } | ||
| 422 | |||
| 423 | std::pair<ResultCode, bool> Thread::YieldAndWaitForLoadBalancing() { | ||
| 424 | bool is_redundant = false; | ||
| 425 | { | ||
| 426 | SchedulerLock lock(kernel); | ||
| 427 | is_redundant = kernel.GlobalScheduler().YieldThreadAndWaitForLoadBalancing(this); | ||
| 428 | } | ||
| 429 | return {RESULT_SUCCESS, is_redundant}; | ||
| 430 | } | ||
| 431 | |||
| 432 | void Thread::AddSchedulingFlag(ThreadSchedFlags flag) { | 407 | void Thread::AddSchedulingFlag(ThreadSchedFlags flag) { |
| 433 | const u32 old_state = scheduling_state; | 408 | const u32 old_state = scheduling_state; |
| 434 | pausing_state |= static_cast<u32>(flag); | 409 | pausing_state |= static_cast<u32>(flag); |
| 435 | const u32 base_scheduling = static_cast<u32>(GetSchedulingStatus()); | 410 | const u32 base_scheduling = static_cast<u32>(GetSchedulingStatus()); |
| 436 | scheduling_state = base_scheduling | pausing_state; | 411 | scheduling_state = base_scheduling | pausing_state; |
| 437 | kernel.GlobalScheduler().AdjustSchedulingOnStatus(this, old_state); | 412 | KScheduler::OnThreadStateChanged(kernel, this, old_state); |
| 438 | } | 413 | } |
| 439 | 414 | ||
| 440 | void Thread::RemoveSchedulingFlag(ThreadSchedFlags flag) { | 415 | void Thread::RemoveSchedulingFlag(ThreadSchedFlags flag) { |
| @@ -442,19 +417,20 @@ void Thread::RemoveSchedulingFlag(ThreadSchedFlags flag) { | |||
| 442 | pausing_state &= ~static_cast<u32>(flag); | 417 | pausing_state &= ~static_cast<u32>(flag); |
| 443 | const u32 base_scheduling = static_cast<u32>(GetSchedulingStatus()); | 418 | const u32 base_scheduling = static_cast<u32>(GetSchedulingStatus()); |
| 444 | scheduling_state = base_scheduling | pausing_state; | 419 | scheduling_state = base_scheduling | pausing_state; |
| 445 | kernel.GlobalScheduler().AdjustSchedulingOnStatus(this, old_state); | 420 | KScheduler::OnThreadStateChanged(kernel, this, old_state); |
| 446 | } | 421 | } |
| 447 | 422 | ||
| 448 | void Thread::SetSchedulingStatus(ThreadSchedStatus new_status) { | 423 | void Thread::SetSchedulingStatus(ThreadSchedStatus new_status) { |
| 449 | const u32 old_state = scheduling_state; | 424 | const u32 old_state = scheduling_state; |
| 450 | scheduling_state = (scheduling_state & static_cast<u32>(ThreadSchedMasks::HighMask)) | | 425 | scheduling_state = (scheduling_state & static_cast<u32>(ThreadSchedMasks::HighMask)) | |
| 451 | static_cast<u32>(new_status); | 426 | static_cast<u32>(new_status); |
| 452 | kernel.GlobalScheduler().AdjustSchedulingOnStatus(this, old_state); | 427 | KScheduler::OnThreadStateChanged(kernel, this, old_state); |
| 453 | } | 428 | } |
| 454 | 429 | ||
| 455 | void Thread::SetCurrentPriority(u32 new_priority) { | 430 | void Thread::SetCurrentPriority(u32 new_priority) { |
| 456 | const u32 old_priority = std::exchange(current_priority, new_priority); | 431 | const u32 old_priority = std::exchange(current_priority, new_priority); |
| 457 | kernel.GlobalScheduler().AdjustSchedulingOnPriority(this, old_priority); | 432 | KScheduler::OnThreadPriorityChanged(kernel, this, kernel.CurrentScheduler()->GetCurrentThread(), |
| 433 | old_priority); | ||
| 458 | } | 434 | } |
| 459 | 435 | ||
| 460 | ResultCode Thread::SetCoreAndAffinityMask(s32 new_core, u64 new_affinity_mask) { | 436 | ResultCode Thread::SetCoreAndAffinityMask(s32 new_core, u64 new_affinity_mask) { |
| @@ -480,10 +456,10 @@ ResultCode Thread::SetCoreAndAffinityMask(s32 new_core, u64 new_affinity_mask) { | |||
| 480 | if (use_override) { | 456 | if (use_override) { |
| 481 | ideal_core_override = new_core; | 457 | ideal_core_override = new_core; |
| 482 | } else { | 458 | } else { |
| 483 | const auto old_affinity_mask = affinity_mask.GetAffinityMask(); | 459 | const auto old_affinity_mask = affinity_mask; |
| 484 | affinity_mask.SetAffinityMask(new_affinity_mask); | 460 | affinity_mask.SetAffinityMask(new_affinity_mask); |
| 485 | ideal_core = new_core; | 461 | ideal_core = new_core; |
| 486 | if (old_affinity_mask != new_affinity_mask) { | 462 | if (old_affinity_mask.GetAffinityMask() != new_affinity_mask) { |
| 487 | const s32 old_core = processor_id; | 463 | const s32 old_core = processor_id; |
| 488 | if (processor_id >= 0 && !affinity_mask.GetAffinity(processor_id)) { | 464 | if (processor_id >= 0 && !affinity_mask.GetAffinity(processor_id)) { |
| 489 | if (static_cast<s32>(ideal_core) < 0) { | 465 | if (static_cast<s32>(ideal_core) < 0) { |
| @@ -493,7 +469,7 @@ ResultCode Thread::SetCoreAndAffinityMask(s32 new_core, u64 new_affinity_mask) { | |||
| 493 | processor_id = ideal_core; | 469 | processor_id = ideal_core; |
| 494 | } | 470 | } |
| 495 | } | 471 | } |
| 496 | kernel.GlobalScheduler().AdjustSchedulingOnAffinity(this, old_affinity_mask, old_core); | 472 | KScheduler::OnThreadAffinityMaskChanged(kernel, this, old_affinity_mask, old_core); |
| 497 | } | 473 | } |
| 498 | } | 474 | } |
| 499 | return RESULT_SUCCESS; | 475 | return RESULT_SUCCESS; |