yuzu/src/core/hle/kernel/k_scheduler.h

174 lines
5.2 KiB
C++
Raw Normal View History

// SPDX-FileCopyrightText: Copyright 2020 yuzu Emulator Project
// SPDX-License-Identifier: GPL-2.0-or-later
#pragma once
#include <atomic>
#include "common/common_types.h"
#include "core/hle/kernel/global_scheduler_context.h"
#include "core/hle/kernel/k_priority_queue.h"
#include "core/hle/kernel/k_scheduler_lock.h"
#include "core/hle/kernel/k_scoped_lock.h"
#include "core/hle/kernel/k_spin_lock.h"
2022-06-26 22:52:16 +00:00
#include "core/hle/kernel/k_thread.h"
namespace Common {
class Fiber;
}
namespace Core {
class System;
}
namespace Kernel {
class KernelCore;
2022-06-26 22:52:16 +00:00
class KInterruptTaskManager;
class KProcess;
class KThread;
2022-06-26 22:52:16 +00:00
class KScopedDisableDispatch;
class KScopedSchedulerLock;
class KScopedSchedulerLockAndSleep;
2019-03-29 21:01:17 +00:00
class KScheduler final {
2019-03-29 21:01:17 +00:00
public:
2022-06-26 22:52:16 +00:00
YUZU_NON_COPYABLE(KScheduler);
YUZU_NON_MOVEABLE(KScheduler);
2022-06-26 22:52:16 +00:00
using LockType = KAbstractSchedulerLock<KScheduler>;
2022-06-26 22:52:16 +00:00
explicit KScheduler(KernelCore& kernel);
~KScheduler();
void Initialize(KThread* main_thread, KThread* idle_thread, s32 core_id);
2022-06-26 22:52:16 +00:00
void Activate();
void OnThreadStart();
void Unload(KThread* thread);
void Reload(KThread* thread);
2022-06-26 22:52:16 +00:00
void SetInterruptTaskRunnable();
void RequestScheduleOnInterrupt();
void PreemptSingleCore();
2019-03-29 21:01:17 +00:00
2022-06-26 22:52:16 +00:00
u64 GetIdleCount() {
return m_state.idle_count;
}
2022-06-26 22:52:16 +00:00
KThread* GetIdleThread() const {
return m_idle_thread;
}
bool IsIdle() const {
return m_current_thread.load() == m_idle_thread;
}
2022-06-26 22:52:16 +00:00
KThread* GetPreviousThread() const {
return m_state.prev_thread;
2019-03-29 21:01:17 +00:00
}
2022-06-26 22:52:16 +00:00
KThread* GetSchedulerCurrentThread() const {
return m_current_thread.load();
}
2022-06-26 22:52:16 +00:00
s64 GetLastContextSwitchTime() const {
return m_last_context_switch_time;
2020-06-27 22:20:06 +00:00
}
2022-06-26 22:52:16 +00:00
// Static public API.
static bool CanSchedule(KernelCore& kernel) {
return GetCurrentThread(kernel).GetDisableDispatchCount() == 0;
2022-06-26 22:52:16 +00:00
}
static bool IsSchedulerLockedByCurrentThread(KernelCore& kernel) {
return kernel.GlobalSchedulerContext().m_scheduler_lock.IsLockedByCurrentThread();
2022-06-26 22:52:16 +00:00
}
2022-06-26 22:52:16 +00:00
static bool IsSchedulerUpdateNeeded(KernelCore& kernel) {
return kernel.GlobalSchedulerContext().m_scheduler_update_needed;
2022-06-26 22:52:16 +00:00
}
static void SetSchedulerUpdateNeeded(KernelCore& kernel) {
kernel.GlobalSchedulerContext().m_scheduler_update_needed = true;
2022-06-26 22:52:16 +00:00
}
static void ClearSchedulerUpdateNeeded(KernelCore& kernel) {
kernel.GlobalSchedulerContext().m_scheduler_update_needed = false;
2022-06-26 22:52:16 +00:00
}
2022-06-26 22:52:16 +00:00
static void DisableScheduling(KernelCore& kernel);
static void EnableScheduling(KernelCore& kernel, u64 cores_needing_scheduling);
2022-06-26 22:52:16 +00:00
static u64 UpdateHighestPriorityThreads(KernelCore& kernel);
static void ClearPreviousThread(KernelCore& kernel, KThread* thread);
static void OnThreadStateChanged(KernelCore& kernel, KThread* thread, ThreadState old_state);
static void OnThreadPriorityChanged(KernelCore& kernel, KThread* thread, s32 old_priority);
static void OnThreadAffinityMaskChanged(KernelCore& kernel, KThread* thread,
const KAffinityMask& old_affinity, s32 old_core);
2022-06-26 22:52:16 +00:00
static void RotateScheduledQueue(KernelCore& kernel, s32 core_id, s32 priority);
static void RescheduleCores(KernelCore& kernel, u64 cores_needing_scheduling);
2022-06-26 22:52:16 +00:00
static void YieldWithoutCoreMigration(KernelCore& kernel);
static void YieldWithCoreMigration(KernelCore& kernel);
static void YieldToAnyThread(KernelCore& kernel);
2022-06-26 22:52:16 +00:00
private:
// Static private API.
static KSchedulerPriorityQueue& GetPriorityQueue(KernelCore& kernel) {
return kernel.GlobalSchedulerContext().m_priority_queue;
2022-06-26 22:52:16 +00:00
}
static u64 UpdateHighestPriorityThreadsImpl(KernelCore& kernel);
static void RescheduleCurrentHLEThread(KernelCore& kernel);
2022-06-26 22:52:16 +00:00
// Instanced private API.
void ScheduleImpl();
void ScheduleImplFiber();
2022-06-26 22:52:16 +00:00
void SwitchThread(KThread* next_thread);
2022-06-26 22:52:16 +00:00
void Schedule();
void ScheduleOnInterrupt();
2022-06-26 22:52:16 +00:00
void RescheduleOtherCores(u64 cores_needing_scheduling);
void RescheduleCurrentCore();
void RescheduleCurrentCoreImpl();
2022-06-26 22:52:16 +00:00
u64 UpdateHighestPriorityThread(KThread* thread);
2022-06-26 22:52:16 +00:00
private:
friend class KScopedDisableDispatch;
struct SchedulingState {
2022-06-26 22:52:16 +00:00
std::atomic<bool> needs_scheduling{false};
bool interrupt_task_runnable{false};
bool should_count_idle{false};
u64 idle_count{0};
KThread* highest_priority_thread{nullptr};
void* idle_thread_stack{nullptr};
std::atomic<KThread*> prev_thread{nullptr};
KInterruptTaskManager* interrupt_task_manager{nullptr};
};
2023-03-07 15:49:41 +00:00
KernelCore& m_kernel;
2022-06-26 22:52:16 +00:00
SchedulingState m_state;
bool m_is_active{false};
s32 m_core_id{0};
s64 m_last_context_switch_time{0};
KThread* m_idle_thread{nullptr};
std::atomic<KThread*> m_current_thread{nullptr};
std::shared_ptr<Common::Fiber> m_switch_fiber{};
KThread* m_switch_cur_thread{};
KThread* m_switch_highest_priority_thread{};
bool m_switch_from_schedule{};
};
2022-06-26 22:52:16 +00:00
class KScopedSchedulerLock : public KScopedLock<KScheduler::LockType> {
2020-02-14 15:44:31 +00:00
public:
2022-06-26 22:52:16 +00:00
explicit KScopedSchedulerLock(KernelCore& kernel)
: KScopedLock(kernel.GlobalSchedulerContext().m_scheduler_lock) {}
2022-06-26 22:52:16 +00:00
~KScopedSchedulerLock() = default;
2020-02-14 15:44:31 +00:00
};
} // namespace Kernel