summaryrefslogtreecommitdiffstats
path: root/src/core/hle/kernel/k_scheduler.h
diff options
context:
space:
mode:
authorLiam <byteslice@airmail.cc>2022-07-06 05:27:25 +0200
committerLiam <byteslice@airmail.cc>2022-07-15 04:47:18 +0200
commit21945ae127480c8332c1110ceada2df4a42a5848 (patch)
treea385c64a14b0d8e8dd71410eaa47575462f8f368 /src/core/hle/kernel/k_scheduler.h
parentkernel: use KScheduler from mesosphere (diff)
downloadyuzu-21945ae127480c8332c1110ceada2df4a42a5848.tar
yuzu-21945ae127480c8332c1110ceada2df4a42a5848.tar.gz
yuzu-21945ae127480c8332c1110ceada2df4a42a5848.tar.bz2
yuzu-21945ae127480c8332c1110ceada2df4a42a5848.tar.lz
yuzu-21945ae127480c8332c1110ceada2df4a42a5848.tar.xz
yuzu-21945ae127480c8332c1110ceada2df4a42a5848.tar.zst
yuzu-21945ae127480c8332c1110ceada2df4a42a5848.zip
Diffstat (limited to '')
-rw-r--r--src/core/hle/kernel/k_scheduler.h24
1 files changed, 18 insertions, 6 deletions
diff --git a/src/core/hle/kernel/k_scheduler.h b/src/core/hle/kernel/k_scheduler.h
index 8f4eebf6a..91e870933 100644
--- a/src/core/hle/kernel/k_scheduler.h
+++ b/src/core/hle/kernel/k_scheduler.h
@@ -41,8 +41,11 @@ public:
explicit KScheduler(KernelCore& kernel);
~KScheduler();
- void Initialize(KThread* idle_thread);
+ void Initialize(KThread* main_thread, KThread* idle_thread, s32 core_id);
void Activate();
+ void OnThreadStart();
+ void Unload(KThread* thread);
+ void Reload(KThread* thread);
void SetInterruptTaskRunnable();
void RequestScheduleOnInterrupt();
@@ -55,6 +58,14 @@ public:
return m_idle_thread;
}
+ bool IsIdle() const {
+ return m_current_thread.load() == m_idle_thread;
+ }
+
+ std::shared_ptr<Common::Fiber> GetSwitchFiber() {
+ return m_switch_fiber;
+ }
+
KThread* GetPreviousThread() const {
return m_state.prev_thread;
}
@@ -69,7 +80,7 @@ public:
// Static public API.
static bool CanSchedule(KernelCore& kernel) {
- return kernel.GetCurrentEmuThread()->GetDisableDispatchCount() == 0;
+ return GetCurrentThread(kernel).GetDisableDispatchCount() == 0;
}
static bool IsSchedulerLockedByCurrentThread(KernelCore& kernel) {
return kernel.GlobalSchedulerContext().scheduler_lock.IsLockedByCurrentThread();
@@ -113,7 +124,7 @@ private:
// Instanced private API.
void ScheduleImpl();
- void ScheduleImplOffStack();
+ void ScheduleImplFiber();
void SwitchThread(KThread* next_thread);
void Schedule();
@@ -147,9 +158,10 @@ private:
KThread* m_idle_thread{nullptr};
std::atomic<KThread*> m_current_thread{nullptr};
- std::shared_ptr<Common::Fiber> m_idle_stack{};
- KThread* m_idle_cur_thread{};
- KThread* m_idle_highest_priority_thread{};
+ std::shared_ptr<Common::Fiber> m_switch_fiber{};
+ KThread* m_switch_cur_thread{};
+ KThread* m_switch_highest_priority_thread{};
+ bool m_switch_from_schedule{};
};
class KScopedSchedulerLock : public KScopedLock<KScheduler::LockType> {