diff options
author | bunnei <bunneidev@gmail.com> | 2021-04-03 03:02:10 +0200 |
---|---|---|
committer | bunnei <bunneidev@gmail.com> | 2021-05-06 01:40:50 +0200 |
commit | 34bed1ab41a296f8ccccc47d7c06ab03de2018b5 (patch) | |
tree | 68ef2cba05f74fba14ec8c0e80b492a5fa587da5 /src/core | |
parent | core: Defer CoreTiming initialization. (diff) | |
download | yuzu-34bed1ab41a296f8ccccc47d7c06ab03de2018b5.tar yuzu-34bed1ab41a296f8ccccc47d7c06ab03de2018b5.tar.gz yuzu-34bed1ab41a296f8ccccc47d7c06ab03de2018b5.tar.bz2 yuzu-34bed1ab41a296f8ccccc47d7c06ab03de2018b5.tar.lz yuzu-34bed1ab41a296f8ccccc47d7c06ab03de2018b5.tar.xz yuzu-34bed1ab41a296f8ccccc47d7c06ab03de2018b5.tar.zst yuzu-34bed1ab41a296f8ccccc47d7c06ab03de2018b5.zip |
Diffstat (limited to '')
-rw-r--r-- | src/core/hle/kernel/client_session.cpp | 3 | ||||
-rw-r--r-- | src/core/hle/kernel/client_session.h | 2 | ||||
-rw-r--r-- | src/core/hle/kernel/global_scheduler_context.cpp | 6 | ||||
-rw-r--r-- | src/core/hle/kernel/global_scheduler_context.h | 8 | ||||
-rw-r--r-- | src/core/hle/kernel/hle_ipc.cpp | 10 | ||||
-rw-r--r-- | src/core/hle/kernel/hle_ipc.h | 10 | ||||
-rw-r--r-- | src/core/hle/kernel/kernel.cpp | 4 | ||||
-rw-r--r-- | src/core/hle/kernel/server_session.cpp | 23 | ||||
-rw-r--r-- | src/core/hle/kernel/server_session.h | 14 | ||||
-rw-r--r-- | src/core/hle/service/prepo/prepo.cpp | 8 |
10 files changed, 30 insertions, 58 deletions
diff --git a/src/core/hle/kernel/client_session.cpp b/src/core/hle/kernel/client_session.cpp index e230f365a..13c55abe4 100644 --- a/src/core/hle/kernel/client_session.cpp +++ b/src/core/hle/kernel/client_session.cpp @@ -38,8 +38,7 @@ ResultVal<std::shared_ptr<ClientSession>> ClientSession::Create(KernelCore& kern return MakeResult(std::move(client_session)); } -ResultCode ClientSession::SendSyncRequest(std::shared_ptr<KThread> thread, - Core::Memory::Memory& memory, +ResultCode ClientSession::SendSyncRequest(KThread* thread, Core::Memory::Memory& memory, Core::Timing::CoreTiming& core_timing) { // Keep ServerSession alive until we're done working with it. if (!parent->Server()) { diff --git a/src/core/hle/kernel/client_session.h b/src/core/hle/kernel/client_session.h index 85aafeaf4..7a1d15d0c 100644 --- a/src/core/hle/kernel/client_session.h +++ b/src/core/hle/kernel/client_session.h @@ -46,7 +46,7 @@ public: return HANDLE_TYPE; } - ResultCode SendSyncRequest(std::shared_ptr<KThread> thread, Core::Memory::Memory& memory, + ResultCode SendSyncRequest(KThread* thread, Core::Memory::Memory& memory, Core::Timing::CoreTiming& core_timing); bool IsSignaled() const override; diff --git a/src/core/hle/kernel/global_scheduler_context.cpp b/src/core/hle/kernel/global_scheduler_context.cpp index c6838649f..7c87cbada 100644 --- a/src/core/hle/kernel/global_scheduler_context.cpp +++ b/src/core/hle/kernel/global_scheduler_context.cpp @@ -17,12 +17,12 @@ GlobalSchedulerContext::GlobalSchedulerContext(KernelCore& kernel) GlobalSchedulerContext::~GlobalSchedulerContext() = default; -void GlobalSchedulerContext::AddThread(std::shared_ptr<KThread> thread) { +void GlobalSchedulerContext::AddThread(KThread* thread) { std::scoped_lock lock{global_list_guard}; - thread_list.push_back(std::move(thread)); + thread_list.push_back(thread); } -void GlobalSchedulerContext::RemoveThread(std::shared_ptr<KThread> thread) { +void GlobalSchedulerContext::RemoveThread(KThread* thread) { std::scoped_lock lock{global_list_guard}; thread_list.erase(std::remove(thread_list.begin(), thread_list.end(), thread), thread_list.end()); diff --git a/src/core/hle/kernel/global_scheduler_context.h b/src/core/hle/kernel/global_scheduler_context.h index 11592843e..ba8b67fd1 100644 --- a/src/core/hle/kernel/global_scheduler_context.h +++ b/src/core/hle/kernel/global_scheduler_context.h @@ -38,13 +38,13 @@ public: ~GlobalSchedulerContext(); /// Adds a new thread to the scheduler - void AddThread(std::shared_ptr<KThread> thread); + void AddThread(KThread* thread); /// Removes a thread from the scheduler - void RemoveThread(std::shared_ptr<KThread> thread); + void RemoveThread(KThread* thread); /// Returns a list of all threads managed by the scheduler - [[nodiscard]] const std::vector<std::shared_ptr<KThread>>& GetThreadList() const { + [[nodiscard]] const std::vector<KThread*>& GetThreadList() const { return thread_list; } @@ -79,7 +79,7 @@ private: LockType scheduler_lock; /// Lists all thread ids that aren't deleted/etc. - std::vector<std::shared_ptr<KThread>> thread_list; + std::vector<KThread*> thread_list; Common::SpinLock global_list_guard{}; }; diff --git a/src/core/hle/kernel/hle_ipc.cpp b/src/core/hle/kernel/hle_ipc.cpp index 2b363b1d9..6ffe6ac41 100644 --- a/src/core/hle/kernel/hle_ipc.cpp +++ b/src/core/hle/kernel/hle_ipc.cpp @@ -46,11 +46,11 @@ void SessionRequestHandler::ClientDisconnected( boost::range::remove_erase(connected_sessions, server_session); } -HLERequestContext::HLERequestContext(KernelCore& kernel, Core::Memory::Memory& memory, - std::shared_ptr<ServerSession> server_session, - std::shared_ptr<KThread> thread) - : server_session(std::move(server_session)), - thread(std::move(thread)), kernel{kernel}, memory{memory} { +HLERequestContext::HLERequestContext(KernelCore& kernel_, Core::Memory::Memory& memory_, + std::shared_ptr<ServerSession> server_session_, + KThread* thread_) + : server_session(std::move(server_session_)), + thread(thread_), kernel{kernel_}, memory{memory_} { cmd_buf[0] = 0; } diff --git a/src/core/hle/kernel/hle_ipc.h b/src/core/hle/kernel/hle_ipc.h index 6fba42615..75617bff0 100644 --- a/src/core/hle/kernel/hle_ipc.h +++ b/src/core/hle/kernel/hle_ipc.h @@ -109,8 +109,7 @@ protected: class HLERequestContext { public: explicit HLERequestContext(KernelCore& kernel, Core::Memory::Memory& memory, - std::shared_ptr<ServerSession> session, - std::shared_ptr<KThread> thread); + std::shared_ptr<ServerSession> session, KThread* thread); ~HLERequestContext(); /// Returns a pointer to the IPC command buffer for this request. @@ -276,10 +275,6 @@ public: return *thread; } - const KThread& GetThread() const { - return *thread; - } - bool IsThreadWaiting() const { return is_thread_waiting; } @@ -291,7 +286,8 @@ private: std::array<u32, IPC::COMMAND_BUFFER_LENGTH> cmd_buf; std::shared_ptr<Kernel::ServerSession> server_session; - std::shared_ptr<KThread> thread; + KThread* thread; + // TODO(yuriks): Check common usage of this and optimize size accordingly boost::container::small_vector<Handle, 8> move_handles; boost::container::small_vector<Handle, 8> copy_handles; diff --git a/src/core/hle/kernel/kernel.cpp b/src/core/hle/kernel/kernel.cpp index 5c4f45ab4..17fe1d59f 100644 --- a/src/core/hle/kernel/kernel.cpp +++ b/src/core/hle/kernel/kernel.cpp @@ -60,8 +60,6 @@ struct KernelCore::Impl { void Initialize(KernelCore& kernel) { global_scheduler_context = std::make_unique<Kernel::GlobalSchedulerContext>(kernel); - RegisterHostThread(); - service_thread_manager = std::make_unique<Common::ThreadWorker>(1, "yuzu:ServiceThreadManager"); is_phantom_mode_for_singlecore = false; @@ -77,6 +75,8 @@ struct KernelCore::Impl { InitializeSchedulers(); InitializeSuspendThreads(); InitializePreemption(kernel); + + RegisterHostThread(); } void InitializeCores() { diff --git a/src/core/hle/kernel/server_session.cpp b/src/core/hle/kernel/server_session.cpp index 790dbb998..bb247959c 100644 --- a/src/core/hle/kernel/server_session.cpp +++ b/src/core/hle/kernel/server_session.cpp @@ -44,12 +44,7 @@ ResultVal<std::shared_ptr<ServerSession>> ServerSession::Create(KernelCore& kern bool ServerSession::IsSignaled() const { // Closed sessions should never wait, an error will be returned from svcReplyAndReceive. - if (!parent->Client()) { - return true; - } - - // Wait if we have no pending requests, or if we're currently handling a request. - return !pending_requesting_threads.empty() && currently_handling == nullptr; + return !parent->Client(); } void ServerSession::ClientDisconnected() { @@ -62,11 +57,6 @@ void ServerSession::ClientDisconnected() { // invalidated (set to null). handler->ClientDisconnected(SharedFrom(this)); } - - // Clean up the list of client threads with pending requests, they are unneeded now that the - // client endpoint is closed. - pending_requesting_threads.clear(); - currently_handling = nullptr; } void ServerSession::AppendDomainRequestHandler(std::shared_ptr<SessionRequestHandler> handler) { @@ -116,11 +106,9 @@ ResultCode ServerSession::HandleDomainSyncRequest(Kernel::HLERequestContext& con return RESULT_SUCCESS; } -ResultCode ServerSession::QueueSyncRequest(std::shared_ptr<KThread> thread, - Core::Memory::Memory& memory) { +ResultCode ServerSession::QueueSyncRequest(KThread* thread, Core::Memory::Memory& memory) { u32* cmd_buf{reinterpret_cast<u32*>(memory.GetPointer(thread->GetTLSAddress()))}; - auto context = - std::make_shared<HLERequestContext>(kernel, memory, SharedFrom(this), std::move(thread)); + auto context = std::make_shared<HLERequestContext>(kernel, memory, SharedFrom(this), thread); context->PopulateFromIncomingCommandBuffer(kernel.CurrentProcess()->GetHandleTable(), cmd_buf); @@ -161,10 +149,9 @@ ResultCode ServerSession::CompleteSyncRequest(HLERequestContext& context) { return result; } -ResultCode ServerSession::HandleSyncRequest(std::shared_ptr<KThread> thread, - Core::Memory::Memory& memory, +ResultCode ServerSession::HandleSyncRequest(KThread* thread, Core::Memory::Memory& memory, Core::Timing::CoreTiming& core_timing) { - return QueueSyncRequest(std::move(thread), memory); + return QueueSyncRequest(thread, memory); } } // namespace Kernel diff --git a/src/core/hle/kernel/server_session.h b/src/core/hle/kernel/server_session.h index c42d5ee59..77ed18c60 100644 --- a/src/core/hle/kernel/server_session.h +++ b/src/core/hle/kernel/server_session.h @@ -95,7 +95,7 @@ public: * * @returns ResultCode from the operation. */ - ResultCode HandleSyncRequest(std::shared_ptr<KThread> thread, Core::Memory::Memory& memory, + ResultCode HandleSyncRequest(KThread* thread, Core::Memory::Memory& memory, Core::Timing::CoreTiming& core_timing); /// Called when a client disconnection occurs. @@ -130,7 +130,7 @@ public: private: /// Queues a sync request from the emulated application. - ResultCode QueueSyncRequest(std::shared_ptr<KThread> thread, Core::Memory::Memory& memory); + ResultCode QueueSyncRequest(KThread* thread, Core::Memory::Memory& memory); /// Completes a sync request from the emulated application. ResultCode CompleteSyncRequest(HLERequestContext& context); @@ -148,16 +148,6 @@ private: /// This is the list of domain request handlers (after conversion to a domain) std::vector<std::shared_ptr<SessionRequestHandler>> domain_request_handlers; - /// List of threads that are pending a response after a sync request. This list is processed in - /// a LIFO manner, thus, the last request will be dispatched first. - /// TODO(Subv): Verify if this is indeed processed in LIFO using a hardware test. - std::vector<std::shared_ptr<KThread>> pending_requesting_threads; - - /// Thread whose request is currently being handled. A request is considered "handled" when a - /// response is sent via svcReplyAndReceive. - /// TODO(Subv): Find a better name for this. - std::shared_ptr<KThread> currently_handling; - /// When set to True, converts the session to a domain at the end of the command bool convert_to_domain{}; diff --git a/src/core/hle/service/prepo/prepo.cpp b/src/core/hle/service/prepo/prepo.cpp index d5b3b17a5..449e8d63b 100644 --- a/src/core/hle/service/prepo/prepo.cpp +++ b/src/core/hle/service/prepo/prepo.cpp @@ -60,7 +60,7 @@ private: const auto process_id = rp.PopRaw<u64>(); const auto data1 = ctx.ReadBuffer(0); - const auto data2 = [ctx] { + const auto data2 = [&ctx] { if (ctx.CanReadBuffer(1)) { return ctx.ReadBuffer(1); } @@ -87,7 +87,7 @@ private: const auto process_id = rp.PopRaw<u64>(); const auto data1 = ctx.ReadBuffer(0); - const auto data2 = [ctx] { + const auto data2 = [&ctx] { if (ctx.CanReadBuffer(1)) { return ctx.ReadBuffer(1); } @@ -139,7 +139,7 @@ private: const auto title_id = rp.PopRaw<u64>(); const auto data1 = ctx.ReadBuffer(0); - const auto data2 = [ctx] { + const auto data2 = [&ctx] { if (ctx.CanReadBuffer(1)) { return ctx.ReadBuffer(1); } @@ -163,7 +163,7 @@ private: const auto title_id = rp.PopRaw<u64>(); const auto data1 = ctx.ReadBuffer(0); - const auto data2 = [ctx] { + const auto data2 = [&ctx] { if (ctx.CanReadBuffer(1)) { return ctx.ReadBuffer(1); } |