summaryrefslogtreecommitdiffstats
path: root/src/core/hle/service/nvnflinger/buffer_queue_consumer.cpp
diff options
context:
space:
mode:
authorFernando S <fsahmkow27@gmail.com>2023-12-19 16:12:56 +0100
committerGitHub <noreply@github.com>2023-12-19 16:12:56 +0100
commit0589a32f755d4c127193b4ba5143635ed3ba8f3a (patch)
tree948631e7d5391978b9a9fd7a84d03e75197c1e25 /src/core/hle/service/nvnflinger/buffer_queue_consumer.cpp
parentMerge pull request #12402 from german77/lang (diff)
parentnvnflinger: mark buffer as acquired when acquired (diff)
downloadyuzu-0589a32f755d4c127193b4ba5143635ed3ba8f3a.tar
yuzu-0589a32f755d4c127193b4ba5143635ed3ba8f3a.tar.gz
yuzu-0589a32f755d4c127193b4ba5143635ed3ba8f3a.tar.bz2
yuzu-0589a32f755d4c127193b4ba5143635ed3ba8f3a.tar.lz
yuzu-0589a32f755d4c127193b4ba5143635ed3ba8f3a.tar.xz
yuzu-0589a32f755d4c127193b4ba5143635ed3ba8f3a.tar.zst
yuzu-0589a32f755d4c127193b4ba5143635ed3ba8f3a.zip
Diffstat (limited to 'src/core/hle/service/nvnflinger/buffer_queue_consumer.cpp')
-rw-r--r--src/core/hle/service/nvnflinger/buffer_queue_consumer.cpp35
1 files changed, 32 insertions, 3 deletions
diff --git a/src/core/hle/service/nvnflinger/buffer_queue_consumer.cpp b/src/core/hle/service/nvnflinger/buffer_queue_consumer.cpp
index d91886bed..bbe8e06d4 100644
--- a/src/core/hle/service/nvnflinger/buffer_queue_consumer.cpp
+++ b/src/core/hle/service/nvnflinger/buffer_queue_consumer.cpp
@@ -90,6 +90,18 @@ Status BufferQueueConsumer::AcquireBuffer(BufferItem* out_buffer,
LOG_DEBUG(Service_Nvnflinger, "acquiring slot={}", slot);
+ // If the front buffer is still being tracked, update its slot state
+ if (core->StillTracking(*front)) {
+ slots[slot].acquire_called = true;
+ slots[slot].needs_cleanup_on_release = false;
+ slots[slot].buffer_state = BufferState::Acquired;
+
+ // TODO: for now, avoid resetting the fence, so that when we next return this
+ // slot to the producer, it will wait for the fence to pass. We should fix this
+ // by properly waiting for the fence in the BufferItemConsumer.
+ // slots[slot].fence = Fence::NoFence();
+ }
+
// If the buffer has previously been acquired by the consumer, set graphic_buffer to nullptr to
// avoid unnecessarily remapping this buffer on the consumer side.
if (out_buffer->acquire_called) {
@@ -132,11 +144,28 @@ Status BufferQueueConsumer::ReleaseBuffer(s32 slot, u64 frame_number, const Fenc
++current;
}
- slots[slot].buffer_state = BufferState::Free;
+ if (slots[slot].buffer_state == BufferState::Acquired) {
+ // TODO: for now, avoid resetting the fence, so that when we next return this
+ // slot to the producer, it can wait for its own fence to pass. We should fix this
+ // by properly waiting for the fence in the BufferItemConsumer.
+ // slots[slot].fence = release_fence;
+ slots[slot].buffer_state = BufferState::Free;
- listener = core->connected_producer_listener;
+ listener = core->connected_producer_listener;
- LOG_DEBUG(Service_Nvnflinger, "releasing slot {}", slot);
+ LOG_DEBUG(Service_Nvnflinger, "releasing slot {}", slot);
+ } else if (slots[slot].needs_cleanup_on_release) {
+ LOG_DEBUG(Service_Nvnflinger, "releasing a stale buffer slot {} (state = {})", slot,
+ slots[slot].buffer_state);
+ slots[slot].needs_cleanup_on_release = false;
+ return Status::StaleBufferSlot;
+ } else {
+ LOG_ERROR(Service_Nvnflinger,
+ "attempted to release buffer slot {} but its state was {}", slot,
+ slots[slot].buffer_state);
+
+ return Status::BadValue;
+ }
core->SignalDequeueCondition();
}