From 312068eebf08db27caa24d531f8324db1bd716be Mon Sep 17 00:00:00 2001 From: Wunk Date: Sun, 12 Nov 2023 14:17:38 -0800 Subject: [PATCH] renderer_vulkan: Optimize descriptor binding (#7142) For each draw, Citra will rebind all descriptor set slots and may redundantly re-bind descriptor-sets that were already bound. Instead it should only bind the descriptor-sets that have either changed or have had their buffer-offsets changed. This also allows entire calls to `vkCmdBindDescriptorSets` to be removed in the case that nothing has changed between draw calls. --- .../renderer_vulkan/vk_pipeline_cache.cpp | 64 +++++++++++++++---- 1 file changed, 53 insertions(+), 11 deletions(-) diff --git a/src/video_core/renderer_vulkan/vk_pipeline_cache.cpp b/src/video_core/renderer_vulkan/vk_pipeline_cache.cpp index bb347e3bfd..84d0a747e7 100644 --- a/src/video_core/renderer_vulkan/vk_pipeline_cache.cpp +++ b/src/video_core/renderer_vulkan/vk_pipeline_cache.cpp @@ -205,19 +205,55 @@ bool PipelineCache::BindPipeline(const PipelineInfo& info, bool wait_built) { return false; } - for (u32 i = 0; i < NUM_RASTERIZER_SETS; i++) { - if (!set_dirty[i]) { - continue; - } - bound_descriptor_sets[i] = descriptor_set_providers[i].Acquire(update_data[i]); - set_dirty[i] = false; + u32 new_descriptors_start = 0; + std::span new_descriptors_span{}; + std::span new_offsets_span{}; + + // Ensure all the descriptor sets are set at least once at the beginning. + if (scheduler.IsStateDirty(StateFlags::DescriptorSets)) { + set_dirty.set(); } + if (set_dirty.any()) { + for (u32 i = 0; i < NUM_RASTERIZER_SETS; i++) { + if (!set_dirty.test(i)) { + continue; + } + bound_descriptor_sets[i] = descriptor_set_providers[i].Acquire(update_data[i]); + } + new_descriptors_span = bound_descriptor_sets; + + // Only send new offsets if the buffer descriptor-set changed. + if (set_dirty.test(0)) { + new_offsets_span = offsets; + } + + // Try to compact the number of updated descriptor-set slots to the ones that have actually + // changed + if (!set_dirty.all()) { + const u64 dirty_mask = set_dirty.to_ulong(); + new_descriptors_start = static_cast(std::countr_zero(dirty_mask)); + const u32 new_descriptors_end = 64u - static_cast(std::countl_zero(dirty_mask)); + const u32 new_descriptors_size = new_descriptors_end - new_descriptors_start; + + new_descriptors_span = + new_descriptors_span.subspan(new_descriptors_start, new_descriptors_size); + } + + set_dirty.reset(); + } + + boost::container::static_vector new_descriptors( + new_descriptors_span.begin(), new_descriptors_span.end()); + boost::container::static_vector new_offsets(new_offsets_span.begin(), + new_offsets_span.end()); + const bool is_dirty = scheduler.IsStateDirty(StateFlags::Pipeline); const bool pipeline_dirty = (current_pipeline != pipeline) || is_dirty; scheduler.Record([this, is_dirty, pipeline_dirty, pipeline, current_dynamic = current_info.dynamic, dynamic = info.dynamic, - descriptor_sets = bound_descriptor_sets, offsets = offsets, + new_descriptors_start, descriptor_sets = std::move(new_descriptors), + offsets = std::move(new_offsets), current_rasterization = current_info.rasterization, current_depth_stencil = current_info.depth_stencil, rasterization = info.rasterization, @@ -318,13 +354,16 @@ bool PipelineCache::BindPipeline(const PipelineInfo& info, bool wait_built) { } cmdbuf.bindPipeline(vk::PipelineBindPoint::eGraphics, pipeline->Handle()); } - cmdbuf.bindDescriptorSets(vk::PipelineBindPoint::eGraphics, *pipeline_layout, 0, - descriptor_sets, offsets); + + if (descriptor_sets.size()) { + cmdbuf.bindDescriptorSets(vk::PipelineBindPoint::eGraphics, *pipeline_layout, + new_descriptors_start, descriptor_sets, offsets); + } }); current_info = info; current_pipeline = pipeline; - scheduler.MarkStateNonDirty(StateFlags::Pipeline); + scheduler.MarkStateNonDirty(StateFlags::Pipeline | StateFlags::DescriptorSets); return true; } @@ -497,7 +536,10 @@ void PipelineCache::BindTexelBuffer(u32 binding, vk::BufferView buffer_view) { } void PipelineCache::SetBufferOffset(u32 binding, size_t offset) { - offsets[binding] = static_cast(offset); + if (offsets[binding] != static_cast(offset)) { + offsets[binding] = static_cast(offset); + set_dirty[0] = true; + } } bool PipelineCache::IsCacheValid(std::span data) const {