|
|
|
@ -3,7 +3,6 @@
|
|
|
|
|
// Refer to the license.txt file included.
|
|
|
|
|
|
|
|
|
|
#include <memory>
|
|
|
|
|
#include <thread>
|
|
|
|
|
|
|
|
|
|
#include "video_core/renderer_vulkan/vk_buffer_cache.h"
|
|
|
|
|
#include "video_core/renderer_vulkan/vk_fence_manager.h"
|
|
|
|
@ -14,13 +13,11 @@
|
|
|
|
|
|
|
|
|
|
namespace Vulkan {
|
|
|
|
|
|
|
|
|
|
InnerFence::InnerFence(const Device& device_, VKScheduler& scheduler_, u32 payload_,
|
|
|
|
|
bool is_stubbed_)
|
|
|
|
|
: FenceBase{payload_, is_stubbed_}, device{device_}, scheduler{scheduler_} {}
|
|
|
|
|
InnerFence::InnerFence(VKScheduler& scheduler_, u32 payload_, bool is_stubbed_)
|
|
|
|
|
: FenceBase{payload_, is_stubbed_}, scheduler{scheduler_} {}
|
|
|
|
|
|
|
|
|
|
InnerFence::InnerFence(const Device& device_, VKScheduler& scheduler_, GPUVAddr address_,
|
|
|
|
|
u32 payload_, bool is_stubbed_)
|
|
|
|
|
: FenceBase{address_, payload_, is_stubbed_}, device{device_}, scheduler{scheduler_} {}
|
|
|
|
|
InnerFence::InnerFence(VKScheduler& scheduler_, GPUVAddr address_, u32 payload_, bool is_stubbed_)
|
|
|
|
|
: FenceBase{address_, payload_, is_stubbed_}, scheduler{scheduler_} {}
|
|
|
|
|
|
|
|
|
|
InnerFence::~InnerFence() = default;
|
|
|
|
|
|
|
|
|
@ -28,63 +25,38 @@ void InnerFence::Queue() {
|
|
|
|
|
if (is_stubbed) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
ASSERT(!event);
|
|
|
|
|
|
|
|
|
|
event = device.GetLogical().CreateEvent();
|
|
|
|
|
ticks = scheduler.CurrentTick();
|
|
|
|
|
|
|
|
|
|
scheduler.RequestOutsideRenderPassOperationContext();
|
|
|
|
|
scheduler.Record([event = *event](vk::CommandBuffer cmdbuf) {
|
|
|
|
|
cmdbuf.SetEvent(event, VK_PIPELINE_STAGE_ALL_COMMANDS_BIT);
|
|
|
|
|
});
|
|
|
|
|
// Get the current tick so we can wait for it
|
|
|
|
|
wait_tick = scheduler.CurrentTick();
|
|
|
|
|
scheduler.Flush();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool InnerFence::IsSignaled() const {
|
|
|
|
|
if (is_stubbed) {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
ASSERT(event);
|
|
|
|
|
return IsEventSignalled();
|
|
|
|
|
return scheduler.IsFree(wait_tick);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void InnerFence::Wait() {
|
|
|
|
|
if (is_stubbed) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
ASSERT(event);
|
|
|
|
|
|
|
|
|
|
if (ticks >= scheduler.CurrentTick()) {
|
|
|
|
|
scheduler.Flush();
|
|
|
|
|
}
|
|
|
|
|
while (!IsEventSignalled()) {
|
|
|
|
|
std::this_thread::yield();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
bool InnerFence::IsEventSignalled() const {
|
|
|
|
|
switch (const VkResult result = event.GetStatus()) {
|
|
|
|
|
case VK_EVENT_SET:
|
|
|
|
|
return true;
|
|
|
|
|
case VK_EVENT_RESET:
|
|
|
|
|
return false;
|
|
|
|
|
default:
|
|
|
|
|
throw vk::Exception(result);
|
|
|
|
|
}
|
|
|
|
|
scheduler.Wait(wait_tick);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
VKFenceManager::VKFenceManager(VideoCore::RasterizerInterface& rasterizer_, Tegra::GPU& gpu_,
|
|
|
|
|
Tegra::MemoryManager& memory_manager_, TextureCache& texture_cache_,
|
|
|
|
|
VKBufferCache& buffer_cache_, VKQueryCache& query_cache_,
|
|
|
|
|
const Device& device_, VKScheduler& scheduler_)
|
|
|
|
|
VKScheduler& scheduler_)
|
|
|
|
|
: GenericFenceManager{rasterizer_, gpu_, texture_cache_, buffer_cache_, query_cache_},
|
|
|
|
|
device{device_}, scheduler{scheduler_} {}
|
|
|
|
|
scheduler{scheduler_} {}
|
|
|
|
|
|
|
|
|
|
Fence VKFenceManager::CreateFence(u32 value, bool is_stubbed) {
|
|
|
|
|
return std::make_shared<InnerFence>(device, scheduler, value, is_stubbed);
|
|
|
|
|
return std::make_shared<InnerFence>(scheduler, value, is_stubbed);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
Fence VKFenceManager::CreateFence(GPUVAddr addr, u32 value, bool is_stubbed) {
|
|
|
|
|
return std::make_shared<InnerFence>(device, scheduler, addr, value, is_stubbed);
|
|
|
|
|
return std::make_shared<InnerFence>(scheduler, addr, value, is_stubbed);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
void VKFenceManager::QueueFence(Fence& fence) {
|
|
|
|
|