|
|
@ -287,12 +287,13 @@ RasterizerVulkan::RasterizerVulkan(Core::System& system, Core::Frontend::EmuWind
|
|
|
|
screen_info{screen_info}, device{device}, resource_manager{resource_manager},
|
|
|
|
screen_info{screen_info}, device{device}, resource_manager{resource_manager},
|
|
|
|
memory_manager{memory_manager}, state_tracker{state_tracker}, scheduler{scheduler},
|
|
|
|
memory_manager{memory_manager}, state_tracker{state_tracker}, scheduler{scheduler},
|
|
|
|
staging_pool(device, memory_manager, scheduler), descriptor_pool(device),
|
|
|
|
staging_pool(device, memory_manager, scheduler), descriptor_pool(device),
|
|
|
|
update_descriptor_queue(device, scheduler),
|
|
|
|
update_descriptor_queue(device, scheduler), renderpass_cache(device),
|
|
|
|
quad_array_pass(device, scheduler, descriptor_pool, staging_pool, update_descriptor_queue),
|
|
|
|
quad_array_pass(device, scheduler, descriptor_pool, staging_pool, update_descriptor_queue),
|
|
|
|
uint8_pass(device, scheduler, descriptor_pool, staging_pool, update_descriptor_queue),
|
|
|
|
uint8_pass(device, scheduler, descriptor_pool, staging_pool, update_descriptor_queue),
|
|
|
|
texture_cache(system, *this, device, resource_manager, memory_manager, scheduler,
|
|
|
|
texture_cache(system, *this, device, resource_manager, memory_manager, scheduler,
|
|
|
|
staging_pool),
|
|
|
|
staging_pool),
|
|
|
|
pipeline_cache(system, *this, device, scheduler, descriptor_pool, update_descriptor_queue),
|
|
|
|
pipeline_cache(system, *this, device, scheduler, descriptor_pool, update_descriptor_queue,
|
|
|
|
|
|
|
|
renderpass_cache),
|
|
|
|
buffer_cache(*this, system, device, memory_manager, scheduler, staging_pool),
|
|
|
|
buffer_cache(*this, system, device, memory_manager, scheduler, staging_pool),
|
|
|
|
sampler_cache(device), query_cache(system, *this, device, scheduler) {
|
|
|
|
sampler_cache(device), query_cache(system, *this, device, scheduler) {
|
|
|
|
scheduler.SetQueryCache(query_cache);
|
|
|
|
scheduler.SetQueryCache(query_cache);
|
|
|
@ -365,13 +366,16 @@ void RasterizerVulkan::Draw(bool is_indexed, bool is_instanced) {
|
|
|
|
void RasterizerVulkan::Clear() {
|
|
|
|
void RasterizerVulkan::Clear() {
|
|
|
|
MICROPROFILE_SCOPE(Vulkan_Clearing);
|
|
|
|
MICROPROFILE_SCOPE(Vulkan_Clearing);
|
|
|
|
|
|
|
|
|
|
|
|
query_cache.UpdateCounters();
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
const auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
const auto& gpu = system.GPU().Maxwell3D();
|
|
|
|
if (!system.GPU().Maxwell3D().ShouldExecute()) {
|
|
|
|
if (!system.GPU().Maxwell3D().ShouldExecute()) {
|
|
|
|
return;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
sampled_views.clear();
|
|
|
|
|
|
|
|
image_views.clear();
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
query_cache.UpdateCounters();
|
|
|
|
|
|
|
|
|
|
|
|
const auto& regs = gpu.regs;
|
|
|
|
const auto& regs = gpu.regs;
|
|
|
|
const bool use_color = regs.clear_buffers.R || regs.clear_buffers.G || regs.clear_buffers.B ||
|
|
|
|
const bool use_color = regs.clear_buffers.R || regs.clear_buffers.G || regs.clear_buffers.B ||
|
|
|
|
regs.clear_buffers.A;
|
|
|
|
regs.clear_buffers.A;
|
|
|
@ -380,52 +384,54 @@ void RasterizerVulkan::Clear() {
|
|
|
|
if (!use_color && !use_depth && !use_stencil) {
|
|
|
|
if (!use_color && !use_depth && !use_stencil) {
|
|
|
|
return;
|
|
|
|
return;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
// Clearing images requires to be out of a renderpass
|
|
|
|
|
|
|
|
scheduler.RequestOutsideRenderPassOperationContext();
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// TODO(Rodrigo): Implement clears rendering a quad or using beginning a renderpass.
|
|
|
|
[[maybe_unused]] const auto texceptions = UpdateAttachments();
|
|
|
|
|
|
|
|
DEBUG_ASSERT(texceptions.none());
|
|
|
|
|
|
|
|
SetupImageTransitions(0, color_attachments, zeta_attachment);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
const vk::RenderPass renderpass = renderpass_cache.GetRenderPass(GetRenderPassParams(0));
|
|
|
|
|
|
|
|
const auto [framebuffer, render_area] = ConfigureFramebuffers(renderpass);
|
|
|
|
|
|
|
|
scheduler.RequestRenderpass({renderpass, framebuffer, {{0, 0}, render_area}, 0, nullptr});
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
const auto& scissor = regs.scissor_test[0];
|
|
|
|
|
|
|
|
const vk::Offset2D scissor_offset(scissor.min_x, scissor.min_y);
|
|
|
|
|
|
|
|
vk::Extent2D scissor_extent{scissor.max_x - scissor.min_x, scissor.max_y - scissor.min_y};
|
|
|
|
|
|
|
|
scissor_extent.width = std::min(scissor_extent.width, render_area.width);
|
|
|
|
|
|
|
|
scissor_extent.height = std::min(scissor_extent.height, render_area.height);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
// TODO(Rodrigo): Implement layer clears
|
|
|
|
|
|
|
|
const vk::ClearRect clear_rect({scissor_offset, scissor_extent}, 0, 1);
|
|
|
|
|
|
|
|
|
|
|
|
if (use_color) {
|
|
|
|
if (use_color) {
|
|
|
|
View color_view;
|
|
|
|
|
|
|
|
{
|
|
|
|
|
|
|
|
MICROPROFILE_SCOPE(Vulkan_RenderTargets);
|
|
|
|
|
|
|
|
color_view = texture_cache.GetColorBufferSurface(regs.clear_buffers.RT.Value(), false);
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
color_view->Transition(vk::ImageLayout::eTransferDstOptimal,
|
|
|
|
|
|
|
|
vk::PipelineStageFlagBits::eTransfer,
|
|
|
|
|
|
|
|
vk::AccessFlagBits::eTransferWrite);
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
const std::array clear_color = {regs.clear_color[0], regs.clear_color[1],
|
|
|
|
const std::array clear_color = {regs.clear_color[0], regs.clear_color[1],
|
|
|
|
regs.clear_color[2], regs.clear_color[3]};
|
|
|
|
regs.clear_color[2], regs.clear_color[3]};
|
|
|
|
const vk::ClearColorValue clear(clear_color);
|
|
|
|
const vk::ClearValue clear_value{clear_color};
|
|
|
|
scheduler.Record([image = color_view->GetImage(),
|
|
|
|
const u32 color_attachment = regs.clear_buffers.RT;
|
|
|
|
subresource = color_view->GetImageSubresourceRange(),
|
|
|
|
scheduler.Record([color_attachment, clear_value, clear_rect](auto cmdbuf, auto& dld) {
|
|
|
|
clear](auto cmdbuf, auto& dld) {
|
|
|
|
const vk::ClearAttachment attachment(vk::ImageAspectFlagBits::eColor, color_attachment,
|
|
|
|
cmdbuf.clearColorImage(image, vk::ImageLayout::eTransferDstOptimal, clear, subresource,
|
|
|
|
clear_value);
|
|
|
|
dld);
|
|
|
|
cmdbuf.clearAttachments(1, &attachment, 1, &clear_rect, dld);
|
|
|
|
});
|
|
|
|
});
|
|
|
|
}
|
|
|
|
}
|
|
|
|
if (use_depth || use_stencil) {
|
|
|
|
|
|
|
|
View zeta_surface;
|
|
|
|
if (!use_depth && !use_stencil) {
|
|
|
|
{
|
|
|
|
return;
|
|
|
|
MICROPROFILE_SCOPE(Vulkan_RenderTargets);
|
|
|
|
}
|
|
|
|
zeta_surface = texture_cache.GetDepthBufferSurface(false);
|
|
|
|
vk::ImageAspectFlags aspect_flags;
|
|
|
|
|
|
|
|
if (use_depth) {
|
|
|
|
|
|
|
|
aspect_flags |= vk::ImageAspectFlagBits::eDepth;
|
|
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
if (use_stencil) {
|
|
|
|
|
|
|
|
aspect_flags |= vk::ImageAspectFlagBits::eStencil;
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
zeta_surface->Transition(vk::ImageLayout::eTransferDstOptimal,
|
|
|
|
scheduler.Record([clear_depth = regs.clear_depth, clear_stencil = regs.clear_stencil,
|
|
|
|
vk::PipelineStageFlagBits::eTransfer,
|
|
|
|
clear_rect, aspect_flags](auto cmdbuf, auto& dld) {
|
|
|
|
vk::AccessFlagBits::eTransferWrite);
|
|
|
|
const vk::ClearDepthStencilValue clear_zeta(clear_depth, clear_stencil);
|
|
|
|
|
|
|
|
const vk::ClearValue clear_value{clear_zeta};
|
|
|
|
const vk::ClearDepthStencilValue clear(regs.clear_depth,
|
|
|
|
const vk::ClearAttachment attachment(aspect_flags, 0, clear_value);
|
|
|
|
static_cast<u32>(regs.clear_stencil));
|
|
|
|
cmdbuf.clearAttachments(1, &attachment, 1, &clear_rect, dld);
|
|
|
|
scheduler.Record([image = zeta_surface->GetImage(),
|
|
|
|
|
|
|
|
subresource = zeta_surface->GetImageSubresourceRange(),
|
|
|
|
|
|
|
|
clear](auto cmdbuf, auto& dld) {
|
|
|
|
|
|
|
|
cmdbuf.clearDepthStencilImage(image, vk::ImageLayout::eTransferDstOptimal, clear,
|
|
|
|
|
|
|
|
subresource, dld);
|
|
|
|
|
|
|
|
});
|
|
|
|
});
|
|
|
|
}
|
|
|
|
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
void RasterizerVulkan::DispatchCompute(GPUVAddr code_addr) {
|
|
|
|
void RasterizerVulkan::DispatchCompute(GPUVAddr code_addr) {
|
|
|
|