Merge pull request #11896 from liamwhite/crop

renderer_vulkan: fix cropping for presentation
master
liamwhite 2023-11-06 12:08:03 +07:00 committed by GitHub
commit 40357098a2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
4 changed files with 94 additions and 67 deletions

@ -3,6 +3,7 @@
#pragma once
#include "common/common_funcs.h"
#include "common/common_types.h"
namespace Service::android {
@ -21,5 +22,6 @@ enum class BufferTransformFlags : u32 {
/// Rotate source image 270 degrees clockwise
Rotate270 = 0x07,
};
DECLARE_ENUM_FLAG_OPERATORS(BufferTransformFlags);
} // namespace Service::android

@ -137,6 +137,56 @@ BlitScreen::BlitScreen(Core::Memory::Memory& cpu_memory_, Core::Frontend::EmuWin
BlitScreen::~BlitScreen() = default;
static Common::Rectangle<f32> NormalizeCrop(const Tegra::FramebufferConfig& framebuffer,
const ScreenInfo& screen_info) {
f32 left, top, right, bottom;
if (!framebuffer.crop_rect.IsEmpty()) {
// If crop rectangle is not empty, apply properties from rectangle.
left = static_cast<f32>(framebuffer.crop_rect.left);
top = static_cast<f32>(framebuffer.crop_rect.top);
right = static_cast<f32>(framebuffer.crop_rect.right);
bottom = static_cast<f32>(framebuffer.crop_rect.bottom);
} else {
// Otherwise, fall back to framebuffer dimensions.
left = 0;
top = 0;
right = static_cast<f32>(framebuffer.width);
bottom = static_cast<f32>(framebuffer.height);
}
// Apply transformation flags.
auto framebuffer_transform_flags = framebuffer.transform_flags;
if (True(framebuffer_transform_flags & Service::android::BufferTransformFlags::FlipH)) {
// Switch left and right.
std::swap(left, right);
}
if (True(framebuffer_transform_flags & Service::android::BufferTransformFlags::FlipV)) {
// Switch top and bottom.
std::swap(top, bottom);
}
framebuffer_transform_flags &= ~Service::android::BufferTransformFlags::FlipH;
framebuffer_transform_flags &= ~Service::android::BufferTransformFlags::FlipV;
if (True(framebuffer_transform_flags)) {
UNIMPLEMENTED_MSG("Unsupported framebuffer_transform_flags={}",
static_cast<u32>(framebuffer_transform_flags));
}
// Get the screen properties.
const f32 screen_width = static_cast<f32>(screen_info.width);
const f32 screen_height = static_cast<f32>(screen_info.height);
// Normalize coordinate space.
left /= screen_width;
top /= screen_height;
right /= screen_width;
bottom /= screen_height;
return Common::Rectangle<f32>(left, top, right, bottom);
}
void BlitScreen::Recreate() {
present_manager.WaitPresent();
scheduler.Finish();
@ -354,17 +404,10 @@ void BlitScreen::Draw(const Tegra::FramebufferConfig& framebuffer,
source_image_view = smaa->Draw(scheduler, image_index, source_image, source_image_view);
}
if (fsr) {
auto crop_rect = framebuffer.crop_rect;
if (crop_rect.GetWidth() == 0) {
crop_rect.right = framebuffer.width;
}
if (crop_rect.GetHeight() == 0) {
crop_rect.bottom = framebuffer.height;
}
crop_rect = crop_rect.Scale(Settings::values.resolution_info.up_factor);
VkExtent2D fsr_input_size{
.width = Settings::values.resolution_info.ScaleUp(framebuffer.width),
.height = Settings::values.resolution_info.ScaleUp(framebuffer.height),
const auto crop_rect = NormalizeCrop(framebuffer, screen_info);
const VkExtent2D fsr_input_size{
.width = Settings::values.resolution_info.ScaleUp(screen_info.width),
.height = Settings::values.resolution_info.ScaleUp(screen_info.height),
};
VkImageView fsr_image_view =
fsr->Draw(scheduler, image_index, source_image_view, fsr_input_size, crop_rect);
@ -1397,61 +1440,37 @@ void BlitScreen::SetUniformData(BufferData& data, const Layout::FramebufferLayou
void BlitScreen::SetVertexData(BufferData& data, const Tegra::FramebufferConfig& framebuffer,
const Layout::FramebufferLayout layout) const {
const auto& framebuffer_transform_flags = framebuffer.transform_flags;
const auto& framebuffer_crop_rect = framebuffer.crop_rect;
f32 left, top, right, bottom;
static constexpr Common::Rectangle<f32> texcoords{0.f, 0.f, 1.f, 1.f};
auto left = texcoords.left;
auto right = texcoords.right;
if (fsr) {
// FSR has already applied the crop, so we just want to render the image
// it has produced.
left = 0;
top = 0;
right = 1;
bottom = 1;
} else {
// Get the normalized crop rectangle.
const auto crop = NormalizeCrop(framebuffer, screen_info);
switch (framebuffer_transform_flags) {
case Service::android::BufferTransformFlags::Unset:
break;
case Service::android::BufferTransformFlags::FlipV:
// Flip the framebuffer vertically
left = texcoords.right;
right = texcoords.left;
break;
default:
UNIMPLEMENTED_MSG("Unsupported framebuffer_transform_flags={}",
static_cast<u32>(framebuffer_transform_flags));
break;
}
UNIMPLEMENTED_IF(framebuffer_crop_rect.left != 0);
f32 left_start{};
if (framebuffer_crop_rect.Top() > 0) {
left_start = static_cast<f32>(framebuffer_crop_rect.Top()) /
static_cast<f32>(framebuffer_crop_rect.Bottom());
}
f32 scale_u = static_cast<f32>(framebuffer.width) / static_cast<f32>(screen_info.width);
f32 scale_v = static_cast<f32>(framebuffer.height) / static_cast<f32>(screen_info.height);
// Scale the output by the crop width/height. This is commonly used with 1280x720 rendering
// (e.g. handheld mode) on a 1920x1080 framebuffer.
if (!fsr) {
if (framebuffer_crop_rect.GetWidth() > 0) {
scale_u = static_cast<f32>(framebuffer_crop_rect.GetWidth()) /
static_cast<f32>(screen_info.width);
}
if (framebuffer_crop_rect.GetHeight() > 0) {
scale_v = static_cast<f32>(framebuffer_crop_rect.GetHeight()) /
static_cast<f32>(screen_info.height);
}
// Apply the crop.
left = crop.left;
top = crop.top;
right = crop.right;
bottom = crop.bottom;
}
// Map the coordinates to the screen.
const auto& screen = layout.screen;
const auto x = static_cast<f32>(screen.left);
const auto y = static_cast<f32>(screen.top);
const auto w = static_cast<f32>(screen.GetWidth());
const auto h = static_cast<f32>(screen.GetHeight());
data.vertices[0] = ScreenRectVertex(x, y, texcoords.top * scale_u, left_start + left * scale_v);
data.vertices[1] =
ScreenRectVertex(x + w, y, texcoords.bottom * scale_u, left_start + left * scale_v);
data.vertices[2] =
ScreenRectVertex(x, y + h, texcoords.top * scale_u, left_start + right * scale_v);
data.vertices[3] =
ScreenRectVertex(x + w, y + h, texcoords.bottom * scale_u, left_start + right * scale_v);
data.vertices[0] = ScreenRectVertex(x, y, left, top);
data.vertices[1] = ScreenRectVertex(x + w, y, right, top);
data.vertices[2] = ScreenRectVertex(x, y + h, left, bottom);
data.vertices[3] = ScreenRectVertex(x + w, y + h, right, bottom);
}
void BlitScreen::CreateSMAA(VkExtent2D smaa_size) {

@ -34,7 +34,7 @@ FSR::FSR(const Device& device_, MemoryAllocator& memory_allocator_, size_t image
}
VkImageView FSR::Draw(Scheduler& scheduler, size_t image_index, VkImageView image_view,
VkExtent2D input_image_extent, const Common::Rectangle<int>& crop_rect) {
VkExtent2D input_image_extent, const Common::Rectangle<f32>& crop_rect) {
UpdateDescriptorSet(image_index, image_view);
@ -61,15 +61,21 @@ VkImageView FSR::Draw(Scheduler& scheduler, size_t image_index, VkImageView imag
cmdbuf.BindPipeline(VK_PIPELINE_BIND_POINT_COMPUTE, *easu_pipeline);
std::array<u32, 4 * 4> push_constants;
FsrEasuConOffset(
push_constants.data() + 0, push_constants.data() + 4, push_constants.data() + 8,
push_constants.data() + 12,
const f32 input_image_width = static_cast<f32>(input_image_extent.width);
const f32 input_image_height = static_cast<f32>(input_image_extent.height);
const f32 output_image_width = static_cast<f32>(output_size.width);
const f32 output_image_height = static_cast<f32>(output_size.height);
const f32 viewport_width = (crop_rect.right - crop_rect.left) * input_image_width;
const f32 viewport_x = crop_rect.left * input_image_width;
const f32 viewport_height = (crop_rect.bottom - crop_rect.top) * input_image_height;
const f32 viewport_y = crop_rect.top * input_image_height;
static_cast<f32>(crop_rect.GetWidth()), static_cast<f32>(crop_rect.GetHeight()),
static_cast<f32>(input_image_extent.width), static_cast<f32>(input_image_extent.height),
static_cast<f32>(output_size.width), static_cast<f32>(output_size.height),
static_cast<f32>(crop_rect.left), static_cast<f32>(crop_rect.top));
std::array<u32, 4 * 4> push_constants;
FsrEasuConOffset(push_constants.data() + 0, push_constants.data() + 4,
push_constants.data() + 8, push_constants.data() + 12,
viewport_width, viewport_height, input_image_width, input_image_height,
output_image_width, output_image_height, viewport_x, viewport_y);
cmdbuf.PushConstants(*pipeline_layout, VK_SHADER_STAGE_COMPUTE_BIT, push_constants);
{

@ -17,7 +17,7 @@ public:
explicit FSR(const Device& device, MemoryAllocator& memory_allocator, size_t image_count,
VkExtent2D output_size);
VkImageView Draw(Scheduler& scheduler, size_t image_index, VkImageView image_view,
VkExtent2D input_image_extent, const Common::Rectangle<int>& crop_rect);
VkExtent2D input_image_extent, const Common::Rectangle<f32>& crop_rect);
private:
void CreateDescriptorPool();