@ -3,6 +3,7 @@
// Refer to the license.txt file included.
# include <algorithm>
# include <bit>
# include <optional>
# include <tuple>
# include <vector>
@ -16,92 +17,93 @@
# include "video_core/vulkan_common/vulkan_wrapper.h"
namespace Vulkan {
namespace {
struct Range {
u64 begin ;
u64 end ;
u64 GetAllocationChunkSize ( u64 required_size ) {
static constexpr u64 sizes [ ] = { 16ULL < < 20 , 32ULL < < 20 , 64ULL < < 20 , 128ULL < < 20 } ;
auto it = std : : lower_bound ( std : : begin ( sizes ) , std : : end ( sizes ) , required_size ) ;
return it ! = std : : end ( sizes ) ? * it : Common : : AlignUp ( required_size , 256ULL < < 20 ) ;
[[nodiscard]] bool Contains ( u64 iterator , u64 size ) const noexcept {
return iterator < end & & begin < iterator + size ;
}
} ;
[[nodiscard]] u64 GetAllocationChunkSize ( u64 required_size ) {
static constexpr std : : array sizes {
0x1000ULL < < 10 , 0x1400ULL < < 10 , 0x1800ULL < < 10 , 0x1c00ULL < < 10 , 0x2000ULL < < 10 ,
0x3200ULL < < 10 , 0x4000ULL < < 10 , 0x6000ULL < < 10 , 0x8000ULL < < 10 , 0xA000ULL < < 10 ,
0x10000ULL < < 10 , 0x18000ULL < < 10 , 0x20000ULL < < 10 ,
} ;
static_assert ( std : : is_sorted ( sizes . begin ( ) , sizes . end ( ) ) ) ;
const auto it = std : : ranges : : lower_bound ( sizes , required_size ) ;
return it ! = sizes . end ( ) ? * it : Common : : AlignUp ( required_size , 4ULL < < 20 ) ;
}
} // Anonymous namespace
class VKMemoryAllocation final {
class MemoryAllocation {
public :
explicit VKMemoryAllocation ( const Device & device_ , vk : : DeviceMemory memory_ ,
VkMemoryPropertyFlags properties_ , u64 allocation_size_ , u32 type_ )
explicit MemoryAllocation( const Device & device_ , vk : : DeviceMemory memory_ ,
VkMemoryPropertyFlags properties_ , u64 allocation_size_ , u32 type_ )
: device { device_ } , memory { std : : move ( memory_ ) } , properties { properties_ } ,
allocation_size { allocation_size_ } , shifted_type { ShiftType ( type_ ) } { }
VKMemoryCommit Commit ( VkDeviceSize commit_size , VkDeviceSize alignment ) {
auto found = TryFindFreeSection ( free_iterator , allocation_size ,
static_cast < u64 > ( commit_size ) , static_cast < u64 > ( alignment ) ) ;
if ( ! found ) {
found = TryFindFreeSection ( 0 , free_iterator , static_cast < u64 > ( commit_size ) ,
static_cast < u64 > ( alignment ) ) ;
if ( ! found ) {
// Signal out of memory, it'll try to do more allocations.
return nullptr ;
}
[[nodiscard]] std : : optional < MemoryCommit > Commit ( VkDeviceSize size , VkDeviceSize alignment ) {
const std : : optional < u64 > alloc = FindFreeRegion ( size , alignment ) ;
if ( ! alloc ) {
// Signal out of memory, it'll try to do more allocations.
return std : : nullopt ;
}
auto commit = std : : make_unique < VKMemoryCommitImpl > ( device , this , memory , * found ,
* found + commit_size ) ;
commits . push_back ( commit . get ( ) ) ;
// Last commit's address is highly probable to be free.
free_iterator = * found + commit_size ;
return commit ;
const Range range {
. begin = * alloc ,
. end = * alloc + size ,
} ;
commits . insert ( std : : ranges : : upper_bound ( commits , * alloc , { } , & Range : : begin ) , range ) ;
return std : : make_optional < MemoryCommit > ( device , this , * memory , * alloc , * alloc + size ) ;
}
void Free ( const VKMemoryCommitImpl * commit ) {
ASSERT ( commit ) ;
const auto it = std : : find ( std : : begin ( commits ) , std : : end ( commits ) , commit ) ;
if ( it = = commits . end ( ) ) {
UNREACHABLE_MSG ( " Freeing unallocated commit! " ) ;
return ;
}
void Free ( u64 begin ) {
const auto it = std : : ranges : : find ( commits , begin , & Range : : begin ) ;
ASSERT_MSG ( it ! = commits . end ( ) , " Invalid commit " ) ;
commits . erase ( it ) ;
}
[[nodiscard]] std : : span < u8 > Map ( ) {
if ( ! memory_mapped_span . empty ( ) ) {
return memory_mapped_span ;
}
u8 * const raw_pointer = memory . Map ( 0 , allocation_size ) ;
memory_mapped_span = std : : span < u8 > ( raw_pointer , allocation_size ) ;
return memory_mapped_span ;
}
/// Returns whether this allocation is compatible with the arguments.
bool IsCompatible ( VkMemoryPropertyFlags wanted_properties , u32 type_mask ) const {
[[nodiscard]] bool IsCompatible( VkMemoryPropertyFlags wanted_properties , u32 type_mask ) const {
return ( wanted_properties & properties ) & & ( type_mask & shifted_type ) ! = 0 ;
}
private :
static constexpr u32 ShiftType ( u32 type ) {
[[nodiscard]] static constexpr u32 ShiftType( u32 type ) {
return 1U < < type ;
}
/// A memory allocator, it may return a free region between "start" and "end" with the solicited
/// requirements.
std : : optional < u64 > TryFindFreeSection ( u64 start , u64 end , u64 size , u64 alignment ) const {
u64 iterator = Common : : AlignUp ( start , alignment ) ;
while ( iterator + size < = end ) {
const u64 try_left = iterator ;
const u64 try_right = try_left + size ;
bool overlap = false ;
for ( const auto & commit : commits ) {
const auto [ commit_left , commit_right ] = commit - > interval ;
if ( try_left < commit_right & & commit_left < try_right ) {
// There's an overlap, continue the search where the overlapping commit ends.
iterator = Common : : AlignUp ( commit_right , alignment ) ;
overlap = true ;
break ;
}
[[nodiscard]] std : : optional < u64 > FindFreeRegion ( u64 size , u64 alignment ) noexcept {
ASSERT ( std : : has_single_bit ( alignment ) ) ;
const u64 alignment_log2 = std : : countr_zero ( alignment ) ;
std : : optional < u64 > candidate ;
u64 iterator = 0 ;
auto commit = commits . begin ( ) ;
while ( iterator + size < = allocation_size ) {
candidate = candidate . value_or ( iterator ) ;
if ( commit = = commits . end ( ) ) {
break ;
}
if ( ! overlap ) {
// A free address has been found.
return try_left ;
if ( commit - > Contains ( * candidate , size ) ) {
candidate = std : : nullopt ;
}
iterator = Common : : AlignUpLog2 ( commit - > end , alignment_log2 ) ;
+ + commit ;
}
// No free regions where found, return an empty optional.
return std : : nullopt ;
return candidate ;
}
const Device & device ; ///< Vulkan device.
@ -109,21 +111,52 @@ private:
const VkMemoryPropertyFlags properties ; ///< Vulkan properties.
const u64 allocation_size ; ///< Size of this allocation.
const u32 shifted_type ; ///< Stored Vulkan type of this allocation, shifted.
/// Hints where the next free region is likely going to be.
u64 free_iterator { } ;
/// Stores all commits done from this allocation.
std : : vector < const VKMemoryCommitImpl * > commits ;
std : : vector < Range > commits ; ///< All commit ranges done from this allocation.
std : : span < u8 > memory_mapped_span ; ///< Memory mapped span. Empty if not queried before.
} ;
MemoryCommit : : MemoryCommit ( const Device & device_ , MemoryAllocation * allocation_ ,
VkDeviceMemory memory_ , u64 begin , u64 end ) noexcept
: device { & device_ } , allocation { allocation_ } , memory { memory_ } , interval { begin , end } { }
MemoryCommit : : ~ MemoryCommit ( ) {
Release ( ) ;
}
MemoryCommit & MemoryCommit : : operator = ( MemoryCommit & & rhs ) noexcept {
Release ( ) ;
device = rhs . device ;
allocation = std : : exchange ( rhs . allocation , nullptr ) ;
memory = rhs . memory ;
interval = rhs . interval ;
span = std : : exchange ( rhs . span , std : : span < u8 > { } ) ;
return * this ;
}
MemoryCommit : : MemoryCommit ( MemoryCommit & & rhs ) noexcept
: device { rhs . device } , allocation { std : : exchange ( rhs . allocation , nullptr ) } , memory { rhs . memory } ,
interval { rhs . interval } , span { std : : exchange ( rhs . span , std : : span < u8 > { } ) } { }
std : : span < u8 > MemoryCommit : : Map ( ) {
if ( ! span . empty ( ) ) {
return span ;
}
span = allocation - > Map ( ) . subspan ( interval . first , interval . second - interval . first ) ;
return span ;
}
void MemoryCommit : : Release ( ) {
if ( allocation ) {
allocation - > Free ( interval . first ) ;
}
}
VKMemoryManager : : VKMemoryManager ( const Device & device_ )
: device { device_ } , properties { device_ . GetPhysical ( ) . GetMemoryProperties ( ) } { }
VKMemoryManager : : ~ VKMemoryManager ( ) = default ;
VKMemoryCommit VKMemoryManager : : Commit ( const VkMemoryRequirements & requirements ,
bool host_visible ) {
MemoryCommit VKMemoryManager : : Commit ( const VkMemoryRequirements & requirements , bool host_visible ) {
const u64 chunk_size = GetAllocationChunkSize ( requirements . size ) ;
// When a host visible commit is asked, search for host visible and coherent, otherwise search
@ -131,39 +164,31 @@ VKMemoryCommit VKMemoryManager::Commit(const VkMemoryRequirements& requirements,
const VkMemoryPropertyFlags wanted_properties =
host_visible ? VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT
: VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT ;
if ( auto commit = TryAllocCommit ( requirements , wanted_properties ) ) {
return commit ;
if ( std : : optional < MemoryCommit > commit = TryAllocCommit ( requirements , wanted_properties ) ) {
return std : : move ( * commit ) ;
}
// Commit has failed, allocate more memory.
if ( ! AllocMemory ( wanted_properties , requirements . memoryTypeBits , chunk_size ) ) {
// TODO(Rodrigo): Handle these situations in some way like flushing to guest memory.
// Allocation has failed, panic.
UNREACHABLE_MSG ( " Ran out of VRAM! " ) ;
return { } ;
}
// TODO(Rodrigo): Handle out of memory situations in some way like flushing to guest memory.
AllocMemory ( wanted_properties , requirements . memoryTypeBits , chunk_size ) ;
// Commit again, this time it won't fail since there's a fresh allocation above. If it does,
// there's a bug.
auto commit = TryAllocCommit ( requirements , wanted_properties ) ;
ASSERT ( commit ) ;
return commit ;
// Commit again, this time it won't fail since there's a fresh allocation above.
// If it does, there's a bug.
return TryAllocCommit ( requirements , wanted_properties ) . value ( ) ;
}
VK MemoryCommit VKMemoryManager : : Commit ( const vk : : Buffer & buffer , bool host_visible ) {
MemoryCommit VKMemoryManager : : Commit ( const vk : : Buffer & buffer , bool host_visible ) {
auto commit = Commit ( device . GetLogical ( ) . GetBufferMemoryRequirements ( * buffer ) , host_visible ) ;
buffer . BindMemory ( commit - > GetMemory ( ) , commit - > Get Offset( ) ) ;
buffer . BindMemory ( commit . Memory ( ) , commit . Offset( ) ) ;
return commit ;
}
VK MemoryCommit VKMemoryManager : : Commit ( const vk : : Image & image , bool host_visible ) {
MemoryCommit VKMemoryManager : : Commit ( const vk : : Image & image , bool host_visible ) {
auto commit = Commit ( device . GetLogical ( ) . GetImageMemoryRequirements ( * image ) , host_visible ) ;
image . BindMemory ( commit - > GetMemory ( ) , commit - > Get Offset( ) ) ;
image . BindMemory ( commit . Memory ( ) , commit . Offset( ) ) ;
return commit ;
}
bool VKMemoryManager : : AllocMemory ( VkMemoryPropertyFlags wanted_properties , u32 type_mask ,
void VKMemoryManager : : AllocMemory ( VkMemoryPropertyFlags wanted_properties , u32 type_mask ,
u64 size ) {
const u32 type = [ & ] {
for ( u32 type_index = 0 ; type_index < properties . memoryTypeCount ; + + type_index ) {
@ -176,26 +201,18 @@ bool VKMemoryManager::AllocMemory(VkMemoryPropertyFlags wanted_properties, u32 t
UNREACHABLE_MSG ( " Couldn't find a compatible memory type! " ) ;
return 0U ;
} ( ) ;
// Try to allocate found type.
vk : : DeviceMemory memory = device . GetLogical ( ) . TryAllocateMemory ( {
vk : : DeviceMemory memory = device . GetLogical ( ) . AllocateMemory ( {
. sType = VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO ,
. pNext = nullptr ,
. allocationSize = size ,
. memoryTypeIndex = type ,
} ) ;
if ( ! memory ) {
LOG_CRITICAL ( Render_Vulkan , " Device allocation failed! " ) ;
return false ;
}
allocations . push_back ( std : : make_unique < VKMemoryAllocation > ( device , std : : move ( memory ) ,
wanted_properties , size , type ) ) ;
return true ;
allocations . push_back ( std : : make_unique < MemoryAllocation > ( device , std : : move ( memory ) ,
wanted_properties , size , type ) ) ;
}
VKMemoryCommit VKMemoryManager : : TryAllocCommit ( const VkMemoryRequirements & requirements ,
VkMemoryPropertyFlags wanted_properties ) {
std : : optional < MemoryCommit > VKMemoryManager : : TryAllocCommit (
const VkMemoryRequirements & requirements , VkMemoryPropertyFlags wanted_properties ) {
for ( auto & allocation : allocations ) {
if ( ! allocation - > IsCompatible ( wanted_properties , requirements . memoryTypeBits ) ) {
continue ;
@ -204,27 +221,7 @@ VKMemoryCommit VKMemoryManager::TryAllocCommit(const VkMemoryRequirements& requi
return commit ;
}
}
return { } ;
}
VKMemoryCommitImpl : : VKMemoryCommitImpl ( const Device & device_ , VKMemoryAllocation * allocation_ ,
const vk : : DeviceMemory & memory_ , u64 begin_ , u64 end_ )
: device { device_ } , memory { memory_ } , interval { begin_ , end_ } , allocation { allocation_ } { }
VKMemoryCommitImpl : : ~ VKMemoryCommitImpl ( ) {
allocation - > Free ( this ) ;
}
MemoryMap VKMemoryCommitImpl : : Map ( u64 size , u64 offset_ ) const {
return MemoryMap ( this , std : : span < u8 > ( memory . Map ( interval . first + offset_ , size ) , size ) ) ;
}
void VKMemoryCommitImpl : : Unmap ( ) const {
memory . Unmap ( ) ;
}
MemoryMap VKMemoryCommitImpl : : Map ( ) const {
return Map ( interval . second - interval . first ) ;
return std : : nullopt ;
}
} // namespace Vulkan