@ -6,6 +6,7 @@
# include <optional>
# include <optional>
# include <tuple>
# include <tuple>
# include <vector>
# include <vector>
# include "common/alignment.h"
# include "common/alignment.h"
# include "common/assert.h"
# include "common/assert.h"
# include "common/common_types.h"
# include "common/common_types.h"
@ -16,34 +17,32 @@
namespace Vulkan {
namespace Vulkan {
// TODO(Rodrigo): Fine tune this number
namespace {
constexpr u64 ALLOC_CHUNK_SIZE = 64 * 1024 * 1024 ;
u64 GetAllocationChunkSize ( u64 required_size ) {
static constexpr u64 sizes [ ] = { 16ULL < < 20 , 32ULL < < 20 , 64ULL < < 20 , 128ULL < < 20 } ;
auto it = std : : lower_bound ( std : : begin ( sizes ) , std : : end ( sizes ) , required_size ) ;
return it ! = std : : end ( sizes ) ? * it : Common : : AlignUp ( required_size , 256ULL < < 20 ) ;
}
} // Anonymous namespace
class VKMemoryAllocation final {
class VKMemoryAllocation final {
public :
public :
explicit VKMemoryAllocation ( const VKDevice & device , vk : : DeviceMemory memory ,
explicit VKMemoryAllocation ( const VKDevice & device , vk : : DeviceMemory memory ,
vk : : MemoryPropertyFlags properties , u64 alloc_size , u32 type )
vk : : MemoryPropertyFlags properties , u64 allocation_size , u32 type )
: device { device } , memory { memory } , properties { properties } , alloc_size { alloc_size } ,
: device { device } , memory { memory } , properties { properties } , allocation_size { allocation_size } ,
shifted_type { ShiftType ( type ) } , is_mappable { properties &
shifted_type { ShiftType ( type ) } { }
vk : : MemoryPropertyFlagBits : : eHostVisible } {
if ( is_mappable ) {
const auto dev = device . GetLogical ( ) ;
const auto & dld = device . GetDispatchLoader ( ) ;
base_address = static_cast < u8 * > ( dev . mapMemory ( memory , 0 , alloc_size , { } , dld ) ) ;
}
}
~ VKMemoryAllocation ( ) {
~ VKMemoryAllocation ( ) {
const auto dev = device . GetLogical ( ) ;
const auto dev = device . GetLogical ( ) ;
const auto & dld = device . GetDispatchLoader ( ) ;
const auto & dld = device . GetDispatchLoader ( ) ;
if ( is_mappable )
dev . unmapMemory ( memory , dld ) ;
dev . free ( memory , nullptr , dld ) ;
dev . free ( memory , nullptr , dld ) ;
}
}
VKMemoryCommit Commit ( vk : : DeviceSize commit_size , vk : : DeviceSize alignment ) {
VKMemoryCommit Commit ( vk : : DeviceSize commit_size , vk : : DeviceSize alignment ) {
auto found = TryFindFreeSection ( free_iterator , alloc _size, static_cast < u64 > ( commit_size ) ,
auto found = TryFindFreeSection ( free_iterator , alloc ation_size ,
static_cast < u64 > ( alignment) ) ;
static_cast < u64 > ( commit_size) , static_cast < u64 > ( alignment) ) ;
if ( ! found ) {
if ( ! found ) {
found = TryFindFreeSection ( 0 , free_iterator , static_cast < u64 > ( commit_size ) ,
found = TryFindFreeSection ( 0 , free_iterator , static_cast < u64 > ( commit_size ) ,
static_cast < u64 > ( alignment ) ) ;
static_cast < u64 > ( alignment ) ) ;
@ -52,8 +51,7 @@ public:
return nullptr ;
return nullptr ;
}
}
}
}
u8 * address = is_mappable ? base_address + * found : nullptr ;
auto commit = std : : make_unique < VKMemoryCommitImpl > ( device , this , memory , * found ,
auto commit = std : : make_unique < VKMemoryCommitImpl > ( this , memory , address , * found ,
* found + commit_size ) ;
* found + commit_size ) ;
commits . push_back ( commit . get ( ) ) ;
commits . push_back ( commit . get ( ) ) ;
@ -65,12 +63,10 @@ public:
void Free ( const VKMemoryCommitImpl * commit ) {
void Free ( const VKMemoryCommitImpl * commit ) {
ASSERT ( commit ) ;
ASSERT ( commit ) ;
const auto it =
std : : find_if ( commits . begin ( ) , commits . end ( ) ,
const auto it = std : : find ( std : : begin ( commits ) , std : : end ( commits ) , commit ) ;
[ & ] ( const auto & stored_commit ) { return stored_commit = = commit ; } ) ;
if ( it = = commits . end ( ) ) {
if ( it = = commits . end ( ) ) {
LOG_CRITICAL ( Render_Vulkan , " Freeing unallocated commit! " ) ;
UNREACHABLE_MSG ( " Freeing unallocated commit! " ) ;
UNREACHABLE ( ) ;
return ;
return ;
}
}
commits . erase ( it ) ;
commits . erase ( it ) ;
@ -88,11 +84,11 @@ private:
}
}
/// A memory allocator, it may return a free region between "start" and "end" with the solicited
/// A memory allocator, it may return a free region between "start" and "end" with the solicited
/// requ er iments.
/// requ ire ments.
std : : optional < u64 > TryFindFreeSection ( u64 start , u64 end , u64 size , u64 alignment ) const {
std : : optional < u64 > TryFindFreeSection ( u64 start , u64 end , u64 size , u64 alignment ) const {
u64 iterator = start;
u64 iterator = Common: : AlignUp ( start, alignment ) ;
while ( iterator + size < end ) {
while ( iterator + size < = end ) {
const u64 try_left = Common: : AlignUp ( iterator, alignment ) ;
const u64 try_left = iterator;
const u64 try_right = try_left + size ;
const u64 try_right = try_left + size ;
bool overlap = false ;
bool overlap = false ;
@ -100,7 +96,7 @@ private:
const auto [ commit_left , commit_right ] = commit - > interval ;
const auto [ commit_left , commit_right ] = commit - > interval ;
if ( try_left < commit_right & & commit_left < try_right ) {
if ( try_left < commit_right & & commit_left < try_right ) {
// There's an overlap, continue the search where the overlapping commit ends.
// There's an overlap, continue the search where the overlapping commit ends.
iterator = commit_right;
iterator = Common: : AlignUp ( commit_right, alignment ) ;
overlap = true ;
overlap = true ;
break ;
break ;
}
}
@ -110,6 +106,7 @@ private:
return try_left ;
return try_left ;
}
}
}
}
// No free regions where found, return an empty optional.
// No free regions where found, return an empty optional.
return std : : nullopt ;
return std : : nullopt ;
}
}
@ -117,12 +114,8 @@ private:
const VKDevice & device ; ///< Vulkan device.
const VKDevice & device ; ///< Vulkan device.
const vk : : DeviceMemory memory ; ///< Vulkan memory allocation handler.
const vk : : DeviceMemory memory ; ///< Vulkan memory allocation handler.
const vk : : MemoryPropertyFlags properties ; ///< Vulkan properties.
const vk : : MemoryPropertyFlags properties ; ///< Vulkan properties.
const u64 alloc _size; ///< Size of this allocation.
const u64 alloc ation _size; ///< Size of this allocation.
const u32 shifted_type ; ///< Stored Vulkan type of this allocation, shifted.
const u32 shifted_type ; ///< Stored Vulkan type of this allocation, shifted.
const bool is_mappable ; ///< Whether the allocation is mappable.
/// Base address of the mapped pointer.
u8 * base_address { } ;
/// Hints where the next free region is likely going to be.
/// Hints where the next free region is likely going to be.
u64 free_iterator { } ;
u64 free_iterator { } ;
@ -132,13 +125,15 @@ private:
} ;
} ;
VKMemoryManager : : VKMemoryManager ( const VKDevice & device )
VKMemoryManager : : VKMemoryManager ( const VKDevice & device )
: device { device } , props { device . GetPhysical ( ) . getMemoryProperties ( device . GetDispatchLoader ( ) ) } ,
: device { device } , properties { device . GetPhysical ( ) . getMemoryProperties (
is_memory_unified { GetMemoryUnified ( props ) } { }
device . GetDispatchLoader ( ) ) } ,
is_memory_unified { GetMemoryUnified ( properties ) } { }
VKMemoryManager : : ~ VKMemoryManager ( ) = default ;
VKMemoryManager : : ~ VKMemoryManager ( ) = default ;
VKMemoryCommit VKMemoryManager : : Commit ( const vk : : MemoryRequirements & reqs , bool host_visible ) {
VKMemoryCommit VKMemoryManager : : Commit ( const vk : : MemoryRequirements & requirements ,
ASSERT ( reqs . size < ALLOC_CHUNK_SIZE ) ;
bool host_visible ) {
const u64 chunk_size = GetAllocationChunkSize ( requirements . size ) ;
// When a host visible commit is asked, search for host visible and coherent, otherwise search
// When a host visible commit is asked, search for host visible and coherent, otherwise search
// for a fast device local type.
// for a fast device local type.
@ -147,32 +142,21 @@ VKMemoryCommit VKMemoryManager::Commit(const vk::MemoryRequirements& reqs, bool
? vk : : MemoryPropertyFlagBits : : eHostVisible | vk : : MemoryPropertyFlagBits : : eHostCoherent
? vk : : MemoryPropertyFlagBits : : eHostVisible | vk : : MemoryPropertyFlagBits : : eHostCoherent
: vk : : MemoryPropertyFlagBits : : eDeviceLocal ;
: vk : : MemoryPropertyFlagBits : : eDeviceLocal ;
const auto TryCommit = [ & ] ( ) - > VKMemoryCommit {
if ( auto commit = TryAllocCommit ( requirements , wanted_properties ) ) {
for ( auto & alloc : allocs ) {
if ( ! alloc - > IsCompatible ( wanted_properties , reqs . memoryTypeBits ) )
continue ;
if ( auto commit = alloc - > Commit ( reqs . size , reqs . alignment ) ; commit ) {
return commit ;
}
}
return { } ;
} ;
if ( auto commit = TryCommit ( ) ; commit ) {
return commit ;
return commit ;
}
}
// Commit has failed, allocate more memory.
// Commit has failed, allocate more memory.
if ( ! AllocMemory ( wanted_properties , reqs . memoryTypeBits , ALLOC_CHUNK_SIZE ) ) {
if ( ! AllocMemory ( wanted_properties , requirements . memoryTypeBits , chunk_size ) ) {
// TODO(Rodrigo): Try to use host memory.
// TODO(Rodrigo): Handle these situations in some way like flushing to guest memory.
LOG_CRITICAL ( Render_Vulkan , " Ran out of memory! " ) ;
// Allocation has failed, panic.
UNREACHABLE ( ) ;
UNREACHABLE_MSG ( " Ran out of VRAM! " ) ;
return { } ;
}
}
// Commit again, this time it won't fail since there's a fresh allocation above. If it does,
// Commit again, this time it won't fail since there's a fresh allocation above. If it does,
// there's a bug.
// there's a bug.
auto commit = Try Commit( ) ;
auto commit = Try Alloc Commit( requirements , wanted_properties ) ;
ASSERT ( commit ) ;
ASSERT ( commit ) ;
return commit ;
return commit ;
}
}
@ -180,8 +164,7 @@ VKMemoryCommit VKMemoryManager::Commit(const vk::MemoryRequirements& reqs, bool
VKMemoryCommit VKMemoryManager : : Commit ( vk : : Buffer buffer , bool host_visible ) {
VKMemoryCommit VKMemoryManager : : Commit ( vk : : Buffer buffer , bool host_visible ) {
const auto dev = device . GetLogical ( ) ;
const auto dev = device . GetLogical ( ) ;
const auto & dld = device . GetDispatchLoader ( ) ;
const auto & dld = device . GetDispatchLoader ( ) ;
const auto requeriments = dev . getBufferMemoryRequirements ( buffer , dld ) ;
auto commit = Commit ( dev . getBufferMemoryRequirements ( buffer , dld ) , host_visible ) ;
auto commit = Commit ( requeriments , host_visible ) ;
dev . bindBufferMemory ( buffer , commit - > GetMemory ( ) , commit - > GetOffset ( ) , dld ) ;
dev . bindBufferMemory ( buffer , commit - > GetMemory ( ) , commit - > GetOffset ( ) , dld ) ;
return commit ;
return commit ;
}
}
@ -189,25 +172,23 @@ VKMemoryCommit VKMemoryManager::Commit(vk::Buffer buffer, bool host_visible) {
VKMemoryCommit VKMemoryManager : : Commit ( vk : : Image image , bool host_visible ) {
VKMemoryCommit VKMemoryManager : : Commit ( vk : : Image image , bool host_visible ) {
const auto dev = device . GetLogical ( ) ;
const auto dev = device . GetLogical ( ) ;
const auto & dld = device . GetDispatchLoader ( ) ;
const auto & dld = device . GetDispatchLoader ( ) ;
const auto requeriments = dev . getImageMemoryRequirements ( image , dld ) ;
auto commit = Commit ( dev . getImageMemoryRequirements ( image , dld ) , host_visible ) ;
auto commit = Commit ( requeriments , host_visible ) ;
dev . bindImageMemory ( image , commit - > GetMemory ( ) , commit - > GetOffset ( ) , dld ) ;
dev . bindImageMemory ( image , commit - > GetMemory ( ) , commit - > GetOffset ( ) , dld ) ;
return commit ;
return commit ;
}
}
bool VKMemoryManager : : AllocMemory ( vk : : MemoryPropertyFlags wanted_properties , u32 type_mask ,
bool VKMemoryManager : : AllocMemory ( vk : : MemoryPropertyFlags wanted_properties , u32 type_mask ,
u64 size ) {
u64 size ) {
const u32 type = [ & ] ( ) {
const u32 type = [ & ] {
for ( u32 type_index = 0 ; type_index < prop s. memoryTypeCount ; + + type_index ) {
for ( u32 type_index = 0 ; type_index < prop ertie s. memoryTypeCount ; + + type_index ) {
const auto flags = prop s. memoryTypes [ type_index ] . propertyFlags ;
const auto flags = prop ertie s. memoryTypes [ type_index ] . propertyFlags ;
if ( ( type_mask & ( 1U < < type_index ) ) & & ( flags & wanted_properties ) ) {
if ( ( type_mask & ( 1U < < type_index ) ) & & ( flags & wanted_properties ) ) {
// The type matches in type and in the wanted properties.
// The type matches in type and in the wanted properties.
return type_index ;
return type_index ;
}
}
}
}
LOG_CRITICAL ( Render_Vulkan , " Couldn't find a compatible memory type! " ) ;
UNREACHABLE_MSG ( " Couldn't find a compatible memory type! " ) ;
UNREACHABLE ( ) ;
return 0U ;
return 0u ;
} ( ) ;
} ( ) ;
const auto dev = device . GetLogical ( ) ;
const auto dev = device . GetLogical ( ) ;
@ -216,19 +197,33 @@ bool VKMemoryManager::AllocMemory(vk::MemoryPropertyFlags wanted_properties, u32
// Try to allocate found type.
// Try to allocate found type.
const vk : : MemoryAllocateInfo memory_ai ( size , type ) ;
const vk : : MemoryAllocateInfo memory_ai ( size , type ) ;
vk : : DeviceMemory memory ;
vk : : DeviceMemory memory ;
if ( const vk : : Result res = dev . allocateMemory ( & memory_ai , nullptr , & memory , dld ) ;
if ( const auto res = dev . allocateMemory ( & memory_ai , nullptr , & memory , dld ) ;
res ! = vk : : Result : : eSuccess ) {
res ! = vk : : Result : : eSuccess ) {
LOG_CRITICAL ( Render_Vulkan , " Device allocation failed with code {}! " , vk : : to_string ( res ) ) ;
LOG_CRITICAL ( Render_Vulkan , " Device allocation failed with code {}! " , vk : : to_string ( res ) ) ;
return false ;
return false ;
}
}
alloc s. push_back (
alloc ation s. push_back (
std : : make_unique < VKMemoryAllocation > ( device , memory , wanted_properties , size , type ) ) ;
std : : make_unique < VKMemoryAllocation > ( device , memory , wanted_properties , size , type ) ) ;
return true ;
return true ;
}
}
/*static*/ bool VKMemoryManager : : GetMemoryUnified ( const vk : : PhysicalDeviceMemoryProperties & props ) {
VKMemoryCommit VKMemoryManager : : TryAllocCommit ( const vk : : MemoryRequirements & requirements ,
for ( u32 heap_index = 0 ; heap_index < props . memoryHeapCount ; + + heap_index ) {
vk : : MemoryPropertyFlags wanted_properties ) {
if ( ! ( props . memoryHeaps [ heap_index ] . flags & vk : : MemoryHeapFlagBits : : eDeviceLocal ) ) {
for ( auto & allocation : allocations ) {
if ( ! allocation - > IsCompatible ( wanted_properties , requirements . memoryTypeBits ) ) {
continue ;
}
if ( auto commit = allocation - > Commit ( requirements . size , requirements . alignment ) ) {
return commit ;
}
}
return { } ;
}
/*static*/ bool VKMemoryManager : : GetMemoryUnified (
const vk : : PhysicalDeviceMemoryProperties & properties ) {
for ( u32 heap_index = 0 ; heap_index < properties . memoryHeapCount ; + + heap_index ) {
if ( ! ( properties . memoryHeaps [ heap_index ] . flags & vk : : MemoryHeapFlagBits : : eDeviceLocal ) ) {
// Memory is considered unified when heaps are device local only.
// Memory is considered unified when heaps are device local only.
return false ;
return false ;
}
}
@ -236,17 +231,28 @@ bool VKMemoryManager::AllocMemory(vk::MemoryPropertyFlags wanted_properties, u32
return true ;
return true ;
}
}
VKMemoryCommitImpl : : VKMemoryCommitImpl ( VKMemoryAllocation * allocation , vk : : DeviceMemory memory ,
VKMemoryCommitImpl : : VKMemoryCommitImpl ( const VKDevice & device , VKMemoryAllocation * allocation ,
u8* data , u64 begin , u64 end )
vk: : DeviceMemory memory , u64 begin , u64 end )
: interval( std : : make_pair ( begin , end ) ) , memory { memory } , allocation { allocation } , data { data } { }
: device{ device } , interval { begin , end } , memory { memory } , allocation { allocation } { }
VKMemoryCommitImpl : : ~ VKMemoryCommitImpl ( ) {
VKMemoryCommitImpl : : ~ VKMemoryCommitImpl ( ) {
allocation - > Free ( this ) ;
allocation - > Free ( this ) ;
}
}
u8 * VKMemoryCommitImpl : : GetData ( ) const {
MemoryMap VKMemoryCommitImpl : : Map ( u64 size , u64 offset_ ) const {
ASSERT_MSG ( data ! = nullptr , " Trying to access an unmapped commit. " ) ;
const auto dev = device . GetLogical ( ) ;
return data ;
const auto address = reinterpret_cast < u8 * > (
dev . mapMemory ( memory , interval . first + offset_ , size , { } , device . GetDispatchLoader ( ) ) ) ;
return MemoryMap { this , address } ;
}
void VKMemoryCommitImpl : : Unmap ( ) const {
const auto dev = device . GetLogical ( ) ;
dev . unmapMemory ( memory , device . GetDispatchLoader ( ) ) ;
}
MemoryMap VKMemoryCommitImpl : : Map ( ) const {
return Map ( interval . second - interval . first ) ;
}
}
} // namespace Vulkan
} // namespace Vulkan