23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1689 #ifndef VMA_RECORDING_ENABLED 1690 #define VMA_RECORDING_ENABLED 0 1694 #define NOMINMAX // For windows.h 1698 #include <vulkan/vulkan.h> 1701 #if VMA_RECORDING_ENABLED 1702 #include <windows.h> 1705 #if !defined(VMA_DEDICATED_ALLOCATION) 1706 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1707 #define VMA_DEDICATED_ALLOCATION 1 1709 #define VMA_DEDICATED_ALLOCATION 0 1713 #if !defined(VMA_BIND_MEMORY2) 1714 #if VK_KHR_bind_memory2 1715 #define VMA_BIND_MEMORY2 1 1717 #define VMA_BIND_MEMORY2 0 1735 uint32_t memoryType,
1736 VkDeviceMemory memory,
1741 uint32_t memoryType,
1742 VkDeviceMemory memory,
1827 #if VMA_DEDICATED_ALLOCATION 1828 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1829 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1831 #if VMA_BIND_MEMORY2 1832 PFN_vkBindBufferMemory2KHR vkBindBufferMemory2KHR;
1833 PFN_vkBindImageMemory2KHR vkBindImageMemory2KHR;
1960 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1968 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1978 uint32_t memoryTypeIndex,
1979 VkMemoryPropertyFlags* pFlags);
1991 uint32_t frameIndex);
2024 #ifndef VMA_STATS_STRING_ENABLED 2025 #define VMA_STATS_STRING_ENABLED 1 2028 #if VMA_STATS_STRING_ENABLED 2035 char** ppStatsString,
2036 VkBool32 detailedMap);
2040 char* pStatsString);
2042 #endif // #if VMA_STATS_STRING_ENABLED 2275 uint32_t memoryTypeBits,
2277 uint32_t* pMemoryTypeIndex);
2293 const VkBufferCreateInfo* pBufferCreateInfo,
2295 uint32_t* pMemoryTypeIndex);
2311 const VkImageCreateInfo* pImageCreateInfo,
2313 uint32_t* pMemoryTypeIndex);
2485 size_t* pLostAllocationCount);
2584 const VkMemoryRequirements* pVkMemoryRequirements,
2610 const VkMemoryRequirements* pVkMemoryRequirements,
2612 size_t allocationCount,
2657 size_t allocationCount,
2669 VkDeviceSize newSize);
3049 size_t allocationCount,
3050 VkBool32* pAllocationsChanged,
3084 VkDeviceSize allocationLocalOffset,
3118 VkDeviceSize allocationLocalOffset,
3150 const VkBufferCreateInfo* pBufferCreateInfo,
3175 const VkImageCreateInfo* pImageCreateInfo,
3201 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 3204 #if defined(__cplusplus) && defined(__INTELLISENSE__) 3205 #define VMA_IMPLEMENTATION 3208 #ifdef VMA_IMPLEMENTATION 3209 #undef VMA_IMPLEMENTATION 3231 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 3232 #define VMA_STATIC_VULKAN_FUNCTIONS 1 3244 #if VMA_USE_STL_CONTAINERS 3245 #define VMA_USE_STL_VECTOR 1 3246 #define VMA_USE_STL_UNORDERED_MAP 1 3247 #define VMA_USE_STL_LIST 1 3250 #ifndef VMA_USE_STL_SHARED_MUTEX 3252 #if __cplusplus >= 201703L 3253 #define VMA_USE_STL_SHARED_MUTEX 1 3257 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L 3258 #define VMA_USE_STL_SHARED_MUTEX 1 3260 #define VMA_USE_STL_SHARED_MUTEX 0 3268 #if VMA_USE_STL_VECTOR 3272 #if VMA_USE_STL_UNORDERED_MAP 3273 #include <unordered_map> 3276 #if VMA_USE_STL_LIST 3285 #include <algorithm> 3290 #define VMA_NULL nullptr 3293 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 3295 void *aligned_alloc(
size_t alignment,
size_t size)
3298 if(alignment <
sizeof(
void*))
3300 alignment =
sizeof(
void*);
3303 return memalign(alignment, size);
3305 #elif defined(__APPLE__) || defined(__ANDROID__) 3307 void *aligned_alloc(
size_t alignment,
size_t size)
3310 if(alignment <
sizeof(
void*))
3312 alignment =
sizeof(
void*);
3316 if(posix_memalign(&pointer, alignment, size) == 0)
3330 #define VMA_ASSERT(expr) assert(expr) 3332 #define VMA_ASSERT(expr) 3338 #ifndef VMA_HEAVY_ASSERT 3340 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 3342 #define VMA_HEAVY_ASSERT(expr) 3346 #ifndef VMA_ALIGN_OF 3347 #define VMA_ALIGN_OF(type) (__alignof(type)) 3350 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 3352 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 3354 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 3358 #ifndef VMA_SYSTEM_FREE 3360 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 3362 #define VMA_SYSTEM_FREE(ptr) free(ptr) 3367 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 3371 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 3375 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 3379 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 3382 #ifndef VMA_DEBUG_LOG 3383 #define VMA_DEBUG_LOG(format, ...) 3393 #if VMA_STATS_STRING_ENABLED 3394 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3396 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3398 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3400 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3402 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3404 snprintf(outStr, strLen,
"%p", ptr);
3412 void Lock() { m_Mutex.lock(); }
3413 void Unlock() { m_Mutex.unlock(); }
3417 #define VMA_MUTEX VmaMutex 3421 #ifndef VMA_RW_MUTEX 3422 #if VMA_USE_STL_SHARED_MUTEX 3424 #include <shared_mutex> 3428 void LockRead() { m_Mutex.lock_shared(); }
3429 void UnlockRead() { m_Mutex.unlock_shared(); }
3430 void LockWrite() { m_Mutex.lock(); }
3431 void UnlockWrite() { m_Mutex.unlock(); }
3433 std::shared_mutex m_Mutex;
3435 #define VMA_RW_MUTEX VmaRWMutex 3436 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600 3442 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3443 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3444 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3445 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3446 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3450 #define VMA_RW_MUTEX VmaRWMutex 3456 void LockRead() { m_Mutex.Lock(); }
3457 void UnlockRead() { m_Mutex.Unlock(); }
3458 void LockWrite() { m_Mutex.Lock(); }
3459 void UnlockWrite() { m_Mutex.Unlock(); }
3463 #define VMA_RW_MUTEX VmaRWMutex 3464 #endif // #if VMA_USE_STL_SHARED_MUTEX 3465 #endif // #ifndef VMA_RW_MUTEX 3475 #ifndef VMA_ATOMIC_UINT32 3477 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 3480 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 3485 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 3488 #ifndef VMA_DEBUG_ALIGNMENT 3493 #define VMA_DEBUG_ALIGNMENT (1) 3496 #ifndef VMA_DEBUG_MARGIN 3501 #define VMA_DEBUG_MARGIN (0) 3504 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3509 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3512 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3518 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3521 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3526 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3529 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3534 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3537 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3538 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3542 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3543 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3547 #ifndef VMA_CLASS_NO_COPY 3548 #define VMA_CLASS_NO_COPY(className) \ 3550 className(const className&) = delete; \ 3551 className& operator=(const className&) = delete; 3554 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3557 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3559 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3560 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3566 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3568 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3569 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3572 static inline uint32_t VmaCountBitsSet(uint32_t v)
3574 uint32_t c = v - ((v >> 1) & 0x55555555);
3575 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3576 c = ((c >> 4) + c) & 0x0F0F0F0F;
3577 c = ((c >> 8) + c) & 0x00FF00FF;
3578 c = ((c >> 16) + c) & 0x0000FFFF;
3584 template <
typename T>
3585 static inline T VmaAlignUp(T val, T align)
3587 return (val + align - 1) / align * align;
3591 template <
typename T>
3592 static inline T VmaAlignDown(T val, T align)
3594 return val / align * align;
3598 template <
typename T>
3599 static inline T VmaRoundDiv(T x, T y)
3601 return (x + (y / (T)2)) / y;
3609 template <
typename T>
3610 inline bool VmaIsPow2(T x)
3612 return (x & (x-1)) == 0;
3616 static inline uint32_t VmaNextPow2(uint32_t v)
3627 static inline uint64_t VmaNextPow2(uint64_t v)
3641 static inline uint32_t VmaPrevPow2(uint32_t v)
3651 static inline uint64_t VmaPrevPow2(uint64_t v)
3663 static inline bool VmaStrIsEmpty(
const char* pStr)
3665 return pStr == VMA_NULL || *pStr ==
'\0';
3668 #if VMA_STATS_STRING_ENABLED 3670 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3686 #endif // #if VMA_STATS_STRING_ENABLED 3690 template<
typename Iterator,
typename Compare>
3691 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3693 Iterator centerValue = end; --centerValue;
3694 Iterator insertIndex = beg;
3695 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3697 if(cmp(*memTypeIndex, *centerValue))
3699 if(insertIndex != memTypeIndex)
3701 VMA_SWAP(*memTypeIndex, *insertIndex);
3706 if(insertIndex != centerValue)
3708 VMA_SWAP(*insertIndex, *centerValue);
3713 template<
typename Iterator,
typename Compare>
3714 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3718 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3719 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3720 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3724 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3726 #endif // #ifndef VMA_SORT 3735 static inline bool VmaBlocksOnSamePage(
3736 VkDeviceSize resourceAOffset,
3737 VkDeviceSize resourceASize,
3738 VkDeviceSize resourceBOffset,
3739 VkDeviceSize pageSize)
3741 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3742 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3743 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3744 VkDeviceSize resourceBStart = resourceBOffset;
3745 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3746 return resourceAEndPage == resourceBStartPage;
3749 enum VmaSuballocationType
3751 VMA_SUBALLOCATION_TYPE_FREE = 0,
3752 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3753 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3754 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3755 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3756 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3757 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3766 static inline bool VmaIsBufferImageGranularityConflict(
3767 VmaSuballocationType suballocType1,
3768 VmaSuballocationType suballocType2)
3770 if(suballocType1 > suballocType2)
3772 VMA_SWAP(suballocType1, suballocType2);
3775 switch(suballocType1)
3777 case VMA_SUBALLOCATION_TYPE_FREE:
3779 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3781 case VMA_SUBALLOCATION_TYPE_BUFFER:
3783 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3784 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3785 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3787 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3788 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3789 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3790 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3792 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3793 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3801 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3803 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION 3804 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3805 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3806 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3808 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3815 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3817 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION 3818 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3819 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3820 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3822 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3835 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
3837 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
3838 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
3839 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3840 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
3846 VMA_CLASS_NO_COPY(VmaMutexLock)
3848 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
3849 m_pMutex(useMutex ? &mutex : VMA_NULL)
3850 {
if(m_pMutex) { m_pMutex->Lock(); } }
3852 {
if(m_pMutex) { m_pMutex->Unlock(); } }
3854 VMA_MUTEX* m_pMutex;
3858 struct VmaMutexLockRead
3860 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3862 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3863 m_pMutex(useMutex ? &mutex : VMA_NULL)
3864 {
if(m_pMutex) { m_pMutex->LockRead(); } }
3865 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
3867 VMA_RW_MUTEX* m_pMutex;
3871 struct VmaMutexLockWrite
3873 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3875 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
3876 m_pMutex(useMutex ? &mutex : VMA_NULL)
3877 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
3878 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
3880 VMA_RW_MUTEX* m_pMutex;
3883 #if VMA_DEBUG_GLOBAL_MUTEX 3884 static VMA_MUTEX gDebugGlobalMutex;
3885 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3887 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3891 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3902 template <
typename CmpLess,
typename IterT,
typename KeyT>
3903 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
3905 size_t down = 0, up = (end - beg);
3908 const size_t mid = (down + up) / 2;
3909 if(cmp(*(beg+mid), key))
3921 template<
typename CmpLess,
typename IterT,
typename KeyT>
3922 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
3924 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3925 beg, end, value, cmp);
3927 (!cmp(*it, value) && !cmp(value, *it)))
3939 template<
typename T>
3940 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
3942 for(uint32_t i = 0; i < count; ++i)
3944 const T iPtr = arr[i];
3945 if(iPtr == VMA_NULL)
3949 for(uint32_t j = i + 1; j < count; ++j)
3963 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3965 if((pAllocationCallbacks != VMA_NULL) &&
3966 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3968 return (*pAllocationCallbacks->pfnAllocation)(
3969 pAllocationCallbacks->pUserData,
3972 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3976 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3980 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3982 if((pAllocationCallbacks != VMA_NULL) &&
3983 (pAllocationCallbacks->pfnFree != VMA_NULL))
3985 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3989 VMA_SYSTEM_FREE(ptr);
3993 template<
typename T>
3994 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3996 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3999 template<
typename T>
4000 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
4002 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
4005 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 4007 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 4009 template<
typename T>
4010 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
4013 VmaFree(pAllocationCallbacks, ptr);
4016 template<
typename T>
4017 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
4021 for(
size_t i = count; i--; )
4025 VmaFree(pAllocationCallbacks, ptr);
4030 template<
typename T>
4031 class VmaStlAllocator
4034 const VkAllocationCallbacks*
const m_pCallbacks;
4035 typedef T value_type;
4037 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
4038 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
4040 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
4041 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
4043 template<
typename U>
4044 bool operator==(
const VmaStlAllocator<U>& rhs)
const 4046 return m_pCallbacks == rhs.m_pCallbacks;
4048 template<
typename U>
4049 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 4051 return m_pCallbacks != rhs.m_pCallbacks;
4054 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
4057 #if VMA_USE_STL_VECTOR 4059 #define VmaVector std::vector 4061 template<
typename T,
typename allocatorT>
4062 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4064 vec.insert(vec.begin() + index, item);
4067 template<
typename T,
typename allocatorT>
4068 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4070 vec.erase(vec.begin() + index);
4073 #else // #if VMA_USE_STL_VECTOR 4078 template<
typename T,
typename AllocatorT>
4082 typedef T value_type;
4084 VmaVector(
const AllocatorT& allocator) :
4085 m_Allocator(allocator),
4092 VmaVector(
size_t count,
const AllocatorT& allocator) :
4093 m_Allocator(allocator),
4094 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4100 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4101 m_Allocator(src.m_Allocator),
4102 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4103 m_Count(src.m_Count),
4104 m_Capacity(src.m_Count)
4108 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4114 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4117 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4121 resize(rhs.m_Count);
4124 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4130 bool empty()
const {
return m_Count == 0; }
4131 size_t size()
const {
return m_Count; }
4132 T* data() {
return m_pArray; }
4133 const T* data()
const {
return m_pArray; }
4135 T& operator[](
size_t index)
4137 VMA_HEAVY_ASSERT(index < m_Count);
4138 return m_pArray[index];
4140 const T& operator[](
size_t index)
const 4142 VMA_HEAVY_ASSERT(index < m_Count);
4143 return m_pArray[index];
4148 VMA_HEAVY_ASSERT(m_Count > 0);
4151 const T& front()
const 4153 VMA_HEAVY_ASSERT(m_Count > 0);
4158 VMA_HEAVY_ASSERT(m_Count > 0);
4159 return m_pArray[m_Count - 1];
4161 const T& back()
const 4163 VMA_HEAVY_ASSERT(m_Count > 0);
4164 return m_pArray[m_Count - 1];
4167 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4169 newCapacity = VMA_MAX(newCapacity, m_Count);
4171 if((newCapacity < m_Capacity) && !freeMemory)
4173 newCapacity = m_Capacity;
4176 if(newCapacity != m_Capacity)
4178 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4181 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4183 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4184 m_Capacity = newCapacity;
4185 m_pArray = newArray;
4189 void resize(
size_t newCount,
bool freeMemory =
false)
4191 size_t newCapacity = m_Capacity;
4192 if(newCount > m_Capacity)
4194 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4198 newCapacity = newCount;
4201 if(newCapacity != m_Capacity)
4203 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4204 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4205 if(elementsToCopy != 0)
4207 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4209 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4210 m_Capacity = newCapacity;
4211 m_pArray = newArray;
4217 void clear(
bool freeMemory =
false)
4219 resize(0, freeMemory);
4222 void insert(
size_t index,
const T& src)
4224 VMA_HEAVY_ASSERT(index <= m_Count);
4225 const size_t oldCount = size();
4226 resize(oldCount + 1);
4227 if(index < oldCount)
4229 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4231 m_pArray[index] = src;
4234 void remove(
size_t index)
4236 VMA_HEAVY_ASSERT(index < m_Count);
4237 const size_t oldCount = size();
4238 if(index < oldCount - 1)
4240 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4242 resize(oldCount - 1);
4245 void push_back(
const T& src)
4247 const size_t newIndex = size();
4248 resize(newIndex + 1);
4249 m_pArray[newIndex] = src;
4254 VMA_HEAVY_ASSERT(m_Count > 0);
4258 void push_front(
const T& src)
4265 VMA_HEAVY_ASSERT(m_Count > 0);
4269 typedef T* iterator;
4271 iterator begin() {
return m_pArray; }
4272 iterator end() {
return m_pArray + m_Count; }
4275 AllocatorT m_Allocator;
4281 template<
typename T,
typename allocatorT>
4282 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4284 vec.insert(index, item);
4287 template<
typename T,
typename allocatorT>
4288 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4293 #endif // #if VMA_USE_STL_VECTOR 4295 template<
typename CmpLess,
typename VectorT>
4296 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4298 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4300 vector.data() + vector.size(),
4302 CmpLess()) - vector.data();
4303 VmaVectorInsert(vector, indexToInsert, value);
4304 return indexToInsert;
4307 template<
typename CmpLess,
typename VectorT>
4308 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4311 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4316 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4318 size_t indexToRemove = it - vector.begin();
4319 VmaVectorRemove(vector, indexToRemove);
4333 template<
typename T>
4334 class VmaPoolAllocator
4336 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4338 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4339 ~VmaPoolAllocator();
4347 uint32_t NextFreeIndex;
4355 uint32_t FirstFreeIndex;
4358 const VkAllocationCallbacks* m_pAllocationCallbacks;
4359 const uint32_t m_FirstBlockCapacity;
4360 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4362 ItemBlock& CreateNewBlock();
4365 template<
typename T>
4366 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4367 m_pAllocationCallbacks(pAllocationCallbacks),
4368 m_FirstBlockCapacity(firstBlockCapacity),
4369 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4371 VMA_ASSERT(m_FirstBlockCapacity > 1);
4374 template<
typename T>
4375 VmaPoolAllocator<T>::~VmaPoolAllocator()
4380 template<
typename T>
4381 void VmaPoolAllocator<T>::Clear()
4383 for(
size_t i = m_ItemBlocks.size(); i--; )
4384 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4385 m_ItemBlocks.clear();
4388 template<
typename T>
4389 T* VmaPoolAllocator<T>::Alloc()
4391 for(
size_t i = m_ItemBlocks.size(); i--; )
4393 ItemBlock& block = m_ItemBlocks[i];
4395 if(block.FirstFreeIndex != UINT32_MAX)
4397 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4398 block.FirstFreeIndex = pItem->NextFreeIndex;
4399 return &pItem->Value;
4404 ItemBlock& newBlock = CreateNewBlock();
4405 Item*
const pItem = &newBlock.pItems[0];
4406 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4407 return &pItem->Value;
4410 template<
typename T>
4411 void VmaPoolAllocator<T>::Free(T* ptr)
4414 for(
size_t i = m_ItemBlocks.size(); i--; )
4416 ItemBlock& block = m_ItemBlocks[i];
4420 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4423 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4425 const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
4426 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4427 block.FirstFreeIndex = index;
4431 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4434 template<
typename T>
4435 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4437 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4438 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4440 const ItemBlock newBlock = {
4441 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4445 m_ItemBlocks.push_back(newBlock);
4448 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4449 newBlock.pItems[i].NextFreeIndex = i + 1;
4450 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4451 return m_ItemBlocks.back();
4457 #if VMA_USE_STL_LIST 4459 #define VmaList std::list 4461 #else // #if VMA_USE_STL_LIST 4463 template<
typename T>
4472 template<
typename T>
4475 VMA_CLASS_NO_COPY(VmaRawList)
4477 typedef VmaListItem<T> ItemType;
4479 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4483 size_t GetCount()
const {
return m_Count; }
4484 bool IsEmpty()
const {
return m_Count == 0; }
4486 ItemType* Front() {
return m_pFront; }
4487 const ItemType* Front()
const {
return m_pFront; }
4488 ItemType* Back() {
return m_pBack; }
4489 const ItemType* Back()
const {
return m_pBack; }
4491 ItemType* PushBack();
4492 ItemType* PushFront();
4493 ItemType* PushBack(
const T& value);
4494 ItemType* PushFront(
const T& value);
4499 ItemType* InsertBefore(ItemType* pItem);
4501 ItemType* InsertAfter(ItemType* pItem);
4503 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4504 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4506 void Remove(ItemType* pItem);
4509 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4510 VmaPoolAllocator<ItemType> m_ItemAllocator;
4516 template<
typename T>
4517 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4518 m_pAllocationCallbacks(pAllocationCallbacks),
4519 m_ItemAllocator(pAllocationCallbacks, 128),
4526 template<
typename T>
4527 VmaRawList<T>::~VmaRawList()
4533 template<
typename T>
4534 void VmaRawList<T>::Clear()
4536 if(IsEmpty() ==
false)
4538 ItemType* pItem = m_pBack;
4539 while(pItem != VMA_NULL)
4541 ItemType*
const pPrevItem = pItem->pPrev;
4542 m_ItemAllocator.Free(pItem);
4545 m_pFront = VMA_NULL;
4551 template<
typename T>
4552 VmaListItem<T>* VmaRawList<T>::PushBack()
4554 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4555 pNewItem->pNext = VMA_NULL;
4558 pNewItem->pPrev = VMA_NULL;
4559 m_pFront = pNewItem;
4565 pNewItem->pPrev = m_pBack;
4566 m_pBack->pNext = pNewItem;
4573 template<
typename T>
4574 VmaListItem<T>* VmaRawList<T>::PushFront()
4576 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4577 pNewItem->pPrev = VMA_NULL;
4580 pNewItem->pNext = VMA_NULL;
4581 m_pFront = pNewItem;
4587 pNewItem->pNext = m_pFront;
4588 m_pFront->pPrev = pNewItem;
4589 m_pFront = pNewItem;
4595 template<
typename T>
4596 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4598 ItemType*
const pNewItem = PushBack();
4599 pNewItem->Value = value;
4603 template<
typename T>
4604 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4606 ItemType*
const pNewItem = PushFront();
4607 pNewItem->Value = value;
4611 template<
typename T>
4612 void VmaRawList<T>::PopBack()
4614 VMA_HEAVY_ASSERT(m_Count > 0);
4615 ItemType*
const pBackItem = m_pBack;
4616 ItemType*
const pPrevItem = pBackItem->pPrev;
4617 if(pPrevItem != VMA_NULL)
4619 pPrevItem->pNext = VMA_NULL;
4621 m_pBack = pPrevItem;
4622 m_ItemAllocator.Free(pBackItem);
4626 template<
typename T>
4627 void VmaRawList<T>::PopFront()
4629 VMA_HEAVY_ASSERT(m_Count > 0);
4630 ItemType*
const pFrontItem = m_pFront;
4631 ItemType*
const pNextItem = pFrontItem->pNext;
4632 if(pNextItem != VMA_NULL)
4634 pNextItem->pPrev = VMA_NULL;
4636 m_pFront = pNextItem;
4637 m_ItemAllocator.Free(pFrontItem);
4641 template<
typename T>
4642 void VmaRawList<T>::Remove(ItemType* pItem)
4644 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4645 VMA_HEAVY_ASSERT(m_Count > 0);
4647 if(pItem->pPrev != VMA_NULL)
4649 pItem->pPrev->pNext = pItem->pNext;
4653 VMA_HEAVY_ASSERT(m_pFront == pItem);
4654 m_pFront = pItem->pNext;
4657 if(pItem->pNext != VMA_NULL)
4659 pItem->pNext->pPrev = pItem->pPrev;
4663 VMA_HEAVY_ASSERT(m_pBack == pItem);
4664 m_pBack = pItem->pPrev;
4667 m_ItemAllocator.Free(pItem);
4671 template<
typename T>
4672 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4674 if(pItem != VMA_NULL)
4676 ItemType*
const prevItem = pItem->pPrev;
4677 ItemType*
const newItem = m_ItemAllocator.Alloc();
4678 newItem->pPrev = prevItem;
4679 newItem->pNext = pItem;
4680 pItem->pPrev = newItem;
4681 if(prevItem != VMA_NULL)
4683 prevItem->pNext = newItem;
4687 VMA_HEAVY_ASSERT(m_pFront == pItem);
4697 template<
typename T>
4698 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4700 if(pItem != VMA_NULL)
4702 ItemType*
const nextItem = pItem->pNext;
4703 ItemType*
const newItem = m_ItemAllocator.Alloc();
4704 newItem->pNext = nextItem;
4705 newItem->pPrev = pItem;
4706 pItem->pNext = newItem;
4707 if(nextItem != VMA_NULL)
4709 nextItem->pPrev = newItem;
4713 VMA_HEAVY_ASSERT(m_pBack == pItem);
4723 template<
typename T>
4724 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4726 ItemType*
const newItem = InsertBefore(pItem);
4727 newItem->Value = value;
4731 template<
typename T>
4732 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4734 ItemType*
const newItem = InsertAfter(pItem);
4735 newItem->Value = value;
4739 template<
typename T,
typename AllocatorT>
4742 VMA_CLASS_NO_COPY(VmaList)
4753 T& operator*()
const 4755 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4756 return m_pItem->Value;
4758 T* operator->()
const 4760 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4761 return &m_pItem->Value;
4764 iterator& operator++()
4766 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4767 m_pItem = m_pItem->pNext;
4770 iterator& operator--()
4772 if(m_pItem != VMA_NULL)
4774 m_pItem = m_pItem->pPrev;
4778 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4779 m_pItem = m_pList->Back();
4784 iterator operator++(
int)
4786 iterator result = *
this;
4790 iterator operator--(
int)
4792 iterator result = *
this;
4797 bool operator==(
const iterator& rhs)
const 4799 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4800 return m_pItem == rhs.m_pItem;
4802 bool operator!=(
const iterator& rhs)
const 4804 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4805 return m_pItem != rhs.m_pItem;
4809 VmaRawList<T>* m_pList;
4810 VmaListItem<T>* m_pItem;
4812 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4818 friend class VmaList<T, AllocatorT>;
4821 class const_iterator
4830 const_iterator(
const iterator& src) :
4831 m_pList(src.m_pList),
4832 m_pItem(src.m_pItem)
4836 const T& operator*()
const 4838 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4839 return m_pItem->Value;
4841 const T* operator->()
const 4843 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4844 return &m_pItem->Value;
4847 const_iterator& operator++()
4849 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4850 m_pItem = m_pItem->pNext;
4853 const_iterator& operator--()
4855 if(m_pItem != VMA_NULL)
4857 m_pItem = m_pItem->pPrev;
4861 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4862 m_pItem = m_pList->Back();
4867 const_iterator operator++(
int)
4869 const_iterator result = *
this;
4873 const_iterator operator--(
int)
4875 const_iterator result = *
this;
4880 bool operator==(
const const_iterator& rhs)
const 4882 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4883 return m_pItem == rhs.m_pItem;
4885 bool operator!=(
const const_iterator& rhs)
const 4887 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4888 return m_pItem != rhs.m_pItem;
4892 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4898 const VmaRawList<T>* m_pList;
4899 const VmaListItem<T>* m_pItem;
4901 friend class VmaList<T, AllocatorT>;
4904 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4906 bool empty()
const {
return m_RawList.IsEmpty(); }
4907 size_t size()
const {
return m_RawList.GetCount(); }
4909 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4910 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4912 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4913 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4915 void clear() { m_RawList.Clear(); }
4916 void push_back(
const T& value) { m_RawList.PushBack(value); }
4917 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4918 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4921 VmaRawList<T> m_RawList;
4924 #endif // #if VMA_USE_STL_LIST 4932 #if VMA_USE_STL_UNORDERED_MAP 4934 #define VmaPair std::pair 4936 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4937 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4939 #else // #if VMA_USE_STL_UNORDERED_MAP 4941 template<
typename T1,
typename T2>
4947 VmaPair() : first(), second() { }
4948 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4954 template<
typename KeyT,
typename ValueT>
4958 typedef VmaPair<KeyT, ValueT> PairType;
4959 typedef PairType* iterator;
4961 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4963 iterator begin() {
return m_Vector.begin(); }
4964 iterator end() {
return m_Vector.end(); }
4966 void insert(
const PairType& pair);
4967 iterator find(
const KeyT& key);
4968 void erase(iterator it);
4971 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4974 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4976 template<
typename FirstT,
typename SecondT>
4977 struct VmaPairFirstLess
4979 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4981 return lhs.first < rhs.first;
4983 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4985 return lhs.first < rhsFirst;
4989 template<
typename KeyT,
typename ValueT>
4990 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4992 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4994 m_Vector.data() + m_Vector.size(),
4996 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4997 VmaVectorInsert(m_Vector, indexToInsert, pair);
5000 template<
typename KeyT,
typename ValueT>
5001 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
5003 PairType* it = VmaBinaryFindFirstNotLess(
5005 m_Vector.data() + m_Vector.size(),
5007 VmaPairFirstLess<KeyT, ValueT>());
5008 if((it != m_Vector.end()) && (it->first == key))
5014 return m_Vector.end();
5018 template<
typename KeyT,
typename ValueT>
5019 void VmaMap<KeyT, ValueT>::erase(iterator it)
5021 VmaVectorRemove(m_Vector, it - m_Vector.begin());
5024 #endif // #if VMA_USE_STL_UNORDERED_MAP 5030 class VmaDeviceMemoryBlock;
5032 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
5034 struct VmaAllocation_T
5037 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
5041 FLAG_USER_DATA_STRING = 0x01,
5045 enum ALLOCATION_TYPE
5047 ALLOCATION_TYPE_NONE,
5048 ALLOCATION_TYPE_BLOCK,
5049 ALLOCATION_TYPE_DEDICATED,
5057 void Ctor(uint32_t currentFrameIndex,
bool userDataString)
5061 m_pUserData = VMA_NULL;
5062 m_LastUseFrameIndex = currentFrameIndex;
5063 m_Type = (uint8_t)ALLOCATION_TYPE_NONE;
5064 m_SuballocationType = (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN;
5066 m_Flags = userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0;
5068 #if VMA_STATS_STRING_ENABLED 5069 m_CreationFrameIndex = currentFrameIndex;
5070 m_BufferImageUsage = 0;
5076 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5079 VMA_ASSERT(m_pUserData == VMA_NULL);
5082 void InitBlockAllocation(
5083 VmaDeviceMemoryBlock* block,
5084 VkDeviceSize offset,
5085 VkDeviceSize alignment,
5087 VmaSuballocationType suballocationType,
5091 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5092 VMA_ASSERT(block != VMA_NULL);
5093 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5094 m_Alignment = alignment;
5096 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5097 m_SuballocationType = (uint8_t)suballocationType;
5098 m_BlockAllocation.m_Block = block;
5099 m_BlockAllocation.m_Offset = offset;
5100 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5105 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5106 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5107 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5108 m_BlockAllocation.m_Block = VMA_NULL;
5109 m_BlockAllocation.m_Offset = 0;
5110 m_BlockAllocation.m_CanBecomeLost =
true;
5113 void ChangeBlockAllocation(
5115 VmaDeviceMemoryBlock* block,
5116 VkDeviceSize offset);
5118 void ChangeOffset(VkDeviceSize newOffset);
5121 void InitDedicatedAllocation(
5122 uint32_t memoryTypeIndex,
5123 VkDeviceMemory hMemory,
5124 VmaSuballocationType suballocationType,
5128 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5129 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5130 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5133 m_SuballocationType = (uint8_t)suballocationType;
5134 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5135 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
5136 m_DedicatedAllocation.m_hMemory = hMemory;
5137 m_DedicatedAllocation.m_pMappedData = pMappedData;
5140 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5141 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5142 VkDeviceSize GetSize()
const {
return m_Size; }
5143 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5144 void* GetUserData()
const {
return m_pUserData; }
5145 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5146 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5148 VmaDeviceMemoryBlock* GetBlock()
const 5150 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5151 return m_BlockAllocation.m_Block;
5153 VkDeviceSize GetOffset()
const;
5154 VkDeviceMemory GetMemory()
const;
5155 uint32_t GetMemoryTypeIndex()
const;
5156 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5157 void* GetMappedData()
const;
5158 bool CanBecomeLost()
const;
5160 uint32_t GetLastUseFrameIndex()
const 5162 return m_LastUseFrameIndex.load();
5164 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5166 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5176 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5178 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5180 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5191 void BlockAllocMap();
5192 void BlockAllocUnmap();
5193 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5196 #if VMA_STATS_STRING_ENABLED 5197 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5198 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5200 void InitBufferImageUsage(uint32_t bufferImageUsage)
5202 VMA_ASSERT(m_BufferImageUsage == 0);
5203 m_BufferImageUsage = bufferImageUsage;
5206 void PrintParameters(
class VmaJsonWriter& json)
const;
5210 VkDeviceSize m_Alignment;
5211 VkDeviceSize m_Size;
5213 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5215 uint8_t m_SuballocationType;
5222 struct BlockAllocation
5224 VmaDeviceMemoryBlock* m_Block;
5225 VkDeviceSize m_Offset;
5226 bool m_CanBecomeLost;
5230 struct DedicatedAllocation
5232 uint32_t m_MemoryTypeIndex;
5233 VkDeviceMemory m_hMemory;
5234 void* m_pMappedData;
5240 BlockAllocation m_BlockAllocation;
5242 DedicatedAllocation m_DedicatedAllocation;
5245 #if VMA_STATS_STRING_ENABLED 5246 uint32_t m_CreationFrameIndex;
5247 uint32_t m_BufferImageUsage;
5257 struct VmaSuballocation
5259 VkDeviceSize offset;
5262 VmaSuballocationType type;
5266 struct VmaSuballocationOffsetLess
5268 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5270 return lhs.offset < rhs.offset;
5273 struct VmaSuballocationOffsetGreater
5275 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5277 return lhs.offset > rhs.offset;
5281 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5284 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5286 enum class VmaAllocationRequestType
5308 struct VmaAllocationRequest
5310 VkDeviceSize offset;
5311 VkDeviceSize sumFreeSize;
5312 VkDeviceSize sumItemSize;
5313 VmaSuballocationList::iterator item;
5314 size_t itemsToMakeLostCount;
5316 VmaAllocationRequestType type;
5318 VkDeviceSize CalcCost()
const 5320 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5328 class VmaBlockMetadata
5332 virtual ~VmaBlockMetadata() { }
5333 virtual void Init(VkDeviceSize size) { m_Size = size; }
5336 virtual bool Validate()
const = 0;
5337 VkDeviceSize GetSize()
const {
return m_Size; }
5338 virtual size_t GetAllocationCount()
const = 0;
5339 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5340 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5342 virtual bool IsEmpty()
const = 0;
5344 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5346 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5348 #if VMA_STATS_STRING_ENABLED 5349 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5355 virtual bool CreateAllocationRequest(
5356 uint32_t currentFrameIndex,
5357 uint32_t frameInUseCount,
5358 VkDeviceSize bufferImageGranularity,
5359 VkDeviceSize allocSize,
5360 VkDeviceSize allocAlignment,
5362 VmaSuballocationType allocType,
5363 bool canMakeOtherLost,
5366 VmaAllocationRequest* pAllocationRequest) = 0;
5368 virtual bool MakeRequestedAllocationsLost(
5369 uint32_t currentFrameIndex,
5370 uint32_t frameInUseCount,
5371 VmaAllocationRequest* pAllocationRequest) = 0;
5373 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5375 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5379 const VmaAllocationRequest& request,
5380 VmaSuballocationType type,
5381 VkDeviceSize allocSize,
5386 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5389 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5391 #if VMA_STATS_STRING_ENABLED 5392 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5393 VkDeviceSize unusedBytes,
5394 size_t allocationCount,
5395 size_t unusedRangeCount)
const;
5396 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5397 VkDeviceSize offset,
5399 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5400 VkDeviceSize offset,
5401 VkDeviceSize size)
const;
5402 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5406 VkDeviceSize m_Size;
5407 const VkAllocationCallbacks* m_pAllocationCallbacks;
5410 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 5411 VMA_ASSERT(0 && "Validation failed: " #cond); \ 5415 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5417 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5420 virtual ~VmaBlockMetadata_Generic();
5421 virtual void Init(VkDeviceSize size);
5423 virtual bool Validate()
const;
5424 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5425 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5426 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5427 virtual bool IsEmpty()
const;
5429 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5430 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5432 #if VMA_STATS_STRING_ENABLED 5433 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5436 virtual bool CreateAllocationRequest(
5437 uint32_t currentFrameIndex,
5438 uint32_t frameInUseCount,
5439 VkDeviceSize bufferImageGranularity,
5440 VkDeviceSize allocSize,
5441 VkDeviceSize allocAlignment,
5443 VmaSuballocationType allocType,
5444 bool canMakeOtherLost,
5446 VmaAllocationRequest* pAllocationRequest);
5448 virtual bool MakeRequestedAllocationsLost(
5449 uint32_t currentFrameIndex,
5450 uint32_t frameInUseCount,
5451 VmaAllocationRequest* pAllocationRequest);
5453 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5455 virtual VkResult CheckCorruption(
const void* pBlockData);
5458 const VmaAllocationRequest& request,
5459 VmaSuballocationType type,
5460 VkDeviceSize allocSize,
5464 virtual void FreeAtOffset(VkDeviceSize offset);
5469 bool IsBufferImageGranularityConflictPossible(
5470 VkDeviceSize bufferImageGranularity,
5471 VmaSuballocationType& inOutPrevSuballocType)
const;
5474 friend class VmaDefragmentationAlgorithm_Generic;
5475 friend class VmaDefragmentationAlgorithm_Fast;
5477 uint32_t m_FreeCount;
5478 VkDeviceSize m_SumFreeSize;
5479 VmaSuballocationList m_Suballocations;
5482 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5484 bool ValidateFreeSuballocationList()
const;
5488 bool CheckAllocation(
5489 uint32_t currentFrameIndex,
5490 uint32_t frameInUseCount,
5491 VkDeviceSize bufferImageGranularity,
5492 VkDeviceSize allocSize,
5493 VkDeviceSize allocAlignment,
5494 VmaSuballocationType allocType,
5495 VmaSuballocationList::const_iterator suballocItem,
5496 bool canMakeOtherLost,
5497 VkDeviceSize* pOffset,
5498 size_t* itemsToMakeLostCount,
5499 VkDeviceSize* pSumFreeSize,
5500 VkDeviceSize* pSumItemSize)
const;
5502 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5506 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5509 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5512 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5593 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5595 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5598 virtual ~VmaBlockMetadata_Linear();
5599 virtual void Init(VkDeviceSize size);
5601 virtual bool Validate()
const;
5602 virtual size_t GetAllocationCount()
const;
5603 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5604 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5605 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5607 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5608 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5610 #if VMA_STATS_STRING_ENABLED 5611 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5614 virtual bool CreateAllocationRequest(
5615 uint32_t currentFrameIndex,
5616 uint32_t frameInUseCount,
5617 VkDeviceSize bufferImageGranularity,
5618 VkDeviceSize allocSize,
5619 VkDeviceSize allocAlignment,
5621 VmaSuballocationType allocType,
5622 bool canMakeOtherLost,
5624 VmaAllocationRequest* pAllocationRequest);
5626 virtual bool MakeRequestedAllocationsLost(
5627 uint32_t currentFrameIndex,
5628 uint32_t frameInUseCount,
5629 VmaAllocationRequest* pAllocationRequest);
5631 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5633 virtual VkResult CheckCorruption(
const void* pBlockData);
5636 const VmaAllocationRequest& request,
5637 VmaSuballocationType type,
5638 VkDeviceSize allocSize,
5642 virtual void FreeAtOffset(VkDeviceSize offset);
5652 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5654 enum SECOND_VECTOR_MODE
5656 SECOND_VECTOR_EMPTY,
5661 SECOND_VECTOR_RING_BUFFER,
5667 SECOND_VECTOR_DOUBLE_STACK,
5670 VkDeviceSize m_SumFreeSize;
5671 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5672 uint32_t m_1stVectorIndex;
5673 SECOND_VECTOR_MODE m_2ndVectorMode;
5675 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5676 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5677 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5678 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5681 size_t m_1stNullItemsBeginCount;
5683 size_t m_1stNullItemsMiddleCount;
5685 size_t m_2ndNullItemsCount;
5687 bool ShouldCompact1st()
const;
5688 void CleanupAfterFree();
5690 bool CreateAllocationRequest_LowerAddress(
5691 uint32_t currentFrameIndex,
5692 uint32_t frameInUseCount,
5693 VkDeviceSize bufferImageGranularity,
5694 VkDeviceSize allocSize,
5695 VkDeviceSize allocAlignment,
5696 VmaSuballocationType allocType,
5697 bool canMakeOtherLost,
5699 VmaAllocationRequest* pAllocationRequest);
5700 bool CreateAllocationRequest_UpperAddress(
5701 uint32_t currentFrameIndex,
5702 uint32_t frameInUseCount,
5703 VkDeviceSize bufferImageGranularity,
5704 VkDeviceSize allocSize,
5705 VkDeviceSize allocAlignment,
5706 VmaSuballocationType allocType,
5707 bool canMakeOtherLost,
5709 VmaAllocationRequest* pAllocationRequest);
5723 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5725 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5728 virtual ~VmaBlockMetadata_Buddy();
5729 virtual void Init(VkDeviceSize size);
5731 virtual bool Validate()
const;
5732 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5733 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5734 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5735 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5737 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5738 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5740 #if VMA_STATS_STRING_ENABLED 5741 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5744 virtual bool CreateAllocationRequest(
5745 uint32_t currentFrameIndex,
5746 uint32_t frameInUseCount,
5747 VkDeviceSize bufferImageGranularity,
5748 VkDeviceSize allocSize,
5749 VkDeviceSize allocAlignment,
5751 VmaSuballocationType allocType,
5752 bool canMakeOtherLost,
5754 VmaAllocationRequest* pAllocationRequest);
5756 virtual bool MakeRequestedAllocationsLost(
5757 uint32_t currentFrameIndex,
5758 uint32_t frameInUseCount,
5759 VmaAllocationRequest* pAllocationRequest);
5761 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5763 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5766 const VmaAllocationRequest& request,
5767 VmaSuballocationType type,
5768 VkDeviceSize allocSize,
5771 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5772 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5775 static const VkDeviceSize MIN_NODE_SIZE = 32;
5776 static const size_t MAX_LEVELS = 30;
5778 struct ValidationContext
5780 size_t calculatedAllocationCount;
5781 size_t calculatedFreeCount;
5782 VkDeviceSize calculatedSumFreeSize;
5784 ValidationContext() :
5785 calculatedAllocationCount(0),
5786 calculatedFreeCount(0),
5787 calculatedSumFreeSize(0) { }
5792 VkDeviceSize offset;
5822 VkDeviceSize m_UsableSize;
5823 uint32_t m_LevelCount;
5829 } m_FreeList[MAX_LEVELS];
5831 size_t m_AllocationCount;
5835 VkDeviceSize m_SumFreeSize;
5837 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5838 void DeleteNode(Node* node);
5839 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5840 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5841 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5843 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5844 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5848 void AddToFreeListFront(uint32_t level, Node* node);
5852 void RemoveFromFreeList(uint32_t level, Node* node);
5854 #if VMA_STATS_STRING_ENABLED 5855 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5865 class VmaDeviceMemoryBlock
5867 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5869 VmaBlockMetadata* m_pMetadata;
5873 ~VmaDeviceMemoryBlock()
5875 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5876 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5883 uint32_t newMemoryTypeIndex,
5884 VkDeviceMemory newMemory,
5885 VkDeviceSize newSize,
5887 uint32_t algorithm);
5891 VmaPool GetParentPool()
const {
return m_hParentPool; }
5892 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5893 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5894 uint32_t GetId()
const {
return m_Id; }
5895 void* GetMappedData()
const {
return m_pMappedData; }
5898 bool Validate()
const;
5903 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5906 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5907 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5909 VkResult BindBufferMemory(
5912 VkDeviceSize allocationLocalOffset,
5915 VkResult BindImageMemory(
5918 VkDeviceSize allocationLocalOffset,
5924 uint32_t m_MemoryTypeIndex;
5926 VkDeviceMemory m_hMemory;
5934 uint32_t m_MapCount;
5935 void* m_pMappedData;
5938 struct VmaPointerLess
5940 bool operator()(
const void* lhs,
const void* rhs)
const 5946 struct VmaDefragmentationMove
5948 size_t srcBlockIndex;
5949 size_t dstBlockIndex;
5950 VkDeviceSize srcOffset;
5951 VkDeviceSize dstOffset;
5955 class VmaDefragmentationAlgorithm;
5963 struct VmaBlockVector
5965 VMA_CLASS_NO_COPY(VmaBlockVector)
5970 uint32_t memoryTypeIndex,
5971 VkDeviceSize preferredBlockSize,
5972 size_t minBlockCount,
5973 size_t maxBlockCount,
5974 VkDeviceSize bufferImageGranularity,
5975 uint32_t frameInUseCount,
5977 bool explicitBlockSize,
5978 uint32_t algorithm);
5981 VkResult CreateMinBlocks();
5983 VmaPool GetParentPool()
const {
return m_hParentPool; }
5984 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5985 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5986 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5987 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5988 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5992 bool IsEmpty()
const {
return m_Blocks.empty(); }
5993 bool IsCorruptionDetectionEnabled()
const;
5996 uint32_t currentFrameIndex,
5998 VkDeviceSize alignment,
6000 VmaSuballocationType suballocType,
6001 size_t allocationCount,
6010 #if VMA_STATS_STRING_ENABLED 6011 void PrintDetailedMap(
class VmaJsonWriter& json);
6014 void MakePoolAllocationsLost(
6015 uint32_t currentFrameIndex,
6016 size_t* pLostAllocationCount);
6017 VkResult CheckCorruption();
6021 class VmaBlockVectorDefragmentationContext* pCtx,
6023 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
6024 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
6025 VkCommandBuffer commandBuffer);
6026 void DefragmentationEnd(
6027 class VmaBlockVectorDefragmentationContext* pCtx,
6033 size_t GetBlockCount()
const {
return m_Blocks.size(); }
6034 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
6035 size_t CalcAllocationCount()
const;
6036 bool IsBufferImageGranularityConflictPossible()
const;
6039 friend class VmaDefragmentationAlgorithm_Generic;
6043 const uint32_t m_MemoryTypeIndex;
6044 const VkDeviceSize m_PreferredBlockSize;
6045 const size_t m_MinBlockCount;
6046 const size_t m_MaxBlockCount;
6047 const VkDeviceSize m_BufferImageGranularity;
6048 const uint32_t m_FrameInUseCount;
6049 const bool m_IsCustomPool;
6050 const bool m_ExplicitBlockSize;
6051 const uint32_t m_Algorithm;
6055 bool m_HasEmptyBlock;
6056 VMA_RW_MUTEX m_Mutex;
6058 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
6059 uint32_t m_NextBlockId;
6061 VkDeviceSize CalcMaxBlockSize()
const;
6064 void Remove(VmaDeviceMemoryBlock* pBlock);
6068 void IncrementallySortBlocks();
6070 VkResult AllocatePage(
6071 uint32_t currentFrameIndex,
6073 VkDeviceSize alignment,
6075 VmaSuballocationType suballocType,
6079 VkResult AllocateFromBlock(
6080 VmaDeviceMemoryBlock* pBlock,
6081 uint32_t currentFrameIndex,
6083 VkDeviceSize alignment,
6086 VmaSuballocationType suballocType,
6090 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6093 void ApplyDefragmentationMovesCpu(
6094 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6095 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6097 void ApplyDefragmentationMovesGpu(
6098 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6099 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6100 VkCommandBuffer commandBuffer);
6111 VMA_CLASS_NO_COPY(VmaPool_T)
6113 VmaBlockVector m_BlockVector;
6118 VkDeviceSize preferredBlockSize);
6121 uint32_t GetId()
const {
return m_Id; }
6122 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6124 #if VMA_STATS_STRING_ENABLED 6139 class VmaDefragmentationAlgorithm
6141 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6143 VmaDefragmentationAlgorithm(
6145 VmaBlockVector* pBlockVector,
6146 uint32_t currentFrameIndex) :
6147 m_hAllocator(hAllocator),
6148 m_pBlockVector(pBlockVector),
6149 m_CurrentFrameIndex(currentFrameIndex)
6152 virtual ~VmaDefragmentationAlgorithm()
6156 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6157 virtual void AddAll() = 0;
6159 virtual VkResult Defragment(
6160 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6161 VkDeviceSize maxBytesToMove,
6162 uint32_t maxAllocationsToMove) = 0;
6164 virtual VkDeviceSize GetBytesMoved()
const = 0;
6165 virtual uint32_t GetAllocationsMoved()
const = 0;
6169 VmaBlockVector*
const m_pBlockVector;
6170 const uint32_t m_CurrentFrameIndex;
6172 struct AllocationInfo
6175 VkBool32* m_pChanged;
6178 m_hAllocation(VK_NULL_HANDLE),
6179 m_pChanged(VMA_NULL)
6183 m_hAllocation(hAlloc),
6184 m_pChanged(pChanged)
6190 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6192 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6194 VmaDefragmentationAlgorithm_Generic(
6196 VmaBlockVector* pBlockVector,
6197 uint32_t currentFrameIndex,
6198 bool overlappingMoveSupported);
6199 virtual ~VmaDefragmentationAlgorithm_Generic();
6201 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6202 virtual void AddAll() { m_AllAllocations =
true; }
6204 virtual VkResult Defragment(
6205 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6206 VkDeviceSize maxBytesToMove,
6207 uint32_t maxAllocationsToMove);
6209 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6210 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6213 uint32_t m_AllocationCount;
6214 bool m_AllAllocations;
6216 VkDeviceSize m_BytesMoved;
6217 uint32_t m_AllocationsMoved;
6219 struct AllocationInfoSizeGreater
6221 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6223 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6227 struct AllocationInfoOffsetGreater
6229 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6231 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6237 size_t m_OriginalBlockIndex;
6238 VmaDeviceMemoryBlock* m_pBlock;
6239 bool m_HasNonMovableAllocations;
6240 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6242 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6243 m_OriginalBlockIndex(SIZE_MAX),
6245 m_HasNonMovableAllocations(true),
6246 m_Allocations(pAllocationCallbacks)
6250 void CalcHasNonMovableAllocations()
6252 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6253 const size_t defragmentAllocCount = m_Allocations.size();
6254 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6257 void SortAllocationsBySizeDescending()
6259 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6262 void SortAllocationsByOffsetDescending()
6264 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6268 struct BlockPointerLess
6270 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 6272 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6274 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6276 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6282 struct BlockInfoCompareMoveDestination
6284 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6286 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6290 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6294 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6302 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6303 BlockInfoVector m_Blocks;
6305 VkResult DefragmentRound(
6306 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6307 VkDeviceSize maxBytesToMove,
6308 uint32_t maxAllocationsToMove);
6310 size_t CalcBlocksWithNonMovableCount()
const;
6312 static bool MoveMakesSense(
6313 size_t dstBlockIndex, VkDeviceSize dstOffset,
6314 size_t srcBlockIndex, VkDeviceSize srcOffset);
6317 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6319 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6321 VmaDefragmentationAlgorithm_Fast(
6323 VmaBlockVector* pBlockVector,
6324 uint32_t currentFrameIndex,
6325 bool overlappingMoveSupported);
6326 virtual ~VmaDefragmentationAlgorithm_Fast();
6328 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6329 virtual void AddAll() { m_AllAllocations =
true; }
6331 virtual VkResult Defragment(
6332 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6333 VkDeviceSize maxBytesToMove,
6334 uint32_t maxAllocationsToMove);
6336 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6337 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6342 size_t origBlockIndex;
6345 class FreeSpaceDatabase
6351 s.blockInfoIndex = SIZE_MAX;
6352 for(
size_t i = 0; i < MAX_COUNT; ++i)
6354 m_FreeSpaces[i] = s;
6358 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6360 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6366 size_t bestIndex = SIZE_MAX;
6367 for(
size_t i = 0; i < MAX_COUNT; ++i)
6370 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6375 if(m_FreeSpaces[i].size < size &&
6376 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6382 if(bestIndex != SIZE_MAX)
6384 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6385 m_FreeSpaces[bestIndex].offset = offset;
6386 m_FreeSpaces[bestIndex].size = size;
6390 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6391 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6393 size_t bestIndex = SIZE_MAX;
6394 VkDeviceSize bestFreeSpaceAfter = 0;
6395 for(
size_t i = 0; i < MAX_COUNT; ++i)
6398 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6400 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6402 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6404 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6406 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6409 bestFreeSpaceAfter = freeSpaceAfter;
6415 if(bestIndex != SIZE_MAX)
6417 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6418 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6420 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6423 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6424 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6425 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6430 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6440 static const size_t MAX_COUNT = 4;
6444 size_t blockInfoIndex;
6445 VkDeviceSize offset;
6447 } m_FreeSpaces[MAX_COUNT];
6450 const bool m_OverlappingMoveSupported;
6452 uint32_t m_AllocationCount;
6453 bool m_AllAllocations;
6455 VkDeviceSize m_BytesMoved;
6456 uint32_t m_AllocationsMoved;
6458 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6460 void PreprocessMetadata();
6461 void PostprocessMetadata();
6462 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6465 struct VmaBlockDefragmentationContext
6469 BLOCK_FLAG_USED = 0x00000001,
6475 class VmaBlockVectorDefragmentationContext
6477 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6481 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6483 VmaBlockVectorDefragmentationContext(
6486 VmaBlockVector* pBlockVector,
6487 uint32_t currFrameIndex);
6488 ~VmaBlockVectorDefragmentationContext();
6490 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6491 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6492 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6494 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6495 void AddAll() { m_AllAllocations =
true; }
6497 void Begin(
bool overlappingMoveSupported);
6504 VmaBlockVector*
const m_pBlockVector;
6505 const uint32_t m_CurrFrameIndex;
6507 VmaDefragmentationAlgorithm* m_pAlgorithm;
6515 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6516 bool m_AllAllocations;
6519 struct VmaDefragmentationContext_T
6522 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6524 VmaDefragmentationContext_T(
6526 uint32_t currFrameIndex,
6529 ~VmaDefragmentationContext_T();
6531 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6532 void AddAllocations(
6533 uint32_t allocationCount,
6535 VkBool32* pAllocationsChanged);
6543 VkResult Defragment(
6544 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6545 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6550 const uint32_t m_CurrFrameIndex;
6551 const uint32_t m_Flags;
6554 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6556 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6559 #if VMA_RECORDING_ENABLED 6566 void WriteConfiguration(
6567 const VkPhysicalDeviceProperties& devProps,
6568 const VkPhysicalDeviceMemoryProperties& memProps,
6569 bool dedicatedAllocationExtensionEnabled,
6570 bool bindMemory2ExtensionEnabled);
6573 void RecordCreateAllocator(uint32_t frameIndex);
6574 void RecordDestroyAllocator(uint32_t frameIndex);
6575 void RecordCreatePool(uint32_t frameIndex,
6578 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6579 void RecordAllocateMemory(uint32_t frameIndex,
6580 const VkMemoryRequirements& vkMemReq,
6583 void RecordAllocateMemoryPages(uint32_t frameIndex,
6584 const VkMemoryRequirements& vkMemReq,
6586 uint64_t allocationCount,
6588 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6589 const VkMemoryRequirements& vkMemReq,
6590 bool requiresDedicatedAllocation,
6591 bool prefersDedicatedAllocation,
6594 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6595 const VkMemoryRequirements& vkMemReq,
6596 bool requiresDedicatedAllocation,
6597 bool prefersDedicatedAllocation,
6600 void RecordFreeMemory(uint32_t frameIndex,
6602 void RecordFreeMemoryPages(uint32_t frameIndex,
6603 uint64_t allocationCount,
6605 void RecordSetAllocationUserData(uint32_t frameIndex,
6607 const void* pUserData);
6608 void RecordCreateLostAllocation(uint32_t frameIndex,
6610 void RecordMapMemory(uint32_t frameIndex,
6612 void RecordUnmapMemory(uint32_t frameIndex,
6614 void RecordFlushAllocation(uint32_t frameIndex,
6615 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6616 void RecordInvalidateAllocation(uint32_t frameIndex,
6617 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6618 void RecordCreateBuffer(uint32_t frameIndex,
6619 const VkBufferCreateInfo& bufCreateInfo,
6622 void RecordCreateImage(uint32_t frameIndex,
6623 const VkImageCreateInfo& imageCreateInfo,
6626 void RecordDestroyBuffer(uint32_t frameIndex,
6628 void RecordDestroyImage(uint32_t frameIndex,
6630 void RecordTouchAllocation(uint32_t frameIndex,
6632 void RecordGetAllocationInfo(uint32_t frameIndex,
6634 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6636 void RecordDefragmentationBegin(uint32_t frameIndex,
6639 void RecordDefragmentationEnd(uint32_t frameIndex,
6649 class UserDataString
6653 const char* GetString()
const {
return m_Str; }
6663 VMA_MUTEX m_FileMutex;
6665 int64_t m_StartCounter;
6667 void GetBasicParams(CallParams& outParams);
6670 template<
typename T>
6671 void PrintPointerList(uint64_t count,
const T* pItems)
6675 fprintf(m_File,
"%p", pItems[0]);
6676 for(uint64_t i = 1; i < count; ++i)
6678 fprintf(m_File,
" %p", pItems[i]);
6683 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6687 #endif // #if VMA_RECORDING_ENABLED 6692 class VmaAllocationObjectAllocator
6694 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
6696 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
6703 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
6707 struct VmaAllocator_T
6709 VMA_CLASS_NO_COPY(VmaAllocator_T)
6712 bool m_UseKhrDedicatedAllocation;
6713 bool m_UseKhrBindMemory2;
6715 bool m_AllocationCallbacksSpecified;
6716 VkAllocationCallbacks m_AllocationCallbacks;
6718 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
6721 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
6722 VMA_MUTEX m_HeapSizeLimitMutex;
6724 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
6725 VkPhysicalDeviceMemoryProperties m_MemProps;
6728 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
6731 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
6732 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
6733 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
6739 const VkAllocationCallbacks* GetAllocationCallbacks()
const 6741 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
6745 return m_VulkanFunctions;
6748 VkDeviceSize GetBufferImageGranularity()
const 6751 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
6752 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
6755 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
6756 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
6758 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 6760 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
6761 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6764 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 6766 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
6767 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6770 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 6772 return IsMemoryTypeNonCoherent(memTypeIndex) ?
6773 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
6774 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
6777 bool IsIntegratedGpu()
const 6779 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
6782 #if VMA_RECORDING_ENABLED 6783 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
6786 void GetBufferMemoryRequirements(
6788 VkMemoryRequirements& memReq,
6789 bool& requiresDedicatedAllocation,
6790 bool& prefersDedicatedAllocation)
const;
6791 void GetImageMemoryRequirements(
6793 VkMemoryRequirements& memReq,
6794 bool& requiresDedicatedAllocation,
6795 bool& prefersDedicatedAllocation)
const;
6798 VkResult AllocateMemory(
6799 const VkMemoryRequirements& vkMemReq,
6800 bool requiresDedicatedAllocation,
6801 bool prefersDedicatedAllocation,
6802 VkBuffer dedicatedBuffer,
6803 VkImage dedicatedImage,
6805 VmaSuballocationType suballocType,
6806 size_t allocationCount,
6811 size_t allocationCount,
6814 VkResult ResizeAllocation(
6816 VkDeviceSize newSize);
6818 void CalculateStats(
VmaStats* pStats);
6820 #if VMA_STATS_STRING_ENABLED 6821 void PrintDetailedMap(
class VmaJsonWriter& json);
6824 VkResult DefragmentationBegin(
6828 VkResult DefragmentationEnd(
6835 void DestroyPool(
VmaPool pool);
6838 void SetCurrentFrameIndex(uint32_t frameIndex);
6839 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
6841 void MakePoolAllocationsLost(
6843 size_t* pLostAllocationCount);
6844 VkResult CheckPoolCorruption(
VmaPool hPool);
6845 VkResult CheckCorruption(uint32_t memoryTypeBits);
6850 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
6852 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
6854 VkResult BindVulkanBuffer(
6855 VkDeviceMemory memory,
6856 VkDeviceSize memoryOffset,
6860 VkResult BindVulkanImage(
6861 VkDeviceMemory memory,
6862 VkDeviceSize memoryOffset,
6869 VkResult BindBufferMemory(
6871 VkDeviceSize allocationLocalOffset,
6874 VkResult BindImageMemory(
6876 VkDeviceSize allocationLocalOffset,
6880 void FlushOrInvalidateAllocation(
6882 VkDeviceSize offset, VkDeviceSize size,
6883 VMA_CACHE_OPERATION op);
6885 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
6891 uint32_t GetGpuDefragmentationMemoryTypeBits();
6894 VkDeviceSize m_PreferredLargeHeapBlockSize;
6896 VkPhysicalDevice m_PhysicalDevice;
6897 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
6898 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
6900 VMA_RW_MUTEX m_PoolsMutex;
6902 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
6903 uint32_t m_NextPoolId;
6907 #if VMA_RECORDING_ENABLED 6908 VmaRecorder* m_pRecorder;
6913 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
6915 VkResult AllocateMemoryOfType(
6917 VkDeviceSize alignment,
6918 bool dedicatedAllocation,
6919 VkBuffer dedicatedBuffer,
6920 VkImage dedicatedImage,
6922 uint32_t memTypeIndex,
6923 VmaSuballocationType suballocType,
6924 size_t allocationCount,
6928 VkResult AllocateDedicatedMemoryPage(
6930 VmaSuballocationType suballocType,
6931 uint32_t memTypeIndex,
6932 const VkMemoryAllocateInfo& allocInfo,
6934 bool isUserDataString,
6939 VkResult AllocateDedicatedMemory(
6941 VmaSuballocationType suballocType,
6942 uint32_t memTypeIndex,
6944 bool isUserDataString,
6946 VkBuffer dedicatedBuffer,
6947 VkImage dedicatedImage,
6948 size_t allocationCount,
6957 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
6963 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
6965 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
6968 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
6970 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
6973 template<
typename T>
6976 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
6979 template<
typename T>
6980 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
6982 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
6985 template<
typename T>
6986 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
6991 VmaFree(hAllocator, ptr);
6995 template<
typename T>
6996 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
7000 for(
size_t i = count; i--; )
7002 VmaFree(hAllocator, ptr);
7009 #if VMA_STATS_STRING_ENABLED 7011 class VmaStringBuilder
7014 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
7015 size_t GetLength()
const {
return m_Data.size(); }
7016 const char* GetData()
const {
return m_Data.data(); }
7018 void Add(
char ch) { m_Data.push_back(ch); }
7019 void Add(
const char* pStr);
7020 void AddNewLine() { Add(
'\n'); }
7021 void AddNumber(uint32_t num);
7022 void AddNumber(uint64_t num);
7023 void AddPointer(
const void* ptr);
7026 VmaVector< char, VmaStlAllocator<char> > m_Data;
7029 void VmaStringBuilder::Add(
const char* pStr)
7031 const size_t strLen = strlen(pStr);
7034 const size_t oldCount = m_Data.size();
7035 m_Data.resize(oldCount + strLen);
7036 memcpy(m_Data.data() + oldCount, pStr, strLen);
7040 void VmaStringBuilder::AddNumber(uint32_t num)
7043 VmaUint32ToStr(buf,
sizeof(buf), num);
7047 void VmaStringBuilder::AddNumber(uint64_t num)
7050 VmaUint64ToStr(buf,
sizeof(buf), num);
7054 void VmaStringBuilder::AddPointer(
const void* ptr)
7057 VmaPtrToStr(buf,
sizeof(buf), ptr);
7061 #endif // #if VMA_STATS_STRING_ENABLED 7066 #if VMA_STATS_STRING_ENABLED 7070 VMA_CLASS_NO_COPY(VmaJsonWriter)
7072 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
7075 void BeginObject(
bool singleLine =
false);
7078 void BeginArray(
bool singleLine =
false);
7081 void WriteString(
const char* pStr);
7082 void BeginString(
const char* pStr = VMA_NULL);
7083 void ContinueString(
const char* pStr);
7084 void ContinueString(uint32_t n);
7085 void ContinueString(uint64_t n);
7086 void ContinueString_Pointer(
const void* ptr);
7087 void EndString(
const char* pStr = VMA_NULL);
7089 void WriteNumber(uint32_t n);
7090 void WriteNumber(uint64_t n);
7091 void WriteBool(
bool b);
7095 static const char*
const INDENT;
7097 enum COLLECTION_TYPE
7099 COLLECTION_TYPE_OBJECT,
7100 COLLECTION_TYPE_ARRAY,
7104 COLLECTION_TYPE type;
7105 uint32_t valueCount;
7106 bool singleLineMode;
7109 VmaStringBuilder& m_SB;
7110 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7111 bool m_InsideString;
7113 void BeginValue(
bool isString);
7114 void WriteIndent(
bool oneLess =
false);
7117 const char*
const VmaJsonWriter::INDENT =
" ";
7119 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7121 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7122 m_InsideString(false)
7126 VmaJsonWriter::~VmaJsonWriter()
7128 VMA_ASSERT(!m_InsideString);
7129 VMA_ASSERT(m_Stack.empty());
7132 void VmaJsonWriter::BeginObject(
bool singleLine)
7134 VMA_ASSERT(!m_InsideString);
7140 item.type = COLLECTION_TYPE_OBJECT;
7141 item.valueCount = 0;
7142 item.singleLineMode = singleLine;
7143 m_Stack.push_back(item);
7146 void VmaJsonWriter::EndObject()
7148 VMA_ASSERT(!m_InsideString);
7153 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7157 void VmaJsonWriter::BeginArray(
bool singleLine)
7159 VMA_ASSERT(!m_InsideString);
7165 item.type = COLLECTION_TYPE_ARRAY;
7166 item.valueCount = 0;
7167 item.singleLineMode = singleLine;
7168 m_Stack.push_back(item);
7171 void VmaJsonWriter::EndArray()
7173 VMA_ASSERT(!m_InsideString);
7178 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7182 void VmaJsonWriter::WriteString(
const char* pStr)
7188 void VmaJsonWriter::BeginString(
const char* pStr)
7190 VMA_ASSERT(!m_InsideString);
7194 m_InsideString =
true;
7195 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7197 ContinueString(pStr);
7201 void VmaJsonWriter::ContinueString(
const char* pStr)
7203 VMA_ASSERT(m_InsideString);
7205 const size_t strLen = strlen(pStr);
7206 for(
size_t i = 0; i < strLen; ++i)
7239 VMA_ASSERT(0 &&
"Character not currently supported.");
7245 void VmaJsonWriter::ContinueString(uint32_t n)
7247 VMA_ASSERT(m_InsideString);
7251 void VmaJsonWriter::ContinueString(uint64_t n)
7253 VMA_ASSERT(m_InsideString);
7257 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7259 VMA_ASSERT(m_InsideString);
7260 m_SB.AddPointer(ptr);
7263 void VmaJsonWriter::EndString(
const char* pStr)
7265 VMA_ASSERT(m_InsideString);
7266 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7268 ContinueString(pStr);
7271 m_InsideString =
false;
7274 void VmaJsonWriter::WriteNumber(uint32_t n)
7276 VMA_ASSERT(!m_InsideString);
7281 void VmaJsonWriter::WriteNumber(uint64_t n)
7283 VMA_ASSERT(!m_InsideString);
7288 void VmaJsonWriter::WriteBool(
bool b)
7290 VMA_ASSERT(!m_InsideString);
7292 m_SB.Add(b ?
"true" :
"false");
7295 void VmaJsonWriter::WriteNull()
7297 VMA_ASSERT(!m_InsideString);
7302 void VmaJsonWriter::BeginValue(
bool isString)
7304 if(!m_Stack.empty())
7306 StackItem& currItem = m_Stack.back();
7307 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7308 currItem.valueCount % 2 == 0)
7310 VMA_ASSERT(isString);
7313 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7314 currItem.valueCount % 2 != 0)
7318 else if(currItem.valueCount > 0)
7327 ++currItem.valueCount;
7331 void VmaJsonWriter::WriteIndent(
bool oneLess)
7333 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7337 size_t count = m_Stack.size();
7338 if(count > 0 && oneLess)
7342 for(
size_t i = 0; i < count; ++i)
7349 #endif // #if VMA_STATS_STRING_ENABLED 7353 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7355 if(IsUserDataString())
7357 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7359 FreeUserDataString(hAllocator);
7361 if(pUserData != VMA_NULL)
7363 const char*
const newStrSrc = (
char*)pUserData;
7364 const size_t newStrLen = strlen(newStrSrc);
7365 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
7366 memcpy(newStrDst, newStrSrc, newStrLen + 1);
7367 m_pUserData = newStrDst;
7372 m_pUserData = pUserData;
7376 void VmaAllocation_T::ChangeBlockAllocation(
7378 VmaDeviceMemoryBlock* block,
7379 VkDeviceSize offset)
7381 VMA_ASSERT(block != VMA_NULL);
7382 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7385 if(block != m_BlockAllocation.m_Block)
7387 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7388 if(IsPersistentMap())
7390 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7391 block->Map(hAllocator, mapRefCount, VMA_NULL);
7394 m_BlockAllocation.m_Block = block;
7395 m_BlockAllocation.m_Offset = offset;
7398 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7400 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7401 m_BlockAllocation.m_Offset = newOffset;
7404 VkDeviceSize VmaAllocation_T::GetOffset()
const 7408 case ALLOCATION_TYPE_BLOCK:
7409 return m_BlockAllocation.m_Offset;
7410 case ALLOCATION_TYPE_DEDICATED:
7418 VkDeviceMemory VmaAllocation_T::GetMemory()
const 7422 case ALLOCATION_TYPE_BLOCK:
7423 return m_BlockAllocation.m_Block->GetDeviceMemory();
7424 case ALLOCATION_TYPE_DEDICATED:
7425 return m_DedicatedAllocation.m_hMemory;
7428 return VK_NULL_HANDLE;
7432 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 7436 case ALLOCATION_TYPE_BLOCK:
7437 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
7438 case ALLOCATION_TYPE_DEDICATED:
7439 return m_DedicatedAllocation.m_MemoryTypeIndex;
7446 void* VmaAllocation_T::GetMappedData()
const 7450 case ALLOCATION_TYPE_BLOCK:
7453 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7454 VMA_ASSERT(pBlockData != VMA_NULL);
7455 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7462 case ALLOCATION_TYPE_DEDICATED:
7463 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7464 return m_DedicatedAllocation.m_pMappedData;
7471 bool VmaAllocation_T::CanBecomeLost()
const 7475 case ALLOCATION_TYPE_BLOCK:
7476 return m_BlockAllocation.m_CanBecomeLost;
7477 case ALLOCATION_TYPE_DEDICATED:
7485 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7487 VMA_ASSERT(CanBecomeLost());
7493 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7496 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7501 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7507 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7517 #if VMA_STATS_STRING_ENABLED 7520 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7529 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 7531 json.WriteString(
"Type");
7532 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7534 json.WriteString(
"Size");
7535 json.WriteNumber(m_Size);
7537 if(m_pUserData != VMA_NULL)
7539 json.WriteString(
"UserData");
7540 if(IsUserDataString())
7542 json.WriteString((
const char*)m_pUserData);
7547 json.ContinueString_Pointer(m_pUserData);
7552 json.WriteString(
"CreationFrameIndex");
7553 json.WriteNumber(m_CreationFrameIndex);
7555 json.WriteString(
"LastUseFrameIndex");
7556 json.WriteNumber(GetLastUseFrameIndex());
7558 if(m_BufferImageUsage != 0)
7560 json.WriteString(
"Usage");
7561 json.WriteNumber(m_BufferImageUsage);
7567 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7569 VMA_ASSERT(IsUserDataString());
7570 if(m_pUserData != VMA_NULL)
7572 char*
const oldStr = (
char*)m_pUserData;
7573 const size_t oldStrLen = strlen(oldStr);
7574 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
7575 m_pUserData = VMA_NULL;
7579 void VmaAllocation_T::BlockAllocMap()
7581 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7583 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7589 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7593 void VmaAllocation_T::BlockAllocUnmap()
7595 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7597 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7603 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7607 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7609 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7613 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7615 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7616 *ppData = m_DedicatedAllocation.m_pMappedData;
7622 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7623 return VK_ERROR_MEMORY_MAP_FAILED;
7628 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7629 hAllocator->m_hDevice,
7630 m_DedicatedAllocation.m_hMemory,
7635 if(result == VK_SUCCESS)
7637 m_DedicatedAllocation.m_pMappedData = *ppData;
7644 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7646 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7648 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7653 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7654 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7655 hAllocator->m_hDevice,
7656 m_DedicatedAllocation.m_hMemory);
7661 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7665 #if VMA_STATS_STRING_ENABLED 7667 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7671 json.WriteString(
"Blocks");
7674 json.WriteString(
"Allocations");
7677 json.WriteString(
"UnusedRanges");
7680 json.WriteString(
"UsedBytes");
7683 json.WriteString(
"UnusedBytes");
7688 json.WriteString(
"AllocationSize");
7689 json.BeginObject(
true);
7690 json.WriteString(
"Min");
7692 json.WriteString(
"Avg");
7694 json.WriteString(
"Max");
7701 json.WriteString(
"UnusedRangeSize");
7702 json.BeginObject(
true);
7703 json.WriteString(
"Min");
7705 json.WriteString(
"Avg");
7707 json.WriteString(
"Max");
7715 #endif // #if VMA_STATS_STRING_ENABLED 7717 struct VmaSuballocationItemSizeLess
7720 const VmaSuballocationList::iterator lhs,
7721 const VmaSuballocationList::iterator rhs)
const 7723 return lhs->size < rhs->size;
7726 const VmaSuballocationList::iterator lhs,
7727 VkDeviceSize rhsSize)
const 7729 return lhs->size < rhsSize;
7737 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
7739 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
7743 #if VMA_STATS_STRING_ENABLED 7745 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
7746 VkDeviceSize unusedBytes,
7747 size_t allocationCount,
7748 size_t unusedRangeCount)
const 7752 json.WriteString(
"TotalBytes");
7753 json.WriteNumber(GetSize());
7755 json.WriteString(
"UnusedBytes");
7756 json.WriteNumber(unusedBytes);
7758 json.WriteString(
"Allocations");
7759 json.WriteNumber((uint64_t)allocationCount);
7761 json.WriteString(
"UnusedRanges");
7762 json.WriteNumber((uint64_t)unusedRangeCount);
7764 json.WriteString(
"Suballocations");
7768 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
7769 VkDeviceSize offset,
7772 json.BeginObject(
true);
7774 json.WriteString(
"Offset");
7775 json.WriteNumber(offset);
7777 hAllocation->PrintParameters(json);
7782 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
7783 VkDeviceSize offset,
7784 VkDeviceSize size)
const 7786 json.BeginObject(
true);
7788 json.WriteString(
"Offset");
7789 json.WriteNumber(offset);
7791 json.WriteString(
"Type");
7792 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
7794 json.WriteString(
"Size");
7795 json.WriteNumber(size);
7800 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 7806 #endif // #if VMA_STATS_STRING_ENABLED 7811 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
7812 VmaBlockMetadata(hAllocator),
7815 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7816 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
7820 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
7824 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
7826 VmaBlockMetadata::Init(size);
7829 m_SumFreeSize = size;
7831 VmaSuballocation suballoc = {};
7832 suballoc.offset = 0;
7833 suballoc.size = size;
7834 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7835 suballoc.hAllocation = VK_NULL_HANDLE;
7837 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7838 m_Suballocations.push_back(suballoc);
7839 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
7841 m_FreeSuballocationsBySize.push_back(suballocItem);
7844 bool VmaBlockMetadata_Generic::Validate()
const 7846 VMA_VALIDATE(!m_Suballocations.empty());
7849 VkDeviceSize calculatedOffset = 0;
7851 uint32_t calculatedFreeCount = 0;
7853 VkDeviceSize calculatedSumFreeSize = 0;
7856 size_t freeSuballocationsToRegister = 0;
7858 bool prevFree =
false;
7860 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7861 suballocItem != m_Suballocations.cend();
7864 const VmaSuballocation& subAlloc = *suballocItem;
7867 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
7869 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
7871 VMA_VALIDATE(!prevFree || !currFree);
7873 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
7877 calculatedSumFreeSize += subAlloc.size;
7878 ++calculatedFreeCount;
7879 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7881 ++freeSuballocationsToRegister;
7885 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
7889 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
7890 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
7893 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
7896 calculatedOffset += subAlloc.size;
7897 prevFree = currFree;
7902 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
7904 VkDeviceSize lastSize = 0;
7905 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
7907 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
7910 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7912 VMA_VALIDATE(suballocItem->size >= lastSize);
7914 lastSize = suballocItem->size;
7918 VMA_VALIDATE(ValidateFreeSuballocationList());
7919 VMA_VALIDATE(calculatedOffset == GetSize());
7920 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
7921 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
7926 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 7928 if(!m_FreeSuballocationsBySize.empty())
7930 return m_FreeSuballocationsBySize.back()->size;
7938 bool VmaBlockMetadata_Generic::IsEmpty()
const 7940 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
7943 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7947 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7959 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7960 suballocItem != m_Suballocations.cend();
7963 const VmaSuballocation& suballoc = *suballocItem;
7964 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7977 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 7979 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7981 inoutStats.
size += GetSize();
7988 #if VMA_STATS_STRING_ENABLED 7990 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 7992 PrintDetailedMap_Begin(json,
7994 m_Suballocations.size() - (size_t)m_FreeCount,
7998 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7999 suballocItem != m_Suballocations.cend();
8000 ++suballocItem, ++i)
8002 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8004 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
8008 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
8012 PrintDetailedMap_End(json);
8015 #endif // #if VMA_STATS_STRING_ENABLED 8017 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
8018 uint32_t currentFrameIndex,
8019 uint32_t frameInUseCount,
8020 VkDeviceSize bufferImageGranularity,
8021 VkDeviceSize allocSize,
8022 VkDeviceSize allocAlignment,
8024 VmaSuballocationType allocType,
8025 bool canMakeOtherLost,
8027 VmaAllocationRequest* pAllocationRequest)
8029 VMA_ASSERT(allocSize > 0);
8030 VMA_ASSERT(!upperAddress);
8031 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8032 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8033 VMA_HEAVY_ASSERT(Validate());
8035 pAllocationRequest->type = VmaAllocationRequestType::Normal;
8038 if(canMakeOtherLost ==
false &&
8039 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
8045 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
8046 if(freeSuballocCount > 0)
8051 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8052 m_FreeSuballocationsBySize.data(),
8053 m_FreeSuballocationsBySize.data() + freeSuballocCount,
8054 allocSize + 2 * VMA_DEBUG_MARGIN,
8055 VmaSuballocationItemSizeLess());
8056 size_t index = it - m_FreeSuballocationsBySize.data();
8057 for(; index < freeSuballocCount; ++index)
8062 bufferImageGranularity,
8066 m_FreeSuballocationsBySize[index],
8068 &pAllocationRequest->offset,
8069 &pAllocationRequest->itemsToMakeLostCount,
8070 &pAllocationRequest->sumFreeSize,
8071 &pAllocationRequest->sumItemSize))
8073 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8078 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
8080 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8081 it != m_Suballocations.end();
8084 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
8087 bufferImageGranularity,
8093 &pAllocationRequest->offset,
8094 &pAllocationRequest->itemsToMakeLostCount,
8095 &pAllocationRequest->sumFreeSize,
8096 &pAllocationRequest->sumItemSize))
8098 pAllocationRequest->item = it;
8106 for(
size_t index = freeSuballocCount; index--; )
8111 bufferImageGranularity,
8115 m_FreeSuballocationsBySize[index],
8117 &pAllocationRequest->offset,
8118 &pAllocationRequest->itemsToMakeLostCount,
8119 &pAllocationRequest->sumFreeSize,
8120 &pAllocationRequest->sumItemSize))
8122 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8129 if(canMakeOtherLost)
8134 VmaAllocationRequest tmpAllocRequest = {};
8135 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8136 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8137 suballocIt != m_Suballocations.end();
8140 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8141 suballocIt->hAllocation->CanBecomeLost())
8146 bufferImageGranularity,
8152 &tmpAllocRequest.offset,
8153 &tmpAllocRequest.itemsToMakeLostCount,
8154 &tmpAllocRequest.sumFreeSize,
8155 &tmpAllocRequest.sumItemSize))
8159 *pAllocationRequest = tmpAllocRequest;
8160 pAllocationRequest->item = suballocIt;
8163 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8165 *pAllocationRequest = tmpAllocRequest;
8166 pAllocationRequest->item = suballocIt;
8179 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8180 uint32_t currentFrameIndex,
8181 uint32_t frameInUseCount,
8182 VmaAllocationRequest* pAllocationRequest)
8184 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8186 while(pAllocationRequest->itemsToMakeLostCount > 0)
8188 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8190 ++pAllocationRequest->item;
8192 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8193 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8194 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8195 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8197 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8198 --pAllocationRequest->itemsToMakeLostCount;
8206 VMA_HEAVY_ASSERT(Validate());
8207 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8208 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8213 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8215 uint32_t lostAllocationCount = 0;
8216 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8217 it != m_Suballocations.end();
8220 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8221 it->hAllocation->CanBecomeLost() &&
8222 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8224 it = FreeSuballocation(it);
8225 ++lostAllocationCount;
8228 return lostAllocationCount;
8231 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8233 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8234 it != m_Suballocations.end();
8237 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8239 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8241 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8242 return VK_ERROR_VALIDATION_FAILED_EXT;
8244 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8246 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8247 return VK_ERROR_VALIDATION_FAILED_EXT;
8255 void VmaBlockMetadata_Generic::Alloc(
8256 const VmaAllocationRequest& request,
8257 VmaSuballocationType type,
8258 VkDeviceSize allocSize,
8261 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8262 VMA_ASSERT(request.item != m_Suballocations.end());
8263 VmaSuballocation& suballoc = *request.item;
8265 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8267 VMA_ASSERT(request.offset >= suballoc.offset);
8268 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8269 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8270 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8274 UnregisterFreeSuballocation(request.item);
8276 suballoc.offset = request.offset;
8277 suballoc.size = allocSize;
8278 suballoc.type = type;
8279 suballoc.hAllocation = hAllocation;
8284 VmaSuballocation paddingSuballoc = {};
8285 paddingSuballoc.offset = request.offset + allocSize;
8286 paddingSuballoc.size = paddingEnd;
8287 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8288 VmaSuballocationList::iterator next = request.item;
8290 const VmaSuballocationList::iterator paddingEndItem =
8291 m_Suballocations.insert(next, paddingSuballoc);
8292 RegisterFreeSuballocation(paddingEndItem);
8298 VmaSuballocation paddingSuballoc = {};
8299 paddingSuballoc.offset = request.offset - paddingBegin;
8300 paddingSuballoc.size = paddingBegin;
8301 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8302 const VmaSuballocationList::iterator paddingBeginItem =
8303 m_Suballocations.insert(request.item, paddingSuballoc);
8304 RegisterFreeSuballocation(paddingBeginItem);
8308 m_FreeCount = m_FreeCount - 1;
8309 if(paddingBegin > 0)
8317 m_SumFreeSize -= allocSize;
8320 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8322 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8323 suballocItem != m_Suballocations.end();
8326 VmaSuballocation& suballoc = *suballocItem;
8327 if(suballoc.hAllocation == allocation)
8329 FreeSuballocation(suballocItem);
8330 VMA_HEAVY_ASSERT(Validate());
8334 VMA_ASSERT(0 &&
"Not found!");
8337 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8339 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8340 suballocItem != m_Suballocations.end();
8343 VmaSuballocation& suballoc = *suballocItem;
8344 if(suballoc.offset == offset)
8346 FreeSuballocation(suballocItem);
8350 VMA_ASSERT(0 &&
"Not found!");
8353 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 8355 VkDeviceSize lastSize = 0;
8356 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8358 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8360 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8361 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8362 VMA_VALIDATE(it->size >= lastSize);
8363 lastSize = it->size;
8368 bool VmaBlockMetadata_Generic::CheckAllocation(
8369 uint32_t currentFrameIndex,
8370 uint32_t frameInUseCount,
8371 VkDeviceSize bufferImageGranularity,
8372 VkDeviceSize allocSize,
8373 VkDeviceSize allocAlignment,
8374 VmaSuballocationType allocType,
8375 VmaSuballocationList::const_iterator suballocItem,
8376 bool canMakeOtherLost,
8377 VkDeviceSize* pOffset,
8378 size_t* itemsToMakeLostCount,
8379 VkDeviceSize* pSumFreeSize,
8380 VkDeviceSize* pSumItemSize)
const 8382 VMA_ASSERT(allocSize > 0);
8383 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8384 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8385 VMA_ASSERT(pOffset != VMA_NULL);
8387 *itemsToMakeLostCount = 0;
8391 if(canMakeOtherLost)
8393 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8395 *pSumFreeSize = suballocItem->size;
8399 if(suballocItem->hAllocation->CanBecomeLost() &&
8400 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8402 ++*itemsToMakeLostCount;
8403 *pSumItemSize = suballocItem->size;
8412 if(GetSize() - suballocItem->offset < allocSize)
8418 *pOffset = suballocItem->offset;
8421 if(VMA_DEBUG_MARGIN > 0)
8423 *pOffset += VMA_DEBUG_MARGIN;
8427 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8431 if(bufferImageGranularity > 1)
8433 bool bufferImageGranularityConflict =
false;
8434 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8435 while(prevSuballocItem != m_Suballocations.cbegin())
8438 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8439 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8441 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8443 bufferImageGranularityConflict =
true;
8451 if(bufferImageGranularityConflict)
8453 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8459 if(*pOffset >= suballocItem->offset + suballocItem->size)
8465 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8468 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8470 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8472 if(suballocItem->offset + totalSize > GetSize())
8479 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8480 if(totalSize > suballocItem->size)
8482 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8483 while(remainingSize > 0)
8486 if(lastSuballocItem == m_Suballocations.cend())
8490 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8492 *pSumFreeSize += lastSuballocItem->size;
8496 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8497 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8498 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8500 ++*itemsToMakeLostCount;
8501 *pSumItemSize += lastSuballocItem->size;
8508 remainingSize = (lastSuballocItem->size < remainingSize) ?
8509 remainingSize - lastSuballocItem->size : 0;
8515 if(bufferImageGranularity > 1)
8517 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8519 while(nextSuballocItem != m_Suballocations.cend())
8521 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8522 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8524 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8526 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8527 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8528 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8530 ++*itemsToMakeLostCount;
8549 const VmaSuballocation& suballoc = *suballocItem;
8550 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8552 *pSumFreeSize = suballoc.size;
8555 if(suballoc.size < allocSize)
8561 *pOffset = suballoc.offset;
8564 if(VMA_DEBUG_MARGIN > 0)
8566 *pOffset += VMA_DEBUG_MARGIN;
8570 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8574 if(bufferImageGranularity > 1)
8576 bool bufferImageGranularityConflict =
false;
8577 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8578 while(prevSuballocItem != m_Suballocations.cbegin())
8581 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8582 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8584 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8586 bufferImageGranularityConflict =
true;
8594 if(bufferImageGranularityConflict)
8596 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8601 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8604 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8607 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8614 if(bufferImageGranularity > 1)
8616 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8618 while(nextSuballocItem != m_Suballocations.cend())
8620 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8621 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8623 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8642 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8644 VMA_ASSERT(item != m_Suballocations.end());
8645 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8647 VmaSuballocationList::iterator nextItem = item;
8649 VMA_ASSERT(nextItem != m_Suballocations.end());
8650 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8652 item->size += nextItem->size;
8654 m_Suballocations.erase(nextItem);
8657 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8660 VmaSuballocation& suballoc = *suballocItem;
8661 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8662 suballoc.hAllocation = VK_NULL_HANDLE;
8666 m_SumFreeSize += suballoc.size;
8669 bool mergeWithNext =
false;
8670 bool mergeWithPrev =
false;
8672 VmaSuballocationList::iterator nextItem = suballocItem;
8674 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8676 mergeWithNext =
true;
8679 VmaSuballocationList::iterator prevItem = suballocItem;
8680 if(suballocItem != m_Suballocations.begin())
8683 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8685 mergeWithPrev =
true;
8691 UnregisterFreeSuballocation(nextItem);
8692 MergeFreeWithNext(suballocItem);
8697 UnregisterFreeSuballocation(prevItem);
8698 MergeFreeWithNext(prevItem);
8699 RegisterFreeSuballocation(prevItem);
8704 RegisterFreeSuballocation(suballocItem);
8705 return suballocItem;
8709 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
8711 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8712 VMA_ASSERT(item->size > 0);
8716 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8718 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8720 if(m_FreeSuballocationsBySize.empty())
8722 m_FreeSuballocationsBySize.push_back(item);
8726 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
8734 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
8736 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8737 VMA_ASSERT(item->size > 0);
8741 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8743 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8745 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8746 m_FreeSuballocationsBySize.data(),
8747 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
8749 VmaSuballocationItemSizeLess());
8750 for(
size_t index = it - m_FreeSuballocationsBySize.data();
8751 index < m_FreeSuballocationsBySize.size();
8754 if(m_FreeSuballocationsBySize[index] == item)
8756 VmaVectorRemove(m_FreeSuballocationsBySize, index);
8759 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
8761 VMA_ASSERT(0 &&
"Not found.");
8767 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
8768 VkDeviceSize bufferImageGranularity,
8769 VmaSuballocationType& inOutPrevSuballocType)
const 8771 if(bufferImageGranularity == 1 || IsEmpty())
8776 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
8777 bool typeConflictFound =
false;
8778 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
8779 it != m_Suballocations.cend();
8782 const VmaSuballocationType suballocType = it->type;
8783 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
8785 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
8786 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
8788 typeConflictFound =
true;
8790 inOutPrevSuballocType = suballocType;
8794 return typeConflictFound || minAlignment >= bufferImageGranularity;
8800 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
8801 VmaBlockMetadata(hAllocator),
8803 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8804 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8805 m_1stVectorIndex(0),
8806 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
8807 m_1stNullItemsBeginCount(0),
8808 m_1stNullItemsMiddleCount(0),
8809 m_2ndNullItemsCount(0)
8813 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
8817 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
8819 VmaBlockMetadata::Init(size);
8820 m_SumFreeSize = size;
8823 bool VmaBlockMetadata_Linear::Validate()
const 8825 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8826 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8828 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
8829 VMA_VALIDATE(!suballocations1st.empty() ||
8830 suballocations2nd.empty() ||
8831 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
8833 if(!suballocations1st.empty())
8836 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
8838 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
8840 if(!suballocations2nd.empty())
8843 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
8846 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
8847 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
8849 VkDeviceSize sumUsedSize = 0;
8850 const size_t suballoc1stCount = suballocations1st.size();
8851 VkDeviceSize offset = VMA_DEBUG_MARGIN;
8853 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8855 const size_t suballoc2ndCount = suballocations2nd.size();
8856 size_t nullItem2ndCount = 0;
8857 for(
size_t i = 0; i < suballoc2ndCount; ++i)
8859 const VmaSuballocation& suballoc = suballocations2nd[i];
8860 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8862 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8863 VMA_VALIDATE(suballoc.offset >= offset);
8867 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8868 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8869 sumUsedSize += suballoc.size;
8876 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8879 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8882 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
8884 const VmaSuballocation& suballoc = suballocations1st[i];
8885 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
8886 suballoc.hAllocation == VK_NULL_HANDLE);
8889 size_t nullItem1stCount = m_1stNullItemsBeginCount;
8891 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
8893 const VmaSuballocation& suballoc = suballocations1st[i];
8894 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8896 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8897 VMA_VALIDATE(suballoc.offset >= offset);
8898 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
8902 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8903 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8904 sumUsedSize += suballoc.size;
8911 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8913 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
8915 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8917 const size_t suballoc2ndCount = suballocations2nd.size();
8918 size_t nullItem2ndCount = 0;
8919 for(
size_t i = suballoc2ndCount; i--; )
8921 const VmaSuballocation& suballoc = suballocations2nd[i];
8922 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8924 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8925 VMA_VALIDATE(suballoc.offset >= offset);
8929 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8930 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8931 sumUsedSize += suballoc.size;
8938 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8941 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8944 VMA_VALIDATE(offset <= GetSize());
8945 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
8950 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 8952 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
8953 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
8956 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 8958 const VkDeviceSize size = GetSize();
8970 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8972 switch(m_2ndVectorMode)
8974 case SECOND_VECTOR_EMPTY:
8980 const size_t suballocations1stCount = suballocations1st.size();
8981 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
8982 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8983 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
8985 firstSuballoc.offset,
8986 size - (lastSuballoc.offset + lastSuballoc.size));
8990 case SECOND_VECTOR_RING_BUFFER:
8995 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8996 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
8997 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
8998 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9002 case SECOND_VECTOR_DOUBLE_STACK:
9007 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9008 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9009 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9010 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9020 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9022 const VkDeviceSize size = GetSize();
9023 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9024 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9025 const size_t suballoc1stCount = suballocations1st.size();
9026 const size_t suballoc2ndCount = suballocations2nd.size();
9037 VkDeviceSize lastOffset = 0;
9039 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9041 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9042 size_t nextAlloc2ndIndex = 0;
9043 while(lastOffset < freeSpace2ndTo1stEnd)
9046 while(nextAlloc2ndIndex < suballoc2ndCount &&
9047 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9049 ++nextAlloc2ndIndex;
9053 if(nextAlloc2ndIndex < suballoc2ndCount)
9055 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9058 if(lastOffset < suballoc.offset)
9061 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9075 lastOffset = suballoc.offset + suballoc.size;
9076 ++nextAlloc2ndIndex;
9082 if(lastOffset < freeSpace2ndTo1stEnd)
9084 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9092 lastOffset = freeSpace2ndTo1stEnd;
9097 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9098 const VkDeviceSize freeSpace1stTo2ndEnd =
9099 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9100 while(lastOffset < freeSpace1stTo2ndEnd)
9103 while(nextAlloc1stIndex < suballoc1stCount &&
9104 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9106 ++nextAlloc1stIndex;
9110 if(nextAlloc1stIndex < suballoc1stCount)
9112 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9115 if(lastOffset < suballoc.offset)
9118 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9132 lastOffset = suballoc.offset + suballoc.size;
9133 ++nextAlloc1stIndex;
9139 if(lastOffset < freeSpace1stTo2ndEnd)
9141 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9149 lastOffset = freeSpace1stTo2ndEnd;
9153 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9155 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9156 while(lastOffset < size)
9159 while(nextAlloc2ndIndex != SIZE_MAX &&
9160 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9162 --nextAlloc2ndIndex;
9166 if(nextAlloc2ndIndex != SIZE_MAX)
9168 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9171 if(lastOffset < suballoc.offset)
9174 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9188 lastOffset = suballoc.offset + suballoc.size;
9189 --nextAlloc2ndIndex;
9195 if(lastOffset < size)
9197 const VkDeviceSize unusedRangeSize = size - lastOffset;
9213 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 9215 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9216 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9217 const VkDeviceSize size = GetSize();
9218 const size_t suballoc1stCount = suballocations1st.size();
9219 const size_t suballoc2ndCount = suballocations2nd.size();
9221 inoutStats.
size += size;
9223 VkDeviceSize lastOffset = 0;
9225 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9227 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9228 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9229 while(lastOffset < freeSpace2ndTo1stEnd)
9232 while(nextAlloc2ndIndex < suballoc2ndCount &&
9233 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9235 ++nextAlloc2ndIndex;
9239 if(nextAlloc2ndIndex < suballoc2ndCount)
9241 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9244 if(lastOffset < suballoc.offset)
9247 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9258 lastOffset = suballoc.offset + suballoc.size;
9259 ++nextAlloc2ndIndex;
9264 if(lastOffset < freeSpace2ndTo1stEnd)
9267 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9274 lastOffset = freeSpace2ndTo1stEnd;
9279 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9280 const VkDeviceSize freeSpace1stTo2ndEnd =
9281 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9282 while(lastOffset < freeSpace1stTo2ndEnd)
9285 while(nextAlloc1stIndex < suballoc1stCount &&
9286 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9288 ++nextAlloc1stIndex;
9292 if(nextAlloc1stIndex < suballoc1stCount)
9294 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9297 if(lastOffset < suballoc.offset)
9300 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9311 lastOffset = suballoc.offset + suballoc.size;
9312 ++nextAlloc1stIndex;
9317 if(lastOffset < freeSpace1stTo2ndEnd)
9320 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9327 lastOffset = freeSpace1stTo2ndEnd;
9331 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9333 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9334 while(lastOffset < size)
9337 while(nextAlloc2ndIndex != SIZE_MAX &&
9338 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9340 --nextAlloc2ndIndex;
9344 if(nextAlloc2ndIndex != SIZE_MAX)
9346 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9349 if(lastOffset < suballoc.offset)
9352 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9363 lastOffset = suballoc.offset + suballoc.size;
9364 --nextAlloc2ndIndex;
9369 if(lastOffset < size)
9372 const VkDeviceSize unusedRangeSize = size - lastOffset;
9385 #if VMA_STATS_STRING_ENABLED 9386 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 9388 const VkDeviceSize size = GetSize();
9389 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9390 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9391 const size_t suballoc1stCount = suballocations1st.size();
9392 const size_t suballoc2ndCount = suballocations2nd.size();
9396 size_t unusedRangeCount = 0;
9397 VkDeviceSize usedBytes = 0;
9399 VkDeviceSize lastOffset = 0;
9401 size_t alloc2ndCount = 0;
9402 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9404 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9405 size_t nextAlloc2ndIndex = 0;
9406 while(lastOffset < freeSpace2ndTo1stEnd)
9409 while(nextAlloc2ndIndex < suballoc2ndCount &&
9410 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9412 ++nextAlloc2ndIndex;
9416 if(nextAlloc2ndIndex < suballoc2ndCount)
9418 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9421 if(lastOffset < suballoc.offset)
9430 usedBytes += suballoc.size;
9433 lastOffset = suballoc.offset + suballoc.size;
9434 ++nextAlloc2ndIndex;
9439 if(lastOffset < freeSpace2ndTo1stEnd)
9446 lastOffset = freeSpace2ndTo1stEnd;
9451 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9452 size_t alloc1stCount = 0;
9453 const VkDeviceSize freeSpace1stTo2ndEnd =
9454 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9455 while(lastOffset < freeSpace1stTo2ndEnd)
9458 while(nextAlloc1stIndex < suballoc1stCount &&
9459 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9461 ++nextAlloc1stIndex;
9465 if(nextAlloc1stIndex < suballoc1stCount)
9467 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9470 if(lastOffset < suballoc.offset)
9479 usedBytes += suballoc.size;
9482 lastOffset = suballoc.offset + suballoc.size;
9483 ++nextAlloc1stIndex;
9488 if(lastOffset < size)
9495 lastOffset = freeSpace1stTo2ndEnd;
9499 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9501 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9502 while(lastOffset < size)
9505 while(nextAlloc2ndIndex != SIZE_MAX &&
9506 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9508 --nextAlloc2ndIndex;
9512 if(nextAlloc2ndIndex != SIZE_MAX)
9514 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9517 if(lastOffset < suballoc.offset)
9526 usedBytes += suballoc.size;
9529 lastOffset = suballoc.offset + suballoc.size;
9530 --nextAlloc2ndIndex;
9535 if(lastOffset < size)
9547 const VkDeviceSize unusedBytes = size - usedBytes;
9548 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9553 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9555 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9556 size_t nextAlloc2ndIndex = 0;
9557 while(lastOffset < freeSpace2ndTo1stEnd)
9560 while(nextAlloc2ndIndex < suballoc2ndCount &&
9561 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9563 ++nextAlloc2ndIndex;
9567 if(nextAlloc2ndIndex < suballoc2ndCount)
9569 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9572 if(lastOffset < suballoc.offset)
9575 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9576 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9581 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9584 lastOffset = suballoc.offset + suballoc.size;
9585 ++nextAlloc2ndIndex;
9590 if(lastOffset < freeSpace2ndTo1stEnd)
9593 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9594 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9598 lastOffset = freeSpace2ndTo1stEnd;
9603 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9604 while(lastOffset < freeSpace1stTo2ndEnd)
9607 while(nextAlloc1stIndex < suballoc1stCount &&
9608 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9610 ++nextAlloc1stIndex;
9614 if(nextAlloc1stIndex < suballoc1stCount)
9616 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9619 if(lastOffset < suballoc.offset)
9622 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9623 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9628 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9631 lastOffset = suballoc.offset + suballoc.size;
9632 ++nextAlloc1stIndex;
9637 if(lastOffset < freeSpace1stTo2ndEnd)
9640 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9641 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9645 lastOffset = freeSpace1stTo2ndEnd;
9649 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9651 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9652 while(lastOffset < size)
9655 while(nextAlloc2ndIndex != SIZE_MAX &&
9656 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9658 --nextAlloc2ndIndex;
9662 if(nextAlloc2ndIndex != SIZE_MAX)
9664 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9667 if(lastOffset < suballoc.offset)
9670 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9671 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9676 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9679 lastOffset = suballoc.offset + suballoc.size;
9680 --nextAlloc2ndIndex;
9685 if(lastOffset < size)
9688 const VkDeviceSize unusedRangeSize = size - lastOffset;
9689 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9698 PrintDetailedMap_End(json);
9700 #endif // #if VMA_STATS_STRING_ENABLED 9702 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9703 uint32_t currentFrameIndex,
9704 uint32_t frameInUseCount,
9705 VkDeviceSize bufferImageGranularity,
9706 VkDeviceSize allocSize,
9707 VkDeviceSize allocAlignment,
9709 VmaSuballocationType allocType,
9710 bool canMakeOtherLost,
9712 VmaAllocationRequest* pAllocationRequest)
9714 VMA_ASSERT(allocSize > 0);
9715 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9716 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9717 VMA_HEAVY_ASSERT(Validate());
9718 return upperAddress ?
9719 CreateAllocationRequest_UpperAddress(
9720 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9721 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
9722 CreateAllocationRequest_LowerAddress(
9723 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9724 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
9727 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
9728 uint32_t currentFrameIndex,
9729 uint32_t frameInUseCount,
9730 VkDeviceSize bufferImageGranularity,
9731 VkDeviceSize allocSize,
9732 VkDeviceSize allocAlignment,
9733 VmaSuballocationType allocType,
9734 bool canMakeOtherLost,
9736 VmaAllocationRequest* pAllocationRequest)
9738 const VkDeviceSize size = GetSize();
9739 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9740 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9742 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9744 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9749 if(allocSize > size)
9753 VkDeviceSize resultBaseOffset = size - allocSize;
9754 if(!suballocations2nd.empty())
9756 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9757 resultBaseOffset = lastSuballoc.offset - allocSize;
9758 if(allocSize > lastSuballoc.offset)
9765 VkDeviceSize resultOffset = resultBaseOffset;
9768 if(VMA_DEBUG_MARGIN > 0)
9770 if(resultOffset < VMA_DEBUG_MARGIN)
9774 resultOffset -= VMA_DEBUG_MARGIN;
9778 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9782 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9784 bool bufferImageGranularityConflict =
false;
9785 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9787 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9788 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9790 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9792 bufferImageGranularityConflict =
true;
9800 if(bufferImageGranularityConflict)
9802 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9807 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9808 suballocations1st.back().offset + suballocations1st.back().size :
9810 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
9814 if(bufferImageGranularity > 1)
9816 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9818 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9819 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9821 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9835 pAllocationRequest->offset = resultOffset;
9836 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
9837 pAllocationRequest->sumItemSize = 0;
9839 pAllocationRequest->itemsToMakeLostCount = 0;
9840 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
9847 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
9848 uint32_t currentFrameIndex,
9849 uint32_t frameInUseCount,
9850 VkDeviceSize bufferImageGranularity,
9851 VkDeviceSize allocSize,
9852 VkDeviceSize allocAlignment,
9853 VmaSuballocationType allocType,
9854 bool canMakeOtherLost,
9856 VmaAllocationRequest* pAllocationRequest)
9858 const VkDeviceSize size = GetSize();
9859 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9860 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9862 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9866 VkDeviceSize resultBaseOffset = 0;
9867 if(!suballocations1st.empty())
9869 const VmaSuballocation& lastSuballoc = suballocations1st.back();
9870 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9874 VkDeviceSize resultOffset = resultBaseOffset;
9877 if(VMA_DEBUG_MARGIN > 0)
9879 resultOffset += VMA_DEBUG_MARGIN;
9883 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9887 if(bufferImageGranularity > 1 && !suballocations1st.empty())
9889 bool bufferImageGranularityConflict =
false;
9890 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9892 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9893 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9895 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9897 bufferImageGranularityConflict =
true;
9905 if(bufferImageGranularityConflict)
9907 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9911 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
9912 suballocations2nd.back().offset : size;
9915 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
9919 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9921 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9923 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9924 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9926 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9940 pAllocationRequest->offset = resultOffset;
9941 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
9942 pAllocationRequest->sumItemSize = 0;
9944 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
9945 pAllocationRequest->itemsToMakeLostCount = 0;
9952 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9954 VMA_ASSERT(!suballocations1st.empty());
9956 VkDeviceSize resultBaseOffset = 0;
9957 if(!suballocations2nd.empty())
9959 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9960 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9964 VkDeviceSize resultOffset = resultBaseOffset;
9967 if(VMA_DEBUG_MARGIN > 0)
9969 resultOffset += VMA_DEBUG_MARGIN;
9973 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9977 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9979 bool bufferImageGranularityConflict =
false;
9980 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
9982 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
9983 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9985 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9987 bufferImageGranularityConflict =
true;
9995 if(bufferImageGranularityConflict)
9997 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10001 pAllocationRequest->itemsToMakeLostCount = 0;
10002 pAllocationRequest->sumItemSize = 0;
10003 size_t index1st = m_1stNullItemsBeginCount;
10005 if(canMakeOtherLost)
10007 while(index1st < suballocations1st.size() &&
10008 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10011 const VmaSuballocation& suballoc = suballocations1st[index1st];
10012 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10018 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10019 if(suballoc.hAllocation->CanBecomeLost() &&
10020 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10022 ++pAllocationRequest->itemsToMakeLostCount;
10023 pAllocationRequest->sumItemSize += suballoc.size;
10035 if(bufferImageGranularity > 1)
10037 while(index1st < suballocations1st.size())
10039 const VmaSuballocation& suballoc = suballocations1st[index1st];
10040 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10042 if(suballoc.hAllocation != VK_NULL_HANDLE)
10045 if(suballoc.hAllocation->CanBecomeLost() &&
10046 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10048 ++pAllocationRequest->itemsToMakeLostCount;
10049 pAllocationRequest->sumItemSize += suballoc.size;
10067 if(index1st == suballocations1st.size() &&
10068 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10071 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10076 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10077 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10081 if(bufferImageGranularity > 1)
10083 for(
size_t nextSuballocIndex = index1st;
10084 nextSuballocIndex < suballocations1st.size();
10085 nextSuballocIndex++)
10087 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10088 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10090 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10104 pAllocationRequest->offset = resultOffset;
10105 pAllocationRequest->sumFreeSize =
10106 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10108 - pAllocationRequest->sumItemSize;
10109 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10118 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10119 uint32_t currentFrameIndex,
10120 uint32_t frameInUseCount,
10121 VmaAllocationRequest* pAllocationRequest)
10123 if(pAllocationRequest->itemsToMakeLostCount == 0)
10128 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10131 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10132 size_t index = m_1stNullItemsBeginCount;
10133 size_t madeLostCount = 0;
10134 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10136 if(index == suballocations->size())
10140 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10142 suballocations = &AccessSuballocations2nd();
10146 VMA_ASSERT(!suballocations->empty());
10148 VmaSuballocation& suballoc = (*suballocations)[index];
10149 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10151 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10152 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10153 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10155 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10156 suballoc.hAllocation = VK_NULL_HANDLE;
10157 m_SumFreeSize += suballoc.size;
10158 if(suballocations == &AccessSuballocations1st())
10160 ++m_1stNullItemsMiddleCount;
10164 ++m_2ndNullItemsCount;
10176 CleanupAfterFree();
10182 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10184 uint32_t lostAllocationCount = 0;
10186 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10187 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10189 VmaSuballocation& suballoc = suballocations1st[i];
10190 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10191 suballoc.hAllocation->CanBecomeLost() &&
10192 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10194 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10195 suballoc.hAllocation = VK_NULL_HANDLE;
10196 ++m_1stNullItemsMiddleCount;
10197 m_SumFreeSize += suballoc.size;
10198 ++lostAllocationCount;
10202 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10203 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10205 VmaSuballocation& suballoc = suballocations2nd[i];
10206 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10207 suballoc.hAllocation->CanBecomeLost() &&
10208 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10210 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10211 suballoc.hAllocation = VK_NULL_HANDLE;
10212 ++m_2ndNullItemsCount;
10213 m_SumFreeSize += suballoc.size;
10214 ++lostAllocationCount;
10218 if(lostAllocationCount)
10220 CleanupAfterFree();
10223 return lostAllocationCount;
10226 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10228 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10229 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10231 const VmaSuballocation& suballoc = suballocations1st[i];
10232 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10234 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10236 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10237 return VK_ERROR_VALIDATION_FAILED_EXT;
10239 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10241 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10242 return VK_ERROR_VALIDATION_FAILED_EXT;
10247 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10248 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10250 const VmaSuballocation& suballoc = suballocations2nd[i];
10251 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10253 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10255 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10256 return VK_ERROR_VALIDATION_FAILED_EXT;
10258 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10260 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10261 return VK_ERROR_VALIDATION_FAILED_EXT;
10269 void VmaBlockMetadata_Linear::Alloc(
10270 const VmaAllocationRequest& request,
10271 VmaSuballocationType type,
10272 VkDeviceSize allocSize,
10275 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10277 switch(request.type)
10279 case VmaAllocationRequestType::UpperAddress:
10281 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10282 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10283 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10284 suballocations2nd.push_back(newSuballoc);
10285 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10288 case VmaAllocationRequestType::EndOf1st:
10290 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10292 VMA_ASSERT(suballocations1st.empty() ||
10293 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10295 VMA_ASSERT(request.offset + allocSize <= GetSize());
10297 suballocations1st.push_back(newSuballoc);
10300 case VmaAllocationRequestType::EndOf2nd:
10302 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10304 VMA_ASSERT(!suballocations1st.empty() &&
10305 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10306 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10308 switch(m_2ndVectorMode)
10310 case SECOND_VECTOR_EMPTY:
10312 VMA_ASSERT(suballocations2nd.empty());
10313 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10315 case SECOND_VECTOR_RING_BUFFER:
10317 VMA_ASSERT(!suballocations2nd.empty());
10319 case SECOND_VECTOR_DOUBLE_STACK:
10320 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10326 suballocations2nd.push_back(newSuballoc);
10330 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10333 m_SumFreeSize -= newSuballoc.size;
10336 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10338 FreeAtOffset(allocation->GetOffset());
10341 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10343 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10344 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10346 if(!suballocations1st.empty())
10349 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10350 if(firstSuballoc.offset == offset)
10352 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10353 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10354 m_SumFreeSize += firstSuballoc.size;
10355 ++m_1stNullItemsBeginCount;
10356 CleanupAfterFree();
10362 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10363 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10365 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10366 if(lastSuballoc.offset == offset)
10368 m_SumFreeSize += lastSuballoc.size;
10369 suballocations2nd.pop_back();
10370 CleanupAfterFree();
10375 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10377 VmaSuballocation& lastSuballoc = suballocations1st.back();
10378 if(lastSuballoc.offset == offset)
10380 m_SumFreeSize += lastSuballoc.size;
10381 suballocations1st.pop_back();
10382 CleanupAfterFree();
10389 VmaSuballocation refSuballoc;
10390 refSuballoc.offset = offset;
10392 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
10393 suballocations1st.begin() + m_1stNullItemsBeginCount,
10394 suballocations1st.end(),
10396 VmaSuballocationOffsetLess());
10397 if(it != suballocations1st.end())
10399 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10400 it->hAllocation = VK_NULL_HANDLE;
10401 ++m_1stNullItemsMiddleCount;
10402 m_SumFreeSize += it->size;
10403 CleanupAfterFree();
10408 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10411 VmaSuballocation refSuballoc;
10412 refSuballoc.offset = offset;
10414 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10415 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
10416 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
10417 if(it != suballocations2nd.end())
10419 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10420 it->hAllocation = VK_NULL_HANDLE;
10421 ++m_2ndNullItemsCount;
10422 m_SumFreeSize += it->size;
10423 CleanupAfterFree();
10428 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10431 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 10433 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10434 const size_t suballocCount = AccessSuballocations1st().size();
10435 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10438 void VmaBlockMetadata_Linear::CleanupAfterFree()
10440 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10441 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10445 suballocations1st.clear();
10446 suballocations2nd.clear();
10447 m_1stNullItemsBeginCount = 0;
10448 m_1stNullItemsMiddleCount = 0;
10449 m_2ndNullItemsCount = 0;
10450 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10454 const size_t suballoc1stCount = suballocations1st.size();
10455 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10456 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10459 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10460 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10462 ++m_1stNullItemsBeginCount;
10463 --m_1stNullItemsMiddleCount;
10467 while(m_1stNullItemsMiddleCount > 0 &&
10468 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10470 --m_1stNullItemsMiddleCount;
10471 suballocations1st.pop_back();
10475 while(m_2ndNullItemsCount > 0 &&
10476 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10478 --m_2ndNullItemsCount;
10479 suballocations2nd.pop_back();
10483 while(m_2ndNullItemsCount > 0 &&
10484 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
10486 --m_2ndNullItemsCount;
10487 VmaVectorRemove(suballocations2nd, 0);
10490 if(ShouldCompact1st())
10492 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10493 size_t srcIndex = m_1stNullItemsBeginCount;
10494 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10496 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10500 if(dstIndex != srcIndex)
10502 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10506 suballocations1st.resize(nonNullItemCount);
10507 m_1stNullItemsBeginCount = 0;
10508 m_1stNullItemsMiddleCount = 0;
10512 if(suballocations2nd.empty())
10514 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10518 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10520 suballocations1st.clear();
10521 m_1stNullItemsBeginCount = 0;
10523 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10526 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10527 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10528 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10529 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10531 ++m_1stNullItemsBeginCount;
10532 --m_1stNullItemsMiddleCount;
10534 m_2ndNullItemsCount = 0;
10535 m_1stVectorIndex ^= 1;
10540 VMA_HEAVY_ASSERT(Validate());
10547 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10548 VmaBlockMetadata(hAllocator),
10550 m_AllocationCount(0),
10554 memset(m_FreeList, 0,
sizeof(m_FreeList));
10557 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10559 DeleteNode(m_Root);
10562 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10564 VmaBlockMetadata::Init(size);
10566 m_UsableSize = VmaPrevPow2(size);
10567 m_SumFreeSize = m_UsableSize;
10571 while(m_LevelCount < MAX_LEVELS &&
10572 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10577 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10578 rootNode->offset = 0;
10579 rootNode->type = Node::TYPE_FREE;
10580 rootNode->parent = VMA_NULL;
10581 rootNode->buddy = VMA_NULL;
10584 AddToFreeListFront(0, rootNode);
10587 bool VmaBlockMetadata_Buddy::Validate()
const 10590 ValidationContext ctx;
10591 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10593 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10595 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10596 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10599 for(uint32_t level = 0; level < m_LevelCount; ++level)
10601 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10602 m_FreeList[level].front->free.prev == VMA_NULL);
10604 for(Node* node = m_FreeList[level].front;
10606 node = node->free.next)
10608 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10610 if(node->free.next == VMA_NULL)
10612 VMA_VALIDATE(m_FreeList[level].back == node);
10616 VMA_VALIDATE(node->free.next->free.prev == node);
10622 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10624 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10630 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 10632 for(uint32_t level = 0; level < m_LevelCount; ++level)
10634 if(m_FreeList[level].front != VMA_NULL)
10636 return LevelToNodeSize(level);
10642 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 10644 const VkDeviceSize unusableSize = GetUnusableSize();
10655 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10657 if(unusableSize > 0)
10666 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 10668 const VkDeviceSize unusableSize = GetUnusableSize();
10670 inoutStats.
size += GetSize();
10671 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10676 if(unusableSize > 0)
10683 #if VMA_STATS_STRING_ENABLED 10685 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 10689 CalcAllocationStatInfo(stat);
10691 PrintDetailedMap_Begin(
10697 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10699 const VkDeviceSize unusableSize = GetUnusableSize();
10700 if(unusableSize > 0)
10702 PrintDetailedMap_UnusedRange(json,
10707 PrintDetailedMap_End(json);
10710 #endif // #if VMA_STATS_STRING_ENABLED 10712 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
10713 uint32_t currentFrameIndex,
10714 uint32_t frameInUseCount,
10715 VkDeviceSize bufferImageGranularity,
10716 VkDeviceSize allocSize,
10717 VkDeviceSize allocAlignment,
10719 VmaSuballocationType allocType,
10720 bool canMakeOtherLost,
10722 VmaAllocationRequest* pAllocationRequest)
10724 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10728 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
10729 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
10730 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
10732 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
10733 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
10736 if(allocSize > m_UsableSize)
10741 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10742 for(uint32_t level = targetLevel + 1; level--; )
10744 for(Node* freeNode = m_FreeList[level].front;
10745 freeNode != VMA_NULL;
10746 freeNode = freeNode->free.next)
10748 if(freeNode->offset % allocAlignment == 0)
10750 pAllocationRequest->type = VmaAllocationRequestType::Normal;
10751 pAllocationRequest->offset = freeNode->offset;
10752 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
10753 pAllocationRequest->sumItemSize = 0;
10754 pAllocationRequest->itemsToMakeLostCount = 0;
10755 pAllocationRequest->customData = (
void*)(uintptr_t)level;
10764 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
10765 uint32_t currentFrameIndex,
10766 uint32_t frameInUseCount,
10767 VmaAllocationRequest* pAllocationRequest)
10773 return pAllocationRequest->itemsToMakeLostCount == 0;
10776 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10785 void VmaBlockMetadata_Buddy::Alloc(
10786 const VmaAllocationRequest& request,
10787 VmaSuballocationType type,
10788 VkDeviceSize allocSize,
10791 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
10793 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10794 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
10796 Node* currNode = m_FreeList[currLevel].front;
10797 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10798 while(currNode->offset != request.offset)
10800 currNode = currNode->free.next;
10801 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10805 while(currLevel < targetLevel)
10809 RemoveFromFreeList(currLevel, currNode);
10811 const uint32_t childrenLevel = currLevel + 1;
10814 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
10815 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
10817 leftChild->offset = currNode->offset;
10818 leftChild->type = Node::TYPE_FREE;
10819 leftChild->parent = currNode;
10820 leftChild->buddy = rightChild;
10822 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
10823 rightChild->type = Node::TYPE_FREE;
10824 rightChild->parent = currNode;
10825 rightChild->buddy = leftChild;
10828 currNode->type = Node::TYPE_SPLIT;
10829 currNode->split.leftChild = leftChild;
10832 AddToFreeListFront(childrenLevel, rightChild);
10833 AddToFreeListFront(childrenLevel, leftChild);
10838 currNode = m_FreeList[currLevel].front;
10847 VMA_ASSERT(currLevel == targetLevel &&
10848 currNode != VMA_NULL &&
10849 currNode->type == Node::TYPE_FREE);
10850 RemoveFromFreeList(currLevel, currNode);
10853 currNode->type = Node::TYPE_ALLOCATION;
10854 currNode->allocation.alloc = hAllocation;
10856 ++m_AllocationCount;
10858 m_SumFreeSize -= allocSize;
10861 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
10863 if(node->type == Node::TYPE_SPLIT)
10865 DeleteNode(node->split.leftChild->buddy);
10866 DeleteNode(node->split.leftChild);
10869 vma_delete(GetAllocationCallbacks(), node);
10872 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 10874 VMA_VALIDATE(level < m_LevelCount);
10875 VMA_VALIDATE(curr->parent == parent);
10876 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
10877 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
10880 case Node::TYPE_FREE:
10882 ctx.calculatedSumFreeSize += levelNodeSize;
10883 ++ctx.calculatedFreeCount;
10885 case Node::TYPE_ALLOCATION:
10886 ++ctx.calculatedAllocationCount;
10887 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
10888 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
10890 case Node::TYPE_SPLIT:
10892 const uint32_t childrenLevel = level + 1;
10893 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
10894 const Node*
const leftChild = curr->split.leftChild;
10895 VMA_VALIDATE(leftChild != VMA_NULL);
10896 VMA_VALIDATE(leftChild->offset == curr->offset);
10897 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
10899 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
10901 const Node*
const rightChild = leftChild->buddy;
10902 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
10903 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
10905 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
10916 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 10919 uint32_t level = 0;
10920 VkDeviceSize currLevelNodeSize = m_UsableSize;
10921 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
10922 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
10925 currLevelNodeSize = nextLevelNodeSize;
10926 nextLevelNodeSize = currLevelNodeSize >> 1;
10931 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
10934 Node* node = m_Root;
10935 VkDeviceSize nodeOffset = 0;
10936 uint32_t level = 0;
10937 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
10938 while(node->type == Node::TYPE_SPLIT)
10940 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
10941 if(offset < nodeOffset + nextLevelSize)
10943 node = node->split.leftChild;
10947 node = node->split.leftChild->buddy;
10948 nodeOffset += nextLevelSize;
10951 levelNodeSize = nextLevelSize;
10954 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
10955 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
10958 --m_AllocationCount;
10959 m_SumFreeSize += alloc->GetSize();
10961 node->type = Node::TYPE_FREE;
10964 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
10966 RemoveFromFreeList(level, node->buddy);
10967 Node*
const parent = node->parent;
10969 vma_delete(GetAllocationCallbacks(), node->buddy);
10970 vma_delete(GetAllocationCallbacks(), node);
10971 parent->type = Node::TYPE_FREE;
10979 AddToFreeListFront(level, node);
10982 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 10986 case Node::TYPE_FREE:
10992 case Node::TYPE_ALLOCATION:
10994 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11000 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11001 if(unusedRangeSize > 0)
11010 case Node::TYPE_SPLIT:
11012 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11013 const Node*
const leftChild = node->split.leftChild;
11014 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11015 const Node*
const rightChild = leftChild->buddy;
11016 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11024 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11026 VMA_ASSERT(node->type == Node::TYPE_FREE);
11029 Node*
const frontNode = m_FreeList[level].front;
11030 if(frontNode == VMA_NULL)
11032 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11033 node->free.prev = node->free.next = VMA_NULL;
11034 m_FreeList[level].front = m_FreeList[level].back = node;
11038 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11039 node->free.prev = VMA_NULL;
11040 node->free.next = frontNode;
11041 frontNode->free.prev = node;
11042 m_FreeList[level].front = node;
11046 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11048 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11051 if(node->free.prev == VMA_NULL)
11053 VMA_ASSERT(m_FreeList[level].front == node);
11054 m_FreeList[level].front = node->free.next;
11058 Node*
const prevFreeNode = node->free.prev;
11059 VMA_ASSERT(prevFreeNode->free.next == node);
11060 prevFreeNode->free.next = node->free.next;
11064 if(node->free.next == VMA_NULL)
11066 VMA_ASSERT(m_FreeList[level].back == node);
11067 m_FreeList[level].back = node->free.prev;
11071 Node*
const nextFreeNode = node->free.next;
11072 VMA_ASSERT(nextFreeNode->free.prev == node);
11073 nextFreeNode->free.prev = node->free.prev;
11077 #if VMA_STATS_STRING_ENABLED 11078 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 11082 case Node::TYPE_FREE:
11083 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11085 case Node::TYPE_ALLOCATION:
11087 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11088 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11089 if(allocSize < levelNodeSize)
11091 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11095 case Node::TYPE_SPLIT:
11097 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11098 const Node*
const leftChild = node->split.leftChild;
11099 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11100 const Node*
const rightChild = leftChild->buddy;
11101 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11108 #endif // #if VMA_STATS_STRING_ENABLED 11114 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11115 m_pMetadata(VMA_NULL),
11116 m_MemoryTypeIndex(UINT32_MAX),
11118 m_hMemory(VK_NULL_HANDLE),
11120 m_pMappedData(VMA_NULL)
11124 void VmaDeviceMemoryBlock::Init(
11127 uint32_t newMemoryTypeIndex,
11128 VkDeviceMemory newMemory,
11129 VkDeviceSize newSize,
11131 uint32_t algorithm)
11133 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11135 m_hParentPool = hParentPool;
11136 m_MemoryTypeIndex = newMemoryTypeIndex;
11138 m_hMemory = newMemory;
11143 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11146 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11152 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11154 m_pMetadata->Init(newSize);
11157 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11161 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11163 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11164 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11165 m_hMemory = VK_NULL_HANDLE;
11167 vma_delete(allocator, m_pMetadata);
11168 m_pMetadata = VMA_NULL;
11171 bool VmaDeviceMemoryBlock::Validate()
const 11173 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11174 (m_pMetadata->GetSize() != 0));
11176 return m_pMetadata->Validate();
11179 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11181 void* pData =
nullptr;
11182 VkResult res = Map(hAllocator, 1, &pData);
11183 if(res != VK_SUCCESS)
11188 res = m_pMetadata->CheckCorruption(pData);
11190 Unmap(hAllocator, 1);
11195 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11202 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11203 if(m_MapCount != 0)
11205 m_MapCount += count;
11206 VMA_ASSERT(m_pMappedData != VMA_NULL);
11207 if(ppData != VMA_NULL)
11209 *ppData = m_pMappedData;
11215 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11216 hAllocator->m_hDevice,
11222 if(result == VK_SUCCESS)
11224 if(ppData != VMA_NULL)
11226 *ppData = m_pMappedData;
11228 m_MapCount = count;
11234 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11241 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11242 if(m_MapCount >= count)
11244 m_MapCount -= count;
11245 if(m_MapCount == 0)
11247 m_pMappedData = VMA_NULL;
11248 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11253 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11257 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11259 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11260 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11263 VkResult res = Map(hAllocator, 1, &pData);
11264 if(res != VK_SUCCESS)
11269 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11270 VmaWriteMagicValue(pData, allocOffset + allocSize);
11272 Unmap(hAllocator, 1);
11277 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11279 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11280 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11283 VkResult res = Map(hAllocator, 1, &pData);
11284 if(res != VK_SUCCESS)
11289 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11291 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11293 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11295 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11298 Unmap(hAllocator, 1);
11303 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11306 VkDeviceSize allocationLocalOffset,
11310 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11311 hAllocation->GetBlock() ==
this);
11312 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11313 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11314 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11316 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11317 return hAllocator->BindVulkanBuffer(m_hMemory, memoryOffset, hBuffer, pNext);
11320 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11323 VkDeviceSize allocationLocalOffset,
11327 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11328 hAllocation->GetBlock() ==
this);
11329 VMA_ASSERT(allocationLocalOffset < hAllocation->GetSize() &&
11330 "Invalid allocationLocalOffset. Did you forget that this offset is relative to the beginning of the allocation, not the whole memory block?");
11331 const VkDeviceSize memoryOffset = hAllocation->GetOffset() + allocationLocalOffset;
11333 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11334 return hAllocator->BindVulkanImage(m_hMemory, memoryOffset, hImage, pNext);
11339 memset(&outInfo, 0,
sizeof(outInfo));
11358 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11366 VmaPool_T::VmaPool_T(
11369 VkDeviceSize preferredBlockSize) :
11373 createInfo.memoryTypeIndex,
11374 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11375 createInfo.minBlockCount,
11376 createInfo.maxBlockCount,
11378 createInfo.frameInUseCount,
11380 createInfo.blockSize != 0,
11386 VmaPool_T::~VmaPool_T()
11390 #if VMA_STATS_STRING_ENABLED 11392 #endif // #if VMA_STATS_STRING_ENABLED 11394 VmaBlockVector::VmaBlockVector(
11397 uint32_t memoryTypeIndex,
11398 VkDeviceSize preferredBlockSize,
11399 size_t minBlockCount,
11400 size_t maxBlockCount,
11401 VkDeviceSize bufferImageGranularity,
11402 uint32_t frameInUseCount,
11404 bool explicitBlockSize,
11405 uint32_t algorithm) :
11406 m_hAllocator(hAllocator),
11407 m_hParentPool(hParentPool),
11408 m_MemoryTypeIndex(memoryTypeIndex),
11409 m_PreferredBlockSize(preferredBlockSize),
11410 m_MinBlockCount(minBlockCount),
11411 m_MaxBlockCount(maxBlockCount),
11412 m_BufferImageGranularity(bufferImageGranularity),
11413 m_FrameInUseCount(frameInUseCount),
11414 m_IsCustomPool(isCustomPool),
11415 m_ExplicitBlockSize(explicitBlockSize),
11416 m_Algorithm(algorithm),
11417 m_HasEmptyBlock(false),
11418 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11423 VmaBlockVector::~VmaBlockVector()
11425 for(
size_t i = m_Blocks.size(); i--; )
11427 m_Blocks[i]->Destroy(m_hAllocator);
11428 vma_delete(m_hAllocator, m_Blocks[i]);
11432 VkResult VmaBlockVector::CreateMinBlocks()
11434 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11436 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11437 if(res != VK_SUCCESS)
11445 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11447 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11449 const size_t blockCount = m_Blocks.size();
11458 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11460 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11461 VMA_ASSERT(pBlock);
11462 VMA_HEAVY_ASSERT(pBlock->Validate());
11463 pBlock->m_pMetadata->AddPoolStats(*pStats);
11467 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 11469 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11470 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11471 (VMA_DEBUG_MARGIN > 0) &&
11473 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11476 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11478 VkResult VmaBlockVector::Allocate(
11479 uint32_t currentFrameIndex,
11481 VkDeviceSize alignment,
11483 VmaSuballocationType suballocType,
11484 size_t allocationCount,
11488 VkResult res = VK_SUCCESS;
11490 if(IsCorruptionDetectionEnabled())
11492 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11493 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11497 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11498 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11500 res = AllocatePage(
11506 pAllocations + allocIndex);
11507 if(res != VK_SUCCESS)
11514 if(res != VK_SUCCESS)
11517 while(allocIndex--)
11519 Free(pAllocations[allocIndex]);
11521 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11527 VkResult VmaBlockVector::AllocatePage(
11528 uint32_t currentFrameIndex,
11530 VkDeviceSize alignment,
11532 VmaSuballocationType suballocType,
11539 const bool canCreateNewBlock =
11541 (m_Blocks.size() < m_MaxBlockCount);
11548 canMakeOtherLost =
false;
11552 if(isUpperAddress &&
11555 return VK_ERROR_FEATURE_NOT_PRESENT;
11569 return VK_ERROR_FEATURE_NOT_PRESENT;
11573 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11575 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11583 if(!canMakeOtherLost || canCreateNewBlock)
11592 if(!m_Blocks.empty())
11594 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11595 VMA_ASSERT(pCurrBlock);
11596 VkResult res = AllocateFromBlock(
11606 if(res == VK_SUCCESS)
11608 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
11618 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11620 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11621 VMA_ASSERT(pCurrBlock);
11622 VkResult res = AllocateFromBlock(
11632 if(res == VK_SUCCESS)
11634 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11642 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11644 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11645 VMA_ASSERT(pCurrBlock);
11646 VkResult res = AllocateFromBlock(
11656 if(res == VK_SUCCESS)
11658 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11666 if(canCreateNewBlock)
11669 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11670 uint32_t newBlockSizeShift = 0;
11671 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11673 if(!m_ExplicitBlockSize)
11676 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11677 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
11679 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11680 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11682 newBlockSize = smallerNewBlockSize;
11683 ++newBlockSizeShift;
11692 size_t newBlockIndex = 0;
11693 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
11695 if(!m_ExplicitBlockSize)
11697 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
11699 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11700 if(smallerNewBlockSize >= size)
11702 newBlockSize = smallerNewBlockSize;
11703 ++newBlockSizeShift;
11704 res = CreateBlock(newBlockSize, &newBlockIndex);
11713 if(res == VK_SUCCESS)
11715 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
11716 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
11718 res = AllocateFromBlock(
11728 if(res == VK_SUCCESS)
11730 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
11736 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11743 if(canMakeOtherLost)
11745 uint32_t tryIndex = 0;
11746 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
11748 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
11749 VmaAllocationRequest bestRequest = {};
11750 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
11756 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11758 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11759 VMA_ASSERT(pCurrBlock);
11760 VmaAllocationRequest currRequest = {};
11761 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11764 m_BufferImageGranularity,
11773 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11774 if(pBestRequestBlock == VMA_NULL ||
11775 currRequestCost < bestRequestCost)
11777 pBestRequestBlock = pCurrBlock;
11778 bestRequest = currRequest;
11779 bestRequestCost = currRequestCost;
11781 if(bestRequestCost == 0)
11792 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11794 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11795 VMA_ASSERT(pCurrBlock);
11796 VmaAllocationRequest currRequest = {};
11797 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11800 m_BufferImageGranularity,
11809 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11810 if(pBestRequestBlock == VMA_NULL ||
11811 currRequestCost < bestRequestCost ||
11814 pBestRequestBlock = pCurrBlock;
11815 bestRequest = currRequest;
11816 bestRequestCost = currRequestCost;
11818 if(bestRequestCost == 0 ||
11828 if(pBestRequestBlock != VMA_NULL)
11832 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
11833 if(res != VK_SUCCESS)
11839 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
11845 if(pBestRequestBlock->m_pMetadata->IsEmpty())
11847 m_HasEmptyBlock =
false;
11850 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
11851 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
11852 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
11853 (*pAllocation)->InitBlockAllocation(
11855 bestRequest.offset,
11861 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
11862 VMA_DEBUG_LOG(
" Returned from existing block");
11863 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
11864 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11866 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11868 if(IsCorruptionDetectionEnabled())
11870 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
11871 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11886 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
11888 return VK_ERROR_TOO_MANY_OBJECTS;
11892 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11895 void VmaBlockVector::Free(
11898 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
11902 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11904 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
11906 if(IsCorruptionDetectionEnabled())
11908 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
11909 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
11912 if(hAllocation->IsPersistentMap())
11914 pBlock->Unmap(m_hAllocator, 1);
11917 pBlock->m_pMetadata->Free(hAllocation);
11918 VMA_HEAVY_ASSERT(pBlock->Validate());
11920 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
11923 if(pBlock->m_pMetadata->IsEmpty())
11926 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
11928 pBlockToDelete = pBlock;
11934 m_HasEmptyBlock =
true;
11939 else if(m_HasEmptyBlock)
11941 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
11942 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
11944 pBlockToDelete = pLastBlock;
11945 m_Blocks.pop_back();
11946 m_HasEmptyBlock =
false;
11950 IncrementallySortBlocks();
11955 if(pBlockToDelete != VMA_NULL)
11957 VMA_DEBUG_LOG(
" Deleted empty allocation");
11958 pBlockToDelete->Destroy(m_hAllocator);
11959 vma_delete(m_hAllocator, pBlockToDelete);
11963 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 11965 VkDeviceSize result = 0;
11966 for(
size_t i = m_Blocks.size(); i--; )
11968 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
11969 if(result >= m_PreferredBlockSize)
11977 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
11979 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11981 if(m_Blocks[blockIndex] == pBlock)
11983 VmaVectorRemove(m_Blocks, blockIndex);
11990 void VmaBlockVector::IncrementallySortBlocks()
11995 for(
size_t i = 1; i < m_Blocks.size(); ++i)
11997 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
11999 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12006 VkResult VmaBlockVector::AllocateFromBlock(
12007 VmaDeviceMemoryBlock* pBlock,
12008 uint32_t currentFrameIndex,
12010 VkDeviceSize alignment,
12013 VmaSuballocationType suballocType,
12022 VmaAllocationRequest currRequest = {};
12023 if(pBlock->m_pMetadata->CreateAllocationRequest(
12026 m_BufferImageGranularity,
12036 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12040 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12041 if(res != VK_SUCCESS)
12048 if(pBlock->m_pMetadata->IsEmpty())
12050 m_HasEmptyBlock =
false;
12053 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12054 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12055 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12056 (*pAllocation)->InitBlockAllocation(
12058 currRequest.offset,
12064 VMA_HEAVY_ASSERT(pBlock->Validate());
12065 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12066 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12068 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12070 if(IsCorruptionDetectionEnabled())
12072 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12073 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12077 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12080 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12082 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12083 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12084 allocInfo.allocationSize = blockSize;
12085 VkDeviceMemory mem = VK_NULL_HANDLE;
12086 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12095 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12101 allocInfo.allocationSize,
12105 m_Blocks.push_back(pBlock);
12106 if(pNewBlockIndex != VMA_NULL)
12108 *pNewBlockIndex = m_Blocks.size() - 1;
12114 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12115 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12116 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12118 const size_t blockCount = m_Blocks.size();
12119 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12123 BLOCK_FLAG_USED = 0x00000001,
12124 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12132 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12133 blockInfo(blockCount, VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12134 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12137 const size_t moveCount = moves.size();
12138 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12140 const VmaDefragmentationMove& move = moves[moveIndex];
12141 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12142 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12145 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12148 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12150 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12151 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12152 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12154 currBlockInfo.pMappedData = pBlock->GetMappedData();
12156 if(currBlockInfo.pMappedData == VMA_NULL)
12158 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12159 if(pDefragCtx->res == VK_SUCCESS)
12161 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12168 if(pDefragCtx->res == VK_SUCCESS)
12170 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12171 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12173 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12175 const VmaDefragmentationMove& move = moves[moveIndex];
12177 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12178 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12180 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12185 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12186 memRange.memory = pSrcBlock->GetDeviceMemory();
12187 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12188 memRange.size = VMA_MIN(
12189 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12190 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12191 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12196 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12197 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12198 static_cast<size_t>(move.size));
12200 if(IsCorruptionDetectionEnabled())
12202 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12203 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12209 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12210 memRange.memory = pDstBlock->GetDeviceMemory();
12211 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12212 memRange.size = VMA_MIN(
12213 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12214 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12215 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12222 for(
size_t blockIndex = blockCount; blockIndex--; )
12224 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12225 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12227 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12228 pBlock->Unmap(m_hAllocator, 1);
12233 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12234 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12235 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12236 VkCommandBuffer commandBuffer)
12238 const size_t blockCount = m_Blocks.size();
12240 pDefragCtx->blockContexts.resize(blockCount);
12241 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12244 const size_t moveCount = moves.size();
12245 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12247 const VmaDefragmentationMove& move = moves[moveIndex];
12248 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12249 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12252 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12256 VkBufferCreateInfo bufCreateInfo;
12257 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
12259 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12261 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12262 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12263 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12265 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12266 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12267 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12268 if(pDefragCtx->res == VK_SUCCESS)
12270 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12271 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12278 if(pDefragCtx->res == VK_SUCCESS)
12280 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12282 const VmaDefragmentationMove& move = moves[moveIndex];
12284 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12285 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12287 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12289 VkBufferCopy region = {
12293 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12294 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12299 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12301 pDefragCtx->res = VK_NOT_READY;
12307 m_HasEmptyBlock =
false;
12308 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12310 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12311 if(pBlock->m_pMetadata->IsEmpty())
12313 if(m_Blocks.size() > m_MinBlockCount)
12315 if(pDefragmentationStats != VMA_NULL)
12318 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12321 VmaVectorRemove(m_Blocks, blockIndex);
12322 pBlock->Destroy(m_hAllocator);
12323 vma_delete(m_hAllocator, pBlock);
12327 m_HasEmptyBlock =
true;
12333 #if VMA_STATS_STRING_ENABLED 12335 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12337 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12339 json.BeginObject();
12343 json.WriteString(
"MemoryTypeIndex");
12344 json.WriteNumber(m_MemoryTypeIndex);
12346 json.WriteString(
"BlockSize");
12347 json.WriteNumber(m_PreferredBlockSize);
12349 json.WriteString(
"BlockCount");
12350 json.BeginObject(
true);
12351 if(m_MinBlockCount > 0)
12353 json.WriteString(
"Min");
12354 json.WriteNumber((uint64_t)m_MinBlockCount);
12356 if(m_MaxBlockCount < SIZE_MAX)
12358 json.WriteString(
"Max");
12359 json.WriteNumber((uint64_t)m_MaxBlockCount);
12361 json.WriteString(
"Cur");
12362 json.WriteNumber((uint64_t)m_Blocks.size());
12365 if(m_FrameInUseCount > 0)
12367 json.WriteString(
"FrameInUseCount");
12368 json.WriteNumber(m_FrameInUseCount);
12371 if(m_Algorithm != 0)
12373 json.WriteString(
"Algorithm");
12374 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12379 json.WriteString(
"PreferredBlockSize");
12380 json.WriteNumber(m_PreferredBlockSize);
12383 json.WriteString(
"Blocks");
12384 json.BeginObject();
12385 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12387 json.BeginString();
12388 json.ContinueString(m_Blocks[i]->GetId());
12391 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12398 #endif // #if VMA_STATS_STRING_ENABLED 12400 void VmaBlockVector::Defragment(
12401 class VmaBlockVectorDefragmentationContext* pCtx,
12403 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12404 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12405 VkCommandBuffer commandBuffer)
12407 pCtx->res = VK_SUCCESS;
12409 const VkMemoryPropertyFlags memPropFlags =
12410 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12411 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12413 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12415 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12416 !IsCorruptionDetectionEnabled() &&
12417 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
12420 if(canDefragmentOnCpu || canDefragmentOnGpu)
12422 bool defragmentOnGpu;
12424 if(canDefragmentOnGpu != canDefragmentOnCpu)
12426 defragmentOnGpu = canDefragmentOnGpu;
12431 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12432 m_hAllocator->IsIntegratedGpu();
12435 bool overlappingMoveSupported = !defragmentOnGpu;
12437 if(m_hAllocator->m_UseMutex)
12439 m_Mutex.LockWrite();
12440 pCtx->mutexLocked =
true;
12443 pCtx->Begin(overlappingMoveSupported);
12447 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12448 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12449 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12450 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12451 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12454 if(pStats != VMA_NULL)
12456 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12457 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12460 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12461 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12462 if(defragmentOnGpu)
12464 maxGpuBytesToMove -= bytesMoved;
12465 maxGpuAllocationsToMove -= allocationsMoved;
12469 maxCpuBytesToMove -= bytesMoved;
12470 maxCpuAllocationsToMove -= allocationsMoved;
12474 if(pCtx->res >= VK_SUCCESS)
12476 if(defragmentOnGpu)
12478 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12482 ApplyDefragmentationMovesCpu(pCtx, moves);
12488 void VmaBlockVector::DefragmentationEnd(
12489 class VmaBlockVectorDefragmentationContext* pCtx,
12493 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12495 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12496 if(blockCtx.hBuffer)
12498 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12499 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12503 if(pCtx->res >= VK_SUCCESS)
12505 FreeEmptyBlocks(pStats);
12508 if(pCtx->mutexLocked)
12510 VMA_ASSERT(m_hAllocator->m_UseMutex);
12511 m_Mutex.UnlockWrite();
12515 size_t VmaBlockVector::CalcAllocationCount()
const 12518 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12520 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12525 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const 12527 if(m_BufferImageGranularity == 1)
12531 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12532 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12534 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12535 VMA_ASSERT(m_Algorithm == 0);
12536 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12537 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12545 void VmaBlockVector::MakePoolAllocationsLost(
12546 uint32_t currentFrameIndex,
12547 size_t* pLostAllocationCount)
12549 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12550 size_t lostAllocationCount = 0;
12551 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12553 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12554 VMA_ASSERT(pBlock);
12555 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12557 if(pLostAllocationCount != VMA_NULL)
12559 *pLostAllocationCount = lostAllocationCount;
12563 VkResult VmaBlockVector::CheckCorruption()
12565 if(!IsCorruptionDetectionEnabled())
12567 return VK_ERROR_FEATURE_NOT_PRESENT;
12570 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12571 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12573 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12574 VMA_ASSERT(pBlock);
12575 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12576 if(res != VK_SUCCESS)
12584 void VmaBlockVector::AddStats(
VmaStats* pStats)
12586 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12587 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12589 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12591 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12593 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12594 VMA_ASSERT(pBlock);
12595 VMA_HEAVY_ASSERT(pBlock->Validate());
12597 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12598 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12599 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12600 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12607 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12609 VmaBlockVector* pBlockVector,
12610 uint32_t currentFrameIndex,
12611 bool overlappingMoveSupported) :
12612 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12613 m_AllocationCount(0),
12614 m_AllAllocations(false),
12616 m_AllocationsMoved(0),
12617 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12620 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12621 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12623 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12624 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12625 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12626 m_Blocks.push_back(pBlockInfo);
12630 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12633 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12635 for(
size_t i = m_Blocks.size(); i--; )
12637 vma_delete(m_hAllocator, m_Blocks[i]);
12641 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12644 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12646 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12647 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12648 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12650 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12651 (*it)->m_Allocations.push_back(allocInfo);
12658 ++m_AllocationCount;
12662 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
12663 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12664 VkDeviceSize maxBytesToMove,
12665 uint32_t maxAllocationsToMove)
12667 if(m_Blocks.empty())
12680 size_t srcBlockMinIndex = 0;
12693 size_t srcBlockIndex = m_Blocks.size() - 1;
12694 size_t srcAllocIndex = SIZE_MAX;
12700 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
12702 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
12705 if(srcBlockIndex == srcBlockMinIndex)
12712 srcAllocIndex = SIZE_MAX;
12717 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
12721 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
12722 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
12724 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
12725 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
12726 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
12727 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
12730 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
12732 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
12733 VmaAllocationRequest dstAllocRequest;
12734 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
12735 m_CurrentFrameIndex,
12736 m_pBlockVector->GetFrameInUseCount(),
12737 m_pBlockVector->GetBufferImageGranularity(),
12744 &dstAllocRequest) &&
12746 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
12748 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
12751 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
12752 (m_BytesMoved + size > maxBytesToMove))
12757 VmaDefragmentationMove move;
12758 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
12759 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
12760 move.srcOffset = srcOffset;
12761 move.dstOffset = dstAllocRequest.offset;
12763 moves.push_back(move);
12765 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
12769 allocInfo.m_hAllocation);
12770 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
12772 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
12774 if(allocInfo.m_pChanged != VMA_NULL)
12776 *allocInfo.m_pChanged = VK_TRUE;
12779 ++m_AllocationsMoved;
12780 m_BytesMoved += size;
12782 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
12790 if(srcAllocIndex > 0)
12796 if(srcBlockIndex > 0)
12799 srcAllocIndex = SIZE_MAX;
12809 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const 12812 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12814 if(m_Blocks[i]->m_HasNonMovableAllocations)
12822 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
12823 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12824 VkDeviceSize maxBytesToMove,
12825 uint32_t maxAllocationsToMove)
12827 if(!m_AllAllocations && m_AllocationCount == 0)
12832 const size_t blockCount = m_Blocks.size();
12833 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12835 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
12837 if(m_AllAllocations)
12839 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
12840 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
12841 it != pMetadata->m_Suballocations.end();
12844 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
12846 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
12847 pBlockInfo->m_Allocations.push_back(allocInfo);
12852 pBlockInfo->CalcHasNonMovableAllocations();
12856 pBlockInfo->SortAllocationsByOffsetDescending();
12862 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
12865 const uint32_t roundCount = 2;
12868 VkResult result = VK_SUCCESS;
12869 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
12871 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
12877 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
12878 size_t dstBlockIndex, VkDeviceSize dstOffset,
12879 size_t srcBlockIndex, VkDeviceSize srcOffset)
12881 if(dstBlockIndex < srcBlockIndex)
12885 if(dstBlockIndex > srcBlockIndex)
12889 if(dstOffset < srcOffset)
12899 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
12901 VmaBlockVector* pBlockVector,
12902 uint32_t currentFrameIndex,
12903 bool overlappingMoveSupported) :
12904 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12905 m_OverlappingMoveSupported(overlappingMoveSupported),
12906 m_AllocationCount(0),
12907 m_AllAllocations(false),
12909 m_AllocationsMoved(0),
12910 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
12912 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
12916 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
12920 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
12921 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12922 VkDeviceSize maxBytesToMove,
12923 uint32_t maxAllocationsToMove)
12925 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
12927 const size_t blockCount = m_pBlockVector->GetBlockCount();
12928 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
12933 PreprocessMetadata();
12937 m_BlockInfos.resize(blockCount);
12938 for(
size_t i = 0; i < blockCount; ++i)
12940 m_BlockInfos[i].origBlockIndex = i;
12943 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
12944 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
12945 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
12950 FreeSpaceDatabase freeSpaceDb;
12952 size_t dstBlockInfoIndex = 0;
12953 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12954 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12955 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12956 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
12957 VkDeviceSize dstOffset = 0;
12960 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
12962 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
12963 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
12964 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
12965 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
12966 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
12968 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
12969 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
12970 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
12971 if(m_AllocationsMoved == maxAllocationsToMove ||
12972 m_BytesMoved + srcAllocSize > maxBytesToMove)
12977 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
12980 size_t freeSpaceInfoIndex;
12981 VkDeviceSize dstAllocOffset;
12982 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
12983 freeSpaceInfoIndex, dstAllocOffset))
12985 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
12986 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
12987 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
12990 if(freeSpaceInfoIndex == srcBlockInfoIndex)
12992 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
12996 VmaSuballocation suballoc = *srcSuballocIt;
12997 suballoc.offset = dstAllocOffset;
12998 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
12999 m_BytesMoved += srcAllocSize;
13000 ++m_AllocationsMoved;
13002 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13004 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13005 srcSuballocIt = nextSuballocIt;
13007 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13009 VmaDefragmentationMove move = {
13010 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13011 srcAllocOffset, dstAllocOffset,
13013 moves.push_back(move);
13020 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13022 VmaSuballocation suballoc = *srcSuballocIt;
13023 suballoc.offset = dstAllocOffset;
13024 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13025 m_BytesMoved += srcAllocSize;
13026 ++m_AllocationsMoved;
13028 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13030 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13031 srcSuballocIt = nextSuballocIt;
13033 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13035 VmaDefragmentationMove move = {
13036 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13037 srcAllocOffset, dstAllocOffset,
13039 moves.push_back(move);
13044 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13047 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13048 dstAllocOffset + srcAllocSize > dstBlockSize)
13051 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13053 ++dstBlockInfoIndex;
13054 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13055 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13056 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13057 dstBlockSize = pDstMetadata->GetSize();
13059 dstAllocOffset = 0;
13063 if(dstBlockInfoIndex == srcBlockInfoIndex)
13065 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13067 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13069 bool skipOver = overlap;
13070 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13074 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13079 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13081 dstOffset = srcAllocOffset + srcAllocSize;
13087 srcSuballocIt->offset = dstAllocOffset;
13088 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13089 dstOffset = dstAllocOffset + srcAllocSize;
13090 m_BytesMoved += srcAllocSize;
13091 ++m_AllocationsMoved;
13093 VmaDefragmentationMove move = {
13094 srcOrigBlockIndex, dstOrigBlockIndex,
13095 srcAllocOffset, dstAllocOffset,
13097 moves.push_back(move);
13105 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13106 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13108 VmaSuballocation suballoc = *srcSuballocIt;
13109 suballoc.offset = dstAllocOffset;
13110 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13111 dstOffset = dstAllocOffset + srcAllocSize;
13112 m_BytesMoved += srcAllocSize;
13113 ++m_AllocationsMoved;
13115 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13117 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13118 srcSuballocIt = nextSuballocIt;
13120 pDstMetadata->m_Suballocations.push_back(suballoc);
13122 VmaDefragmentationMove move = {
13123 srcOrigBlockIndex, dstOrigBlockIndex,
13124 srcAllocOffset, dstAllocOffset,
13126 moves.push_back(move);
13132 m_BlockInfos.clear();
13134 PostprocessMetadata();
13139 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13141 const size_t blockCount = m_pBlockVector->GetBlockCount();
13142 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13144 VmaBlockMetadata_Generic*
const pMetadata =
13145 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13146 pMetadata->m_FreeCount = 0;
13147 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13148 pMetadata->m_FreeSuballocationsBySize.clear();
13149 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13150 it != pMetadata->m_Suballocations.end(); )
13152 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13154 VmaSuballocationList::iterator nextIt = it;
13156 pMetadata->m_Suballocations.erase(it);
13167 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13169 const size_t blockCount = m_pBlockVector->GetBlockCount();
13170 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13172 VmaBlockMetadata_Generic*
const pMetadata =
13173 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13174 const VkDeviceSize blockSize = pMetadata->GetSize();
13177 if(pMetadata->m_Suballocations.empty())
13179 pMetadata->m_FreeCount = 1;
13181 VmaSuballocation suballoc = {
13185 VMA_SUBALLOCATION_TYPE_FREE };
13186 pMetadata->m_Suballocations.push_back(suballoc);
13187 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13192 VkDeviceSize offset = 0;
13193 VmaSuballocationList::iterator it;
13194 for(it = pMetadata->m_Suballocations.begin();
13195 it != pMetadata->m_Suballocations.end();
13198 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13199 VMA_ASSERT(it->offset >= offset);
13202 if(it->offset > offset)
13204 ++pMetadata->m_FreeCount;
13205 const VkDeviceSize freeSize = it->offset - offset;
13206 VmaSuballocation suballoc = {
13210 VMA_SUBALLOCATION_TYPE_FREE };
13211 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13212 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13214 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13218 pMetadata->m_SumFreeSize -= it->size;
13219 offset = it->offset + it->size;
13223 if(offset < blockSize)
13225 ++pMetadata->m_FreeCount;
13226 const VkDeviceSize freeSize = blockSize - offset;
13227 VmaSuballocation suballoc = {
13231 VMA_SUBALLOCATION_TYPE_FREE };
13232 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13233 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13234 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13236 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13241 pMetadata->m_FreeSuballocationsBySize.begin(),
13242 pMetadata->m_FreeSuballocationsBySize.end(),
13243 VmaSuballocationItemSizeLess());
13246 VMA_HEAVY_ASSERT(pMetadata->Validate());
13250 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13253 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13254 while(it != pMetadata->m_Suballocations.end())
13256 if(it->offset < suballoc.offset)
13261 pMetadata->m_Suballocations.insert(it, suballoc);
13267 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13270 VmaBlockVector* pBlockVector,
13271 uint32_t currFrameIndex) :
13273 mutexLocked(false),
13274 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13275 m_hAllocator(hAllocator),
13276 m_hCustomPool(hCustomPool),
13277 m_pBlockVector(pBlockVector),
13278 m_CurrFrameIndex(currFrameIndex),
13279 m_pAlgorithm(VMA_NULL),
13280 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13281 m_AllAllocations(false)
13285 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13287 vma_delete(m_hAllocator, m_pAlgorithm);
13290 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13292 AllocInfo info = { hAlloc, pChanged };
13293 m_Allocations.push_back(info);
13296 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13298 const bool allAllocations = m_AllAllocations ||
13299 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13311 if(VMA_DEBUG_MARGIN == 0 &&
13313 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13315 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13316 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13320 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13321 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13326 m_pAlgorithm->AddAll();
13330 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13332 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13340 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13342 uint32_t currFrameIndex,
13345 m_hAllocator(hAllocator),
13346 m_CurrFrameIndex(currFrameIndex),
13349 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13351 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13354 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13356 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13358 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13359 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13360 vma_delete(m_hAllocator, pBlockVectorCtx);
13362 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13364 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13365 if(pBlockVectorCtx)
13367 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13368 vma_delete(m_hAllocator, pBlockVectorCtx);
13373 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13375 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13377 VmaPool pool = pPools[poolIndex];
13380 if(pool->m_BlockVector.GetAlgorithm() == 0)
13382 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13384 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13386 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13388 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13393 if(!pBlockVectorDefragCtx)
13395 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13398 &pool->m_BlockVector,
13400 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13403 pBlockVectorDefragCtx->AddAll();
13408 void VmaDefragmentationContext_T::AddAllocations(
13409 uint32_t allocationCount,
13411 VkBool32* pAllocationsChanged)
13414 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13417 VMA_ASSERT(hAlloc);
13419 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13421 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13423 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13425 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
13427 if(hAllocPool != VK_NULL_HANDLE)
13430 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13432 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13434 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13436 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13440 if(!pBlockVectorDefragCtx)
13442 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13445 &hAllocPool->m_BlockVector,
13447 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13454 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13455 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13456 if(!pBlockVectorDefragCtx)
13458 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13461 m_hAllocator->m_pBlockVectors[memTypeIndex],
13463 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13467 if(pBlockVectorDefragCtx)
13469 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13470 &pAllocationsChanged[allocIndex] : VMA_NULL;
13471 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13477 VkResult VmaDefragmentationContext_T::Defragment(
13478 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13479 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13487 if(commandBuffer == VK_NULL_HANDLE)
13489 maxGpuBytesToMove = 0;
13490 maxGpuAllocationsToMove = 0;
13493 VkResult res = VK_SUCCESS;
13496 for(uint32_t memTypeIndex = 0;
13497 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13500 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13501 if(pBlockVectorCtx)
13503 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13504 pBlockVectorCtx->GetBlockVector()->Defragment(
13507 maxCpuBytesToMove, maxCpuAllocationsToMove,
13508 maxGpuBytesToMove, maxGpuAllocationsToMove,
13510 if(pBlockVectorCtx->res != VK_SUCCESS)
13512 res = pBlockVectorCtx->res;
13518 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13519 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13522 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13523 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13524 pBlockVectorCtx->GetBlockVector()->Defragment(
13527 maxCpuBytesToMove, maxCpuAllocationsToMove,
13528 maxGpuBytesToMove, maxGpuAllocationsToMove,
13530 if(pBlockVectorCtx->res != VK_SUCCESS)
13532 res = pBlockVectorCtx->res;
13542 #if VMA_RECORDING_ENABLED 13544 VmaRecorder::VmaRecorder() :
13549 m_StartCounter(INT64_MAX)
13555 m_UseMutex = useMutex;
13556 m_Flags = settings.
flags;
13558 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13559 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13562 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13565 return VK_ERROR_INITIALIZATION_FAILED;
13569 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13570 fprintf(m_File,
"%s\n",
"1,6");
13575 VmaRecorder::~VmaRecorder()
13577 if(m_File != VMA_NULL)
13583 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13585 CallParams callParams;
13586 GetBasicParams(callParams);
13588 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13589 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13593 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13595 CallParams callParams;
13596 GetBasicParams(callParams);
13598 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13599 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13605 CallParams callParams;
13606 GetBasicParams(callParams);
13608 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13609 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13620 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13622 CallParams callParams;
13623 GetBasicParams(callParams);
13625 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13626 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13631 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13632 const VkMemoryRequirements& vkMemReq,
13636 CallParams callParams;
13637 GetBasicParams(callParams);
13639 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13640 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13641 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13643 vkMemReq.alignment,
13644 vkMemReq.memoryTypeBits,
13652 userDataStr.GetString());
13656 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
13657 const VkMemoryRequirements& vkMemReq,
13659 uint64_t allocationCount,
13662 CallParams callParams;
13663 GetBasicParams(callParams);
13665 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13666 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13667 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
13669 vkMemReq.alignment,
13670 vkMemReq.memoryTypeBits,
13677 PrintPointerList(allocationCount, pAllocations);
13678 fprintf(m_File,
",%s\n", userDataStr.GetString());
13682 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
13683 const VkMemoryRequirements& vkMemReq,
13684 bool requiresDedicatedAllocation,
13685 bool prefersDedicatedAllocation,
13689 CallParams callParams;
13690 GetBasicParams(callParams);
13692 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13693 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13694 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13696 vkMemReq.alignment,
13697 vkMemReq.memoryTypeBits,
13698 requiresDedicatedAllocation ? 1 : 0,
13699 prefersDedicatedAllocation ? 1 : 0,
13707 userDataStr.GetString());
13711 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
13712 const VkMemoryRequirements& vkMemReq,
13713 bool requiresDedicatedAllocation,
13714 bool prefersDedicatedAllocation,
13718 CallParams callParams;
13719 GetBasicParams(callParams);
13721 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13722 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13723 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13725 vkMemReq.alignment,
13726 vkMemReq.memoryTypeBits,
13727 requiresDedicatedAllocation ? 1 : 0,
13728 prefersDedicatedAllocation ? 1 : 0,
13736 userDataStr.GetString());
13740 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
13743 CallParams callParams;
13744 GetBasicParams(callParams);
13746 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13747 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13752 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
13753 uint64_t allocationCount,
13756 CallParams callParams;
13757 GetBasicParams(callParams);
13759 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13760 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
13761 PrintPointerList(allocationCount, pAllocations);
13762 fprintf(m_File,
"\n");
13766 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
13768 const void* pUserData)
13770 CallParams callParams;
13771 GetBasicParams(callParams);
13773 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13774 UserDataString userDataStr(
13777 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13779 userDataStr.GetString());
13783 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
13786 CallParams callParams;
13787 GetBasicParams(callParams);
13789 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13790 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13795 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
13798 CallParams callParams;
13799 GetBasicParams(callParams);
13801 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13802 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13807 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
13810 CallParams callParams;
13811 GetBasicParams(callParams);
13813 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13814 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13819 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
13820 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13822 CallParams callParams;
13823 GetBasicParams(callParams);
13825 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13826 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13833 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
13834 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13836 CallParams callParams;
13837 GetBasicParams(callParams);
13839 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13840 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13847 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
13848 const VkBufferCreateInfo& bufCreateInfo,
13852 CallParams callParams;
13853 GetBasicParams(callParams);
13855 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13856 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13857 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13858 bufCreateInfo.flags,
13859 bufCreateInfo.size,
13860 bufCreateInfo.usage,
13861 bufCreateInfo.sharingMode,
13862 allocCreateInfo.
flags,
13863 allocCreateInfo.
usage,
13867 allocCreateInfo.
pool,
13869 userDataStr.GetString());
13873 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
13874 const VkImageCreateInfo& imageCreateInfo,
13878 CallParams callParams;
13879 GetBasicParams(callParams);
13881 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13882 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13883 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13884 imageCreateInfo.flags,
13885 imageCreateInfo.imageType,
13886 imageCreateInfo.format,
13887 imageCreateInfo.extent.width,
13888 imageCreateInfo.extent.height,
13889 imageCreateInfo.extent.depth,
13890 imageCreateInfo.mipLevels,
13891 imageCreateInfo.arrayLayers,
13892 imageCreateInfo.samples,
13893 imageCreateInfo.tiling,
13894 imageCreateInfo.usage,
13895 imageCreateInfo.sharingMode,
13896 imageCreateInfo.initialLayout,
13897 allocCreateInfo.
flags,
13898 allocCreateInfo.
usage,
13902 allocCreateInfo.
pool,
13904 userDataStr.GetString());
13908 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
13911 CallParams callParams;
13912 GetBasicParams(callParams);
13914 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13915 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
13920 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
13923 CallParams callParams;
13924 GetBasicParams(callParams);
13926 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13927 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
13932 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
13935 CallParams callParams;
13936 GetBasicParams(callParams);
13938 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13939 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13944 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
13947 CallParams callParams;
13948 GetBasicParams(callParams);
13950 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13951 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
13956 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
13959 CallParams callParams;
13960 GetBasicParams(callParams);
13962 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13963 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
13968 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
13972 CallParams callParams;
13973 GetBasicParams(callParams);
13975 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13976 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
13979 fprintf(m_File,
",");
13981 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
13991 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
13994 CallParams callParams;
13995 GetBasicParams(callParams);
13997 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13998 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14005 if(pUserData != VMA_NULL)
14009 m_Str = (
const char*)pUserData;
14013 sprintf_s(m_PtrStr,
"%p", pUserData);
14023 void VmaRecorder::WriteConfiguration(
14024 const VkPhysicalDeviceProperties& devProps,
14025 const VkPhysicalDeviceMemoryProperties& memProps,
14026 bool dedicatedAllocationExtensionEnabled,
14027 bool bindMemory2ExtensionEnabled)
14029 fprintf(m_File,
"Config,Begin\n");
14031 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14032 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14033 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14034 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14035 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14036 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14038 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14039 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14040 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14042 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14043 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14045 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14046 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14048 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14049 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14051 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14052 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
14055 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14056 fprintf(m_File,
"Extension,VK_KHR_bind_memory2,%u\n", bindMemory2ExtensionEnabled ? 1 : 0);
14058 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14059 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14060 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14061 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14062 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14063 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14064 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14065 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14066 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14068 fprintf(m_File,
"Config,End\n");
14071 void VmaRecorder::GetBasicParams(CallParams& outParams)
14073 outParams.threadId = GetCurrentThreadId();
14075 LARGE_INTEGER counter;
14076 QueryPerformanceCounter(&counter);
14077 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
14080 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
14084 fprintf(m_File,
"%p", pItems[0]);
14085 for(uint64_t i = 1; i < count; ++i)
14087 fprintf(m_File,
" %p", pItems[i]);
14092 void VmaRecorder::Flush()
14100 #endif // #if VMA_RECORDING_ENABLED 14105 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14106 m_Allocator(pAllocationCallbacks, 1024)
14112 VmaMutexLock mutexLock(m_Mutex);
14113 return m_Allocator.Alloc();
14116 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14118 VmaMutexLock mutexLock(m_Mutex);
14119 m_Allocator.Free(hAlloc);
14129 m_hDevice(pCreateInfo->device),
14130 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14131 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14132 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14133 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14134 m_PreferredLargeHeapBlockSize(0),
14135 m_PhysicalDevice(pCreateInfo->physicalDevice),
14136 m_CurrentFrameIndex(0),
14137 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
14138 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14141 ,m_pRecorder(VMA_NULL)
14144 if(VMA_DEBUG_DETECT_CORRUPTION)
14147 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14152 #if !(VMA_DEDICATED_ALLOCATION) 14155 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14158 #if !(VMA_BIND_MEMORY2) 14161 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_BIND_MEMORY2_BIT set but required extension is disabled by preprocessor macros.");
14165 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14166 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14167 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14169 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14170 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14172 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14174 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
14185 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14186 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14188 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14189 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14190 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14191 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14198 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14200 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14201 if(limit != VK_WHOLE_SIZE)
14203 m_HeapSizeLimit[heapIndex] = limit;
14204 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14206 m_MemProps.memoryHeaps[heapIndex].size = limit;
14212 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14214 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14216 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14220 preferredBlockSize,
14223 GetBufferImageGranularity(),
14230 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14237 VkResult res = VK_SUCCESS;
14242 #if VMA_RECORDING_ENABLED 14243 m_pRecorder = vma_new(
this, VmaRecorder)();
14245 if(res != VK_SUCCESS)
14249 m_pRecorder->WriteConfiguration(
14250 m_PhysicalDeviceProperties,
14252 m_UseKhrDedicatedAllocation,
14253 m_UseKhrBindMemory2);
14254 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14256 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14257 return VK_ERROR_FEATURE_NOT_PRESENT;
14264 VmaAllocator_T::~VmaAllocator_T()
14266 #if VMA_RECORDING_ENABLED 14267 if(m_pRecorder != VMA_NULL)
14269 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14270 vma_delete(
this, m_pRecorder);
14274 VMA_ASSERT(m_Pools.empty());
14276 for(
size_t i = GetMemoryTypeCount(); i--; )
14278 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
14280 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
14283 vma_delete(
this, m_pDedicatedAllocations[i]);
14284 vma_delete(
this, m_pBlockVectors[i]);
14288 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14290 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14291 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
14292 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
14293 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
14294 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
14295 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
14296 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
14297 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
14298 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
14299 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
14300 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
14301 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
14302 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
14303 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
14304 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
14305 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
14306 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
14307 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
14308 #if VMA_DEDICATED_ALLOCATION 14309 if(m_UseKhrDedicatedAllocation)
14311 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14312 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14313 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14314 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14316 #endif // #if VMA_DEDICATED_ALLOCATION 14317 #if VMA_BIND_MEMORY2 14318 if(m_UseKhrBindMemory2)
14320 m_VulkanFunctions.vkBindBufferMemory2KHR =
14321 (PFN_vkBindBufferMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindBufferMemory2KHR");
14322 m_VulkanFunctions.vkBindImageMemory2KHR =
14323 (PFN_vkBindImageMemory2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkBindImageMemory2KHR");
14325 #endif // #if VMA_BIND_MEMORY2 14326 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14328 #define VMA_COPY_IF_NOT_NULL(funcName) \ 14329 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 14331 if(pVulkanFunctions != VMA_NULL)
14333 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14334 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14335 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14336 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14337 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14338 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14339 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14340 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14341 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14342 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14343 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14344 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14345 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14346 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14347 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14348 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14349 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14350 #if VMA_DEDICATED_ALLOCATION 14351 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14352 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14354 #if VMA_BIND_MEMORY2 14355 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory2KHR);
14356 VMA_COPY_IF_NOT_NULL(vkBindImageMemory2KHR);
14360 #undef VMA_COPY_IF_NOT_NULL 14364 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14365 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14366 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14367 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14368 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14369 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14370 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14371 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14372 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14373 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14374 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14375 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14376 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14377 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14378 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14379 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14380 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14381 #if VMA_DEDICATED_ALLOCATION 14382 if(m_UseKhrDedicatedAllocation)
14384 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14385 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14388 #if VMA_BIND_MEMORY2 14389 if(m_UseKhrBindMemory2)
14391 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL);
14392 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL);
14397 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14399 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14400 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14401 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14402 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
14405 VkResult VmaAllocator_T::AllocateMemoryOfType(
14407 VkDeviceSize alignment,
14408 bool dedicatedAllocation,
14409 VkBuffer dedicatedBuffer,
14410 VkImage dedicatedImage,
14412 uint32_t memTypeIndex,
14413 VmaSuballocationType suballocType,
14414 size_t allocationCount,
14417 VMA_ASSERT(pAllocations != VMA_NULL);
14418 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14424 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14429 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14430 VMA_ASSERT(blockVector);
14432 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14433 bool preferDedicatedMemory =
14434 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14435 dedicatedAllocation ||
14437 size > preferredBlockSize / 2;
14439 if(preferDedicatedMemory &&
14441 finalCreateInfo.
pool == VK_NULL_HANDLE)
14450 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14454 return AllocateDedicatedMemory(
14469 VkResult res = blockVector->Allocate(
14470 m_CurrentFrameIndex.load(),
14477 if(res == VK_SUCCESS)
14485 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14489 res = AllocateDedicatedMemory(
14495 finalCreateInfo.pUserData,
14500 if(res == VK_SUCCESS)
14503 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14509 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14516 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14518 VmaSuballocationType suballocType,
14519 uint32_t memTypeIndex,
14521 bool isUserDataString,
14523 VkBuffer dedicatedBuffer,
14524 VkImage dedicatedImage,
14525 size_t allocationCount,
14528 VMA_ASSERT(allocationCount > 0 && pAllocations);
14530 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14531 allocInfo.memoryTypeIndex = memTypeIndex;
14532 allocInfo.allocationSize = size;
14534 #if VMA_DEDICATED_ALLOCATION 14535 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14536 if(m_UseKhrDedicatedAllocation)
14538 if(dedicatedBuffer != VK_NULL_HANDLE)
14540 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14541 dedicatedAllocInfo.buffer = dedicatedBuffer;
14542 allocInfo.pNext = &dedicatedAllocInfo;
14544 else if(dedicatedImage != VK_NULL_HANDLE)
14546 dedicatedAllocInfo.image = dedicatedImage;
14547 allocInfo.pNext = &dedicatedAllocInfo;
14550 #endif // #if VMA_DEDICATED_ALLOCATION 14553 VkResult res = VK_SUCCESS;
14554 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14556 res = AllocateDedicatedMemoryPage(
14564 pAllocations + allocIndex);
14565 if(res != VK_SUCCESS)
14571 if(res == VK_SUCCESS)
14575 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14576 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14577 VMA_ASSERT(pDedicatedAllocations);
14578 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14580 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
14584 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
14589 while(allocIndex--)
14592 VkDeviceMemory hMemory = currAlloc->GetMemory();
14604 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
14606 currAlloc->SetUserData(
this, VMA_NULL);
14608 m_AllocationObjectAllocator.Free(currAlloc);
14611 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14617 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
14619 VmaSuballocationType suballocType,
14620 uint32_t memTypeIndex,
14621 const VkMemoryAllocateInfo& allocInfo,
14623 bool isUserDataString,
14627 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14628 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14631 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14635 void* pMappedData = VMA_NULL;
14638 res = (*m_VulkanFunctions.vkMapMemory)(
14647 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
14648 FreeVulkanMemory(memTypeIndex, size, hMemory);
14653 *pAllocation = m_AllocationObjectAllocator.Allocate();
14654 (*pAllocation)->Ctor(m_CurrentFrameIndex.load(), isUserDataString);
14655 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
14656 (*pAllocation)->SetUserData(
this, pUserData);
14657 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14659 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14665 void VmaAllocator_T::GetBufferMemoryRequirements(
14667 VkMemoryRequirements& memReq,
14668 bool& requiresDedicatedAllocation,
14669 bool& prefersDedicatedAllocation)
const 14671 #if VMA_DEDICATED_ALLOCATION 14672 if(m_UseKhrDedicatedAllocation)
14674 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14675 memReqInfo.buffer = hBuffer;
14677 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14679 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14680 memReq2.pNext = &memDedicatedReq;
14682 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14684 memReq = memReq2.memoryRequirements;
14685 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14686 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14689 #endif // #if VMA_DEDICATED_ALLOCATION 14691 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14692 requiresDedicatedAllocation =
false;
14693 prefersDedicatedAllocation =
false;
14697 void VmaAllocator_T::GetImageMemoryRequirements(
14699 VkMemoryRequirements& memReq,
14700 bool& requiresDedicatedAllocation,
14701 bool& prefersDedicatedAllocation)
const 14703 #if VMA_DEDICATED_ALLOCATION 14704 if(m_UseKhrDedicatedAllocation)
14706 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
14707 memReqInfo.image = hImage;
14709 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14711 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14712 memReq2.pNext = &memDedicatedReq;
14714 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14716 memReq = memReq2.memoryRequirements;
14717 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14718 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14721 #endif // #if VMA_DEDICATED_ALLOCATION 14723 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14724 requiresDedicatedAllocation =
false;
14725 prefersDedicatedAllocation =
false;
14729 VkResult VmaAllocator_T::AllocateMemory(
14730 const VkMemoryRequirements& vkMemReq,
14731 bool requiresDedicatedAllocation,
14732 bool prefersDedicatedAllocation,
14733 VkBuffer dedicatedBuffer,
14734 VkImage dedicatedImage,
14736 VmaSuballocationType suballocType,
14737 size_t allocationCount,
14740 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14742 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
14744 if(vkMemReq.size == 0)
14746 return VK_ERROR_VALIDATION_FAILED_EXT;
14751 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14752 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14757 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
14758 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14760 if(requiresDedicatedAllocation)
14764 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
14765 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14767 if(createInfo.
pool != VK_NULL_HANDLE)
14769 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
14770 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14773 if((createInfo.
pool != VK_NULL_HANDLE) &&
14776 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
14777 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14780 if(createInfo.
pool != VK_NULL_HANDLE)
14782 const VkDeviceSize alignmentForPool = VMA_MAX(
14783 vkMemReq.alignment,
14784 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
14789 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14794 return createInfo.
pool->m_BlockVector.Allocate(
14795 m_CurrentFrameIndex.load(),
14806 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
14807 uint32_t memTypeIndex = UINT32_MAX;
14809 if(res == VK_SUCCESS)
14811 VkDeviceSize alignmentForMemType = VMA_MAX(
14812 vkMemReq.alignment,
14813 GetMemoryTypeMinAlignment(memTypeIndex));
14815 res = AllocateMemoryOfType(
14817 alignmentForMemType,
14818 requiresDedicatedAllocation || prefersDedicatedAllocation,
14827 if(res == VK_SUCCESS)
14837 memoryTypeBits &= ~(1u << memTypeIndex);
14840 if(res == VK_SUCCESS)
14842 alignmentForMemType = VMA_MAX(
14843 vkMemReq.alignment,
14844 GetMemoryTypeMinAlignment(memTypeIndex));
14846 res = AllocateMemoryOfType(
14848 alignmentForMemType,
14849 requiresDedicatedAllocation || prefersDedicatedAllocation,
14858 if(res == VK_SUCCESS)
14868 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14879 void VmaAllocator_T::FreeMemory(
14880 size_t allocationCount,
14883 VMA_ASSERT(pAllocations);
14885 for(
size_t allocIndex = allocationCount; allocIndex--; )
14889 if(allocation != VK_NULL_HANDLE)
14891 if(TouchAllocation(allocation))
14893 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14895 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
14898 switch(allocation->GetType())
14900 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14902 VmaBlockVector* pBlockVector = VMA_NULL;
14903 VmaPool hPool = allocation->GetBlock()->GetParentPool();
14904 if(hPool != VK_NULL_HANDLE)
14906 pBlockVector = &hPool->m_BlockVector;
14910 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14911 pBlockVector = m_pBlockVectors[memTypeIndex];
14913 pBlockVector->Free(allocation);
14916 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14917 FreeDedicatedMemory(allocation);
14924 allocation->SetUserData(
this, VMA_NULL);
14925 allocation->Dtor();
14926 m_AllocationObjectAllocator.Free(allocation);
14931 VkResult VmaAllocator_T::ResizeAllocation(
14933 VkDeviceSize newSize)
14936 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
14938 return VK_ERROR_VALIDATION_FAILED_EXT;
14940 if(newSize == alloc->GetSize())
14944 return VK_ERROR_OUT_OF_POOL_MEMORY;
14947 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
14950 InitStatInfo(pStats->
total);
14951 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
14953 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14957 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14959 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
14960 VMA_ASSERT(pBlockVector);
14961 pBlockVector->AddStats(pStats);
14966 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
14967 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
14969 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
14974 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14976 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14977 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14978 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
14979 VMA_ASSERT(pDedicatedAllocVector);
14980 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
14983 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
14984 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14985 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14986 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
14991 VmaPostprocessCalcStatInfo(pStats->
total);
14992 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
14993 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
14994 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
14995 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
14998 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
15000 VkResult VmaAllocator_T::DefragmentationBegin(
15010 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
15011 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
15014 (*pContext)->AddAllocations(
15017 VkResult res = (*pContext)->Defragment(
15022 if(res != VK_NOT_READY)
15024 vma_delete(
this, *pContext);
15025 *pContext = VMA_NULL;
15031 VkResult VmaAllocator_T::DefragmentationEnd(
15034 vma_delete(
this, context);
15040 if(hAllocation->CanBecomeLost())
15046 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15047 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15050 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15054 pAllocationInfo->
offset = 0;
15055 pAllocationInfo->
size = hAllocation->GetSize();
15057 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15060 else if(localLastUseFrameIndex == localCurrFrameIndex)
15062 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15063 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15064 pAllocationInfo->
offset = hAllocation->GetOffset();
15065 pAllocationInfo->
size = hAllocation->GetSize();
15067 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15072 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15074 localLastUseFrameIndex = localCurrFrameIndex;
15081 #if VMA_STATS_STRING_ENABLED 15082 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15083 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15086 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15087 if(localLastUseFrameIndex == localCurrFrameIndex)
15093 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15095 localLastUseFrameIndex = localCurrFrameIndex;
15101 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15102 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15103 pAllocationInfo->
offset = hAllocation->GetOffset();
15104 pAllocationInfo->
size = hAllocation->GetSize();
15105 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
15106 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15110 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
15113 if(hAllocation->CanBecomeLost())
15115 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15116 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15119 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15123 else if(localLastUseFrameIndex == localCurrFrameIndex)
15129 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15131 localLastUseFrameIndex = localCurrFrameIndex;
15138 #if VMA_STATS_STRING_ENABLED 15139 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15140 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15143 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15144 if(localLastUseFrameIndex == localCurrFrameIndex)
15150 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15152 localLastUseFrameIndex = localCurrFrameIndex;
15164 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
15174 return VK_ERROR_INITIALIZATION_FAILED;
15177 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15179 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15181 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15182 if(res != VK_SUCCESS)
15184 vma_delete(
this, *pPool);
15191 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15192 (*pPool)->SetId(m_NextPoolId++);
15193 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15199 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15203 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15204 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15205 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15208 vma_delete(
this, pool);
15213 pool->m_BlockVector.GetPoolStats(pPoolStats);
15216 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15218 m_CurrentFrameIndex.store(frameIndex);
15221 void VmaAllocator_T::MakePoolAllocationsLost(
15223 size_t* pLostAllocationCount)
15225 hPool->m_BlockVector.MakePoolAllocationsLost(
15226 m_CurrentFrameIndex.load(),
15227 pLostAllocationCount);
15230 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15232 return hPool->m_BlockVector.CheckCorruption();
15235 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15237 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15240 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15242 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15244 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15245 VMA_ASSERT(pBlockVector);
15246 VkResult localRes = pBlockVector->CheckCorruption();
15249 case VK_ERROR_FEATURE_NOT_PRESENT:
15252 finalRes = VK_SUCCESS;
15262 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15263 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15265 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15267 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15270 case VK_ERROR_FEATURE_NOT_PRESENT:
15273 finalRes = VK_SUCCESS;
15285 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15287 *pAllocation = m_AllocationObjectAllocator.Allocate();
15288 (*pAllocation)->Ctor(VMA_FRAME_INDEX_LOST,
false);
15289 (*pAllocation)->InitLost();
15292 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15294 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15297 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15299 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15300 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
15302 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15303 if(res == VK_SUCCESS)
15305 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
15310 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
15315 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15318 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15320 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15326 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15328 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15330 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15333 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15335 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
15336 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15338 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15339 m_HeapSizeLimit[heapIndex] += size;
15343 VkResult VmaAllocator_T::BindVulkanBuffer(
15344 VkDeviceMemory memory,
15345 VkDeviceSize memoryOffset,
15349 if(pNext != VMA_NULL)
15351 #if VMA_BIND_MEMORY2 15352 if(m_UseKhrBindMemory2 && m_VulkanFunctions.vkBindBufferMemory2KHR != VMA_NULL)
15354 VkBindBufferMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_BUFFER_MEMORY_INFO_KHR };
15355 bindBufferMemoryInfo.pNext = pNext;
15356 bindBufferMemoryInfo.buffer = buffer;
15357 bindBufferMemoryInfo.memory = memory;
15358 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15359 return (*m_VulkanFunctions.vkBindBufferMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15362 #endif // #if VMA_BIND_MEMORY2 15364 return VK_ERROR_EXTENSION_NOT_PRESENT;
15369 return (*m_VulkanFunctions.vkBindBufferMemory)(m_hDevice, buffer, memory, memoryOffset);
15373 VkResult VmaAllocator_T::BindVulkanImage(
15374 VkDeviceMemory memory,
15375 VkDeviceSize memoryOffset,
15379 if(pNext != VMA_NULL)
15381 #if VMA_BIND_MEMORY2 15382 if(m_UseKhrBindMemory2 && m_VulkanFunctions.vkBindImageMemory2KHR != VMA_NULL)
15384 VkBindImageMemoryInfoKHR bindBufferMemoryInfo = { VK_STRUCTURE_TYPE_BIND_IMAGE_MEMORY_INFO_KHR };
15385 bindBufferMemoryInfo.pNext = pNext;
15386 bindBufferMemoryInfo.image = image;
15387 bindBufferMemoryInfo.memory = memory;
15388 bindBufferMemoryInfo.memoryOffset = memoryOffset;
15389 return (*m_VulkanFunctions.vkBindImageMemory2KHR)(m_hDevice, 1, &bindBufferMemoryInfo);
15392 #endif // #if VMA_BIND_MEMORY2 15394 return VK_ERROR_EXTENSION_NOT_PRESENT;
15399 return (*m_VulkanFunctions.vkBindImageMemory)(m_hDevice, image, memory, memoryOffset);
15403 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15405 if(hAllocation->CanBecomeLost())
15407 return VK_ERROR_MEMORY_MAP_FAILED;
15410 switch(hAllocation->GetType())
15412 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15414 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15415 char *pBytes = VMA_NULL;
15416 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15417 if(res == VK_SUCCESS)
15419 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15420 hAllocation->BlockAllocMap();
15424 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15425 return hAllocation->DedicatedAllocMap(
this, ppData);
15428 return VK_ERROR_MEMORY_MAP_FAILED;
15434 switch(hAllocation->GetType())
15436 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15438 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15439 hAllocation->BlockAllocUnmap();
15440 pBlock->Unmap(
this, 1);
15443 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15444 hAllocation->DedicatedAllocUnmap(
this);
15451 VkResult VmaAllocator_T::BindBufferMemory(
15453 VkDeviceSize allocationLocalOffset,
15457 VkResult res = VK_SUCCESS;
15458 switch(hAllocation->GetType())
15460 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15461 res = BindVulkanBuffer(hAllocation->GetMemory(), allocationLocalOffset, hBuffer, pNext);
15463 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15465 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15466 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
15467 res = pBlock->BindBufferMemory(
this, hAllocation, allocationLocalOffset, hBuffer, pNext);
15476 VkResult VmaAllocator_T::BindImageMemory(
15478 VkDeviceSize allocationLocalOffset,
15482 VkResult res = VK_SUCCESS;
15483 switch(hAllocation->GetType())
15485 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15486 res = BindVulkanImage(hAllocation->GetMemory(), allocationLocalOffset, hImage, pNext);
15488 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15490 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15491 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
15492 res = pBlock->BindImageMemory(
this, hAllocation, allocationLocalOffset, hImage, pNext);
15501 void VmaAllocator_T::FlushOrInvalidateAllocation(
15503 VkDeviceSize offset, VkDeviceSize size,
15504 VMA_CACHE_OPERATION op)
15506 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
15507 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
15509 const VkDeviceSize allocationSize = hAllocation->GetSize();
15510 VMA_ASSERT(offset <= allocationSize);
15512 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
15514 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
15515 memRange.memory = hAllocation->GetMemory();
15517 switch(hAllocation->GetType())
15519 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15520 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15521 if(size == VK_WHOLE_SIZE)
15523 memRange.size = allocationSize - memRange.offset;
15527 VMA_ASSERT(offset + size <= allocationSize);
15528 memRange.size = VMA_MIN(
15529 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
15530 allocationSize - memRange.offset);
15534 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15537 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15538 if(size == VK_WHOLE_SIZE)
15540 size = allocationSize - offset;
15544 VMA_ASSERT(offset + size <= allocationSize);
15546 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
15549 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
15550 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
15551 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
15552 memRange.offset += allocationOffset;
15553 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
15564 case VMA_CACHE_FLUSH:
15565 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
15567 case VMA_CACHE_INVALIDATE:
15568 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
15577 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
15579 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
15581 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15583 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15584 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15585 VMA_ASSERT(pDedicatedAllocations);
15586 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
15587 VMA_ASSERT(success);
15590 VkDeviceMemory hMemory = allocation->GetMemory();
15602 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15604 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15607 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const 15609 VkBufferCreateInfo dummyBufCreateInfo;
15610 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
15612 uint32_t memoryTypeBits = 0;
15615 VkBuffer buf = VK_NULL_HANDLE;
15616 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
15617 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
15618 if(res == VK_SUCCESS)
15621 VkMemoryRequirements memReq;
15622 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
15623 memoryTypeBits = memReq.memoryTypeBits;
15626 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
15629 return memoryTypeBits;
15632 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
15634 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
15635 !hAllocation->CanBecomeLost() &&
15636 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15638 void* pData = VMA_NULL;
15639 VkResult res = Map(hAllocation, &pData);
15640 if(res == VK_SUCCESS)
15642 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
15643 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
15644 Unmap(hAllocation);
15648 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15653 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
15655 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
15656 if(memoryTypeBits == UINT32_MAX)
15658 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
15659 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
15661 return memoryTypeBits;
15664 #if VMA_STATS_STRING_ENABLED 15666 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15668 bool dedicatedAllocationsStarted =
false;
15669 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15671 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15672 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15673 VMA_ASSERT(pDedicatedAllocVector);
15674 if(pDedicatedAllocVector->empty() ==
false)
15676 if(dedicatedAllocationsStarted ==
false)
15678 dedicatedAllocationsStarted =
true;
15679 json.WriteString(
"DedicatedAllocations");
15680 json.BeginObject();
15683 json.BeginString(
"Type ");
15684 json.ContinueString(memTypeIndex);
15689 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
15691 json.BeginObject(
true);
15693 hAlloc->PrintParameters(json);
15700 if(dedicatedAllocationsStarted)
15706 bool allocationsStarted =
false;
15707 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15709 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
15711 if(allocationsStarted ==
false)
15713 allocationsStarted =
true;
15714 json.WriteString(
"DefaultPools");
15715 json.BeginObject();
15718 json.BeginString(
"Type ");
15719 json.ContinueString(memTypeIndex);
15722 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
15725 if(allocationsStarted)
15733 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15734 const size_t poolCount = m_Pools.size();
15737 json.WriteString(
"Pools");
15738 json.BeginObject();
15739 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15741 json.BeginString();
15742 json.ContinueString(m_Pools[poolIndex]->GetId());
15745 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
15752 #endif // #if VMA_STATS_STRING_ENABLED 15761 VMA_ASSERT(pCreateInfo && pAllocator);
15762 VMA_DEBUG_LOG(
"vmaCreateAllocator");
15764 return (*pAllocator)->Init(pCreateInfo);
15770 if(allocator != VK_NULL_HANDLE)
15772 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
15773 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
15774 vma_delete(&allocationCallbacks, allocator);
15780 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
15782 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
15783 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
15788 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
15790 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
15791 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
15796 uint32_t memoryTypeIndex,
15797 VkMemoryPropertyFlags* pFlags)
15799 VMA_ASSERT(allocator && pFlags);
15800 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
15801 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
15806 uint32_t frameIndex)
15808 VMA_ASSERT(allocator);
15809 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
15811 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15813 allocator->SetCurrentFrameIndex(frameIndex);
15820 VMA_ASSERT(allocator && pStats);
15821 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15822 allocator->CalculateStats(pStats);
15825 #if VMA_STATS_STRING_ENABLED 15829 char** ppStatsString,
15830 VkBool32 detailedMap)
15832 VMA_ASSERT(allocator && ppStatsString);
15833 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15835 VmaStringBuilder sb(allocator);
15837 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
15838 json.BeginObject();
15841 allocator->CalculateStats(&stats);
15843 json.WriteString(
"Total");
15844 VmaPrintStatInfo(json, stats.
total);
15846 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
15848 json.BeginString(
"Heap ");
15849 json.ContinueString(heapIndex);
15851 json.BeginObject();
15853 json.WriteString(
"Size");
15854 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
15856 json.WriteString(
"Flags");
15857 json.BeginArray(
true);
15858 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
15860 json.WriteString(
"DEVICE_LOCAL");
15866 json.WriteString(
"Stats");
15867 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
15870 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
15872 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
15874 json.BeginString(
"Type ");
15875 json.ContinueString(typeIndex);
15878 json.BeginObject();
15880 json.WriteString(
"Flags");
15881 json.BeginArray(
true);
15882 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
15883 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
15885 json.WriteString(
"DEVICE_LOCAL");
15887 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15889 json.WriteString(
"HOST_VISIBLE");
15891 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
15893 json.WriteString(
"HOST_COHERENT");
15895 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
15897 json.WriteString(
"HOST_CACHED");
15899 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
15901 json.WriteString(
"LAZILY_ALLOCATED");
15907 json.WriteString(
"Stats");
15908 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
15917 if(detailedMap == VK_TRUE)
15919 allocator->PrintDetailedMap(json);
15925 const size_t len = sb.GetLength();
15926 char*
const pChars = vma_new_array(allocator,
char, len + 1);
15929 memcpy(pChars, sb.GetData(), len);
15931 pChars[len] =
'\0';
15932 *ppStatsString = pChars;
15937 char* pStatsString)
15939 if(pStatsString != VMA_NULL)
15941 VMA_ASSERT(allocator);
15942 size_t len = strlen(pStatsString);
15943 vma_delete_array(allocator, pStatsString, len + 1);
15947 #endif // #if VMA_STATS_STRING_ENABLED 15954 uint32_t memoryTypeBits,
15956 uint32_t* pMemoryTypeIndex)
15958 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15959 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15960 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15967 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
15968 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
15971 switch(pAllocationCreateInfo->
usage)
15976 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15978 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15982 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
15985 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15986 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15988 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15992 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15993 preferredFlags |= VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
15999 *pMemoryTypeIndex = UINT32_MAX;
16000 uint32_t minCost = UINT32_MAX;
16001 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
16002 memTypeIndex < allocator->GetMemoryTypeCount();
16003 ++memTypeIndex, memTypeBit <<= 1)
16006 if((memTypeBit & memoryTypeBits) != 0)
16008 const VkMemoryPropertyFlags currFlags =
16009 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
16011 if((requiredFlags & ~currFlags) == 0)
16014 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
16016 if(currCost < minCost)
16018 *pMemoryTypeIndex = memTypeIndex;
16023 minCost = currCost;
16028 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
16033 const VkBufferCreateInfo* pBufferCreateInfo,
16035 uint32_t* pMemoryTypeIndex)
16037 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16038 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
16039 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16040 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16042 const VkDevice hDev = allocator->m_hDevice;
16043 VkBuffer hBuffer = VK_NULL_HANDLE;
16044 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
16045 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
16046 if(res == VK_SUCCESS)
16048 VkMemoryRequirements memReq = {};
16049 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
16050 hDev, hBuffer, &memReq);
16054 memReq.memoryTypeBits,
16055 pAllocationCreateInfo,
16058 allocator->GetVulkanFunctions().vkDestroyBuffer(
16059 hDev, hBuffer, allocator->GetAllocationCallbacks());
16066 const VkImageCreateInfo* pImageCreateInfo,
16068 uint32_t* pMemoryTypeIndex)
16070 VMA_ASSERT(allocator != VK_NULL_HANDLE);
16071 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
16072 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
16073 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
16075 const VkDevice hDev = allocator->m_hDevice;
16076 VkImage hImage = VK_NULL_HANDLE;
16077 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
16078 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
16079 if(res == VK_SUCCESS)
16081 VkMemoryRequirements memReq = {};
16082 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
16083 hDev, hImage, &memReq);
16087 memReq.memoryTypeBits,
16088 pAllocationCreateInfo,
16091 allocator->GetVulkanFunctions().vkDestroyImage(
16092 hDev, hImage, allocator->GetAllocationCallbacks());
16102 VMA_ASSERT(allocator && pCreateInfo && pPool);
16104 VMA_DEBUG_LOG(
"vmaCreatePool");
16106 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16108 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
16110 #if VMA_RECORDING_ENABLED 16111 if(allocator->GetRecorder() != VMA_NULL)
16113 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
16124 VMA_ASSERT(allocator);
16126 if(pool == VK_NULL_HANDLE)
16131 VMA_DEBUG_LOG(
"vmaDestroyPool");
16133 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16135 #if VMA_RECORDING_ENABLED 16136 if(allocator->GetRecorder() != VMA_NULL)
16138 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
16142 allocator->DestroyPool(pool);
16150 VMA_ASSERT(allocator && pool && pPoolStats);
16152 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16154 allocator->GetPoolStats(pool, pPoolStats);
16160 size_t* pLostAllocationCount)
16162 VMA_ASSERT(allocator && pool);
16164 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16166 #if VMA_RECORDING_ENABLED 16167 if(allocator->GetRecorder() != VMA_NULL)
16169 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
16173 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
16178 VMA_ASSERT(allocator && pool);
16180 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16182 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
16184 return allocator->CheckPoolCorruption(pool);
16189 const VkMemoryRequirements* pVkMemoryRequirements,
16194 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16196 VMA_DEBUG_LOG(
"vmaAllocateMemory");
16198 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16200 VkResult result = allocator->AllocateMemory(
16201 *pVkMemoryRequirements,
16207 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16211 #if VMA_RECORDING_ENABLED 16212 if(allocator->GetRecorder() != VMA_NULL)
16214 allocator->GetRecorder()->RecordAllocateMemory(
16215 allocator->GetCurrentFrameIndex(),
16216 *pVkMemoryRequirements,
16222 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16224 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16232 const VkMemoryRequirements* pVkMemoryRequirements,
16234 size_t allocationCount,
16238 if(allocationCount == 0)
16243 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16245 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
16247 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16249 VkResult result = allocator->AllocateMemory(
16250 *pVkMemoryRequirements,
16256 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16260 #if VMA_RECORDING_ENABLED 16261 if(allocator->GetRecorder() != VMA_NULL)
16263 allocator->GetRecorder()->RecordAllocateMemoryPages(
16264 allocator->GetCurrentFrameIndex(),
16265 *pVkMemoryRequirements,
16267 (uint64_t)allocationCount,
16272 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16274 for(
size_t i = 0; i < allocationCount; ++i)
16276 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16290 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16292 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16294 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16296 VkMemoryRequirements vkMemReq = {};
16297 bool requiresDedicatedAllocation =
false;
16298 bool prefersDedicatedAllocation =
false;
16299 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16300 requiresDedicatedAllocation,
16301 prefersDedicatedAllocation);
16303 VkResult result = allocator->AllocateMemory(
16305 requiresDedicatedAllocation,
16306 prefersDedicatedAllocation,
16310 VMA_SUBALLOCATION_TYPE_BUFFER,
16314 #if VMA_RECORDING_ENABLED 16315 if(allocator->GetRecorder() != VMA_NULL)
16317 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16318 allocator->GetCurrentFrameIndex(),
16320 requiresDedicatedAllocation,
16321 prefersDedicatedAllocation,
16327 if(pAllocationInfo && result == VK_SUCCESS)
16329 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16342 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16344 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
16346 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16348 VkMemoryRequirements vkMemReq = {};
16349 bool requiresDedicatedAllocation =
false;
16350 bool prefersDedicatedAllocation =
false;
16351 allocator->GetImageMemoryRequirements(image, vkMemReq,
16352 requiresDedicatedAllocation, prefersDedicatedAllocation);
16354 VkResult result = allocator->AllocateMemory(
16356 requiresDedicatedAllocation,
16357 prefersDedicatedAllocation,
16361 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16365 #if VMA_RECORDING_ENABLED 16366 if(allocator->GetRecorder() != VMA_NULL)
16368 allocator->GetRecorder()->RecordAllocateMemoryForImage(
16369 allocator->GetCurrentFrameIndex(),
16371 requiresDedicatedAllocation,
16372 prefersDedicatedAllocation,
16378 if(pAllocationInfo && result == VK_SUCCESS)
16380 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16390 VMA_ASSERT(allocator);
16392 if(allocation == VK_NULL_HANDLE)
16397 VMA_DEBUG_LOG(
"vmaFreeMemory");
16399 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16401 #if VMA_RECORDING_ENABLED 16402 if(allocator->GetRecorder() != VMA_NULL)
16404 allocator->GetRecorder()->RecordFreeMemory(
16405 allocator->GetCurrentFrameIndex(),
16410 allocator->FreeMemory(
16417 size_t allocationCount,
16420 if(allocationCount == 0)
16425 VMA_ASSERT(allocator);
16427 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
16429 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16431 #if VMA_RECORDING_ENABLED 16432 if(allocator->GetRecorder() != VMA_NULL)
16434 allocator->GetRecorder()->RecordFreeMemoryPages(
16435 allocator->GetCurrentFrameIndex(),
16436 (uint64_t)allocationCount,
16441 allocator->FreeMemory(allocationCount, pAllocations);
16447 VkDeviceSize newSize)
16449 VMA_ASSERT(allocator && allocation);
16451 VMA_DEBUG_LOG(
"vmaResizeAllocation");
16453 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16455 return allocator->ResizeAllocation(allocation, newSize);
16463 VMA_ASSERT(allocator && allocation && pAllocationInfo);
16465 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16467 #if VMA_RECORDING_ENABLED 16468 if(allocator->GetRecorder() != VMA_NULL)
16470 allocator->GetRecorder()->RecordGetAllocationInfo(
16471 allocator->GetCurrentFrameIndex(),
16476 allocator->GetAllocationInfo(allocation, pAllocationInfo);
16483 VMA_ASSERT(allocator && allocation);
16485 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16487 #if VMA_RECORDING_ENABLED 16488 if(allocator->GetRecorder() != VMA_NULL)
16490 allocator->GetRecorder()->RecordTouchAllocation(
16491 allocator->GetCurrentFrameIndex(),
16496 return allocator->TouchAllocation(allocation);
16504 VMA_ASSERT(allocator && allocation);
16506 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16508 allocation->SetUserData(allocator, pUserData);
16510 #if VMA_RECORDING_ENABLED 16511 if(allocator->GetRecorder() != VMA_NULL)
16513 allocator->GetRecorder()->RecordSetAllocationUserData(
16514 allocator->GetCurrentFrameIndex(),
16525 VMA_ASSERT(allocator && pAllocation);
16527 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
16529 allocator->CreateLostAllocation(pAllocation);
16531 #if VMA_RECORDING_ENABLED 16532 if(allocator->GetRecorder() != VMA_NULL)
16534 allocator->GetRecorder()->RecordCreateLostAllocation(
16535 allocator->GetCurrentFrameIndex(),
16546 VMA_ASSERT(allocator && allocation && ppData);
16548 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16550 VkResult res = allocator->Map(allocation, ppData);
16552 #if VMA_RECORDING_ENABLED 16553 if(allocator->GetRecorder() != VMA_NULL)
16555 allocator->GetRecorder()->RecordMapMemory(
16556 allocator->GetCurrentFrameIndex(),
16568 VMA_ASSERT(allocator && allocation);
16570 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16572 #if VMA_RECORDING_ENABLED 16573 if(allocator->GetRecorder() != VMA_NULL)
16575 allocator->GetRecorder()->RecordUnmapMemory(
16576 allocator->GetCurrentFrameIndex(),
16581 allocator->Unmap(allocation);
16586 VMA_ASSERT(allocator && allocation);
16588 VMA_DEBUG_LOG(
"vmaFlushAllocation");
16590 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16592 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
16594 #if VMA_RECORDING_ENABLED 16595 if(allocator->GetRecorder() != VMA_NULL)
16597 allocator->GetRecorder()->RecordFlushAllocation(
16598 allocator->GetCurrentFrameIndex(),
16599 allocation, offset, size);
16606 VMA_ASSERT(allocator && allocation);
16608 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
16610 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16612 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
16614 #if VMA_RECORDING_ENABLED 16615 if(allocator->GetRecorder() != VMA_NULL)
16617 allocator->GetRecorder()->RecordInvalidateAllocation(
16618 allocator->GetCurrentFrameIndex(),
16619 allocation, offset, size);
16626 VMA_ASSERT(allocator);
16628 VMA_DEBUG_LOG(
"vmaCheckCorruption");
16630 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16632 return allocator->CheckCorruption(memoryTypeBits);
16638 size_t allocationCount,
16639 VkBool32* pAllocationsChanged,
16649 if(pDefragmentationInfo != VMA_NULL)
16663 if(res == VK_NOT_READY)
16676 VMA_ASSERT(allocator && pInfo && pContext);
16687 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
16689 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
16691 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16693 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
16695 #if VMA_RECORDING_ENABLED 16696 if(allocator->GetRecorder() != VMA_NULL)
16698 allocator->GetRecorder()->RecordDefragmentationBegin(
16699 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
16710 VMA_ASSERT(allocator);
16712 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
16714 if(context != VK_NULL_HANDLE)
16716 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16718 #if VMA_RECORDING_ENABLED 16719 if(allocator->GetRecorder() != VMA_NULL)
16721 allocator->GetRecorder()->RecordDefragmentationEnd(
16722 allocator->GetCurrentFrameIndex(), context);
16726 return allocator->DefragmentationEnd(context);
16739 VMA_ASSERT(allocator && allocation && buffer);
16741 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
16743 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16745 return allocator->BindBufferMemory(allocation, 0, buffer, VMA_NULL);
16751 VkDeviceSize allocationLocalOffset,
16755 VMA_ASSERT(allocator && allocation && buffer);
16757 VMA_DEBUG_LOG(
"vmaBindBufferMemory2");
16759 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16761 return allocator->BindBufferMemory(allocation, allocationLocalOffset, buffer, pNext);
16769 VMA_ASSERT(allocator && allocation && image);
16771 VMA_DEBUG_LOG(
"vmaBindImageMemory");
16773 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16775 return allocator->BindImageMemory(allocation, 0, image, VMA_NULL);
16781 VkDeviceSize allocationLocalOffset,
16785 VMA_ASSERT(allocator && allocation && image);
16787 VMA_DEBUG_LOG(
"vmaBindImageMemory2");
16789 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16791 return allocator->BindImageMemory(allocation, allocationLocalOffset, image, pNext);
16796 const VkBufferCreateInfo* pBufferCreateInfo,
16802 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16804 if(pBufferCreateInfo->size == 0)
16806 return VK_ERROR_VALIDATION_FAILED_EXT;
16809 VMA_DEBUG_LOG(
"vmaCreateBuffer");
16811 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16813 *pBuffer = VK_NULL_HANDLE;
16814 *pAllocation = VK_NULL_HANDLE;
16817 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
16818 allocator->m_hDevice,
16820 allocator->GetAllocationCallbacks(),
16825 VkMemoryRequirements vkMemReq = {};
16826 bool requiresDedicatedAllocation =
false;
16827 bool prefersDedicatedAllocation =
false;
16828 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
16829 requiresDedicatedAllocation, prefersDedicatedAllocation);
16833 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
16835 VMA_ASSERT(vkMemReq.alignment %
16836 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
16838 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
16840 VMA_ASSERT(vkMemReq.alignment %
16841 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
16843 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
16845 VMA_ASSERT(vkMemReq.alignment %
16846 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
16850 res = allocator->AllocateMemory(
16852 requiresDedicatedAllocation,
16853 prefersDedicatedAllocation,
16856 *pAllocationCreateInfo,
16857 VMA_SUBALLOCATION_TYPE_BUFFER,
16861 #if VMA_RECORDING_ENABLED 16862 if(allocator->GetRecorder() != VMA_NULL)
16864 allocator->GetRecorder()->RecordCreateBuffer(
16865 allocator->GetCurrentFrameIndex(),
16866 *pBufferCreateInfo,
16867 *pAllocationCreateInfo,
16877 res = allocator->BindBufferMemory(*pAllocation, 0, *pBuffer, VMA_NULL);
16882 #if VMA_STATS_STRING_ENABLED 16883 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
16885 if(pAllocationInfo != VMA_NULL)
16887 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16892 allocator->FreeMemory(
16895 *pAllocation = VK_NULL_HANDLE;
16896 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16897 *pBuffer = VK_NULL_HANDLE;
16900 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16901 *pBuffer = VK_NULL_HANDLE;
16912 VMA_ASSERT(allocator);
16914 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16919 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
16921 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16923 #if VMA_RECORDING_ENABLED 16924 if(allocator->GetRecorder() != VMA_NULL)
16926 allocator->GetRecorder()->RecordDestroyBuffer(
16927 allocator->GetCurrentFrameIndex(),
16932 if(buffer != VK_NULL_HANDLE)
16934 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
16937 if(allocation != VK_NULL_HANDLE)
16939 allocator->FreeMemory(
16947 const VkImageCreateInfo* pImageCreateInfo,
16953 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
16955 if(pImageCreateInfo->extent.width == 0 ||
16956 pImageCreateInfo->extent.height == 0 ||
16957 pImageCreateInfo->extent.depth == 0 ||
16958 pImageCreateInfo->mipLevels == 0 ||
16959 pImageCreateInfo->arrayLayers == 0)
16961 return VK_ERROR_VALIDATION_FAILED_EXT;
16964 VMA_DEBUG_LOG(
"vmaCreateImage");
16966 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16968 *pImage = VK_NULL_HANDLE;
16969 *pAllocation = VK_NULL_HANDLE;
16972 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
16973 allocator->m_hDevice,
16975 allocator->GetAllocationCallbacks(),
16979 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
16980 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
16981 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
16984 VkMemoryRequirements vkMemReq = {};
16985 bool requiresDedicatedAllocation =
false;
16986 bool prefersDedicatedAllocation =
false;
16987 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
16988 requiresDedicatedAllocation, prefersDedicatedAllocation);
16990 res = allocator->AllocateMemory(
16992 requiresDedicatedAllocation,
16993 prefersDedicatedAllocation,
16996 *pAllocationCreateInfo,
17001 #if VMA_RECORDING_ENABLED 17002 if(allocator->GetRecorder() != VMA_NULL)
17004 allocator->GetRecorder()->RecordCreateImage(
17005 allocator->GetCurrentFrameIndex(),
17007 *pAllocationCreateInfo,
17017 res = allocator->BindImageMemory(*pAllocation, 0, *pImage, VMA_NULL);
17022 #if VMA_STATS_STRING_ENABLED 17023 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
17025 if(pAllocationInfo != VMA_NULL)
17027 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
17032 allocator->FreeMemory(
17035 *pAllocation = VK_NULL_HANDLE;
17036 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17037 *pImage = VK_NULL_HANDLE;
17040 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
17041 *pImage = VK_NULL_HANDLE;
17052 VMA_ASSERT(allocator);
17054 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
17059 VMA_DEBUG_LOG(
"vmaDestroyImage");
17061 VMA_DEBUG_GLOBAL_MUTEX_LOCK
17063 #if VMA_RECORDING_ENABLED 17064 if(allocator->GetRecorder() != VMA_NULL)
17066 allocator->GetRecorder()->RecordDestroyImage(
17067 allocator->GetCurrentFrameIndex(),
17072 if(image != VK_NULL_HANDLE)
17074 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
17076 if(allocation != VK_NULL_HANDLE)
17078 allocator->FreeMemory(
17084 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1810
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2114
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1872
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:2911
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
Definition: vk_mem_alloc.h:1846
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2445
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1822
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:2076
Definition: vk_mem_alloc.h:2180
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2864
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1814
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2545
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1869
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2947
VkResult vmaBindImageMemory2(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize allocationLocalOffset, VkImage image, const void *pNext)
Binds image to allocation with additional parameters.
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2334
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1690
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2426
Definition: vk_mem_alloc.h:2151
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2867
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1803
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2233
Definition: vk_mem_alloc.h:2103
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1881
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2362
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1.
Definition: vk_mem_alloc.h:1935
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1866
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2107
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:2007
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1819
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2901
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:2006
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2951
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(),...
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1898
VmaStatInfo total
Definition: vk_mem_alloc.h:2016
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2959
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2217
Definition: vk_mem_alloc.h:2175
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2942
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1820
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1733
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1875
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2376
Definition: vk_mem_alloc.h:2370
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1826
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1942
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2555
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1815
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1844
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2254
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2396
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:2432
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1801
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2379
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2916
VmaMemoryUsage
Definition: vk_mem_alloc.h:2054
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2876
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2937
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:2955
Definition: vk_mem_alloc.h:2093
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2241
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1818
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:2012
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1739
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2855
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
Definition: vk_mem_alloc.h:2853
Definition: vk_mem_alloc.h:2201
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2882
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1760
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1848
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1765
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2957
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2228
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2442
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1811
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1995
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:2391
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1752
Definition: vk_mem_alloc.h:2366
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:2158
Represents Opaque object that represents started defragmentation process.
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:2008
Definition: vk_mem_alloc.h:1799
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1756
Definition: vk_mem_alloc.h:2191
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2382
Definition: vk_mem_alloc.h:2102
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1817
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2223
Definition: vk_mem_alloc.h:2214
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1998
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1813
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2404
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1884
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2435
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2212
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2906
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2247
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1923
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:2014
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2138
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:2007
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1824
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1854
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:2852
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2930
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1754
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1823
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2418
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1816
Definition: vk_mem_alloc.h:2169
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1862
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2569
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:1878
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:2007
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:2004
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2423
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2861
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions.
Definition: vk_mem_alloc.h:2184
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:2550
Definition: vk_mem_alloc.h:2198
Definition: vk_mem_alloc.h:2210
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2953
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1809
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
VkResult vmaBindBufferMemory2(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize allocationLocalOffset, VkBuffer buffer, const void *pNext)
Binds buffer to allocation with additional parameters.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:2002
Definition: vk_mem_alloc.h:2059
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2372
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1851
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:2000
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1821
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1825
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2125
Definition: vk_mem_alloc.h:2205
Definition: vk_mem_alloc.h:2086
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2564
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1787
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1812
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2351
VkResult vmaResizeAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize newSize)
Deprecated.
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2531
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:2195
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2316
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:2008
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame.
Definition: vk_mem_alloc.h:2164
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1838
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:2015
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2429
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:2008
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:2921
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2536
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2885