23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1622 #ifndef VMA_RECORDING_ENABLED 1624 #define VMA_RECORDING_ENABLED 1 1626 #define VMA_RECORDING_ENABLED 0 1631 #define NOMINMAX // For windows.h 1635 #include <vulkan/vulkan.h> 1638 #if VMA_RECORDING_ENABLED 1639 #include <windows.h> 1642 #if !defined(VMA_DEDICATED_ALLOCATION) 1643 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1644 #define VMA_DEDICATED_ALLOCATION 1 1646 #define VMA_DEDICATED_ALLOCATION 0 1664 uint32_t memoryType,
1665 VkDeviceMemory memory,
1670 uint32_t memoryType,
1671 VkDeviceMemory memory,
1744 #if VMA_DEDICATED_ALLOCATION 1745 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1746 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1873 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1881 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1891 uint32_t memoryTypeIndex,
1892 VkMemoryPropertyFlags* pFlags);
1904 uint32_t frameIndex);
1937 #define VMA_STATS_STRING_ENABLED 1 1939 #if VMA_STATS_STRING_ENABLED 1946 char** ppStatsString,
1947 VkBool32 detailedMap);
1951 char* pStatsString);
1953 #endif // #if VMA_STATS_STRING_ENABLED 2185 uint32_t memoryTypeBits,
2187 uint32_t* pMemoryTypeIndex);
2203 const VkBufferCreateInfo* pBufferCreateInfo,
2205 uint32_t* pMemoryTypeIndex);
2221 const VkImageCreateInfo* pImageCreateInfo,
2223 uint32_t* pMemoryTypeIndex);
2395 size_t* pLostAllocationCount);
2494 const VkMemoryRequirements* pVkMemoryRequirements,
2548 VkDeviceSize newSize);
2917 size_t allocationCount,
2918 VkBool32* pAllocationsChanged,
2984 const VkBufferCreateInfo* pBufferCreateInfo,
3009 const VkImageCreateInfo* pImageCreateInfo,
3035 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 3038 #if defined(__cplusplus) && defined(__INTELLISENSE__) 3039 #define VMA_IMPLEMENTATION 3042 #ifdef VMA_IMPLEMENTATION 3043 #undef VMA_IMPLEMENTATION 3065 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 3066 #define VMA_STATIC_VULKAN_FUNCTIONS 1 3078 #if VMA_USE_STL_CONTAINERS 3079 #define VMA_USE_STL_VECTOR 1 3080 #define VMA_USE_STL_UNORDERED_MAP 1 3081 #define VMA_USE_STL_LIST 1 3084 #ifndef VMA_USE_STL_SHARED_MUTEX 3086 #if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 3087 #define VMA_USE_STL_SHARED_MUTEX 1 3091 #if VMA_USE_STL_VECTOR 3095 #if VMA_USE_STL_UNORDERED_MAP 3096 #include <unordered_map> 3099 #if VMA_USE_STL_LIST 3108 #include <algorithm> 3114 #define VMA_NULL nullptr 3117 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 3119 void *aligned_alloc(
size_t alignment,
size_t size)
3122 if(alignment <
sizeof(
void*))
3124 alignment =
sizeof(
void*);
3127 return memalign(alignment, size);
3129 #elif defined(__APPLE__) || defined(__ANDROID__) 3131 void *aligned_alloc(
size_t alignment,
size_t size)
3134 if(alignment <
sizeof(
void*))
3136 alignment =
sizeof(
void*);
3140 if(posix_memalign(&pointer, alignment, size) == 0)
3154 #define VMA_ASSERT(expr) assert(expr) 3156 #define VMA_ASSERT(expr) 3162 #ifndef VMA_HEAVY_ASSERT 3164 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 3166 #define VMA_HEAVY_ASSERT(expr) 3170 #ifndef VMA_ALIGN_OF 3171 #define VMA_ALIGN_OF(type) (__alignof(type)) 3174 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 3176 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 3178 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 3182 #ifndef VMA_SYSTEM_FREE 3184 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 3186 #define VMA_SYSTEM_FREE(ptr) free(ptr) 3191 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 3195 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 3199 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 3203 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 3206 #ifndef VMA_DEBUG_LOG 3207 #define VMA_DEBUG_LOG(format, ...) 3217 #if VMA_STATS_STRING_ENABLED 3218 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3220 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3222 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3224 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3226 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3228 snprintf(outStr, strLen,
"%p", ptr);
3236 void Lock() { m_Mutex.lock(); }
3237 void Unlock() { m_Mutex.unlock(); }
3241 #define VMA_MUTEX VmaMutex 3245 #ifndef VMA_RW_MUTEX 3246 #if VMA_USE_STL_SHARED_MUTEX 3248 #include <shared_mutex> 3252 void LockRead() { m_Mutex.lock_shared(); }
3253 void UnlockRead() { m_Mutex.unlock_shared(); }
3254 void LockWrite() { m_Mutex.lock(); }
3255 void UnlockWrite() { m_Mutex.unlock(); }
3257 std::shared_mutex m_Mutex;
3259 #define VMA_RW_MUTEX VmaRWMutex 3260 #elif defined(_WIN32) 3265 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3266 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3267 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3268 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3269 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3273 #define VMA_RW_MUTEX VmaRWMutex 3279 void LockRead() { m_Mutex.Lock(); }
3280 void UnlockRead() { m_Mutex.Unlock(); }
3281 void LockWrite() { m_Mutex.Lock(); }
3282 void UnlockWrite() { m_Mutex.Unlock(); }
3286 #define VMA_RW_MUTEX VmaRWMutex 3287 #endif // #if VMA_USE_STL_SHARED_MUTEX 3288 #endif // #ifndef VMA_RW_MUTEX 3298 #ifndef VMA_ATOMIC_UINT32 3299 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 3302 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 3307 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 3310 #ifndef VMA_DEBUG_ALIGNMENT 3315 #define VMA_DEBUG_ALIGNMENT (1) 3318 #ifndef VMA_DEBUG_MARGIN 3323 #define VMA_DEBUG_MARGIN (0) 3326 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3331 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3334 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3340 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3343 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3348 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3351 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3356 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3359 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3360 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3364 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3365 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3369 #ifndef VMA_CLASS_NO_COPY 3370 #define VMA_CLASS_NO_COPY(className) \ 3372 className(const className&) = delete; \ 3373 className& operator=(const className&) = delete; 3376 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3379 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3381 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3382 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3388 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3390 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3391 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3394 static inline uint32_t VmaCountBitsSet(uint32_t v)
3396 uint32_t c = v - ((v >> 1) & 0x55555555);
3397 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3398 c = ((c >> 4) + c) & 0x0F0F0F0F;
3399 c = ((c >> 8) + c) & 0x00FF00FF;
3400 c = ((c >> 16) + c) & 0x0000FFFF;
3406 template <
typename T>
3407 static inline T VmaAlignUp(T val, T align)
3409 return (val + align - 1) / align * align;
3413 template <
typename T>
3414 static inline T VmaAlignDown(T val, T align)
3416 return val / align * align;
3420 template <
typename T>
3421 static inline T VmaRoundDiv(T x, T y)
3423 return (x + (y / (T)2)) / y;
3431 template <
typename T>
3432 inline bool VmaIsPow2(T x)
3434 return (x & (x-1)) == 0;
3438 static inline uint32_t VmaNextPow2(uint32_t v)
3449 static inline uint64_t VmaNextPow2(uint64_t v)
3463 static inline uint32_t VmaPrevPow2(uint32_t v)
3473 static inline uint64_t VmaPrevPow2(uint64_t v)
3485 static inline bool VmaStrIsEmpty(
const char* pStr)
3487 return pStr == VMA_NULL || *pStr ==
'\0';
3490 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3508 template<
typename Iterator,
typename Compare>
3509 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3511 Iterator centerValue = end; --centerValue;
3512 Iterator insertIndex = beg;
3513 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3515 if(cmp(*memTypeIndex, *centerValue))
3517 if(insertIndex != memTypeIndex)
3519 VMA_SWAP(*memTypeIndex, *insertIndex);
3524 if(insertIndex != centerValue)
3526 VMA_SWAP(*insertIndex, *centerValue);
3531 template<
typename Iterator,
typename Compare>
3532 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3536 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3537 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3538 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3542 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3544 #endif // #ifndef VMA_SORT 3553 static inline bool VmaBlocksOnSamePage(
3554 VkDeviceSize resourceAOffset,
3555 VkDeviceSize resourceASize,
3556 VkDeviceSize resourceBOffset,
3557 VkDeviceSize pageSize)
3559 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3560 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3561 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3562 VkDeviceSize resourceBStart = resourceBOffset;
3563 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3564 return resourceAEndPage == resourceBStartPage;
3567 enum VmaSuballocationType
3569 VMA_SUBALLOCATION_TYPE_FREE = 0,
3570 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3571 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3572 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3573 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3574 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3575 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3584 static inline bool VmaIsBufferImageGranularityConflict(
3585 VmaSuballocationType suballocType1,
3586 VmaSuballocationType suballocType2)
3588 if(suballocType1 > suballocType2)
3590 VMA_SWAP(suballocType1, suballocType2);
3593 switch(suballocType1)
3595 case VMA_SUBALLOCATION_TYPE_FREE:
3597 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3599 case VMA_SUBALLOCATION_TYPE_BUFFER:
3601 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3602 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3603 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3605 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3606 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3607 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3608 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3610 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3611 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3619 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3621 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3622 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3623 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3625 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3629 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3631 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3632 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3633 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3635 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3646 VMA_CLASS_NO_COPY(VmaMutexLock)
3648 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3649 m_pMutex(useMutex ? &mutex : VMA_NULL)
3650 {
if(m_pMutex) { m_pMutex->Lock(); } }
3652 {
if(m_pMutex) { m_pMutex->Unlock(); } }
3654 VMA_MUTEX* m_pMutex;
3658 struct VmaMutexLockRead
3660 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3662 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3663 m_pMutex(useMutex ? &mutex : VMA_NULL)
3664 {
if(m_pMutex) { m_pMutex->LockRead(); } }
3665 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
3667 VMA_RW_MUTEX* m_pMutex;
3671 struct VmaMutexLockWrite
3673 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3675 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
3676 m_pMutex(useMutex ? &mutex : VMA_NULL)
3677 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
3678 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
3680 VMA_RW_MUTEX* m_pMutex;
3683 #if VMA_DEBUG_GLOBAL_MUTEX 3684 static VMA_MUTEX gDebugGlobalMutex;
3685 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3687 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3691 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3702 template <
typename CmpLess,
typename IterT,
typename KeyT>
3703 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3705 size_t down = 0, up = (end - beg);
3708 const size_t mid = (down + up) / 2;
3709 if(cmp(*(beg+mid), key))
3724 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3726 if((pAllocationCallbacks != VMA_NULL) &&
3727 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3729 return (*pAllocationCallbacks->pfnAllocation)(
3730 pAllocationCallbacks->pUserData,
3733 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3737 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3741 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3743 if((pAllocationCallbacks != VMA_NULL) &&
3744 (pAllocationCallbacks->pfnFree != VMA_NULL))
3746 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3750 VMA_SYSTEM_FREE(ptr);
3754 template<
typename T>
3755 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3757 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3760 template<
typename T>
3761 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3763 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3766 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3768 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3770 template<
typename T>
3771 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3774 VmaFree(pAllocationCallbacks, ptr);
3777 template<
typename T>
3778 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3782 for(
size_t i = count; i--; )
3786 VmaFree(pAllocationCallbacks, ptr);
3791 template<
typename T>
3792 class VmaStlAllocator
3795 const VkAllocationCallbacks*
const m_pCallbacks;
3796 typedef T value_type;
3798 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3799 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3801 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3802 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3804 template<
typename U>
3805 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3807 return m_pCallbacks == rhs.m_pCallbacks;
3809 template<
typename U>
3810 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3812 return m_pCallbacks != rhs.m_pCallbacks;
3815 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3818 #if VMA_USE_STL_VECTOR 3820 #define VmaVector std::vector 3822 template<
typename T,
typename allocatorT>
3823 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3825 vec.insert(vec.begin() + index, item);
3828 template<
typename T,
typename allocatorT>
3829 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3831 vec.erase(vec.begin() + index);
3834 #else // #if VMA_USE_STL_VECTOR 3839 template<
typename T,
typename AllocatorT>
3843 typedef T value_type;
3845 VmaVector(
const AllocatorT& allocator) :
3846 m_Allocator(allocator),
3853 VmaVector(
size_t count,
const AllocatorT& allocator) :
3854 m_Allocator(allocator),
3855 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3861 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3862 m_Allocator(src.m_Allocator),
3863 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3864 m_Count(src.m_Count),
3865 m_Capacity(src.m_Count)
3869 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3875 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3878 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3882 resize(rhs.m_Count);
3885 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3891 bool empty()
const {
return m_Count == 0; }
3892 size_t size()
const {
return m_Count; }
3893 T* data() {
return m_pArray; }
3894 const T* data()
const {
return m_pArray; }
3896 T& operator[](
size_t index)
3898 VMA_HEAVY_ASSERT(index < m_Count);
3899 return m_pArray[index];
3901 const T& operator[](
size_t index)
const 3903 VMA_HEAVY_ASSERT(index < m_Count);
3904 return m_pArray[index];
3909 VMA_HEAVY_ASSERT(m_Count > 0);
3912 const T& front()
const 3914 VMA_HEAVY_ASSERT(m_Count > 0);
3919 VMA_HEAVY_ASSERT(m_Count > 0);
3920 return m_pArray[m_Count - 1];
3922 const T& back()
const 3924 VMA_HEAVY_ASSERT(m_Count > 0);
3925 return m_pArray[m_Count - 1];
3928 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3930 newCapacity = VMA_MAX(newCapacity, m_Count);
3932 if((newCapacity < m_Capacity) && !freeMemory)
3934 newCapacity = m_Capacity;
3937 if(newCapacity != m_Capacity)
3939 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3942 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3944 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3945 m_Capacity = newCapacity;
3946 m_pArray = newArray;
3950 void resize(
size_t newCount,
bool freeMemory =
false)
3952 size_t newCapacity = m_Capacity;
3953 if(newCount > m_Capacity)
3955 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3959 newCapacity = newCount;
3962 if(newCapacity != m_Capacity)
3964 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3965 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3966 if(elementsToCopy != 0)
3968 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3970 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3971 m_Capacity = newCapacity;
3972 m_pArray = newArray;
3978 void clear(
bool freeMemory =
false)
3980 resize(0, freeMemory);
3983 void insert(
size_t index,
const T& src)
3985 VMA_HEAVY_ASSERT(index <= m_Count);
3986 const size_t oldCount = size();
3987 resize(oldCount + 1);
3988 if(index < oldCount)
3990 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3992 m_pArray[index] = src;
3995 void remove(
size_t index)
3997 VMA_HEAVY_ASSERT(index < m_Count);
3998 const size_t oldCount = size();
3999 if(index < oldCount - 1)
4001 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4003 resize(oldCount - 1);
4006 void push_back(
const T& src)
4008 const size_t newIndex = size();
4009 resize(newIndex + 1);
4010 m_pArray[newIndex] = src;
4015 VMA_HEAVY_ASSERT(m_Count > 0);
4019 void push_front(
const T& src)
4026 VMA_HEAVY_ASSERT(m_Count > 0);
4030 typedef T* iterator;
4032 iterator begin() {
return m_pArray; }
4033 iterator end() {
return m_pArray + m_Count; }
4036 AllocatorT m_Allocator;
4042 template<
typename T,
typename allocatorT>
4043 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4045 vec.insert(index, item);
4048 template<
typename T,
typename allocatorT>
4049 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4054 #endif // #if VMA_USE_STL_VECTOR 4056 template<
typename CmpLess,
typename VectorT>
4057 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4059 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4061 vector.data() + vector.size(),
4063 CmpLess()) - vector.data();
4064 VmaVectorInsert(vector, indexToInsert, value);
4065 return indexToInsert;
4068 template<
typename CmpLess,
typename VectorT>
4069 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4072 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4077 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4079 size_t indexToRemove = it - vector.begin();
4080 VmaVectorRemove(vector, indexToRemove);
4086 template<
typename CmpLess,
typename IterT,
typename KeyT>
4087 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
4090 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4091 beg, end, value, comparator);
4093 (!comparator(*it, value) && !comparator(value, *it)))
4108 template<
typename T>
4109 class VmaPoolAllocator
4111 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4113 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
4114 ~VmaPoolAllocator();
4122 uint32_t NextFreeIndex;
4129 uint32_t FirstFreeIndex;
4132 const VkAllocationCallbacks* m_pAllocationCallbacks;
4133 size_t m_ItemsPerBlock;
4134 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4136 ItemBlock& CreateNewBlock();
4139 template<
typename T>
4140 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
4141 m_pAllocationCallbacks(pAllocationCallbacks),
4142 m_ItemsPerBlock(itemsPerBlock),
4143 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4145 VMA_ASSERT(itemsPerBlock > 0);
4148 template<
typename T>
4149 VmaPoolAllocator<T>::~VmaPoolAllocator()
4154 template<
typename T>
4155 void VmaPoolAllocator<T>::Clear()
4157 for(
size_t i = m_ItemBlocks.size(); i--; )
4158 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
4159 m_ItemBlocks.clear();
4162 template<
typename T>
4163 T* VmaPoolAllocator<T>::Alloc()
4165 for(
size_t i = m_ItemBlocks.size(); i--; )
4167 ItemBlock& block = m_ItemBlocks[i];
4169 if(block.FirstFreeIndex != UINT32_MAX)
4171 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4172 block.FirstFreeIndex = pItem->NextFreeIndex;
4173 return &pItem->Value;
4178 ItemBlock& newBlock = CreateNewBlock();
4179 Item*
const pItem = &newBlock.pItems[0];
4180 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4181 return &pItem->Value;
4184 template<
typename T>
4185 void VmaPoolAllocator<T>::Free(T* ptr)
4188 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
4190 ItemBlock& block = m_ItemBlocks[i];
4194 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4197 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
4199 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
4200 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4201 block.FirstFreeIndex = index;
4205 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4208 template<
typename T>
4209 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4211 ItemBlock newBlock = {
4212 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
4214 m_ItemBlocks.push_back(newBlock);
4217 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
4218 newBlock.pItems[i].NextFreeIndex = i + 1;
4219 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
4220 return m_ItemBlocks.back();
4226 #if VMA_USE_STL_LIST 4228 #define VmaList std::list 4230 #else // #if VMA_USE_STL_LIST 4232 template<
typename T>
4241 template<
typename T>
4244 VMA_CLASS_NO_COPY(VmaRawList)
4246 typedef VmaListItem<T> ItemType;
4248 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4252 size_t GetCount()
const {
return m_Count; }
4253 bool IsEmpty()
const {
return m_Count == 0; }
4255 ItemType* Front() {
return m_pFront; }
4256 const ItemType* Front()
const {
return m_pFront; }
4257 ItemType* Back() {
return m_pBack; }
4258 const ItemType* Back()
const {
return m_pBack; }
4260 ItemType* PushBack();
4261 ItemType* PushFront();
4262 ItemType* PushBack(
const T& value);
4263 ItemType* PushFront(
const T& value);
4268 ItemType* InsertBefore(ItemType* pItem);
4270 ItemType* InsertAfter(ItemType* pItem);
4272 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4273 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4275 void Remove(ItemType* pItem);
4278 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4279 VmaPoolAllocator<ItemType> m_ItemAllocator;
4285 template<
typename T>
4286 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4287 m_pAllocationCallbacks(pAllocationCallbacks),
4288 m_ItemAllocator(pAllocationCallbacks, 128),
4295 template<
typename T>
4296 VmaRawList<T>::~VmaRawList()
4302 template<
typename T>
4303 void VmaRawList<T>::Clear()
4305 if(IsEmpty() ==
false)
4307 ItemType* pItem = m_pBack;
4308 while(pItem != VMA_NULL)
4310 ItemType*
const pPrevItem = pItem->pPrev;
4311 m_ItemAllocator.Free(pItem);
4314 m_pFront = VMA_NULL;
4320 template<
typename T>
4321 VmaListItem<T>* VmaRawList<T>::PushBack()
4323 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4324 pNewItem->pNext = VMA_NULL;
4327 pNewItem->pPrev = VMA_NULL;
4328 m_pFront = pNewItem;
4334 pNewItem->pPrev = m_pBack;
4335 m_pBack->pNext = pNewItem;
4342 template<
typename T>
4343 VmaListItem<T>* VmaRawList<T>::PushFront()
4345 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4346 pNewItem->pPrev = VMA_NULL;
4349 pNewItem->pNext = VMA_NULL;
4350 m_pFront = pNewItem;
4356 pNewItem->pNext = m_pFront;
4357 m_pFront->pPrev = pNewItem;
4358 m_pFront = pNewItem;
4364 template<
typename T>
4365 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4367 ItemType*
const pNewItem = PushBack();
4368 pNewItem->Value = value;
4372 template<
typename T>
4373 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4375 ItemType*
const pNewItem = PushFront();
4376 pNewItem->Value = value;
4380 template<
typename T>
4381 void VmaRawList<T>::PopBack()
4383 VMA_HEAVY_ASSERT(m_Count > 0);
4384 ItemType*
const pBackItem = m_pBack;
4385 ItemType*
const pPrevItem = pBackItem->pPrev;
4386 if(pPrevItem != VMA_NULL)
4388 pPrevItem->pNext = VMA_NULL;
4390 m_pBack = pPrevItem;
4391 m_ItemAllocator.Free(pBackItem);
4395 template<
typename T>
4396 void VmaRawList<T>::PopFront()
4398 VMA_HEAVY_ASSERT(m_Count > 0);
4399 ItemType*
const pFrontItem = m_pFront;
4400 ItemType*
const pNextItem = pFrontItem->pNext;
4401 if(pNextItem != VMA_NULL)
4403 pNextItem->pPrev = VMA_NULL;
4405 m_pFront = pNextItem;
4406 m_ItemAllocator.Free(pFrontItem);
4410 template<
typename T>
4411 void VmaRawList<T>::Remove(ItemType* pItem)
4413 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4414 VMA_HEAVY_ASSERT(m_Count > 0);
4416 if(pItem->pPrev != VMA_NULL)
4418 pItem->pPrev->pNext = pItem->pNext;
4422 VMA_HEAVY_ASSERT(m_pFront == pItem);
4423 m_pFront = pItem->pNext;
4426 if(pItem->pNext != VMA_NULL)
4428 pItem->pNext->pPrev = pItem->pPrev;
4432 VMA_HEAVY_ASSERT(m_pBack == pItem);
4433 m_pBack = pItem->pPrev;
4436 m_ItemAllocator.Free(pItem);
4440 template<
typename T>
4441 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4443 if(pItem != VMA_NULL)
4445 ItemType*
const prevItem = pItem->pPrev;
4446 ItemType*
const newItem = m_ItemAllocator.Alloc();
4447 newItem->pPrev = prevItem;
4448 newItem->pNext = pItem;
4449 pItem->pPrev = newItem;
4450 if(prevItem != VMA_NULL)
4452 prevItem->pNext = newItem;
4456 VMA_HEAVY_ASSERT(m_pFront == pItem);
4466 template<
typename T>
4467 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4469 if(pItem != VMA_NULL)
4471 ItemType*
const nextItem = pItem->pNext;
4472 ItemType*
const newItem = m_ItemAllocator.Alloc();
4473 newItem->pNext = nextItem;
4474 newItem->pPrev = pItem;
4475 pItem->pNext = newItem;
4476 if(nextItem != VMA_NULL)
4478 nextItem->pPrev = newItem;
4482 VMA_HEAVY_ASSERT(m_pBack == pItem);
4492 template<
typename T>
4493 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4495 ItemType*
const newItem = InsertBefore(pItem);
4496 newItem->Value = value;
4500 template<
typename T>
4501 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4503 ItemType*
const newItem = InsertAfter(pItem);
4504 newItem->Value = value;
4508 template<
typename T,
typename AllocatorT>
4511 VMA_CLASS_NO_COPY(VmaList)
4522 T& operator*()
const 4524 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4525 return m_pItem->Value;
4527 T* operator->()
const 4529 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4530 return &m_pItem->Value;
4533 iterator& operator++()
4535 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4536 m_pItem = m_pItem->pNext;
4539 iterator& operator--()
4541 if(m_pItem != VMA_NULL)
4543 m_pItem = m_pItem->pPrev;
4547 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4548 m_pItem = m_pList->Back();
4553 iterator operator++(
int)
4555 iterator result = *
this;
4559 iterator operator--(
int)
4561 iterator result = *
this;
4566 bool operator==(
const iterator& rhs)
const 4568 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4569 return m_pItem == rhs.m_pItem;
4571 bool operator!=(
const iterator& rhs)
const 4573 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4574 return m_pItem != rhs.m_pItem;
4578 VmaRawList<T>* m_pList;
4579 VmaListItem<T>* m_pItem;
4581 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4587 friend class VmaList<T, AllocatorT>;
4590 class const_iterator
4599 const_iterator(
const iterator& src) :
4600 m_pList(src.m_pList),
4601 m_pItem(src.m_pItem)
4605 const T& operator*()
const 4607 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4608 return m_pItem->Value;
4610 const T* operator->()
const 4612 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4613 return &m_pItem->Value;
4616 const_iterator& operator++()
4618 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4619 m_pItem = m_pItem->pNext;
4622 const_iterator& operator--()
4624 if(m_pItem != VMA_NULL)
4626 m_pItem = m_pItem->pPrev;
4630 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4631 m_pItem = m_pList->Back();
4636 const_iterator operator++(
int)
4638 const_iterator result = *
this;
4642 const_iterator operator--(
int)
4644 const_iterator result = *
this;
4649 bool operator==(
const const_iterator& rhs)
const 4651 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4652 return m_pItem == rhs.m_pItem;
4654 bool operator!=(
const const_iterator& rhs)
const 4656 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4657 return m_pItem != rhs.m_pItem;
4661 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4667 const VmaRawList<T>* m_pList;
4668 const VmaListItem<T>* m_pItem;
4670 friend class VmaList<T, AllocatorT>;
4673 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4675 bool empty()
const {
return m_RawList.IsEmpty(); }
4676 size_t size()
const {
return m_RawList.GetCount(); }
4678 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4679 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4681 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4682 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4684 void clear() { m_RawList.Clear(); }
4685 void push_back(
const T& value) { m_RawList.PushBack(value); }
4686 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4687 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4690 VmaRawList<T> m_RawList;
4693 #endif // #if VMA_USE_STL_LIST 4701 #if VMA_USE_STL_UNORDERED_MAP 4703 #define VmaPair std::pair 4705 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4706 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4708 #else // #if VMA_USE_STL_UNORDERED_MAP 4710 template<
typename T1,
typename T2>
4716 VmaPair() : first(), second() { }
4717 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4723 template<
typename KeyT,
typename ValueT>
4727 typedef VmaPair<KeyT, ValueT> PairType;
4728 typedef PairType* iterator;
4730 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4732 iterator begin() {
return m_Vector.begin(); }
4733 iterator end() {
return m_Vector.end(); }
4735 void insert(
const PairType& pair);
4736 iterator find(
const KeyT& key);
4737 void erase(iterator it);
4740 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4743 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4745 template<
typename FirstT,
typename SecondT>
4746 struct VmaPairFirstLess
4748 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4750 return lhs.first < rhs.first;
4752 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4754 return lhs.first < rhsFirst;
4758 template<
typename KeyT,
typename ValueT>
4759 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4761 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4763 m_Vector.data() + m_Vector.size(),
4765 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4766 VmaVectorInsert(m_Vector, indexToInsert, pair);
4769 template<
typename KeyT,
typename ValueT>
4770 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4772 PairType* it = VmaBinaryFindFirstNotLess(
4774 m_Vector.data() + m_Vector.size(),
4776 VmaPairFirstLess<KeyT, ValueT>());
4777 if((it != m_Vector.end()) && (it->first == key))
4783 return m_Vector.end();
4787 template<
typename KeyT,
typename ValueT>
4788 void VmaMap<KeyT, ValueT>::erase(iterator it)
4790 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4793 #endif // #if VMA_USE_STL_UNORDERED_MAP 4799 class VmaDeviceMemoryBlock;
4801 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4803 struct VmaAllocation_T
4805 VMA_CLASS_NO_COPY(VmaAllocation_T)
4807 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4811 FLAG_USER_DATA_STRING = 0x01,
4815 enum ALLOCATION_TYPE
4817 ALLOCATION_TYPE_NONE,
4818 ALLOCATION_TYPE_BLOCK,
4819 ALLOCATION_TYPE_DEDICATED,
4822 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4825 m_pUserData(VMA_NULL),
4826 m_LastUseFrameIndex(currentFrameIndex),
4827 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4828 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4830 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4832 #if VMA_STATS_STRING_ENABLED 4833 m_CreationFrameIndex = currentFrameIndex;
4834 m_BufferImageUsage = 0;
4840 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4843 VMA_ASSERT(m_pUserData == VMA_NULL);
4846 void InitBlockAllocation(
4848 VmaDeviceMemoryBlock* block,
4849 VkDeviceSize offset,
4850 VkDeviceSize alignment,
4852 VmaSuballocationType suballocationType,
4856 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4857 VMA_ASSERT(block != VMA_NULL);
4858 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4859 m_Alignment = alignment;
4861 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4862 m_SuballocationType = (uint8_t)suballocationType;
4863 m_BlockAllocation.m_hPool = hPool;
4864 m_BlockAllocation.m_Block = block;
4865 m_BlockAllocation.m_Offset = offset;
4866 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4871 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4872 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4873 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4874 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4875 m_BlockAllocation.m_Block = VMA_NULL;
4876 m_BlockAllocation.m_Offset = 0;
4877 m_BlockAllocation.m_CanBecomeLost =
true;
4880 void ChangeBlockAllocation(
4882 VmaDeviceMemoryBlock* block,
4883 VkDeviceSize offset);
4885 void ChangeSize(VkDeviceSize newSize);
4886 void ChangeOffset(VkDeviceSize newOffset);
4889 void InitDedicatedAllocation(
4890 uint32_t memoryTypeIndex,
4891 VkDeviceMemory hMemory,
4892 VmaSuballocationType suballocationType,
4896 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4897 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4898 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4901 m_SuballocationType = (uint8_t)suballocationType;
4902 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4903 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4904 m_DedicatedAllocation.m_hMemory = hMemory;
4905 m_DedicatedAllocation.m_pMappedData = pMappedData;
4908 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4909 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4910 VkDeviceSize GetSize()
const {
return m_Size; }
4911 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4912 void* GetUserData()
const {
return m_pUserData; }
4913 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4914 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4916 VmaDeviceMemoryBlock* GetBlock()
const 4918 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4919 return m_BlockAllocation.m_Block;
4921 VkDeviceSize GetOffset()
const;
4922 VkDeviceMemory GetMemory()
const;
4923 uint32_t GetMemoryTypeIndex()
const;
4924 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4925 void* GetMappedData()
const;
4926 bool CanBecomeLost()
const;
4929 uint32_t GetLastUseFrameIndex()
const 4931 return m_LastUseFrameIndex.load();
4933 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4935 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4945 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4947 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4949 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4960 void BlockAllocMap();
4961 void BlockAllocUnmap();
4962 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4965 #if VMA_STATS_STRING_ENABLED 4966 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4967 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4969 void InitBufferImageUsage(uint32_t bufferImageUsage)
4971 VMA_ASSERT(m_BufferImageUsage == 0);
4972 m_BufferImageUsage = bufferImageUsage;
4975 void PrintParameters(
class VmaJsonWriter& json)
const;
4979 VkDeviceSize m_Alignment;
4980 VkDeviceSize m_Size;
4982 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4984 uint8_t m_SuballocationType;
4991 struct BlockAllocation
4994 VmaDeviceMemoryBlock* m_Block;
4995 VkDeviceSize m_Offset;
4996 bool m_CanBecomeLost;
5000 struct DedicatedAllocation
5002 uint32_t m_MemoryTypeIndex;
5003 VkDeviceMemory m_hMemory;
5004 void* m_pMappedData;
5010 BlockAllocation m_BlockAllocation;
5012 DedicatedAllocation m_DedicatedAllocation;
5015 #if VMA_STATS_STRING_ENABLED 5016 uint32_t m_CreationFrameIndex;
5017 uint32_t m_BufferImageUsage;
5027 struct VmaSuballocation
5029 VkDeviceSize offset;
5032 VmaSuballocationType type;
5036 struct VmaSuballocationOffsetLess
5038 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5040 return lhs.offset < rhs.offset;
5043 struct VmaSuballocationOffsetGreater
5045 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5047 return lhs.offset > rhs.offset;
5051 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5054 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5069 struct VmaAllocationRequest
5071 VkDeviceSize offset;
5072 VkDeviceSize sumFreeSize;
5073 VkDeviceSize sumItemSize;
5074 VmaSuballocationList::iterator item;
5075 size_t itemsToMakeLostCount;
5078 VkDeviceSize CalcCost()
const 5080 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5088 class VmaBlockMetadata
5092 virtual ~VmaBlockMetadata() { }
5093 virtual void Init(VkDeviceSize size) { m_Size = size; }
5096 virtual bool Validate()
const = 0;
5097 VkDeviceSize GetSize()
const {
return m_Size; }
5098 virtual size_t GetAllocationCount()
const = 0;
5099 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5100 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5102 virtual bool IsEmpty()
const = 0;
5104 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5106 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5108 #if VMA_STATS_STRING_ENABLED 5109 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5115 virtual bool CreateAllocationRequest(
5116 uint32_t currentFrameIndex,
5117 uint32_t frameInUseCount,
5118 VkDeviceSize bufferImageGranularity,
5119 VkDeviceSize allocSize,
5120 VkDeviceSize allocAlignment,
5122 VmaSuballocationType allocType,
5123 bool canMakeOtherLost,
5126 VmaAllocationRequest* pAllocationRequest) = 0;
5128 virtual bool MakeRequestedAllocationsLost(
5129 uint32_t currentFrameIndex,
5130 uint32_t frameInUseCount,
5131 VmaAllocationRequest* pAllocationRequest) = 0;
5133 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5135 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5139 const VmaAllocationRequest& request,
5140 VmaSuballocationType type,
5141 VkDeviceSize allocSize,
5147 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5150 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize) {
return false; }
5153 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5155 #if VMA_STATS_STRING_ENABLED 5156 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5157 VkDeviceSize unusedBytes,
5158 size_t allocationCount,
5159 size_t unusedRangeCount)
const;
5160 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5161 VkDeviceSize offset,
5163 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5164 VkDeviceSize offset,
5165 VkDeviceSize size)
const;
5166 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5170 VkDeviceSize m_Size;
5171 const VkAllocationCallbacks* m_pAllocationCallbacks;
5174 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 5175 VMA_ASSERT(0 && "Validation failed: " #cond); \ 5179 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5181 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5184 virtual ~VmaBlockMetadata_Generic();
5185 virtual void Init(VkDeviceSize size);
5187 virtual bool Validate()
const;
5188 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5189 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5190 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5191 virtual bool IsEmpty()
const;
5193 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5194 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5196 #if VMA_STATS_STRING_ENABLED 5197 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5200 virtual bool CreateAllocationRequest(
5201 uint32_t currentFrameIndex,
5202 uint32_t frameInUseCount,
5203 VkDeviceSize bufferImageGranularity,
5204 VkDeviceSize allocSize,
5205 VkDeviceSize allocAlignment,
5207 VmaSuballocationType allocType,
5208 bool canMakeOtherLost,
5210 VmaAllocationRequest* pAllocationRequest);
5212 virtual bool MakeRequestedAllocationsLost(
5213 uint32_t currentFrameIndex,
5214 uint32_t frameInUseCount,
5215 VmaAllocationRequest* pAllocationRequest);
5217 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5219 virtual VkResult CheckCorruption(
const void* pBlockData);
5222 const VmaAllocationRequest& request,
5223 VmaSuballocationType type,
5224 VkDeviceSize allocSize,
5229 virtual void FreeAtOffset(VkDeviceSize offset);
5231 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize);
5236 bool IsBufferImageGranularityConflictPossible(
5237 VkDeviceSize bufferImageGranularity,
5238 VmaSuballocationType& inOutPrevSuballocType)
const;
5241 friend class VmaDefragmentationAlgorithm_Generic;
5242 friend class VmaDefragmentationAlgorithm_Fast;
5244 uint32_t m_FreeCount;
5245 VkDeviceSize m_SumFreeSize;
5246 VmaSuballocationList m_Suballocations;
5249 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5251 bool ValidateFreeSuballocationList()
const;
5255 bool CheckAllocation(
5256 uint32_t currentFrameIndex,
5257 uint32_t frameInUseCount,
5258 VkDeviceSize bufferImageGranularity,
5259 VkDeviceSize allocSize,
5260 VkDeviceSize allocAlignment,
5261 VmaSuballocationType allocType,
5262 VmaSuballocationList::const_iterator suballocItem,
5263 bool canMakeOtherLost,
5264 VkDeviceSize* pOffset,
5265 size_t* itemsToMakeLostCount,
5266 VkDeviceSize* pSumFreeSize,
5267 VkDeviceSize* pSumItemSize)
const;
5269 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5273 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5276 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5279 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5360 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5362 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5365 virtual ~VmaBlockMetadata_Linear();
5366 virtual void Init(VkDeviceSize size);
5368 virtual bool Validate()
const;
5369 virtual size_t GetAllocationCount()
const;
5370 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5371 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5372 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5374 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5375 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5377 #if VMA_STATS_STRING_ENABLED 5378 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5381 virtual bool CreateAllocationRequest(
5382 uint32_t currentFrameIndex,
5383 uint32_t frameInUseCount,
5384 VkDeviceSize bufferImageGranularity,
5385 VkDeviceSize allocSize,
5386 VkDeviceSize allocAlignment,
5388 VmaSuballocationType allocType,
5389 bool canMakeOtherLost,
5391 VmaAllocationRequest* pAllocationRequest);
5393 virtual bool MakeRequestedAllocationsLost(
5394 uint32_t currentFrameIndex,
5395 uint32_t frameInUseCount,
5396 VmaAllocationRequest* pAllocationRequest);
5398 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5400 virtual VkResult CheckCorruption(
const void* pBlockData);
5403 const VmaAllocationRequest& request,
5404 VmaSuballocationType type,
5405 VkDeviceSize allocSize,
5410 virtual void FreeAtOffset(VkDeviceSize offset);
5420 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5422 enum SECOND_VECTOR_MODE
5424 SECOND_VECTOR_EMPTY,
5429 SECOND_VECTOR_RING_BUFFER,
5435 SECOND_VECTOR_DOUBLE_STACK,
5438 VkDeviceSize m_SumFreeSize;
5439 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5440 uint32_t m_1stVectorIndex;
5441 SECOND_VECTOR_MODE m_2ndVectorMode;
5443 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5444 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5445 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5446 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5449 size_t m_1stNullItemsBeginCount;
5451 size_t m_1stNullItemsMiddleCount;
5453 size_t m_2ndNullItemsCount;
5455 bool ShouldCompact1st()
const;
5456 void CleanupAfterFree();
5470 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5472 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5475 virtual ~VmaBlockMetadata_Buddy();
5476 virtual void Init(VkDeviceSize size);
5478 virtual bool Validate()
const;
5479 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5480 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5481 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5482 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5484 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5485 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5487 #if VMA_STATS_STRING_ENABLED 5488 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5491 virtual bool CreateAllocationRequest(
5492 uint32_t currentFrameIndex,
5493 uint32_t frameInUseCount,
5494 VkDeviceSize bufferImageGranularity,
5495 VkDeviceSize allocSize,
5496 VkDeviceSize allocAlignment,
5498 VmaSuballocationType allocType,
5499 bool canMakeOtherLost,
5501 VmaAllocationRequest* pAllocationRequest);
5503 virtual bool MakeRequestedAllocationsLost(
5504 uint32_t currentFrameIndex,
5505 uint32_t frameInUseCount,
5506 VmaAllocationRequest* pAllocationRequest);
5508 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5510 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5513 const VmaAllocationRequest& request,
5514 VmaSuballocationType type,
5515 VkDeviceSize allocSize,
5519 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5520 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5523 static const VkDeviceSize MIN_NODE_SIZE = 32;
5524 static const size_t MAX_LEVELS = 30;
5526 struct ValidationContext
5528 size_t calculatedAllocationCount;
5529 size_t calculatedFreeCount;
5530 VkDeviceSize calculatedSumFreeSize;
5532 ValidationContext() :
5533 calculatedAllocationCount(0),
5534 calculatedFreeCount(0),
5535 calculatedSumFreeSize(0) { }
5540 VkDeviceSize offset;
5570 VkDeviceSize m_UsableSize;
5571 uint32_t m_LevelCount;
5577 } m_FreeList[MAX_LEVELS];
5579 size_t m_AllocationCount;
5583 VkDeviceSize m_SumFreeSize;
5585 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5586 void DeleteNode(Node* node);
5587 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5588 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5589 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5591 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5592 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5596 void AddToFreeListFront(uint32_t level, Node* node);
5600 void RemoveFromFreeList(uint32_t level, Node* node);
5602 #if VMA_STATS_STRING_ENABLED 5603 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5613 class VmaDeviceMemoryBlock
5615 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5617 VmaBlockMetadata* m_pMetadata;
5621 ~VmaDeviceMemoryBlock()
5623 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5624 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5630 uint32_t newMemoryTypeIndex,
5631 VkDeviceMemory newMemory,
5632 VkDeviceSize newSize,
5634 uint32_t algorithm);
5638 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5639 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5640 uint32_t GetId()
const {
return m_Id; }
5641 void* GetMappedData()
const {
return m_pMappedData; }
5644 bool Validate()
const;
5649 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5652 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5653 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5655 VkResult BindBufferMemory(
5659 VkResult BindImageMemory(
5665 uint32_t m_MemoryTypeIndex;
5667 VkDeviceMemory m_hMemory;
5675 uint32_t m_MapCount;
5676 void* m_pMappedData;
5679 struct VmaPointerLess
5681 bool operator()(
const void* lhs,
const void* rhs)
const 5687 struct VmaDefragmentationMove
5689 size_t srcBlockIndex;
5690 size_t dstBlockIndex;
5691 VkDeviceSize srcOffset;
5692 VkDeviceSize dstOffset;
5696 class VmaDefragmentationAlgorithm;
5704 struct VmaBlockVector
5706 VMA_CLASS_NO_COPY(VmaBlockVector)
5710 uint32_t memoryTypeIndex,
5711 VkDeviceSize preferredBlockSize,
5712 size_t minBlockCount,
5713 size_t maxBlockCount,
5714 VkDeviceSize bufferImageGranularity,
5715 uint32_t frameInUseCount,
5717 bool explicitBlockSize,
5718 uint32_t algorithm);
5721 VkResult CreateMinBlocks();
5723 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5724 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5725 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5726 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5727 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5731 bool IsEmpty()
const {
return m_Blocks.empty(); }
5732 bool IsCorruptionDetectionEnabled()
const;
5736 uint32_t currentFrameIndex,
5738 VkDeviceSize alignment,
5740 VmaSuballocationType suballocType,
5749 #if VMA_STATS_STRING_ENABLED 5750 void PrintDetailedMap(
class VmaJsonWriter& json);
5753 void MakePoolAllocationsLost(
5754 uint32_t currentFrameIndex,
5755 size_t* pLostAllocationCount);
5756 VkResult CheckCorruption();
5760 class VmaBlockVectorDefragmentationContext* pCtx,
5762 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
5763 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
5764 VkCommandBuffer commandBuffer);
5765 void DefragmentationEnd(
5766 class VmaBlockVectorDefragmentationContext* pCtx,
5772 size_t GetBlockCount()
const {
return m_Blocks.size(); }
5773 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
5774 size_t CalcAllocationCount()
const;
5775 bool IsBufferImageGranularityConflictPossible()
const;
5778 friend class VmaDefragmentationAlgorithm_Generic;
5781 const uint32_t m_MemoryTypeIndex;
5782 const VkDeviceSize m_PreferredBlockSize;
5783 const size_t m_MinBlockCount;
5784 const size_t m_MaxBlockCount;
5785 const VkDeviceSize m_BufferImageGranularity;
5786 const uint32_t m_FrameInUseCount;
5787 const bool m_IsCustomPool;
5788 const bool m_ExplicitBlockSize;
5789 const uint32_t m_Algorithm;
5793 bool m_HasEmptyBlock;
5794 VMA_RW_MUTEX m_Mutex;
5796 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5797 uint32_t m_NextBlockId;
5799 VkDeviceSize CalcMaxBlockSize()
const;
5802 void Remove(VmaDeviceMemoryBlock* pBlock);
5806 void IncrementallySortBlocks();
5809 VkResult AllocateFromBlock(
5810 VmaDeviceMemoryBlock* pBlock,
5812 uint32_t currentFrameIndex,
5814 VkDeviceSize alignment,
5817 VmaSuballocationType suballocType,
5821 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5824 void ApplyDefragmentationMovesCpu(
5825 class VmaBlockVectorDefragmentationContext* pDefragCtx,
5826 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
5828 void ApplyDefragmentationMovesGpu(
5829 class VmaBlockVectorDefragmentationContext* pDefragCtx,
5830 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
5831 VkCommandBuffer commandBuffer);
5842 VMA_CLASS_NO_COPY(VmaPool_T)
5844 VmaBlockVector m_BlockVector;
5849 VkDeviceSize preferredBlockSize);
5852 uint32_t GetId()
const {
return m_Id; }
5853 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5855 #if VMA_STATS_STRING_ENABLED 5870 class VmaDefragmentationAlgorithm
5872 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
5874 VmaDefragmentationAlgorithm(
5876 VmaBlockVector* pBlockVector,
5877 uint32_t currentFrameIndex) :
5878 m_hAllocator(hAllocator),
5879 m_pBlockVector(pBlockVector),
5880 m_CurrentFrameIndex(currentFrameIndex)
5883 virtual ~VmaDefragmentationAlgorithm()
5887 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
5888 virtual void AddAll() = 0;
5890 virtual VkResult Defragment(
5891 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
5892 VkDeviceSize maxBytesToMove,
5893 uint32_t maxAllocationsToMove) = 0;
5895 virtual VkDeviceSize GetBytesMoved()
const = 0;
5896 virtual uint32_t GetAllocationsMoved()
const = 0;
5900 VmaBlockVector*
const m_pBlockVector;
5901 const uint32_t m_CurrentFrameIndex;
5903 struct AllocationInfo
5906 VkBool32* m_pChanged;
5909 m_hAllocation(VK_NULL_HANDLE),
5910 m_pChanged(VMA_NULL)
5914 m_hAllocation(hAlloc),
5915 m_pChanged(pChanged)
5921 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
5923 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
5925 VmaDefragmentationAlgorithm_Generic(
5927 VmaBlockVector* pBlockVector,
5928 uint32_t currentFrameIndex,
5929 bool overlappingMoveSupported);
5930 virtual ~VmaDefragmentationAlgorithm_Generic();
5932 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
5933 virtual void AddAll() { m_AllAllocations =
true; }
5935 virtual VkResult Defragment(
5936 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
5937 VkDeviceSize maxBytesToMove,
5938 uint32_t maxAllocationsToMove);
5940 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
5941 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
5944 uint32_t m_AllocationCount;
5945 bool m_AllAllocations;
5947 VkDeviceSize m_BytesMoved;
5948 uint32_t m_AllocationsMoved;
5950 struct AllocationInfoSizeGreater
5952 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5954 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
5958 struct AllocationInfoOffsetGreater
5960 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5962 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
5968 size_t m_OriginalBlockIndex;
5969 VmaDeviceMemoryBlock* m_pBlock;
5970 bool m_HasNonMovableAllocations;
5971 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5973 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
5974 m_OriginalBlockIndex(SIZE_MAX),
5976 m_HasNonMovableAllocations(true),
5977 m_Allocations(pAllocationCallbacks)
5981 void CalcHasNonMovableAllocations()
5983 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
5984 const size_t defragmentAllocCount = m_Allocations.size();
5985 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
5988 void SortAllocationsBySizeDescending()
5990 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
5993 void SortAllocationsByOffsetDescending()
5995 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
5999 struct BlockPointerLess
6001 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 6003 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6005 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6007 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6013 struct BlockInfoCompareMoveDestination
6015 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6017 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6021 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6025 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6033 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6034 BlockInfoVector m_Blocks;
6036 VkResult DefragmentRound(
6037 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6038 VkDeviceSize maxBytesToMove,
6039 uint32_t maxAllocationsToMove);
6041 size_t CalcBlocksWithNonMovableCount()
const;
6043 static bool MoveMakesSense(
6044 size_t dstBlockIndex, VkDeviceSize dstOffset,
6045 size_t srcBlockIndex, VkDeviceSize srcOffset);
6048 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6050 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6052 VmaDefragmentationAlgorithm_Fast(
6054 VmaBlockVector* pBlockVector,
6055 uint32_t currentFrameIndex,
6056 bool overlappingMoveSupported);
6057 virtual ~VmaDefragmentationAlgorithm_Fast();
6059 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6060 virtual void AddAll() { m_AllAllocations =
true; }
6062 virtual VkResult Defragment(
6063 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6064 VkDeviceSize maxBytesToMove,
6065 uint32_t maxAllocationsToMove);
6067 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6068 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6073 size_t origBlockIndex;
6076 class FreeSpaceDatabase
6082 s.blockInfoIndex = SIZE_MAX;
6083 for(
size_t i = 0; i < MAX_COUNT; ++i)
6085 m_FreeSpaces[i] = s;
6089 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6091 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6097 size_t bestIndex = SIZE_MAX;
6098 for(
size_t i = 0; i < MAX_COUNT; ++i)
6101 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6106 if(m_FreeSpaces[i].size < size &&
6107 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6113 if(bestIndex != SIZE_MAX)
6115 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6116 m_FreeSpaces[bestIndex].offset = offset;
6117 m_FreeSpaces[bestIndex].size = size;
6121 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6122 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6124 size_t bestIndex = SIZE_MAX;
6125 VkDeviceSize bestFreeSpaceAfter = 0;
6126 for(
size_t i = 0; i < MAX_COUNT; ++i)
6129 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6131 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6133 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6135 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6137 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6140 bestFreeSpaceAfter = freeSpaceAfter;
6146 if(bestIndex != SIZE_MAX)
6148 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6149 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6151 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6154 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6155 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6156 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6161 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6171 static const size_t MAX_COUNT = 4;
6175 size_t blockInfoIndex;
6176 VkDeviceSize offset;
6178 } m_FreeSpaces[MAX_COUNT];
6181 const bool m_OverlappingMoveSupported;
6183 uint32_t m_AllocationCount;
6184 bool m_AllAllocations;
6186 VkDeviceSize m_BytesMoved;
6187 uint32_t m_AllocationsMoved;
6189 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6191 void PreprocessMetadata();
6192 void PostprocessMetadata();
6193 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6196 struct VmaBlockDefragmentationContext
6199 VMA_CLASS_NO_COPY(VmaBlockDefragmentationContext)
6203 BLOCK_FLAG_USED = 0x00000001,
6208 VmaBlockDefragmentationContext() :
6210 hBuffer(VK_NULL_HANDLE)
6215 class VmaBlockVectorDefragmentationContext
6217 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6221 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6223 VmaBlockVectorDefragmentationContext(
6226 VmaBlockVector* pBlockVector,
6227 uint32_t currFrameIndex,
6229 ~VmaBlockVectorDefragmentationContext();
6231 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6232 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6233 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6235 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6236 void AddAll() { m_AllAllocations =
true; }
6238 void Begin(
bool overlappingMoveSupported);
6245 VmaBlockVector*
const m_pBlockVector;
6246 const uint32_t m_CurrFrameIndex;
6247 const uint32_t m_AlgorithmFlags;
6249 VmaDefragmentationAlgorithm* m_pAlgorithm;
6257 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6258 bool m_AllAllocations;
6261 struct VmaDefragmentationContext_T
6264 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6266 VmaDefragmentationContext_T(
6268 uint32_t currFrameIndex,
6271 ~VmaDefragmentationContext_T();
6273 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6274 void AddAllocations(
6275 uint32_t allocationCount,
6277 VkBool32* pAllocationsChanged);
6285 VkResult Defragment(
6286 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6287 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6292 const uint32_t m_CurrFrameIndex;
6293 const uint32_t m_Flags;
6296 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6298 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6301 #if VMA_RECORDING_ENABLED 6308 void WriteConfiguration(
6309 const VkPhysicalDeviceProperties& devProps,
6310 const VkPhysicalDeviceMemoryProperties& memProps,
6311 bool dedicatedAllocationExtensionEnabled);
6314 void RecordCreateAllocator(uint32_t frameIndex);
6315 void RecordDestroyAllocator(uint32_t frameIndex);
6316 void RecordCreatePool(uint32_t frameIndex,
6319 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6320 void RecordAllocateMemory(uint32_t frameIndex,
6321 const VkMemoryRequirements& vkMemReq,
6324 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6325 const VkMemoryRequirements& vkMemReq,
6326 bool requiresDedicatedAllocation,
6327 bool prefersDedicatedAllocation,
6330 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6331 const VkMemoryRequirements& vkMemReq,
6332 bool requiresDedicatedAllocation,
6333 bool prefersDedicatedAllocation,
6336 void RecordFreeMemory(uint32_t frameIndex,
6338 void RecordResizeAllocation(
6339 uint32_t frameIndex,
6341 VkDeviceSize newSize);
6342 void RecordSetAllocationUserData(uint32_t frameIndex,
6344 const void* pUserData);
6345 void RecordCreateLostAllocation(uint32_t frameIndex,
6347 void RecordMapMemory(uint32_t frameIndex,
6349 void RecordUnmapMemory(uint32_t frameIndex,
6351 void RecordFlushAllocation(uint32_t frameIndex,
6352 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6353 void RecordInvalidateAllocation(uint32_t frameIndex,
6354 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6355 void RecordCreateBuffer(uint32_t frameIndex,
6356 const VkBufferCreateInfo& bufCreateInfo,
6359 void RecordCreateImage(uint32_t frameIndex,
6360 const VkImageCreateInfo& imageCreateInfo,
6363 void RecordDestroyBuffer(uint32_t frameIndex,
6365 void RecordDestroyImage(uint32_t frameIndex,
6367 void RecordTouchAllocation(uint32_t frameIndex,
6369 void RecordGetAllocationInfo(uint32_t frameIndex,
6371 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6381 class UserDataString
6385 const char* GetString()
const {
return m_Str; }
6395 VMA_MUTEX m_FileMutex;
6397 int64_t m_StartCounter;
6399 void GetBasicParams(CallParams& outParams);
6403 #endif // #if VMA_RECORDING_ENABLED 6406 struct VmaAllocator_T
6408 VMA_CLASS_NO_COPY(VmaAllocator_T)
6411 bool m_UseKhrDedicatedAllocation;
6413 bool m_AllocationCallbacksSpecified;
6414 VkAllocationCallbacks m_AllocationCallbacks;
6418 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
6419 VMA_MUTEX m_HeapSizeLimitMutex;
6421 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
6422 VkPhysicalDeviceMemoryProperties m_MemProps;
6425 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
6428 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
6429 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
6430 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
6436 const VkAllocationCallbacks* GetAllocationCallbacks()
const 6438 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
6442 return m_VulkanFunctions;
6445 VkDeviceSize GetBufferImageGranularity()
const 6448 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
6449 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
6452 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
6453 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
6455 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 6457 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
6458 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6461 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 6463 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
6464 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6467 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 6469 return IsMemoryTypeNonCoherent(memTypeIndex) ?
6470 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
6471 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
6474 bool IsIntegratedGpu()
const 6476 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
6479 #if VMA_RECORDING_ENABLED 6480 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
6483 void GetBufferMemoryRequirements(
6485 VkMemoryRequirements& memReq,
6486 bool& requiresDedicatedAllocation,
6487 bool& prefersDedicatedAllocation)
const;
6488 void GetImageMemoryRequirements(
6490 VkMemoryRequirements& memReq,
6491 bool& requiresDedicatedAllocation,
6492 bool& prefersDedicatedAllocation)
const;
6495 VkResult AllocateMemory(
6496 const VkMemoryRequirements& vkMemReq,
6497 bool requiresDedicatedAllocation,
6498 bool prefersDedicatedAllocation,
6499 VkBuffer dedicatedBuffer,
6500 VkImage dedicatedImage,
6502 VmaSuballocationType suballocType,
6508 VkResult ResizeAllocation(
6510 VkDeviceSize newSize);
6512 void CalculateStats(
VmaStats* pStats);
6514 #if VMA_STATS_STRING_ENABLED 6515 void PrintDetailedMap(
class VmaJsonWriter& json);
6518 VkResult DefragmentationBegin(
6522 VkResult DefragmentationEnd(
6529 void DestroyPool(
VmaPool pool);
6532 void SetCurrentFrameIndex(uint32_t frameIndex);
6533 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
6535 void MakePoolAllocationsLost(
6537 size_t* pLostAllocationCount);
6538 VkResult CheckPoolCorruption(
VmaPool hPool);
6539 VkResult CheckCorruption(uint32_t memoryTypeBits);
6543 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
6544 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
6549 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
6550 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
6552 void FlushOrInvalidateAllocation(
6554 VkDeviceSize offset, VkDeviceSize size,
6555 VMA_CACHE_OPERATION op);
6557 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
6560 VkDeviceSize m_PreferredLargeHeapBlockSize;
6562 VkPhysicalDevice m_PhysicalDevice;
6563 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
6565 VMA_RW_MUTEX m_PoolsMutex;
6567 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
6568 uint32_t m_NextPoolId;
6572 #if VMA_RECORDING_ENABLED 6573 VmaRecorder* m_pRecorder;
6578 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
6580 VkResult AllocateMemoryOfType(
6582 VkDeviceSize alignment,
6583 bool dedicatedAllocation,
6584 VkBuffer dedicatedBuffer,
6585 VkImage dedicatedImage,
6587 uint32_t memTypeIndex,
6588 VmaSuballocationType suballocType,
6592 VkResult AllocateDedicatedMemory(
6594 VmaSuballocationType suballocType,
6595 uint32_t memTypeIndex,
6597 bool isUserDataString,
6599 VkBuffer dedicatedBuffer,
6600 VkImage dedicatedImage,
6610 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
6612 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
6615 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
6617 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
6620 template<
typename T>
6623 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
6626 template<
typename T>
6627 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
6629 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
6632 template<
typename T>
6633 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
6638 VmaFree(hAllocator, ptr);
6642 template<
typename T>
6643 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
6647 for(
size_t i = count; i--; )
6649 VmaFree(hAllocator, ptr);
6656 #if VMA_STATS_STRING_ENABLED 6658 class VmaStringBuilder
6661 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
6662 size_t GetLength()
const {
return m_Data.size(); }
6663 const char* GetData()
const {
return m_Data.data(); }
6665 void Add(
char ch) { m_Data.push_back(ch); }
6666 void Add(
const char* pStr);
6667 void AddNewLine() { Add(
'\n'); }
6668 void AddNumber(uint32_t num);
6669 void AddNumber(uint64_t num);
6670 void AddPointer(
const void* ptr);
6673 VmaVector< char, VmaStlAllocator<char> > m_Data;
6676 void VmaStringBuilder::Add(
const char* pStr)
6678 const size_t strLen = strlen(pStr);
6681 const size_t oldCount = m_Data.size();
6682 m_Data.resize(oldCount + strLen);
6683 memcpy(m_Data.data() + oldCount, pStr, strLen);
6687 void VmaStringBuilder::AddNumber(uint32_t num)
6690 VmaUint32ToStr(buf,
sizeof(buf), num);
6694 void VmaStringBuilder::AddNumber(uint64_t num)
6697 VmaUint64ToStr(buf,
sizeof(buf), num);
6701 void VmaStringBuilder::AddPointer(
const void* ptr)
6704 VmaPtrToStr(buf,
sizeof(buf), ptr);
6708 #endif // #if VMA_STATS_STRING_ENABLED 6713 #if VMA_STATS_STRING_ENABLED 6717 VMA_CLASS_NO_COPY(VmaJsonWriter)
6719 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6722 void BeginObject(
bool singleLine =
false);
6725 void BeginArray(
bool singleLine =
false);
6728 void WriteString(
const char* pStr);
6729 void BeginString(
const char* pStr = VMA_NULL);
6730 void ContinueString(
const char* pStr);
6731 void ContinueString(uint32_t n);
6732 void ContinueString(uint64_t n);
6733 void ContinueString_Pointer(
const void* ptr);
6734 void EndString(
const char* pStr = VMA_NULL);
6736 void WriteNumber(uint32_t n);
6737 void WriteNumber(uint64_t n);
6738 void WriteBool(
bool b);
6742 static const char*
const INDENT;
6744 enum COLLECTION_TYPE
6746 COLLECTION_TYPE_OBJECT,
6747 COLLECTION_TYPE_ARRAY,
6751 COLLECTION_TYPE type;
6752 uint32_t valueCount;
6753 bool singleLineMode;
6756 VmaStringBuilder& m_SB;
6757 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
6758 bool m_InsideString;
6760 void BeginValue(
bool isString);
6761 void WriteIndent(
bool oneLess =
false);
6764 const char*
const VmaJsonWriter::INDENT =
" ";
6766 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
6768 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
6769 m_InsideString(false)
6773 VmaJsonWriter::~VmaJsonWriter()
6775 VMA_ASSERT(!m_InsideString);
6776 VMA_ASSERT(m_Stack.empty());
6779 void VmaJsonWriter::BeginObject(
bool singleLine)
6781 VMA_ASSERT(!m_InsideString);
6787 item.type = COLLECTION_TYPE_OBJECT;
6788 item.valueCount = 0;
6789 item.singleLineMode = singleLine;
6790 m_Stack.push_back(item);
6793 void VmaJsonWriter::EndObject()
6795 VMA_ASSERT(!m_InsideString);
6800 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
6804 void VmaJsonWriter::BeginArray(
bool singleLine)
6806 VMA_ASSERT(!m_InsideString);
6812 item.type = COLLECTION_TYPE_ARRAY;
6813 item.valueCount = 0;
6814 item.singleLineMode = singleLine;
6815 m_Stack.push_back(item);
6818 void VmaJsonWriter::EndArray()
6820 VMA_ASSERT(!m_InsideString);
6825 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
6829 void VmaJsonWriter::WriteString(
const char* pStr)
6835 void VmaJsonWriter::BeginString(
const char* pStr)
6837 VMA_ASSERT(!m_InsideString);
6841 m_InsideString =
true;
6842 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6844 ContinueString(pStr);
6848 void VmaJsonWriter::ContinueString(
const char* pStr)
6850 VMA_ASSERT(m_InsideString);
6852 const size_t strLen = strlen(pStr);
6853 for(
size_t i = 0; i < strLen; ++i)
6886 VMA_ASSERT(0 &&
"Character not currently supported.");
6892 void VmaJsonWriter::ContinueString(uint32_t n)
6894 VMA_ASSERT(m_InsideString);
6898 void VmaJsonWriter::ContinueString(uint64_t n)
6900 VMA_ASSERT(m_InsideString);
6904 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
6906 VMA_ASSERT(m_InsideString);
6907 m_SB.AddPointer(ptr);
6910 void VmaJsonWriter::EndString(
const char* pStr)
6912 VMA_ASSERT(m_InsideString);
6913 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6915 ContinueString(pStr);
6918 m_InsideString =
false;
6921 void VmaJsonWriter::WriteNumber(uint32_t n)
6923 VMA_ASSERT(!m_InsideString);
6928 void VmaJsonWriter::WriteNumber(uint64_t n)
6930 VMA_ASSERT(!m_InsideString);
6935 void VmaJsonWriter::WriteBool(
bool b)
6937 VMA_ASSERT(!m_InsideString);
6939 m_SB.Add(b ?
"true" :
"false");
6942 void VmaJsonWriter::WriteNull()
6944 VMA_ASSERT(!m_InsideString);
6949 void VmaJsonWriter::BeginValue(
bool isString)
6951 if(!m_Stack.empty())
6953 StackItem& currItem = m_Stack.back();
6954 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6955 currItem.valueCount % 2 == 0)
6957 VMA_ASSERT(isString);
6960 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6961 currItem.valueCount % 2 != 0)
6965 else if(currItem.valueCount > 0)
6974 ++currItem.valueCount;
6978 void VmaJsonWriter::WriteIndent(
bool oneLess)
6980 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
6984 size_t count = m_Stack.size();
6985 if(count > 0 && oneLess)
6989 for(
size_t i = 0; i < count; ++i)
6996 #endif // #if VMA_STATS_STRING_ENABLED 7000 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7002 if(IsUserDataString())
7004 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7006 FreeUserDataString(hAllocator);
7008 if(pUserData != VMA_NULL)
7010 const char*
const newStrSrc = (
char*)pUserData;
7011 const size_t newStrLen = strlen(newStrSrc);
7012 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
7013 memcpy(newStrDst, newStrSrc, newStrLen + 1);
7014 m_pUserData = newStrDst;
7019 m_pUserData = pUserData;
7023 void VmaAllocation_T::ChangeBlockAllocation(
7025 VmaDeviceMemoryBlock* block,
7026 VkDeviceSize offset)
7028 VMA_ASSERT(block != VMA_NULL);
7029 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7032 if(block != m_BlockAllocation.m_Block)
7034 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7035 if(IsPersistentMap())
7037 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7038 block->Map(hAllocator, mapRefCount, VMA_NULL);
7041 m_BlockAllocation.m_Block = block;
7042 m_BlockAllocation.m_Offset = offset;
7045 void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
7047 VMA_ASSERT(newSize > 0);
7051 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7053 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7054 m_BlockAllocation.m_Offset = newOffset;
7057 VkDeviceSize VmaAllocation_T::GetOffset()
const 7061 case ALLOCATION_TYPE_BLOCK:
7062 return m_BlockAllocation.m_Offset;
7063 case ALLOCATION_TYPE_DEDICATED:
7071 VkDeviceMemory VmaAllocation_T::GetMemory()
const 7075 case ALLOCATION_TYPE_BLOCK:
7076 return m_BlockAllocation.m_Block->GetDeviceMemory();
7077 case ALLOCATION_TYPE_DEDICATED:
7078 return m_DedicatedAllocation.m_hMemory;
7081 return VK_NULL_HANDLE;
7085 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 7089 case ALLOCATION_TYPE_BLOCK:
7090 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
7091 case ALLOCATION_TYPE_DEDICATED:
7092 return m_DedicatedAllocation.m_MemoryTypeIndex;
7099 void* VmaAllocation_T::GetMappedData()
const 7103 case ALLOCATION_TYPE_BLOCK:
7106 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7107 VMA_ASSERT(pBlockData != VMA_NULL);
7108 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7115 case ALLOCATION_TYPE_DEDICATED:
7116 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7117 return m_DedicatedAllocation.m_pMappedData;
7124 bool VmaAllocation_T::CanBecomeLost()
const 7128 case ALLOCATION_TYPE_BLOCK:
7129 return m_BlockAllocation.m_CanBecomeLost;
7130 case ALLOCATION_TYPE_DEDICATED:
7138 VmaPool VmaAllocation_T::GetPool()
const 7140 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7141 return m_BlockAllocation.m_hPool;
7144 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7146 VMA_ASSERT(CanBecomeLost());
7152 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7155 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7160 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7166 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7176 #if VMA_STATS_STRING_ENABLED 7179 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7188 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 7190 json.WriteString(
"Type");
7191 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7193 json.WriteString(
"Size");
7194 json.WriteNumber(m_Size);
7196 if(m_pUserData != VMA_NULL)
7198 json.WriteString(
"UserData");
7199 if(IsUserDataString())
7201 json.WriteString((
const char*)m_pUserData);
7206 json.ContinueString_Pointer(m_pUserData);
7211 json.WriteString(
"CreationFrameIndex");
7212 json.WriteNumber(m_CreationFrameIndex);
7214 json.WriteString(
"LastUseFrameIndex");
7215 json.WriteNumber(GetLastUseFrameIndex());
7217 if(m_BufferImageUsage != 0)
7219 json.WriteString(
"Usage");
7220 json.WriteNumber(m_BufferImageUsage);
7226 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7228 VMA_ASSERT(IsUserDataString());
7229 if(m_pUserData != VMA_NULL)
7231 char*
const oldStr = (
char*)m_pUserData;
7232 const size_t oldStrLen = strlen(oldStr);
7233 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
7234 m_pUserData = VMA_NULL;
7238 void VmaAllocation_T::BlockAllocMap()
7240 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7242 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7248 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7252 void VmaAllocation_T::BlockAllocUnmap()
7254 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7256 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7262 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7266 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7268 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7272 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7274 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7275 *ppData = m_DedicatedAllocation.m_pMappedData;
7281 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7282 return VK_ERROR_MEMORY_MAP_FAILED;
7287 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7288 hAllocator->m_hDevice,
7289 m_DedicatedAllocation.m_hMemory,
7294 if(result == VK_SUCCESS)
7296 m_DedicatedAllocation.m_pMappedData = *ppData;
7303 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7305 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7307 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7312 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7313 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7314 hAllocator->m_hDevice,
7315 m_DedicatedAllocation.m_hMemory);
7320 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7324 #if VMA_STATS_STRING_ENABLED 7326 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7330 json.WriteString(
"Blocks");
7333 json.WriteString(
"Allocations");
7336 json.WriteString(
"UnusedRanges");
7339 json.WriteString(
"UsedBytes");
7342 json.WriteString(
"UnusedBytes");
7347 json.WriteString(
"AllocationSize");
7348 json.BeginObject(
true);
7349 json.WriteString(
"Min");
7351 json.WriteString(
"Avg");
7353 json.WriteString(
"Max");
7360 json.WriteString(
"UnusedRangeSize");
7361 json.BeginObject(
true);
7362 json.WriteString(
"Min");
7364 json.WriteString(
"Avg");
7366 json.WriteString(
"Max");
7374 #endif // #if VMA_STATS_STRING_ENABLED 7376 struct VmaSuballocationItemSizeLess
7379 const VmaSuballocationList::iterator lhs,
7380 const VmaSuballocationList::iterator rhs)
const 7382 return lhs->size < rhs->size;
7385 const VmaSuballocationList::iterator lhs,
7386 VkDeviceSize rhsSize)
const 7388 return lhs->size < rhsSize;
7396 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
7398 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
7402 #if VMA_STATS_STRING_ENABLED 7404 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
7405 VkDeviceSize unusedBytes,
7406 size_t allocationCount,
7407 size_t unusedRangeCount)
const 7411 json.WriteString(
"TotalBytes");
7412 json.WriteNumber(GetSize());
7414 json.WriteString(
"UnusedBytes");
7415 json.WriteNumber(unusedBytes);
7417 json.WriteString(
"Allocations");
7418 json.WriteNumber((uint64_t)allocationCount);
7420 json.WriteString(
"UnusedRanges");
7421 json.WriteNumber((uint64_t)unusedRangeCount);
7423 json.WriteString(
"Suballocations");
7427 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
7428 VkDeviceSize offset,
7431 json.BeginObject(
true);
7433 json.WriteString(
"Offset");
7434 json.WriteNumber(offset);
7436 hAllocation->PrintParameters(json);
7441 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
7442 VkDeviceSize offset,
7443 VkDeviceSize size)
const 7445 json.BeginObject(
true);
7447 json.WriteString(
"Offset");
7448 json.WriteNumber(offset);
7450 json.WriteString(
"Type");
7451 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
7453 json.WriteString(
"Size");
7454 json.WriteNumber(size);
7459 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 7465 #endif // #if VMA_STATS_STRING_ENABLED 7470 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
7471 VmaBlockMetadata(hAllocator),
7474 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7475 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
7479 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
7483 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
7485 VmaBlockMetadata::Init(size);
7488 m_SumFreeSize = size;
7490 VmaSuballocation suballoc = {};
7491 suballoc.offset = 0;
7492 suballoc.size = size;
7493 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7494 suballoc.hAllocation = VK_NULL_HANDLE;
7496 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7497 m_Suballocations.push_back(suballoc);
7498 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
7500 m_FreeSuballocationsBySize.push_back(suballocItem);
7503 bool VmaBlockMetadata_Generic::Validate()
const 7505 VMA_VALIDATE(!m_Suballocations.empty());
7508 VkDeviceSize calculatedOffset = 0;
7510 uint32_t calculatedFreeCount = 0;
7512 VkDeviceSize calculatedSumFreeSize = 0;
7515 size_t freeSuballocationsToRegister = 0;
7517 bool prevFree =
false;
7519 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7520 suballocItem != m_Suballocations.cend();
7523 const VmaSuballocation& subAlloc = *suballocItem;
7526 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
7528 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
7530 VMA_VALIDATE(!prevFree || !currFree);
7532 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
7536 calculatedSumFreeSize += subAlloc.size;
7537 ++calculatedFreeCount;
7538 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7540 ++freeSuballocationsToRegister;
7544 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
7548 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
7549 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
7552 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
7555 calculatedOffset += subAlloc.size;
7556 prevFree = currFree;
7561 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
7563 VkDeviceSize lastSize = 0;
7564 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
7566 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
7569 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7571 VMA_VALIDATE(suballocItem->size >= lastSize);
7573 lastSize = suballocItem->size;
7577 VMA_VALIDATE(ValidateFreeSuballocationList());
7578 VMA_VALIDATE(calculatedOffset == GetSize());
7579 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
7580 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
7585 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 7587 if(!m_FreeSuballocationsBySize.empty())
7589 return m_FreeSuballocationsBySize.back()->size;
7597 bool VmaBlockMetadata_Generic::IsEmpty()
const 7599 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
7602 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7606 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7618 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7619 suballocItem != m_Suballocations.cend();
7622 const VmaSuballocation& suballoc = *suballocItem;
7623 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7636 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 7638 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7640 inoutStats.
size += GetSize();
7647 #if VMA_STATS_STRING_ENABLED 7649 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 7651 PrintDetailedMap_Begin(json,
7653 m_Suballocations.size() - (size_t)m_FreeCount,
7657 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7658 suballocItem != m_Suballocations.cend();
7659 ++suballocItem, ++i)
7661 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7663 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
7667 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
7671 PrintDetailedMap_End(json);
7674 #endif // #if VMA_STATS_STRING_ENABLED 7676 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
7677 uint32_t currentFrameIndex,
7678 uint32_t frameInUseCount,
7679 VkDeviceSize bufferImageGranularity,
7680 VkDeviceSize allocSize,
7681 VkDeviceSize allocAlignment,
7683 VmaSuballocationType allocType,
7684 bool canMakeOtherLost,
7686 VmaAllocationRequest* pAllocationRequest)
7688 VMA_ASSERT(allocSize > 0);
7689 VMA_ASSERT(!upperAddress);
7690 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7691 VMA_ASSERT(pAllocationRequest != VMA_NULL);
7692 VMA_HEAVY_ASSERT(Validate());
7695 if(canMakeOtherLost ==
false &&
7696 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
7702 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
7703 if(freeSuballocCount > 0)
7708 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7709 m_FreeSuballocationsBySize.data(),
7710 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7711 allocSize + 2 * VMA_DEBUG_MARGIN,
7712 VmaSuballocationItemSizeLess());
7713 size_t index = it - m_FreeSuballocationsBySize.data();
7714 for(; index < freeSuballocCount; ++index)
7719 bufferImageGranularity,
7723 m_FreeSuballocationsBySize[index],
7725 &pAllocationRequest->offset,
7726 &pAllocationRequest->itemsToMakeLostCount,
7727 &pAllocationRequest->sumFreeSize,
7728 &pAllocationRequest->sumItemSize))
7730 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7735 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
7737 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7738 it != m_Suballocations.end();
7741 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
7744 bufferImageGranularity,
7750 &pAllocationRequest->offset,
7751 &pAllocationRequest->itemsToMakeLostCount,
7752 &pAllocationRequest->sumFreeSize,
7753 &pAllocationRequest->sumItemSize))
7755 pAllocationRequest->item = it;
7763 for(
size_t index = freeSuballocCount; index--; )
7768 bufferImageGranularity,
7772 m_FreeSuballocationsBySize[index],
7774 &pAllocationRequest->offset,
7775 &pAllocationRequest->itemsToMakeLostCount,
7776 &pAllocationRequest->sumFreeSize,
7777 &pAllocationRequest->sumItemSize))
7779 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7786 if(canMakeOtherLost)
7790 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
7791 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
7793 VmaAllocationRequest tmpAllocRequest = {};
7794 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
7795 suballocIt != m_Suballocations.end();
7798 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
7799 suballocIt->hAllocation->CanBecomeLost())
7804 bufferImageGranularity,
7810 &tmpAllocRequest.offset,
7811 &tmpAllocRequest.itemsToMakeLostCount,
7812 &tmpAllocRequest.sumFreeSize,
7813 &tmpAllocRequest.sumItemSize))
7815 tmpAllocRequest.item = suballocIt;
7817 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
7820 *pAllocationRequest = tmpAllocRequest;
7826 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
7835 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
7836 uint32_t currentFrameIndex,
7837 uint32_t frameInUseCount,
7838 VmaAllocationRequest* pAllocationRequest)
7840 while(pAllocationRequest->itemsToMakeLostCount > 0)
7842 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
7844 ++pAllocationRequest->item;
7846 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7847 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
7848 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
7849 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7851 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
7852 --pAllocationRequest->itemsToMakeLostCount;
7860 VMA_HEAVY_ASSERT(Validate());
7861 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7862 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
7867 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7869 uint32_t lostAllocationCount = 0;
7870 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7871 it != m_Suballocations.end();
7874 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
7875 it->hAllocation->CanBecomeLost() &&
7876 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7878 it = FreeSuballocation(it);
7879 ++lostAllocationCount;
7882 return lostAllocationCount;
7885 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
7887 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7888 it != m_Suballocations.end();
7891 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
7893 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
7895 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
7896 return VK_ERROR_VALIDATION_FAILED_EXT;
7898 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
7900 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
7901 return VK_ERROR_VALIDATION_FAILED_EXT;
7909 void VmaBlockMetadata_Generic::Alloc(
7910 const VmaAllocationRequest& request,
7911 VmaSuballocationType type,
7912 VkDeviceSize allocSize,
7916 VMA_ASSERT(!upperAddress);
7917 VMA_ASSERT(request.item != m_Suballocations.end());
7918 VmaSuballocation& suballoc = *request.item;
7920 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7922 VMA_ASSERT(request.offset >= suballoc.offset);
7923 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
7924 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
7925 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
7929 UnregisterFreeSuballocation(request.item);
7931 suballoc.offset = request.offset;
7932 suballoc.size = allocSize;
7933 suballoc.type = type;
7934 suballoc.hAllocation = hAllocation;
7939 VmaSuballocation paddingSuballoc = {};
7940 paddingSuballoc.offset = request.offset + allocSize;
7941 paddingSuballoc.size = paddingEnd;
7942 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7943 VmaSuballocationList::iterator next = request.item;
7945 const VmaSuballocationList::iterator paddingEndItem =
7946 m_Suballocations.insert(next, paddingSuballoc);
7947 RegisterFreeSuballocation(paddingEndItem);
7953 VmaSuballocation paddingSuballoc = {};
7954 paddingSuballoc.offset = request.offset - paddingBegin;
7955 paddingSuballoc.size = paddingBegin;
7956 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7957 const VmaSuballocationList::iterator paddingBeginItem =
7958 m_Suballocations.insert(request.item, paddingSuballoc);
7959 RegisterFreeSuballocation(paddingBeginItem);
7963 m_FreeCount = m_FreeCount - 1;
7964 if(paddingBegin > 0)
7972 m_SumFreeSize -= allocSize;
7975 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
7977 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7978 suballocItem != m_Suballocations.end();
7981 VmaSuballocation& suballoc = *suballocItem;
7982 if(suballoc.hAllocation == allocation)
7984 FreeSuballocation(suballocItem);
7985 VMA_HEAVY_ASSERT(Validate());
7989 VMA_ASSERT(0 &&
"Not found!");
7992 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
7994 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7995 suballocItem != m_Suballocations.end();
7998 VmaSuballocation& suballoc = *suballocItem;
7999 if(suballoc.offset == offset)
8001 FreeSuballocation(suballocItem);
8005 VMA_ASSERT(0 &&
"Not found!");
8008 bool VmaBlockMetadata_Generic::ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize)
8010 typedef VmaSuballocationList::iterator iter_type;
8011 for(iter_type suballocItem = m_Suballocations.begin();
8012 suballocItem != m_Suballocations.end();
8015 VmaSuballocation& suballoc = *suballocItem;
8016 if(suballoc.hAllocation == alloc)
8018 iter_type nextItem = suballocItem;
8022 VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
8025 if(newSize < alloc->GetSize())
8027 const VkDeviceSize sizeDiff = suballoc.size - newSize;
8030 if(nextItem != m_Suballocations.end())
8033 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8036 UnregisterFreeSuballocation(nextItem);
8037 nextItem->offset -= sizeDiff;
8038 nextItem->size += sizeDiff;
8039 RegisterFreeSuballocation(nextItem);
8045 VmaSuballocation newFreeSuballoc;
8046 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8047 newFreeSuballoc.offset = suballoc.offset + newSize;
8048 newFreeSuballoc.size = sizeDiff;
8049 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8050 iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
8051 RegisterFreeSuballocation(newFreeSuballocIt);
8060 VmaSuballocation newFreeSuballoc;
8061 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8062 newFreeSuballoc.offset = suballoc.offset + newSize;
8063 newFreeSuballoc.size = sizeDiff;
8064 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8065 m_Suballocations.push_back(newFreeSuballoc);
8067 iter_type newFreeSuballocIt = m_Suballocations.end();
8068 RegisterFreeSuballocation(--newFreeSuballocIt);
8073 suballoc.size = newSize;
8074 m_SumFreeSize += sizeDiff;
8079 const VkDeviceSize sizeDiff = newSize - suballoc.size;
8082 if(nextItem != m_Suballocations.end())
8085 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8088 if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
8094 if(nextItem->size > sizeDiff)
8097 UnregisterFreeSuballocation(nextItem);
8098 nextItem->offset += sizeDiff;
8099 nextItem->size -= sizeDiff;
8100 RegisterFreeSuballocation(nextItem);
8106 UnregisterFreeSuballocation(nextItem);
8107 m_Suballocations.erase(nextItem);
8123 suballoc.size = newSize;
8124 m_SumFreeSize -= sizeDiff;
8131 VMA_ASSERT(0 &&
"Not found!");
8135 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 8137 VkDeviceSize lastSize = 0;
8138 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8140 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8142 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8143 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8144 VMA_VALIDATE(it->size >= lastSize);
8145 lastSize = it->size;
8150 bool VmaBlockMetadata_Generic::CheckAllocation(
8151 uint32_t currentFrameIndex,
8152 uint32_t frameInUseCount,
8153 VkDeviceSize bufferImageGranularity,
8154 VkDeviceSize allocSize,
8155 VkDeviceSize allocAlignment,
8156 VmaSuballocationType allocType,
8157 VmaSuballocationList::const_iterator suballocItem,
8158 bool canMakeOtherLost,
8159 VkDeviceSize* pOffset,
8160 size_t* itemsToMakeLostCount,
8161 VkDeviceSize* pSumFreeSize,
8162 VkDeviceSize* pSumItemSize)
const 8164 VMA_ASSERT(allocSize > 0);
8165 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8166 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8167 VMA_ASSERT(pOffset != VMA_NULL);
8169 *itemsToMakeLostCount = 0;
8173 if(canMakeOtherLost)
8175 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8177 *pSumFreeSize = suballocItem->size;
8181 if(suballocItem->hAllocation->CanBecomeLost() &&
8182 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8184 ++*itemsToMakeLostCount;
8185 *pSumItemSize = suballocItem->size;
8194 if(GetSize() - suballocItem->offset < allocSize)
8200 *pOffset = suballocItem->offset;
8203 if(VMA_DEBUG_MARGIN > 0)
8205 *pOffset += VMA_DEBUG_MARGIN;
8209 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8213 if(bufferImageGranularity > 1)
8215 bool bufferImageGranularityConflict =
false;
8216 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8217 while(prevSuballocItem != m_Suballocations.cbegin())
8220 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8221 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8223 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8225 bufferImageGranularityConflict =
true;
8233 if(bufferImageGranularityConflict)
8235 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8241 if(*pOffset >= suballocItem->offset + suballocItem->size)
8247 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8250 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8252 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8254 if(suballocItem->offset + totalSize > GetSize())
8261 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8262 if(totalSize > suballocItem->size)
8264 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8265 while(remainingSize > 0)
8268 if(lastSuballocItem == m_Suballocations.cend())
8272 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8274 *pSumFreeSize += lastSuballocItem->size;
8278 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8279 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8280 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8282 ++*itemsToMakeLostCount;
8283 *pSumItemSize += lastSuballocItem->size;
8290 remainingSize = (lastSuballocItem->size < remainingSize) ?
8291 remainingSize - lastSuballocItem->size : 0;
8297 if(bufferImageGranularity > 1)
8299 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8301 while(nextSuballocItem != m_Suballocations.cend())
8303 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8304 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8306 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8308 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8309 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8310 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8312 ++*itemsToMakeLostCount;
8331 const VmaSuballocation& suballoc = *suballocItem;
8332 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8334 *pSumFreeSize = suballoc.size;
8337 if(suballoc.size < allocSize)
8343 *pOffset = suballoc.offset;
8346 if(VMA_DEBUG_MARGIN > 0)
8348 *pOffset += VMA_DEBUG_MARGIN;
8352 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8356 if(bufferImageGranularity > 1)
8358 bool bufferImageGranularityConflict =
false;
8359 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8360 while(prevSuballocItem != m_Suballocations.cbegin())
8363 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8364 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8366 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8368 bufferImageGranularityConflict =
true;
8376 if(bufferImageGranularityConflict)
8378 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8383 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8386 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8389 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8396 if(bufferImageGranularity > 1)
8398 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8400 while(nextSuballocItem != m_Suballocations.cend())
8402 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8403 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8405 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8424 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8426 VMA_ASSERT(item != m_Suballocations.end());
8427 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8429 VmaSuballocationList::iterator nextItem = item;
8431 VMA_ASSERT(nextItem != m_Suballocations.end());
8432 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8434 item->size += nextItem->size;
8436 m_Suballocations.erase(nextItem);
8439 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8442 VmaSuballocation& suballoc = *suballocItem;
8443 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8444 suballoc.hAllocation = VK_NULL_HANDLE;
8448 m_SumFreeSize += suballoc.size;
8451 bool mergeWithNext =
false;
8452 bool mergeWithPrev =
false;
8454 VmaSuballocationList::iterator nextItem = suballocItem;
8456 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8458 mergeWithNext =
true;
8461 VmaSuballocationList::iterator prevItem = suballocItem;
8462 if(suballocItem != m_Suballocations.begin())
8465 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8467 mergeWithPrev =
true;
8473 UnregisterFreeSuballocation(nextItem);
8474 MergeFreeWithNext(suballocItem);
8479 UnregisterFreeSuballocation(prevItem);
8480 MergeFreeWithNext(prevItem);
8481 RegisterFreeSuballocation(prevItem);
8486 RegisterFreeSuballocation(suballocItem);
8487 return suballocItem;
8491 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
8493 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8494 VMA_ASSERT(item->size > 0);
8498 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8500 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8502 if(m_FreeSuballocationsBySize.empty())
8504 m_FreeSuballocationsBySize.push_back(item);
8508 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
8516 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
8518 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8519 VMA_ASSERT(item->size > 0);
8523 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8525 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8527 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8528 m_FreeSuballocationsBySize.data(),
8529 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
8531 VmaSuballocationItemSizeLess());
8532 for(
size_t index = it - m_FreeSuballocationsBySize.data();
8533 index < m_FreeSuballocationsBySize.size();
8536 if(m_FreeSuballocationsBySize[index] == item)
8538 VmaVectorRemove(m_FreeSuballocationsBySize, index);
8541 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
8543 VMA_ASSERT(0 &&
"Not found.");
8549 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
8550 VkDeviceSize bufferImageGranularity,
8551 VmaSuballocationType& inOutPrevSuballocType)
const 8553 if(bufferImageGranularity == 1 || IsEmpty())
8558 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
8559 bool typeConflictFound =
false;
8560 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
8561 it != m_Suballocations.cend();
8564 const VmaSuballocationType suballocType = it->type;
8565 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
8567 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
8568 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
8570 typeConflictFound =
true;
8572 inOutPrevSuballocType = suballocType;
8576 return typeConflictFound || minAlignment >= bufferImageGranularity;
8582 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
8583 VmaBlockMetadata(hAllocator),
8585 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8586 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8587 m_1stVectorIndex(0),
8588 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
8589 m_1stNullItemsBeginCount(0),
8590 m_1stNullItemsMiddleCount(0),
8591 m_2ndNullItemsCount(0)
8595 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
8599 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
8601 VmaBlockMetadata::Init(size);
8602 m_SumFreeSize = size;
8605 bool VmaBlockMetadata_Linear::Validate()
const 8607 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8608 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8610 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
8611 VMA_VALIDATE(!suballocations1st.empty() ||
8612 suballocations2nd.empty() ||
8613 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
8615 if(!suballocations1st.empty())
8618 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
8620 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
8622 if(!suballocations2nd.empty())
8625 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
8628 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
8629 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
8631 VkDeviceSize sumUsedSize = 0;
8632 const size_t suballoc1stCount = suballocations1st.size();
8633 VkDeviceSize offset = VMA_DEBUG_MARGIN;
8635 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8637 const size_t suballoc2ndCount = suballocations2nd.size();
8638 size_t nullItem2ndCount = 0;
8639 for(
size_t i = 0; i < suballoc2ndCount; ++i)
8641 const VmaSuballocation& suballoc = suballocations2nd[i];
8642 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8644 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8645 VMA_VALIDATE(suballoc.offset >= offset);
8649 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8650 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8651 sumUsedSize += suballoc.size;
8658 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8661 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8664 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
8666 const VmaSuballocation& suballoc = suballocations1st[i];
8667 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
8668 suballoc.hAllocation == VK_NULL_HANDLE);
8671 size_t nullItem1stCount = m_1stNullItemsBeginCount;
8673 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
8675 const VmaSuballocation& suballoc = suballocations1st[i];
8676 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8678 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8679 VMA_VALIDATE(suballoc.offset >= offset);
8680 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
8684 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8685 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8686 sumUsedSize += suballoc.size;
8693 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8695 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
8697 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8699 const size_t suballoc2ndCount = suballocations2nd.size();
8700 size_t nullItem2ndCount = 0;
8701 for(
size_t i = suballoc2ndCount; i--; )
8703 const VmaSuballocation& suballoc = suballocations2nd[i];
8704 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8706 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8707 VMA_VALIDATE(suballoc.offset >= offset);
8711 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8712 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8713 sumUsedSize += suballoc.size;
8720 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8723 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8726 VMA_VALIDATE(offset <= GetSize());
8727 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
8732 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 8734 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
8735 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
8738 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 8740 const VkDeviceSize size = GetSize();
8752 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8754 switch(m_2ndVectorMode)
8756 case SECOND_VECTOR_EMPTY:
8762 const size_t suballocations1stCount = suballocations1st.size();
8763 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
8764 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8765 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
8767 firstSuballoc.offset,
8768 size - (lastSuballoc.offset + lastSuballoc.size));
8772 case SECOND_VECTOR_RING_BUFFER:
8777 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8778 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
8779 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
8780 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
8784 case SECOND_VECTOR_DOUBLE_STACK:
8789 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8790 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
8791 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
8792 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
8802 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 8804 const VkDeviceSize size = GetSize();
8805 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8806 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8807 const size_t suballoc1stCount = suballocations1st.size();
8808 const size_t suballoc2ndCount = suballocations2nd.size();
8819 VkDeviceSize lastOffset = 0;
8821 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8823 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8824 size_t nextAlloc2ndIndex = 0;
8825 while(lastOffset < freeSpace2ndTo1stEnd)
8828 while(nextAlloc2ndIndex < suballoc2ndCount &&
8829 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8831 ++nextAlloc2ndIndex;
8835 if(nextAlloc2ndIndex < suballoc2ndCount)
8837 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8840 if(lastOffset < suballoc.offset)
8843 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8857 lastOffset = suballoc.offset + suballoc.size;
8858 ++nextAlloc2ndIndex;
8864 if(lastOffset < freeSpace2ndTo1stEnd)
8866 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8874 lastOffset = freeSpace2ndTo1stEnd;
8879 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8880 const VkDeviceSize freeSpace1stTo2ndEnd =
8881 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8882 while(lastOffset < freeSpace1stTo2ndEnd)
8885 while(nextAlloc1stIndex < suballoc1stCount &&
8886 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8888 ++nextAlloc1stIndex;
8892 if(nextAlloc1stIndex < suballoc1stCount)
8894 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8897 if(lastOffset < suballoc.offset)
8900 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8914 lastOffset = suballoc.offset + suballoc.size;
8915 ++nextAlloc1stIndex;
8921 if(lastOffset < freeSpace1stTo2ndEnd)
8923 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8931 lastOffset = freeSpace1stTo2ndEnd;
8935 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8937 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8938 while(lastOffset < size)
8941 while(nextAlloc2ndIndex != SIZE_MAX &&
8942 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8944 --nextAlloc2ndIndex;
8948 if(nextAlloc2ndIndex != SIZE_MAX)
8950 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8953 if(lastOffset < suballoc.offset)
8956 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8970 lastOffset = suballoc.offset + suballoc.size;
8971 --nextAlloc2ndIndex;
8977 if(lastOffset < size)
8979 const VkDeviceSize unusedRangeSize = size - lastOffset;
8995 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 8997 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8998 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8999 const VkDeviceSize size = GetSize();
9000 const size_t suballoc1stCount = suballocations1st.size();
9001 const size_t suballoc2ndCount = suballocations2nd.size();
9003 inoutStats.
size += size;
9005 VkDeviceSize lastOffset = 0;
9007 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9009 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9010 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9011 while(lastOffset < freeSpace2ndTo1stEnd)
9014 while(nextAlloc2ndIndex < suballoc2ndCount &&
9015 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9017 ++nextAlloc2ndIndex;
9021 if(nextAlloc2ndIndex < suballoc2ndCount)
9023 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9026 if(lastOffset < suballoc.offset)
9029 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9040 lastOffset = suballoc.offset + suballoc.size;
9041 ++nextAlloc2ndIndex;
9046 if(lastOffset < freeSpace2ndTo1stEnd)
9049 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9056 lastOffset = freeSpace2ndTo1stEnd;
9061 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9062 const VkDeviceSize freeSpace1stTo2ndEnd =
9063 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9064 while(lastOffset < freeSpace1stTo2ndEnd)
9067 while(nextAlloc1stIndex < suballoc1stCount &&
9068 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9070 ++nextAlloc1stIndex;
9074 if(nextAlloc1stIndex < suballoc1stCount)
9076 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9079 if(lastOffset < suballoc.offset)
9082 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9093 lastOffset = suballoc.offset + suballoc.size;
9094 ++nextAlloc1stIndex;
9099 if(lastOffset < freeSpace1stTo2ndEnd)
9102 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9109 lastOffset = freeSpace1stTo2ndEnd;
9113 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9115 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9116 while(lastOffset < size)
9119 while(nextAlloc2ndIndex != SIZE_MAX &&
9120 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9122 --nextAlloc2ndIndex;
9126 if(nextAlloc2ndIndex != SIZE_MAX)
9128 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9131 if(lastOffset < suballoc.offset)
9134 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9145 lastOffset = suballoc.offset + suballoc.size;
9146 --nextAlloc2ndIndex;
9151 if(lastOffset < size)
9154 const VkDeviceSize unusedRangeSize = size - lastOffset;
9167 #if VMA_STATS_STRING_ENABLED 9168 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 9170 const VkDeviceSize size = GetSize();
9171 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9172 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9173 const size_t suballoc1stCount = suballocations1st.size();
9174 const size_t suballoc2ndCount = suballocations2nd.size();
9178 size_t unusedRangeCount = 0;
9179 VkDeviceSize usedBytes = 0;
9181 VkDeviceSize lastOffset = 0;
9183 size_t alloc2ndCount = 0;
9184 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9186 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9187 size_t nextAlloc2ndIndex = 0;
9188 while(lastOffset < freeSpace2ndTo1stEnd)
9191 while(nextAlloc2ndIndex < suballoc2ndCount &&
9192 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9194 ++nextAlloc2ndIndex;
9198 if(nextAlloc2ndIndex < suballoc2ndCount)
9200 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9203 if(lastOffset < suballoc.offset)
9212 usedBytes += suballoc.size;
9215 lastOffset = suballoc.offset + suballoc.size;
9216 ++nextAlloc2ndIndex;
9221 if(lastOffset < freeSpace2ndTo1stEnd)
9228 lastOffset = freeSpace2ndTo1stEnd;
9233 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9234 size_t alloc1stCount = 0;
9235 const VkDeviceSize freeSpace1stTo2ndEnd =
9236 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9237 while(lastOffset < freeSpace1stTo2ndEnd)
9240 while(nextAlloc1stIndex < suballoc1stCount &&
9241 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9243 ++nextAlloc1stIndex;
9247 if(nextAlloc1stIndex < suballoc1stCount)
9249 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9252 if(lastOffset < suballoc.offset)
9261 usedBytes += suballoc.size;
9264 lastOffset = suballoc.offset + suballoc.size;
9265 ++nextAlloc1stIndex;
9270 if(lastOffset < size)
9277 lastOffset = freeSpace1stTo2ndEnd;
9281 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9283 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9284 while(lastOffset < size)
9287 while(nextAlloc2ndIndex != SIZE_MAX &&
9288 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9290 --nextAlloc2ndIndex;
9294 if(nextAlloc2ndIndex != SIZE_MAX)
9296 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9299 if(lastOffset < suballoc.offset)
9308 usedBytes += suballoc.size;
9311 lastOffset = suballoc.offset + suballoc.size;
9312 --nextAlloc2ndIndex;
9317 if(lastOffset < size)
9329 const VkDeviceSize unusedBytes = size - usedBytes;
9330 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9335 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9337 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9338 size_t nextAlloc2ndIndex = 0;
9339 while(lastOffset < freeSpace2ndTo1stEnd)
9342 while(nextAlloc2ndIndex < suballoc2ndCount &&
9343 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9345 ++nextAlloc2ndIndex;
9349 if(nextAlloc2ndIndex < suballoc2ndCount)
9351 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9354 if(lastOffset < suballoc.offset)
9357 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9358 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9363 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9366 lastOffset = suballoc.offset + suballoc.size;
9367 ++nextAlloc2ndIndex;
9372 if(lastOffset < freeSpace2ndTo1stEnd)
9375 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9376 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9380 lastOffset = freeSpace2ndTo1stEnd;
9385 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9386 while(lastOffset < freeSpace1stTo2ndEnd)
9389 while(nextAlloc1stIndex < suballoc1stCount &&
9390 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9392 ++nextAlloc1stIndex;
9396 if(nextAlloc1stIndex < suballoc1stCount)
9398 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9401 if(lastOffset < suballoc.offset)
9404 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9405 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9410 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9413 lastOffset = suballoc.offset + suballoc.size;
9414 ++nextAlloc1stIndex;
9419 if(lastOffset < freeSpace1stTo2ndEnd)
9422 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9423 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9427 lastOffset = freeSpace1stTo2ndEnd;
9431 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9433 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9434 while(lastOffset < size)
9437 while(nextAlloc2ndIndex != SIZE_MAX &&
9438 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9440 --nextAlloc2ndIndex;
9444 if(nextAlloc2ndIndex != SIZE_MAX)
9446 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9449 if(lastOffset < suballoc.offset)
9452 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9453 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9458 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9461 lastOffset = suballoc.offset + suballoc.size;
9462 --nextAlloc2ndIndex;
9467 if(lastOffset < size)
9470 const VkDeviceSize unusedRangeSize = size - lastOffset;
9471 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9480 PrintDetailedMap_End(json);
9482 #endif // #if VMA_STATS_STRING_ENABLED 9484 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9485 uint32_t currentFrameIndex,
9486 uint32_t frameInUseCount,
9487 VkDeviceSize bufferImageGranularity,
9488 VkDeviceSize allocSize,
9489 VkDeviceSize allocAlignment,
9491 VmaSuballocationType allocType,
9492 bool canMakeOtherLost,
9494 VmaAllocationRequest* pAllocationRequest)
9496 VMA_ASSERT(allocSize > 0);
9497 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9498 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9499 VMA_HEAVY_ASSERT(Validate());
9501 const VkDeviceSize size = GetSize();
9502 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9503 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9507 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9509 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9514 if(allocSize > size)
9518 VkDeviceSize resultBaseOffset = size - allocSize;
9519 if(!suballocations2nd.empty())
9521 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9522 resultBaseOffset = lastSuballoc.offset - allocSize;
9523 if(allocSize > lastSuballoc.offset)
9530 VkDeviceSize resultOffset = resultBaseOffset;
9533 if(VMA_DEBUG_MARGIN > 0)
9535 if(resultOffset < VMA_DEBUG_MARGIN)
9539 resultOffset -= VMA_DEBUG_MARGIN;
9543 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9547 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9549 bool bufferImageGranularityConflict =
false;
9550 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9552 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9553 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9555 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9557 bufferImageGranularityConflict =
true;
9565 if(bufferImageGranularityConflict)
9567 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9572 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9573 suballocations1st.back().offset + suballocations1st.back().size :
9575 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
9579 if(bufferImageGranularity > 1)
9581 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9583 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9584 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9586 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9600 pAllocationRequest->offset = resultOffset;
9601 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
9602 pAllocationRequest->sumItemSize = 0;
9604 pAllocationRequest->itemsToMakeLostCount = 0;
9610 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9614 VkDeviceSize resultBaseOffset = 0;
9615 if(!suballocations1st.empty())
9617 const VmaSuballocation& lastSuballoc = suballocations1st.back();
9618 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9622 VkDeviceSize resultOffset = resultBaseOffset;
9625 if(VMA_DEBUG_MARGIN > 0)
9627 resultOffset += VMA_DEBUG_MARGIN;
9631 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9635 if(bufferImageGranularity > 1 && !suballocations1st.empty())
9637 bool bufferImageGranularityConflict =
false;
9638 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9640 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9641 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9643 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9645 bufferImageGranularityConflict =
true;
9653 if(bufferImageGranularityConflict)
9655 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9659 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
9660 suballocations2nd.back().offset : size;
9663 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
9667 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9669 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9671 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9672 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9674 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9688 pAllocationRequest->offset = resultOffset;
9689 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
9690 pAllocationRequest->sumItemSize = 0;
9692 pAllocationRequest->itemsToMakeLostCount = 0;
9699 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9701 VMA_ASSERT(!suballocations1st.empty());
9703 VkDeviceSize resultBaseOffset = 0;
9704 if(!suballocations2nd.empty())
9706 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9707 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9711 VkDeviceSize resultOffset = resultBaseOffset;
9714 if(VMA_DEBUG_MARGIN > 0)
9716 resultOffset += VMA_DEBUG_MARGIN;
9720 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9724 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9726 bool bufferImageGranularityConflict =
false;
9727 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
9729 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
9730 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9732 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9734 bufferImageGranularityConflict =
true;
9742 if(bufferImageGranularityConflict)
9744 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9748 pAllocationRequest->itemsToMakeLostCount = 0;
9749 pAllocationRequest->sumItemSize = 0;
9750 size_t index1st = m_1stNullItemsBeginCount;
9752 if(canMakeOtherLost)
9754 while(index1st < suballocations1st.size() &&
9755 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
9758 const VmaSuballocation& suballoc = suballocations1st[index1st];
9759 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
9765 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9766 if(suballoc.hAllocation->CanBecomeLost() &&
9767 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9769 ++pAllocationRequest->itemsToMakeLostCount;
9770 pAllocationRequest->sumItemSize += suballoc.size;
9782 if(bufferImageGranularity > 1)
9784 while(index1st < suballocations1st.size())
9786 const VmaSuballocation& suballoc = suballocations1st[index1st];
9787 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
9789 if(suballoc.hAllocation != VK_NULL_HANDLE)
9792 if(suballoc.hAllocation->CanBecomeLost() &&
9793 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9795 ++pAllocationRequest->itemsToMakeLostCount;
9796 pAllocationRequest->sumItemSize += suballoc.size;
9815 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
9816 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
9820 if(bufferImageGranularity > 1)
9822 for(
size_t nextSuballocIndex = index1st;
9823 nextSuballocIndex < suballocations1st.size();
9824 nextSuballocIndex++)
9826 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
9827 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9829 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9843 pAllocationRequest->offset = resultOffset;
9844 pAllocationRequest->sumFreeSize =
9845 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
9847 - pAllocationRequest->sumItemSize;
9857 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
9858 uint32_t currentFrameIndex,
9859 uint32_t frameInUseCount,
9860 VmaAllocationRequest* pAllocationRequest)
9862 if(pAllocationRequest->itemsToMakeLostCount == 0)
9867 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
9869 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9870 size_t index1st = m_1stNullItemsBeginCount;
9871 size_t madeLostCount = 0;
9872 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
9874 VMA_ASSERT(index1st < suballocations1st.size());
9875 VmaSuballocation& suballoc = suballocations1st[index1st];
9876 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9878 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9879 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
9880 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9882 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9883 suballoc.hAllocation = VK_NULL_HANDLE;
9884 m_SumFreeSize += suballoc.size;
9885 ++m_1stNullItemsMiddleCount;
9902 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9904 uint32_t lostAllocationCount = 0;
9906 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9907 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
9909 VmaSuballocation& suballoc = suballocations1st[i];
9910 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
9911 suballoc.hAllocation->CanBecomeLost() &&
9912 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9914 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9915 suballoc.hAllocation = VK_NULL_HANDLE;
9916 ++m_1stNullItemsMiddleCount;
9917 m_SumFreeSize += suballoc.size;
9918 ++lostAllocationCount;
9922 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9923 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9925 VmaSuballocation& suballoc = suballocations2nd[i];
9926 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
9927 suballoc.hAllocation->CanBecomeLost() &&
9928 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9930 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9931 suballoc.hAllocation = VK_NULL_HANDLE;
9932 ++m_2ndNullItemsCount;
9933 ++lostAllocationCount;
9937 if(lostAllocationCount)
9942 return lostAllocationCount;
9945 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
9947 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9948 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
9950 const VmaSuballocation& suballoc = suballocations1st[i];
9951 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9953 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9955 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9956 return VK_ERROR_VALIDATION_FAILED_EXT;
9958 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9960 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9961 return VK_ERROR_VALIDATION_FAILED_EXT;
9966 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9967 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9969 const VmaSuballocation& suballoc = suballocations2nd[i];
9970 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9972 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9974 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9975 return VK_ERROR_VALIDATION_FAILED_EXT;
9977 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9979 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9980 return VK_ERROR_VALIDATION_FAILED_EXT;
9988 void VmaBlockMetadata_Linear::Alloc(
9989 const VmaAllocationRequest& request,
9990 VmaSuballocationType type,
9991 VkDeviceSize allocSize,
9995 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
9999 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10000 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10001 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10002 suballocations2nd.push_back(newSuballoc);
10003 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10007 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10010 if(suballocations1st.empty())
10012 suballocations1st.push_back(newSuballoc);
10017 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
10020 VMA_ASSERT(request.offset + allocSize <= GetSize());
10021 suballocations1st.push_back(newSuballoc);
10024 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
10026 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10028 switch(m_2ndVectorMode)
10030 case SECOND_VECTOR_EMPTY:
10032 VMA_ASSERT(suballocations2nd.empty());
10033 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10035 case SECOND_VECTOR_RING_BUFFER:
10037 VMA_ASSERT(!suballocations2nd.empty());
10039 case SECOND_VECTOR_DOUBLE_STACK:
10040 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10046 suballocations2nd.push_back(newSuballoc);
10050 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10055 m_SumFreeSize -= newSuballoc.size;
10058 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10060 FreeAtOffset(allocation->GetOffset());
10063 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10065 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10066 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10068 if(!suballocations1st.empty())
10071 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10072 if(firstSuballoc.offset == offset)
10074 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10075 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10076 m_SumFreeSize += firstSuballoc.size;
10077 ++m_1stNullItemsBeginCount;
10078 CleanupAfterFree();
10084 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10085 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10087 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10088 if(lastSuballoc.offset == offset)
10090 m_SumFreeSize += lastSuballoc.size;
10091 suballocations2nd.pop_back();
10092 CleanupAfterFree();
10097 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10099 VmaSuballocation& lastSuballoc = suballocations1st.back();
10100 if(lastSuballoc.offset == offset)
10102 m_SumFreeSize += lastSuballoc.size;
10103 suballocations1st.pop_back();
10104 CleanupAfterFree();
10111 VmaSuballocation refSuballoc;
10112 refSuballoc.offset = offset;
10114 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
10115 suballocations1st.begin() + m_1stNullItemsBeginCount,
10116 suballocations1st.end(),
10118 if(it != suballocations1st.end())
10120 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10121 it->hAllocation = VK_NULL_HANDLE;
10122 ++m_1stNullItemsMiddleCount;
10123 m_SumFreeSize += it->size;
10124 CleanupAfterFree();
10129 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10132 VmaSuballocation refSuballoc;
10133 refSuballoc.offset = offset;
10135 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10136 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
10137 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
10138 if(it != suballocations2nd.end())
10140 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10141 it->hAllocation = VK_NULL_HANDLE;
10142 ++m_2ndNullItemsCount;
10143 m_SumFreeSize += it->size;
10144 CleanupAfterFree();
10149 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10152 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 10154 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10155 const size_t suballocCount = AccessSuballocations1st().size();
10156 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10159 void VmaBlockMetadata_Linear::CleanupAfterFree()
10161 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10162 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10166 suballocations1st.clear();
10167 suballocations2nd.clear();
10168 m_1stNullItemsBeginCount = 0;
10169 m_1stNullItemsMiddleCount = 0;
10170 m_2ndNullItemsCount = 0;
10171 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10175 const size_t suballoc1stCount = suballocations1st.size();
10176 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10177 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10180 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10181 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10183 ++m_1stNullItemsBeginCount;
10184 --m_1stNullItemsMiddleCount;
10188 while(m_1stNullItemsMiddleCount > 0 &&
10189 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10191 --m_1stNullItemsMiddleCount;
10192 suballocations1st.pop_back();
10196 while(m_2ndNullItemsCount > 0 &&
10197 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10199 --m_2ndNullItemsCount;
10200 suballocations2nd.pop_back();
10203 if(ShouldCompact1st())
10205 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10206 size_t srcIndex = m_1stNullItemsBeginCount;
10207 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10209 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10213 if(dstIndex != srcIndex)
10215 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10219 suballocations1st.resize(nonNullItemCount);
10220 m_1stNullItemsBeginCount = 0;
10221 m_1stNullItemsMiddleCount = 0;
10225 if(suballocations2nd.empty())
10227 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10231 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10233 suballocations1st.clear();
10234 m_1stNullItemsBeginCount = 0;
10236 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10239 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10240 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10241 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10242 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10244 ++m_1stNullItemsBeginCount;
10245 --m_1stNullItemsMiddleCount;
10247 m_2ndNullItemsCount = 0;
10248 m_1stVectorIndex ^= 1;
10253 VMA_HEAVY_ASSERT(Validate());
10260 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10261 VmaBlockMetadata(hAllocator),
10263 m_AllocationCount(0),
10267 memset(m_FreeList, 0,
sizeof(m_FreeList));
10270 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10272 DeleteNode(m_Root);
10275 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10277 VmaBlockMetadata::Init(size);
10279 m_UsableSize = VmaPrevPow2(size);
10280 m_SumFreeSize = m_UsableSize;
10284 while(m_LevelCount < MAX_LEVELS &&
10285 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10290 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10291 rootNode->offset = 0;
10292 rootNode->type = Node::TYPE_FREE;
10293 rootNode->parent = VMA_NULL;
10294 rootNode->buddy = VMA_NULL;
10297 AddToFreeListFront(0, rootNode);
10300 bool VmaBlockMetadata_Buddy::Validate()
const 10303 ValidationContext ctx;
10304 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10306 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10308 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10309 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10312 for(uint32_t level = 0; level < m_LevelCount; ++level)
10314 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10315 m_FreeList[level].front->free.prev == VMA_NULL);
10317 for(Node* node = m_FreeList[level].front;
10319 node = node->free.next)
10321 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10323 if(node->free.next == VMA_NULL)
10325 VMA_VALIDATE(m_FreeList[level].back == node);
10329 VMA_VALIDATE(node->free.next->free.prev == node);
10335 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10337 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10343 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 10345 for(uint32_t level = 0; level < m_LevelCount; ++level)
10347 if(m_FreeList[level].front != VMA_NULL)
10349 return LevelToNodeSize(level);
10355 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 10357 const VkDeviceSize unusableSize = GetUnusableSize();
10368 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10370 if(unusableSize > 0)
10379 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 10381 const VkDeviceSize unusableSize = GetUnusableSize();
10383 inoutStats.
size += GetSize();
10384 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10389 if(unusableSize > 0)
10396 #if VMA_STATS_STRING_ENABLED 10398 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 10402 CalcAllocationStatInfo(stat);
10404 PrintDetailedMap_Begin(
10410 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10412 const VkDeviceSize unusableSize = GetUnusableSize();
10413 if(unusableSize > 0)
10415 PrintDetailedMap_UnusedRange(json,
10420 PrintDetailedMap_End(json);
10423 #endif // #if VMA_STATS_STRING_ENABLED 10425 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
10426 uint32_t currentFrameIndex,
10427 uint32_t frameInUseCount,
10428 VkDeviceSize bufferImageGranularity,
10429 VkDeviceSize allocSize,
10430 VkDeviceSize allocAlignment,
10432 VmaSuballocationType allocType,
10433 bool canMakeOtherLost,
10435 VmaAllocationRequest* pAllocationRequest)
10437 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10441 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
10442 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
10443 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
10445 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
10446 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
10449 if(allocSize > m_UsableSize)
10454 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10455 for(uint32_t level = targetLevel + 1; level--; )
10457 for(Node* freeNode = m_FreeList[level].front;
10458 freeNode != VMA_NULL;
10459 freeNode = freeNode->free.next)
10461 if(freeNode->offset % allocAlignment == 0)
10463 pAllocationRequest->offset = freeNode->offset;
10464 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
10465 pAllocationRequest->sumItemSize = 0;
10466 pAllocationRequest->itemsToMakeLostCount = 0;
10467 pAllocationRequest->customData = (
void*)(uintptr_t)level;
10476 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
10477 uint32_t currentFrameIndex,
10478 uint32_t frameInUseCount,
10479 VmaAllocationRequest* pAllocationRequest)
10485 return pAllocationRequest->itemsToMakeLostCount == 0;
10488 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10497 void VmaBlockMetadata_Buddy::Alloc(
10498 const VmaAllocationRequest& request,
10499 VmaSuballocationType type,
10500 VkDeviceSize allocSize,
10504 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10505 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
10507 Node* currNode = m_FreeList[currLevel].front;
10508 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10509 while(currNode->offset != request.offset)
10511 currNode = currNode->free.next;
10512 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10516 while(currLevel < targetLevel)
10520 RemoveFromFreeList(currLevel, currNode);
10522 const uint32_t childrenLevel = currLevel + 1;
10525 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
10526 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
10528 leftChild->offset = currNode->offset;
10529 leftChild->type = Node::TYPE_FREE;
10530 leftChild->parent = currNode;
10531 leftChild->buddy = rightChild;
10533 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
10534 rightChild->type = Node::TYPE_FREE;
10535 rightChild->parent = currNode;
10536 rightChild->buddy = leftChild;
10539 currNode->type = Node::TYPE_SPLIT;
10540 currNode->split.leftChild = leftChild;
10543 AddToFreeListFront(childrenLevel, rightChild);
10544 AddToFreeListFront(childrenLevel, leftChild);
10549 currNode = m_FreeList[currLevel].front;
10558 VMA_ASSERT(currLevel == targetLevel &&
10559 currNode != VMA_NULL &&
10560 currNode->type == Node::TYPE_FREE);
10561 RemoveFromFreeList(currLevel, currNode);
10564 currNode->type = Node::TYPE_ALLOCATION;
10565 currNode->allocation.alloc = hAllocation;
10567 ++m_AllocationCount;
10569 m_SumFreeSize -= allocSize;
10572 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
10574 if(node->type == Node::TYPE_SPLIT)
10576 DeleteNode(node->split.leftChild->buddy);
10577 DeleteNode(node->split.leftChild);
10580 vma_delete(GetAllocationCallbacks(), node);
10583 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 10585 VMA_VALIDATE(level < m_LevelCount);
10586 VMA_VALIDATE(curr->parent == parent);
10587 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
10588 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
10591 case Node::TYPE_FREE:
10593 ctx.calculatedSumFreeSize += levelNodeSize;
10594 ++ctx.calculatedFreeCount;
10596 case Node::TYPE_ALLOCATION:
10597 ++ctx.calculatedAllocationCount;
10598 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
10599 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
10601 case Node::TYPE_SPLIT:
10603 const uint32_t childrenLevel = level + 1;
10604 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
10605 const Node*
const leftChild = curr->split.leftChild;
10606 VMA_VALIDATE(leftChild != VMA_NULL);
10607 VMA_VALIDATE(leftChild->offset == curr->offset);
10608 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
10610 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
10612 const Node*
const rightChild = leftChild->buddy;
10613 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
10614 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
10616 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
10627 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 10630 uint32_t level = 0;
10631 VkDeviceSize currLevelNodeSize = m_UsableSize;
10632 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
10633 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
10636 currLevelNodeSize = nextLevelNodeSize;
10637 nextLevelNodeSize = currLevelNodeSize >> 1;
10642 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
10645 Node* node = m_Root;
10646 VkDeviceSize nodeOffset = 0;
10647 uint32_t level = 0;
10648 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
10649 while(node->type == Node::TYPE_SPLIT)
10651 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
10652 if(offset < nodeOffset + nextLevelSize)
10654 node = node->split.leftChild;
10658 node = node->split.leftChild->buddy;
10659 nodeOffset += nextLevelSize;
10662 levelNodeSize = nextLevelSize;
10665 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
10666 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
10669 --m_AllocationCount;
10670 m_SumFreeSize += alloc->GetSize();
10672 node->type = Node::TYPE_FREE;
10675 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
10677 RemoveFromFreeList(level, node->buddy);
10678 Node*
const parent = node->parent;
10680 vma_delete(GetAllocationCallbacks(), node->buddy);
10681 vma_delete(GetAllocationCallbacks(), node);
10682 parent->type = Node::TYPE_FREE;
10690 AddToFreeListFront(level, node);
10693 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 10697 case Node::TYPE_FREE:
10703 case Node::TYPE_ALLOCATION:
10705 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10711 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
10712 if(unusedRangeSize > 0)
10721 case Node::TYPE_SPLIT:
10723 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
10724 const Node*
const leftChild = node->split.leftChild;
10725 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
10726 const Node*
const rightChild = leftChild->buddy;
10727 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
10735 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
10737 VMA_ASSERT(node->type == Node::TYPE_FREE);
10740 Node*
const frontNode = m_FreeList[level].front;
10741 if(frontNode == VMA_NULL)
10743 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
10744 node->free.prev = node->free.next = VMA_NULL;
10745 m_FreeList[level].front = m_FreeList[level].back = node;
10749 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
10750 node->free.prev = VMA_NULL;
10751 node->free.next = frontNode;
10752 frontNode->free.prev = node;
10753 m_FreeList[level].front = node;
10757 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
10759 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
10762 if(node->free.prev == VMA_NULL)
10764 VMA_ASSERT(m_FreeList[level].front == node);
10765 m_FreeList[level].front = node->free.next;
10769 Node*
const prevFreeNode = node->free.prev;
10770 VMA_ASSERT(prevFreeNode->free.next == node);
10771 prevFreeNode->free.next = node->free.next;
10775 if(node->free.next == VMA_NULL)
10777 VMA_ASSERT(m_FreeList[level].back == node);
10778 m_FreeList[level].back = node->free.prev;
10782 Node*
const nextFreeNode = node->free.next;
10783 VMA_ASSERT(nextFreeNode->free.prev == node);
10784 nextFreeNode->free.prev = node->free.prev;
10788 #if VMA_STATS_STRING_ENABLED 10789 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 10793 case Node::TYPE_FREE:
10794 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
10796 case Node::TYPE_ALLOCATION:
10798 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
10799 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10800 if(allocSize < levelNodeSize)
10802 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
10806 case Node::TYPE_SPLIT:
10808 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
10809 const Node*
const leftChild = node->split.leftChild;
10810 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
10811 const Node*
const rightChild = leftChild->buddy;
10812 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
10819 #endif // #if VMA_STATS_STRING_ENABLED 10825 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
10826 m_pMetadata(VMA_NULL),
10827 m_MemoryTypeIndex(UINT32_MAX),
10829 m_hMemory(VK_NULL_HANDLE),
10831 m_pMappedData(VMA_NULL)
10835 void VmaDeviceMemoryBlock::Init(
10837 uint32_t newMemoryTypeIndex,
10838 VkDeviceMemory newMemory,
10839 VkDeviceSize newSize,
10841 uint32_t algorithm)
10843 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
10845 m_MemoryTypeIndex = newMemoryTypeIndex;
10847 m_hMemory = newMemory;
10852 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
10855 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
10861 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
10863 m_pMetadata->Init(newSize);
10866 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
10870 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
10872 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
10873 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
10874 m_hMemory = VK_NULL_HANDLE;
10876 vma_delete(allocator, m_pMetadata);
10877 m_pMetadata = VMA_NULL;
10880 bool VmaDeviceMemoryBlock::Validate()
const 10882 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
10883 (m_pMetadata->GetSize() != 0));
10885 return m_pMetadata->Validate();
10888 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
10890 void* pData =
nullptr;
10891 VkResult res = Map(hAllocator, 1, &pData);
10892 if(res != VK_SUCCESS)
10897 res = m_pMetadata->CheckCorruption(pData);
10899 Unmap(hAllocator, 1);
10904 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
10911 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10912 if(m_MapCount != 0)
10914 m_MapCount += count;
10915 VMA_ASSERT(m_pMappedData != VMA_NULL);
10916 if(ppData != VMA_NULL)
10918 *ppData = m_pMappedData;
10924 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
10925 hAllocator->m_hDevice,
10931 if(result == VK_SUCCESS)
10933 if(ppData != VMA_NULL)
10935 *ppData = m_pMappedData;
10937 m_MapCount = count;
10943 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
10950 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10951 if(m_MapCount >= count)
10953 m_MapCount -= count;
10954 if(m_MapCount == 0)
10956 m_pMappedData = VMA_NULL;
10957 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
10962 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
10966 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10968 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10969 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10972 VkResult res = Map(hAllocator, 1, &pData);
10973 if(res != VK_SUCCESS)
10978 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
10979 VmaWriteMagicValue(pData, allocOffset + allocSize);
10981 Unmap(hAllocator, 1);
10986 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10988 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10989 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10992 VkResult res = Map(hAllocator, 1, &pData);
10993 if(res != VK_SUCCESS)
10998 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11000 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11002 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11004 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11007 Unmap(hAllocator, 1);
11012 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11017 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11018 hAllocation->GetBlock() ==
this);
11020 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11021 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
11022 hAllocator->m_hDevice,
11025 hAllocation->GetOffset());
11028 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11033 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11034 hAllocation->GetBlock() ==
this);
11036 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11037 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
11038 hAllocator->m_hDevice,
11041 hAllocation->GetOffset());
11046 memset(&outInfo, 0,
sizeof(outInfo));
11065 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11073 VmaPool_T::VmaPool_T(
11076 VkDeviceSize preferredBlockSize) :
11079 createInfo.memoryTypeIndex,
11080 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11081 createInfo.minBlockCount,
11082 createInfo.maxBlockCount,
11084 createInfo.frameInUseCount,
11086 createInfo.blockSize != 0,
11092 VmaPool_T::~VmaPool_T()
11096 #if VMA_STATS_STRING_ENABLED 11098 #endif // #if VMA_STATS_STRING_ENABLED 11100 VmaBlockVector::VmaBlockVector(
11102 uint32_t memoryTypeIndex,
11103 VkDeviceSize preferredBlockSize,
11104 size_t minBlockCount,
11105 size_t maxBlockCount,
11106 VkDeviceSize bufferImageGranularity,
11107 uint32_t frameInUseCount,
11109 bool explicitBlockSize,
11110 uint32_t algorithm) :
11111 m_hAllocator(hAllocator),
11112 m_MemoryTypeIndex(memoryTypeIndex),
11113 m_PreferredBlockSize(preferredBlockSize),
11114 m_MinBlockCount(minBlockCount),
11115 m_MaxBlockCount(maxBlockCount),
11116 m_BufferImageGranularity(bufferImageGranularity),
11117 m_FrameInUseCount(frameInUseCount),
11118 m_IsCustomPool(isCustomPool),
11119 m_ExplicitBlockSize(explicitBlockSize),
11120 m_Algorithm(algorithm),
11121 m_HasEmptyBlock(false),
11122 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11127 VmaBlockVector::~VmaBlockVector()
11129 for(
size_t i = m_Blocks.size(); i--; )
11131 m_Blocks[i]->Destroy(m_hAllocator);
11132 vma_delete(m_hAllocator, m_Blocks[i]);
11136 VkResult VmaBlockVector::CreateMinBlocks()
11138 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11140 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11141 if(res != VK_SUCCESS)
11149 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11151 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11153 const size_t blockCount = m_Blocks.size();
11162 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11164 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11165 VMA_ASSERT(pBlock);
11166 VMA_HEAVY_ASSERT(pBlock->Validate());
11167 pBlock->m_pMetadata->AddPoolStats(*pStats);
11171 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 11173 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11174 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11175 (VMA_DEBUG_MARGIN > 0) &&
11176 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11179 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11181 VkResult VmaBlockVector::Allocate(
11183 uint32_t currentFrameIndex,
11185 VkDeviceSize alignment,
11187 VmaSuballocationType suballocType,
11194 const bool canCreateNewBlock =
11196 (m_Blocks.size() < m_MaxBlockCount);
11203 canMakeOtherLost =
false;
11207 if(isUpperAddress &&
11210 return VK_ERROR_FEATURE_NOT_PRESENT;
11224 return VK_ERROR_FEATURE_NOT_PRESENT;
11228 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11230 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11233 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11240 if(!canMakeOtherLost || canCreateNewBlock)
11249 if(!m_Blocks.empty())
11251 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11252 VMA_ASSERT(pCurrBlock);
11253 VkResult res = AllocateFromBlock(
11264 if(res == VK_SUCCESS)
11266 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
11276 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11278 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11279 VMA_ASSERT(pCurrBlock);
11280 VkResult res = AllocateFromBlock(
11291 if(res == VK_SUCCESS)
11293 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11301 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11303 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11304 VMA_ASSERT(pCurrBlock);
11305 VkResult res = AllocateFromBlock(
11316 if(res == VK_SUCCESS)
11318 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11326 if(canCreateNewBlock)
11329 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11330 uint32_t newBlockSizeShift = 0;
11331 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11333 if(!m_ExplicitBlockSize)
11336 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11337 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
11339 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11340 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11342 newBlockSize = smallerNewBlockSize;
11343 ++newBlockSizeShift;
11352 size_t newBlockIndex = 0;
11353 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
11355 if(!m_ExplicitBlockSize)
11357 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
11359 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11360 if(smallerNewBlockSize >= size)
11362 newBlockSize = smallerNewBlockSize;
11363 ++newBlockSizeShift;
11364 res = CreateBlock(newBlockSize, &newBlockIndex);
11373 if(res == VK_SUCCESS)
11375 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
11376 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
11378 res = AllocateFromBlock(
11389 if(res == VK_SUCCESS)
11391 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
11397 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11404 if(canMakeOtherLost)
11406 uint32_t tryIndex = 0;
11407 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
11409 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
11410 VmaAllocationRequest bestRequest = {};
11411 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
11417 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11419 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11420 VMA_ASSERT(pCurrBlock);
11421 VmaAllocationRequest currRequest = {};
11422 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11425 m_BufferImageGranularity,
11434 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11435 if(pBestRequestBlock == VMA_NULL ||
11436 currRequestCost < bestRequestCost)
11438 pBestRequestBlock = pCurrBlock;
11439 bestRequest = currRequest;
11440 bestRequestCost = currRequestCost;
11442 if(bestRequestCost == 0)
11453 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11455 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11456 VMA_ASSERT(pCurrBlock);
11457 VmaAllocationRequest currRequest = {};
11458 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11461 m_BufferImageGranularity,
11470 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11471 if(pBestRequestBlock == VMA_NULL ||
11472 currRequestCost < bestRequestCost ||
11475 pBestRequestBlock = pCurrBlock;
11476 bestRequest = currRequest;
11477 bestRequestCost = currRequestCost;
11479 if(bestRequestCost == 0 ||
11489 if(pBestRequestBlock != VMA_NULL)
11493 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
11494 if(res != VK_SUCCESS)
11500 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
11506 if(pBestRequestBlock->m_pMetadata->IsEmpty())
11508 m_HasEmptyBlock =
false;
11511 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
11512 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
11513 (*pAllocation)->InitBlockAllocation(
11516 bestRequest.offset,
11522 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
11523 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
11524 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
11525 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11527 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11529 if(IsCorruptionDetectionEnabled())
11531 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
11532 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11547 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
11549 return VK_ERROR_TOO_MANY_OBJECTS;
11553 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11556 void VmaBlockVector::Free(
11559 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
11563 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11565 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
11567 if(IsCorruptionDetectionEnabled())
11569 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
11570 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
11573 if(hAllocation->IsPersistentMap())
11575 pBlock->Unmap(m_hAllocator, 1);
11578 pBlock->m_pMetadata->Free(hAllocation);
11579 VMA_HEAVY_ASSERT(pBlock->Validate());
11581 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
11584 if(pBlock->m_pMetadata->IsEmpty())
11587 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
11589 pBlockToDelete = pBlock;
11595 m_HasEmptyBlock =
true;
11600 else if(m_HasEmptyBlock)
11602 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
11603 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
11605 pBlockToDelete = pLastBlock;
11606 m_Blocks.pop_back();
11607 m_HasEmptyBlock =
false;
11611 IncrementallySortBlocks();
11616 if(pBlockToDelete != VMA_NULL)
11618 VMA_DEBUG_LOG(
" Deleted empty allocation");
11619 pBlockToDelete->Destroy(m_hAllocator);
11620 vma_delete(m_hAllocator, pBlockToDelete);
11624 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 11626 VkDeviceSize result = 0;
11627 for(
size_t i = m_Blocks.size(); i--; )
11629 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
11630 if(result >= m_PreferredBlockSize)
11638 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
11640 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11642 if(m_Blocks[blockIndex] == pBlock)
11644 VmaVectorRemove(m_Blocks, blockIndex);
11651 void VmaBlockVector::IncrementallySortBlocks()
11656 for(
size_t i = 1; i < m_Blocks.size(); ++i)
11658 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
11660 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
11667 VkResult VmaBlockVector::AllocateFromBlock(
11668 VmaDeviceMemoryBlock* pBlock,
11670 uint32_t currentFrameIndex,
11672 VkDeviceSize alignment,
11675 VmaSuballocationType suballocType,
11684 VmaAllocationRequest currRequest = {};
11685 if(pBlock->m_pMetadata->CreateAllocationRequest(
11688 m_BufferImageGranularity,
11698 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
11702 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
11703 if(res != VK_SUCCESS)
11710 if(pBlock->m_pMetadata->IsEmpty())
11712 m_HasEmptyBlock =
false;
11715 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
11716 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
11717 (*pAllocation)->InitBlockAllocation(
11720 currRequest.offset,
11726 VMA_HEAVY_ASSERT(pBlock->Validate());
11727 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
11728 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11730 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11732 if(IsCorruptionDetectionEnabled())
11734 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
11735 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11739 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11742 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
11744 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
11745 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
11746 allocInfo.allocationSize = blockSize;
11747 VkDeviceMemory mem = VK_NULL_HANDLE;
11748 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
11757 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
11762 allocInfo.allocationSize,
11766 m_Blocks.push_back(pBlock);
11767 if(pNewBlockIndex != VMA_NULL)
11769 *pNewBlockIndex = m_Blocks.size() - 1;
11775 void VmaBlockVector::ApplyDefragmentationMovesCpu(
11776 class VmaBlockVectorDefragmentationContext* pDefragCtx,
11777 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
11779 const size_t blockCount = m_Blocks.size();
11780 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
11784 BLOCK_FLAG_USED = 0x00000001,
11785 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
11793 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
11794 blockInfo(blockCount, VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
11795 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
11798 const size_t moveCount = moves.size();
11799 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
11801 const VmaDefragmentationMove& move = moves[moveIndex];
11802 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
11803 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
11806 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
11809 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
11811 BlockInfo& currBlockInfo = blockInfo[blockIndex];
11812 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
11813 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
11815 currBlockInfo.pMappedData = pBlock->GetMappedData();
11817 if(currBlockInfo.pMappedData == VMA_NULL)
11819 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
11820 if(pDefragCtx->res == VK_SUCCESS)
11822 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
11829 if(pDefragCtx->res == VK_SUCCESS)
11831 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
11832 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
11834 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
11836 const VmaDefragmentationMove& move = moves[moveIndex];
11838 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
11839 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
11841 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
11846 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
11847 memRange.memory = pSrcBlock->GetDeviceMemory();
11848 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
11849 memRange.size = VMA_MIN(
11850 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
11851 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
11852 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
11857 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
11858 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
11859 static_cast<size_t>(move.size));
11861 if(IsCorruptionDetectionEnabled())
11863 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
11864 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
11870 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
11871 memRange.memory = pDstBlock->GetDeviceMemory();
11872 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
11873 memRange.size = VMA_MIN(
11874 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
11875 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
11876 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
11883 for(
size_t blockIndex = blockCount; blockIndex--; )
11885 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
11886 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
11888 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
11889 pBlock->Unmap(m_hAllocator, 1);
11894 void VmaBlockVector::ApplyDefragmentationMovesGpu(
11895 class VmaBlockVectorDefragmentationContext* pDefragCtx,
11896 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
11897 VkCommandBuffer commandBuffer)
11899 const size_t blockCount = m_Blocks.size();
11901 pDefragCtx->blockContexts.resize(blockCount);
11902 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
11905 const size_t moveCount = moves.size();
11906 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
11908 const VmaDefragmentationMove& move = moves[moveIndex];
11909 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
11910 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
11913 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
11917 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
11918 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
11919 VK_BUFFER_USAGE_TRANSFER_DST_BIT;
11921 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
11923 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
11924 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
11925 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
11927 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
11928 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
11929 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
11930 if(pDefragCtx->res == VK_SUCCESS)
11932 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
11933 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
11940 if(pDefragCtx->res == VK_SUCCESS)
11942 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
11943 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
11945 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
11947 const VmaDefragmentationMove& move = moves[moveIndex];
11949 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
11950 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
11952 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
11954 VkBufferCopy region = {
11958 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
11959 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
11964 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
11966 pDefragCtx->res = VK_NOT_READY;
11972 m_HasEmptyBlock =
false;
11973 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11975 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
11976 if(pBlock->m_pMetadata->IsEmpty())
11978 if(m_Blocks.size() > m_MinBlockCount)
11980 if(pDefragmentationStats != VMA_NULL)
11983 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
11986 VmaVectorRemove(m_Blocks, blockIndex);
11987 pBlock->Destroy(m_hAllocator);
11988 vma_delete(m_hAllocator, pBlock);
11992 m_HasEmptyBlock =
true;
11998 #if VMA_STATS_STRING_ENABLED 12000 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12002 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12004 json.BeginObject();
12008 json.WriteString(
"MemoryTypeIndex");
12009 json.WriteNumber(m_MemoryTypeIndex);
12011 json.WriteString(
"BlockSize");
12012 json.WriteNumber(m_PreferredBlockSize);
12014 json.WriteString(
"BlockCount");
12015 json.BeginObject(
true);
12016 if(m_MinBlockCount > 0)
12018 json.WriteString(
"Min");
12019 json.WriteNumber((uint64_t)m_MinBlockCount);
12021 if(m_MaxBlockCount < SIZE_MAX)
12023 json.WriteString(
"Max");
12024 json.WriteNumber((uint64_t)m_MaxBlockCount);
12026 json.WriteString(
"Cur");
12027 json.WriteNumber((uint64_t)m_Blocks.size());
12030 if(m_FrameInUseCount > 0)
12032 json.WriteString(
"FrameInUseCount");
12033 json.WriteNumber(m_FrameInUseCount);
12036 if(m_Algorithm != 0)
12038 json.WriteString(
"Algorithm");
12039 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12044 json.WriteString(
"PreferredBlockSize");
12045 json.WriteNumber(m_PreferredBlockSize);
12048 json.WriteString(
"Blocks");
12049 json.BeginObject();
12050 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12052 json.BeginString();
12053 json.ContinueString(m_Blocks[i]->GetId());
12056 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12063 #endif // #if VMA_STATS_STRING_ENABLED 12065 void VmaBlockVector::Defragment(
12066 class VmaBlockVectorDefragmentationContext* pCtx,
12068 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12069 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12070 VkCommandBuffer commandBuffer)
12072 pCtx->res = VK_SUCCESS;
12074 const VkMemoryPropertyFlags memPropFlags =
12075 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12076 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12077 (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12078 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0;
12081 if(canDefragmentOnCpu || canDefragmentOnGpu)
12083 bool defragmentOnGpu;
12085 if(canDefragmentOnGpu != canDefragmentOnCpu)
12087 defragmentOnGpu = canDefragmentOnGpu;
12092 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12093 m_hAllocator->IsIntegratedGpu();
12096 bool overlappingMoveSupported = !defragmentOnGpu;
12098 if(m_hAllocator->m_UseMutex)
12100 m_Mutex.LockWrite();
12101 pCtx->mutexLocked =
true;
12104 pCtx->Begin(overlappingMoveSupported);
12108 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12109 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12110 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12111 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12112 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12115 if(pStats != VMA_NULL)
12117 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12118 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12121 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12122 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12123 if(defragmentOnGpu)
12125 maxGpuBytesToMove -= bytesMoved;
12126 maxGpuAllocationsToMove -= allocationsMoved;
12130 maxCpuBytesToMove -= bytesMoved;
12131 maxCpuAllocationsToMove -= allocationsMoved;
12135 if(pCtx->res >= VK_SUCCESS)
12137 if(defragmentOnGpu)
12139 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12143 ApplyDefragmentationMovesCpu(pCtx, moves);
12149 void VmaBlockVector::DefragmentationEnd(
12150 class VmaBlockVectorDefragmentationContext* pCtx,
12154 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12156 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12157 if(blockCtx.hBuffer)
12159 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12160 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12164 if(pCtx->res >= VK_SUCCESS)
12166 FreeEmptyBlocks(pStats);
12169 if(pCtx->mutexLocked)
12171 VMA_ASSERT(m_hAllocator->m_UseMutex);
12172 m_Mutex.UnlockWrite();
12176 size_t VmaBlockVector::CalcAllocationCount()
const 12179 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12181 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12186 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const 12188 if(m_BufferImageGranularity == 1)
12192 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12193 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12195 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12196 VMA_ASSERT(m_Algorithm == 0);
12197 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12198 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12206 void VmaBlockVector::MakePoolAllocationsLost(
12207 uint32_t currentFrameIndex,
12208 size_t* pLostAllocationCount)
12210 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12211 size_t lostAllocationCount = 0;
12212 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12214 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12215 VMA_ASSERT(pBlock);
12216 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12218 if(pLostAllocationCount != VMA_NULL)
12220 *pLostAllocationCount = lostAllocationCount;
12224 VkResult VmaBlockVector::CheckCorruption()
12226 if(!IsCorruptionDetectionEnabled())
12228 return VK_ERROR_FEATURE_NOT_PRESENT;
12231 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12232 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12234 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12235 VMA_ASSERT(pBlock);
12236 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12237 if(res != VK_SUCCESS)
12245 void VmaBlockVector::AddStats(
VmaStats* pStats)
12247 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12248 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12250 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12252 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12254 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12255 VMA_ASSERT(pBlock);
12256 VMA_HEAVY_ASSERT(pBlock->Validate());
12258 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12259 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12260 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12261 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12268 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12270 VmaBlockVector* pBlockVector,
12271 uint32_t currentFrameIndex,
12272 bool overlappingMoveSupported) :
12273 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12274 m_AllAllocations(false),
12275 m_AllocationCount(0),
12277 m_AllocationsMoved(0),
12278 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12281 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12282 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12284 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12285 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12286 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12287 m_Blocks.push_back(pBlockInfo);
12291 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12294 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12296 for(
size_t i = m_Blocks.size(); i--; )
12298 vma_delete(m_hAllocator, m_Blocks[i]);
12302 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12305 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12307 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12308 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12309 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12311 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12312 (*it)->m_Allocations.push_back(allocInfo);
12319 ++m_AllocationCount;
12323 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
12324 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12325 VkDeviceSize maxBytesToMove,
12326 uint32_t maxAllocationsToMove)
12328 if(m_Blocks.empty())
12341 size_t srcBlockMinIndex = 0;
12354 size_t srcBlockIndex = m_Blocks.size() - 1;
12355 size_t srcAllocIndex = SIZE_MAX;
12361 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
12363 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
12366 if(srcBlockIndex == srcBlockMinIndex)
12373 srcAllocIndex = SIZE_MAX;
12378 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
12382 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
12383 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
12385 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
12386 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
12387 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
12388 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
12391 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
12393 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
12394 VmaAllocationRequest dstAllocRequest;
12395 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
12396 m_CurrentFrameIndex,
12397 m_pBlockVector->GetFrameInUseCount(),
12398 m_pBlockVector->GetBufferImageGranularity(),
12405 &dstAllocRequest) &&
12407 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
12409 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
12412 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
12413 (m_BytesMoved + size > maxBytesToMove))
12418 VmaDefragmentationMove move;
12419 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
12420 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
12421 move.srcOffset = srcOffset;
12422 move.dstOffset = dstAllocRequest.offset;
12424 moves.push_back(move);
12426 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
12431 allocInfo.m_hAllocation);
12432 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
12434 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
12436 if(allocInfo.m_pChanged != VMA_NULL)
12438 *allocInfo.m_pChanged = VK_TRUE;
12441 ++m_AllocationsMoved;
12442 m_BytesMoved += size;
12444 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
12452 if(srcAllocIndex > 0)
12458 if(srcBlockIndex > 0)
12461 srcAllocIndex = SIZE_MAX;
12471 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const 12474 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12476 if(m_Blocks[i]->m_HasNonMovableAllocations)
12484 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
12485 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12486 VkDeviceSize maxBytesToMove,
12487 uint32_t maxAllocationsToMove)
12489 if(!m_AllAllocations && m_AllocationCount == 0)
12494 const size_t blockCount = m_Blocks.size();
12495 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12497 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
12499 if(m_AllAllocations)
12501 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
12502 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
12503 it != pMetadata->m_Suballocations.end();
12506 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
12508 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
12509 pBlockInfo->m_Allocations.push_back(allocInfo);
12514 pBlockInfo->CalcHasNonMovableAllocations();
12518 pBlockInfo->SortAllocationsByOffsetDescending();
12524 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
12527 const uint32_t roundCount = 2;
12530 VkResult result = VK_SUCCESS;
12531 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
12533 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
12539 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
12540 size_t dstBlockIndex, VkDeviceSize dstOffset,
12541 size_t srcBlockIndex, VkDeviceSize srcOffset)
12543 if(dstBlockIndex < srcBlockIndex)
12547 if(dstBlockIndex > srcBlockIndex)
12551 if(dstOffset < srcOffset)
12561 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
12563 VmaBlockVector* pBlockVector,
12564 uint32_t currentFrameIndex,
12565 bool overlappingMoveSupported) :
12566 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12567 m_OverlappingMoveSupported(overlappingMoveSupported),
12568 m_AllocationCount(0),
12569 m_AllAllocations(false),
12571 m_AllocationsMoved(0),
12572 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
12574 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
12578 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
12582 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
12583 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12584 VkDeviceSize maxBytesToMove,
12585 uint32_t maxAllocationsToMove)
12587 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
12589 const size_t blockCount = m_pBlockVector->GetBlockCount();
12590 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
12595 PreprocessMetadata();
12599 m_BlockInfos.resize(blockCount);
12600 for(
size_t i = 0; i < blockCount; ++i)
12602 m_BlockInfos[i].origBlockIndex = i;
12605 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
12606 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
12607 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
12612 FreeSpaceDatabase freeSpaceDb;
12614 size_t dstBlockInfoIndex = 0;
12615 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12616 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12617 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12618 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
12619 VkDeviceSize dstOffset = 0;
12622 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
12624 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
12625 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
12626 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
12627 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
12628 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
12630 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
12631 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
12632 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
12633 if(m_AllocationsMoved == maxAllocationsToMove ||
12634 m_BytesMoved + srcAllocSize > maxBytesToMove)
12639 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
12642 size_t freeSpaceInfoIndex;
12643 VkDeviceSize dstAllocOffset;
12644 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
12645 freeSpaceInfoIndex, dstAllocOffset))
12647 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
12648 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
12649 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
12650 VkDeviceSize freeSpaceBlockSize = pFreeSpaceMetadata->GetSize();
12653 if(freeSpaceInfoIndex == srcBlockInfoIndex)
12655 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
12659 VmaSuballocation suballoc = *srcSuballocIt;
12660 suballoc.offset = dstAllocOffset;
12661 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
12662 m_BytesMoved += srcAllocSize;
12663 ++m_AllocationsMoved;
12665 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12667 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12668 srcSuballocIt = nextSuballocIt;
12670 InsertSuballoc(pFreeSpaceMetadata, suballoc);
12672 VmaDefragmentationMove move = {
12673 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
12674 srcAllocOffset, dstAllocOffset,
12676 moves.push_back(move);
12683 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
12685 VmaSuballocation suballoc = *srcSuballocIt;
12686 suballoc.offset = dstAllocOffset;
12687 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
12688 m_BytesMoved += srcAllocSize;
12689 ++m_AllocationsMoved;
12691 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12693 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12694 srcSuballocIt = nextSuballocIt;
12696 InsertSuballoc(pFreeSpaceMetadata, suballoc);
12698 VmaDefragmentationMove move = {
12699 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
12700 srcAllocOffset, dstAllocOffset,
12702 moves.push_back(move);
12707 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
12710 while(dstBlockInfoIndex < srcBlockInfoIndex &&
12711 dstAllocOffset + srcAllocSize > dstBlockSize)
12714 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
12716 ++dstBlockInfoIndex;
12717 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12718 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12719 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12720 dstBlockSize = pDstMetadata->GetSize();
12722 dstAllocOffset = 0;
12726 if(dstBlockInfoIndex == srcBlockInfoIndex)
12728 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
12730 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
12732 bool skipOver = overlap;
12733 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
12737 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
12742 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
12744 dstOffset = srcAllocOffset + srcAllocSize;
12750 srcSuballocIt->offset = dstAllocOffset;
12751 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
12752 dstOffset = dstAllocOffset + srcAllocSize;
12753 m_BytesMoved += srcAllocSize;
12754 ++m_AllocationsMoved;
12756 VmaDefragmentationMove move = {
12757 srcOrigBlockIndex, dstOrigBlockIndex,
12758 srcAllocOffset, dstAllocOffset,
12760 moves.push_back(move);
12768 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
12769 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
12771 VmaSuballocation suballoc = *srcSuballocIt;
12772 suballoc.offset = dstAllocOffset;
12773 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
12774 dstOffset = dstAllocOffset + srcAllocSize;
12775 m_BytesMoved += srcAllocSize;
12776 ++m_AllocationsMoved;
12778 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12780 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12781 srcSuballocIt = nextSuballocIt;
12783 pDstMetadata->m_Suballocations.push_back(suballoc);
12785 VmaDefragmentationMove move = {
12786 srcOrigBlockIndex, dstOrigBlockIndex,
12787 srcAllocOffset, dstAllocOffset,
12789 moves.push_back(move);
12795 m_BlockInfos.clear();
12797 PostprocessMetadata();
12802 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
12804 const size_t blockCount = m_pBlockVector->GetBlockCount();
12805 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12807 VmaBlockMetadata_Generic*
const pMetadata =
12808 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
12809 pMetadata->m_FreeCount = 0;
12810 pMetadata->m_SumFreeSize = pMetadata->GetSize();
12811 pMetadata->m_FreeSuballocationsBySize.clear();
12812 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
12813 it != pMetadata->m_Suballocations.end(); )
12815 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
12817 VmaSuballocationList::iterator nextIt = it;
12819 pMetadata->m_Suballocations.erase(it);
12830 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
12832 const size_t blockCount = m_pBlockVector->GetBlockCount();
12833 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12835 VmaBlockMetadata_Generic*
const pMetadata =
12836 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
12837 const VkDeviceSize blockSize = pMetadata->GetSize();
12840 if(pMetadata->m_Suballocations.empty())
12842 pMetadata->m_FreeCount = 1;
12844 VmaSuballocation suballoc = {
12848 VMA_SUBALLOCATION_TYPE_FREE };
12849 pMetadata->m_Suballocations.push_back(suballoc);
12850 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
12855 VkDeviceSize offset = 0;
12856 VmaSuballocationList::iterator it;
12857 for(it = pMetadata->m_Suballocations.begin();
12858 it != pMetadata->m_Suballocations.end();
12861 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
12862 VMA_ASSERT(it->offset >= offset);
12865 if(it->offset > offset)
12867 ++pMetadata->m_FreeCount;
12868 const VkDeviceSize freeSize = it->offset - offset;
12869 VmaSuballocation suballoc = {
12873 VMA_SUBALLOCATION_TYPE_FREE };
12874 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
12875 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
12877 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
12881 pMetadata->m_SumFreeSize -= it->size;
12882 offset = it->offset + it->size;
12886 if(offset < blockSize)
12888 ++pMetadata->m_FreeCount;
12889 const VkDeviceSize freeSize = blockSize - offset;
12890 VmaSuballocation suballoc = {
12894 VMA_SUBALLOCATION_TYPE_FREE };
12895 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
12896 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
12897 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
12899 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
12904 pMetadata->m_FreeSuballocationsBySize.begin(),
12905 pMetadata->m_FreeSuballocationsBySize.end(),
12906 VmaSuballocationItemSizeLess());
12909 VMA_HEAVY_ASSERT(pMetadata->Validate());
12913 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
12916 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
12917 while(it != pMetadata->m_Suballocations.end())
12919 if(it->offset < suballoc.offset)
12924 pMetadata->m_Suballocations.insert(it, suballoc);
12930 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
12933 VmaBlockVector* pBlockVector,
12934 uint32_t currFrameIndex,
12935 uint32_t algorithmFlags) :
12937 mutexLocked(false),
12938 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
12939 m_hAllocator(hAllocator),
12940 m_hCustomPool(hCustomPool),
12941 m_pBlockVector(pBlockVector),
12942 m_CurrFrameIndex(currFrameIndex),
12943 m_AlgorithmFlags(algorithmFlags),
12944 m_pAlgorithm(VMA_NULL),
12945 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
12946 m_AllAllocations(false)
12950 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
12952 vma_delete(m_hAllocator, m_pAlgorithm);
12955 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12957 AllocInfo info = { hAlloc, pChanged };
12958 m_Allocations.push_back(info);
12961 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
12963 const bool allAllocations = m_AllAllocations ||
12964 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
12976 if(VMA_DEBUG_MARGIN == 0 &&
12978 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
12980 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
12981 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
12985 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
12986 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
12991 m_pAlgorithm->AddAll();
12995 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
12997 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13005 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13007 uint32_t currFrameIndex,
13010 m_hAllocator(hAllocator),
13011 m_CurrFrameIndex(currFrameIndex),
13014 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13016 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13019 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13021 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13023 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13024 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13025 vma_delete(m_hAllocator, pBlockVectorCtx);
13027 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13029 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13030 if(pBlockVectorCtx)
13032 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13033 vma_delete(m_hAllocator, pBlockVectorCtx);
13038 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13040 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13042 VmaPool pool = pPools[poolIndex];
13045 if(pool->m_BlockVector.GetAlgorithm() == 0)
13047 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13049 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13051 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13053 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13058 if(!pBlockVectorDefragCtx)
13060 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13063 &pool->m_BlockVector,
13066 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13069 pBlockVectorDefragCtx->AddAll();
13074 void VmaDefragmentationContext_T::AddAllocations(
13075 uint32_t allocationCount,
13077 VkBool32* pAllocationsChanged)
13080 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13083 VMA_ASSERT(hAlloc);
13085 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13087 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13089 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13091 const VmaPool hAllocPool = hAlloc->GetPool();
13093 if(hAllocPool != VK_NULL_HANDLE)
13096 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13098 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13100 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13102 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13106 if(!pBlockVectorDefragCtx)
13108 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13111 &hAllocPool->m_BlockVector,
13114 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13121 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13122 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13123 if(!pBlockVectorDefragCtx)
13125 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13128 m_hAllocator->m_pBlockVectors[memTypeIndex],
13131 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13135 if(pBlockVectorDefragCtx)
13137 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13138 &pAllocationsChanged[allocIndex] : VMA_NULL;
13139 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13145 VkResult VmaDefragmentationContext_T::Defragment(
13146 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13147 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13155 if(commandBuffer == VK_NULL_HANDLE)
13157 maxGpuBytesToMove = 0;
13158 maxGpuAllocationsToMove = 0;
13161 VkResult res = VK_SUCCESS;
13164 for(uint32_t memTypeIndex = 0;
13165 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13168 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13169 if(pBlockVectorCtx)
13171 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13172 pBlockVectorCtx->GetBlockVector()->Defragment(
13175 maxCpuBytesToMove, maxCpuAllocationsToMove,
13176 maxGpuBytesToMove, maxGpuAllocationsToMove,
13178 if(pBlockVectorCtx->res != VK_SUCCESS)
13180 res = pBlockVectorCtx->res;
13186 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13187 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13190 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13191 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13192 pBlockVectorCtx->GetBlockVector()->Defragment(
13195 maxCpuBytesToMove, maxCpuAllocationsToMove,
13196 maxGpuBytesToMove, maxGpuAllocationsToMove,
13198 if(pBlockVectorCtx->res != VK_SUCCESS)
13200 res = pBlockVectorCtx->res;
13210 #if VMA_RECORDING_ENABLED 13212 VmaRecorder::VmaRecorder() :
13217 m_StartCounter(INT64_MAX)
13223 m_UseMutex = useMutex;
13224 m_Flags = settings.
flags;
13226 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13227 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13230 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13233 return VK_ERROR_INITIALIZATION_FAILED;
13237 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13238 fprintf(m_File,
"%s\n",
"1,4");
13243 VmaRecorder::~VmaRecorder()
13245 if(m_File != VMA_NULL)
13251 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13253 CallParams callParams;
13254 GetBasicParams(callParams);
13256 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13257 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13261 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13263 CallParams callParams;
13264 GetBasicParams(callParams);
13266 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13267 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13273 CallParams callParams;
13274 GetBasicParams(callParams);
13276 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13277 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13288 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13290 CallParams callParams;
13291 GetBasicParams(callParams);
13293 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13294 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13299 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13300 const VkMemoryRequirements& vkMemReq,
13304 CallParams callParams;
13305 GetBasicParams(callParams);
13307 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13308 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13309 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13311 vkMemReq.alignment,
13312 vkMemReq.memoryTypeBits,
13320 userDataStr.GetString());
13324 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
13325 const VkMemoryRequirements& vkMemReq,
13326 bool requiresDedicatedAllocation,
13327 bool prefersDedicatedAllocation,
13331 CallParams callParams;
13332 GetBasicParams(callParams);
13334 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13335 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13336 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13338 vkMemReq.alignment,
13339 vkMemReq.memoryTypeBits,
13340 requiresDedicatedAllocation ? 1 : 0,
13341 prefersDedicatedAllocation ? 1 : 0,
13349 userDataStr.GetString());
13353 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
13354 const VkMemoryRequirements& vkMemReq,
13355 bool requiresDedicatedAllocation,
13356 bool prefersDedicatedAllocation,
13360 CallParams callParams;
13361 GetBasicParams(callParams);
13363 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13364 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13365 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13367 vkMemReq.alignment,
13368 vkMemReq.memoryTypeBits,
13369 requiresDedicatedAllocation ? 1 : 0,
13370 prefersDedicatedAllocation ? 1 : 0,
13378 userDataStr.GetString());
13382 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
13385 CallParams callParams;
13386 GetBasicParams(callParams);
13388 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13389 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13394 void VmaRecorder::RecordResizeAllocation(
13395 uint32_t frameIndex,
13397 VkDeviceSize newSize)
13399 CallParams callParams;
13400 GetBasicParams(callParams);
13402 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13403 fprintf(m_File,
"%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
13404 allocation, newSize);
13408 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
13410 const void* pUserData)
13412 CallParams callParams;
13413 GetBasicParams(callParams);
13415 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13416 UserDataString userDataStr(
13419 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13421 userDataStr.GetString());
13425 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
13428 CallParams callParams;
13429 GetBasicParams(callParams);
13431 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13432 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13437 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
13440 CallParams callParams;
13441 GetBasicParams(callParams);
13443 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13444 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13449 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
13452 CallParams callParams;
13453 GetBasicParams(callParams);
13455 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13456 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13461 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
13462 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13464 CallParams callParams;
13465 GetBasicParams(callParams);
13467 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13468 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13475 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
13476 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13478 CallParams callParams;
13479 GetBasicParams(callParams);
13481 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13482 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13489 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
13490 const VkBufferCreateInfo& bufCreateInfo,
13494 CallParams callParams;
13495 GetBasicParams(callParams);
13497 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13498 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13499 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13500 bufCreateInfo.flags,
13501 bufCreateInfo.size,
13502 bufCreateInfo.usage,
13503 bufCreateInfo.sharingMode,
13504 allocCreateInfo.
flags,
13505 allocCreateInfo.
usage,
13509 allocCreateInfo.
pool,
13511 userDataStr.GetString());
13515 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
13516 const VkImageCreateInfo& imageCreateInfo,
13520 CallParams callParams;
13521 GetBasicParams(callParams);
13523 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13524 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13525 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13526 imageCreateInfo.flags,
13527 imageCreateInfo.imageType,
13528 imageCreateInfo.format,
13529 imageCreateInfo.extent.width,
13530 imageCreateInfo.extent.height,
13531 imageCreateInfo.extent.depth,
13532 imageCreateInfo.mipLevels,
13533 imageCreateInfo.arrayLayers,
13534 imageCreateInfo.samples,
13535 imageCreateInfo.tiling,
13536 imageCreateInfo.usage,
13537 imageCreateInfo.sharingMode,
13538 imageCreateInfo.initialLayout,
13539 allocCreateInfo.
flags,
13540 allocCreateInfo.
usage,
13544 allocCreateInfo.
pool,
13546 userDataStr.GetString());
13550 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
13553 CallParams callParams;
13554 GetBasicParams(callParams);
13556 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13557 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
13562 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
13565 CallParams callParams;
13566 GetBasicParams(callParams);
13568 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13569 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
13574 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
13577 CallParams callParams;
13578 GetBasicParams(callParams);
13580 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13581 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13586 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
13589 CallParams callParams;
13590 GetBasicParams(callParams);
13592 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13593 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
13598 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
13601 CallParams callParams;
13602 GetBasicParams(callParams);
13604 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13605 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
13612 if(pUserData != VMA_NULL)
13616 m_Str = (
const char*)pUserData;
13620 sprintf_s(m_PtrStr,
"%p", pUserData);
13630 void VmaRecorder::WriteConfiguration(
13631 const VkPhysicalDeviceProperties& devProps,
13632 const VkPhysicalDeviceMemoryProperties& memProps,
13633 bool dedicatedAllocationExtensionEnabled)
13635 fprintf(m_File,
"Config,Begin\n");
13637 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
13638 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
13639 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
13640 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
13641 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
13642 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
13644 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
13645 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
13646 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
13648 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
13649 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
13651 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
13652 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
13654 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
13655 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
13657 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
13658 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
13661 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
13663 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
13664 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
13665 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
13666 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
13667 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
13668 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
13669 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
13670 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
13671 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
13673 fprintf(m_File,
"Config,End\n");
13676 void VmaRecorder::GetBasicParams(CallParams& outParams)
13678 outParams.threadId = GetCurrentThreadId();
13680 LARGE_INTEGER counter;
13681 QueryPerformanceCounter(&counter);
13682 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
13685 void VmaRecorder::Flush()
13693 #endif // #if VMA_RECORDING_ENABLED 13701 m_hDevice(pCreateInfo->device),
13702 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
13703 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
13704 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
13705 m_PreferredLargeHeapBlockSize(0),
13706 m_PhysicalDevice(pCreateInfo->physicalDevice),
13707 m_CurrentFrameIndex(0),
13708 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
13711 ,m_pRecorder(VMA_NULL)
13714 if(VMA_DEBUG_DETECT_CORRUPTION)
13717 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
13722 #if !(VMA_DEDICATED_ALLOCATION) 13725 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
13729 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
13730 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
13731 memset(&m_MemProps, 0,
sizeof(m_MemProps));
13733 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
13734 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
13736 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
13738 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
13749 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
13750 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
13752 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
13753 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
13754 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
13755 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
13762 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
13764 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
13765 if(limit != VK_WHOLE_SIZE)
13767 m_HeapSizeLimit[heapIndex] = limit;
13768 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
13770 m_MemProps.memoryHeaps[heapIndex].size = limit;
13776 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13778 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
13780 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
13783 preferredBlockSize,
13786 GetBufferImageGranularity(),
13793 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
13800 VkResult res = VK_SUCCESS;
13805 #if VMA_RECORDING_ENABLED 13806 m_pRecorder = vma_new(
this, VmaRecorder)();
13808 if(res != VK_SUCCESS)
13812 m_pRecorder->WriteConfiguration(
13813 m_PhysicalDeviceProperties,
13815 m_UseKhrDedicatedAllocation);
13816 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
13818 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
13819 return VK_ERROR_FEATURE_NOT_PRESENT;
13826 VmaAllocator_T::~VmaAllocator_T()
13828 #if VMA_RECORDING_ENABLED 13829 if(m_pRecorder != VMA_NULL)
13831 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
13832 vma_delete(
this, m_pRecorder);
13836 VMA_ASSERT(m_Pools.empty());
13838 for(
size_t i = GetMemoryTypeCount(); i--; )
13840 vma_delete(
this, m_pDedicatedAllocations[i]);
13841 vma_delete(
this, m_pBlockVectors[i]);
13845 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
13847 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 13848 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
13849 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
13850 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
13851 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
13852 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
13853 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
13854 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
13855 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
13856 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
13857 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
13858 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
13859 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
13860 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
13861 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
13862 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
13863 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
13864 m_VulkanFunctions.vkCmdCopyBuffer = &vkCmdCopyBuffer;
13865 #if VMA_DEDICATED_ALLOCATION 13866 if(m_UseKhrDedicatedAllocation)
13868 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
13869 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
13870 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
13871 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
13873 #endif // #if VMA_DEDICATED_ALLOCATION 13874 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 13876 #define VMA_COPY_IF_NOT_NULL(funcName) \ 13877 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 13879 if(pVulkanFunctions != VMA_NULL)
13881 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
13882 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
13883 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
13884 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
13885 VMA_COPY_IF_NOT_NULL(vkMapMemory);
13886 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
13887 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
13888 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
13889 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
13890 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
13891 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
13892 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
13893 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
13894 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
13895 VMA_COPY_IF_NOT_NULL(vkCreateImage);
13896 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
13897 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
13898 #if VMA_DEDICATED_ALLOCATION 13899 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
13900 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
13904 #undef VMA_COPY_IF_NOT_NULL 13908 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
13909 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
13910 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
13911 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
13912 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
13913 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
13914 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
13915 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
13916 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
13917 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
13918 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
13919 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
13920 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
13921 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
13922 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
13923 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
13924 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
13925 #if VMA_DEDICATED_ALLOCATION 13926 if(m_UseKhrDedicatedAllocation)
13928 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
13929 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
13934 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
13936 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
13937 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
13938 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
13939 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
13942 VkResult VmaAllocator_T::AllocateMemoryOfType(
13944 VkDeviceSize alignment,
13945 bool dedicatedAllocation,
13946 VkBuffer dedicatedBuffer,
13947 VkImage dedicatedImage,
13949 uint32_t memTypeIndex,
13950 VmaSuballocationType suballocType,
13953 VMA_ASSERT(pAllocation != VMA_NULL);
13954 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
13960 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13965 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
13966 VMA_ASSERT(blockVector);
13968 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
13969 bool preferDedicatedMemory =
13970 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
13971 dedicatedAllocation ||
13973 size > preferredBlockSize / 2;
13975 if(preferDedicatedMemory &&
13977 finalCreateInfo.
pool == VK_NULL_HANDLE)
13986 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13990 return AllocateDedicatedMemory(
14004 VkResult res = blockVector->Allocate(
14006 m_CurrentFrameIndex.load(),
14012 if(res == VK_SUCCESS)
14020 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14024 res = AllocateDedicatedMemory(
14030 finalCreateInfo.pUserData,
14034 if(res == VK_SUCCESS)
14037 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14043 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14050 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14052 VmaSuballocationType suballocType,
14053 uint32_t memTypeIndex,
14055 bool isUserDataString,
14057 VkBuffer dedicatedBuffer,
14058 VkImage dedicatedImage,
14061 VMA_ASSERT(pAllocation);
14063 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14064 allocInfo.memoryTypeIndex = memTypeIndex;
14065 allocInfo.allocationSize = size;
14067 #if VMA_DEDICATED_ALLOCATION 14068 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14069 if(m_UseKhrDedicatedAllocation)
14071 if(dedicatedBuffer != VK_NULL_HANDLE)
14073 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14074 dedicatedAllocInfo.buffer = dedicatedBuffer;
14075 allocInfo.pNext = &dedicatedAllocInfo;
14077 else if(dedicatedImage != VK_NULL_HANDLE)
14079 dedicatedAllocInfo.image = dedicatedImage;
14080 allocInfo.pNext = &dedicatedAllocInfo;
14083 #endif // #if VMA_DEDICATED_ALLOCATION 14086 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14087 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14090 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14094 void* pMappedData = VMA_NULL;
14097 res = (*m_VulkanFunctions.vkMapMemory)(
14106 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
14107 FreeVulkanMemory(memTypeIndex, size, hMemory);
14112 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
14113 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
14114 (*pAllocation)->SetUserData(
this, pUserData);
14115 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14117 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14122 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14123 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14124 VMA_ASSERT(pDedicatedAllocations);
14125 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
14128 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
14133 void VmaAllocator_T::GetBufferMemoryRequirements(
14135 VkMemoryRequirements& memReq,
14136 bool& requiresDedicatedAllocation,
14137 bool& prefersDedicatedAllocation)
const 14139 #if VMA_DEDICATED_ALLOCATION 14140 if(m_UseKhrDedicatedAllocation)
14142 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14143 memReqInfo.buffer = hBuffer;
14145 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14147 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14148 memReq2.pNext = &memDedicatedReq;
14150 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14152 memReq = memReq2.memoryRequirements;
14153 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14154 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14157 #endif // #if VMA_DEDICATED_ALLOCATION 14159 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14160 requiresDedicatedAllocation =
false;
14161 prefersDedicatedAllocation =
false;
14165 void VmaAllocator_T::GetImageMemoryRequirements(
14167 VkMemoryRequirements& memReq,
14168 bool& requiresDedicatedAllocation,
14169 bool& prefersDedicatedAllocation)
const 14171 #if VMA_DEDICATED_ALLOCATION 14172 if(m_UseKhrDedicatedAllocation)
14174 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
14175 memReqInfo.image = hImage;
14177 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14179 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14180 memReq2.pNext = &memDedicatedReq;
14182 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14184 memReq = memReq2.memoryRequirements;
14185 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14186 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14189 #endif // #if VMA_DEDICATED_ALLOCATION 14191 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14192 requiresDedicatedAllocation =
false;
14193 prefersDedicatedAllocation =
false;
14197 VkResult VmaAllocator_T::AllocateMemory(
14198 const VkMemoryRequirements& vkMemReq,
14199 bool requiresDedicatedAllocation,
14200 bool prefersDedicatedAllocation,
14201 VkBuffer dedicatedBuffer,
14202 VkImage dedicatedImage,
14204 VmaSuballocationType suballocType,
14207 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
14209 if(vkMemReq.size == 0)
14211 return VK_ERROR_VALIDATION_FAILED_EXT;
14216 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14217 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14222 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
14223 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14225 if(requiresDedicatedAllocation)
14229 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
14230 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14232 if(createInfo.
pool != VK_NULL_HANDLE)
14234 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
14235 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14238 if((createInfo.
pool != VK_NULL_HANDLE) &&
14241 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
14242 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14245 if(createInfo.
pool != VK_NULL_HANDLE)
14247 const VkDeviceSize alignmentForPool = VMA_MAX(
14248 vkMemReq.alignment,
14249 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
14250 return createInfo.
pool->m_BlockVector.Allocate(
14252 m_CurrentFrameIndex.load(),
14262 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
14263 uint32_t memTypeIndex = UINT32_MAX;
14265 if(res == VK_SUCCESS)
14267 VkDeviceSize alignmentForMemType = VMA_MAX(
14268 vkMemReq.alignment,
14269 GetMemoryTypeMinAlignment(memTypeIndex));
14271 res = AllocateMemoryOfType(
14273 alignmentForMemType,
14274 requiresDedicatedAllocation || prefersDedicatedAllocation,
14282 if(res == VK_SUCCESS)
14292 memoryTypeBits &= ~(1u << memTypeIndex);
14295 if(res == VK_SUCCESS)
14297 alignmentForMemType = VMA_MAX(
14298 vkMemReq.alignment,
14299 GetMemoryTypeMinAlignment(memTypeIndex));
14301 res = AllocateMemoryOfType(
14303 alignmentForMemType,
14304 requiresDedicatedAllocation || prefersDedicatedAllocation,
14312 if(res == VK_SUCCESS)
14322 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14333 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
14335 VMA_ASSERT(allocation);
14337 if(TouchAllocation(allocation))
14339 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14341 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
14344 switch(allocation->GetType())
14346 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14348 VmaBlockVector* pBlockVector = VMA_NULL;
14349 VmaPool hPool = allocation->GetPool();
14350 if(hPool != VK_NULL_HANDLE)
14352 pBlockVector = &hPool->m_BlockVector;
14356 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14357 pBlockVector = m_pBlockVectors[memTypeIndex];
14359 pBlockVector->Free(allocation);
14362 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14363 FreeDedicatedMemory(allocation);
14370 allocation->SetUserData(
this, VMA_NULL);
14371 vma_delete(
this, allocation);
14374 VkResult VmaAllocator_T::ResizeAllocation(
14376 VkDeviceSize newSize)
14378 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
14380 return VK_ERROR_VALIDATION_FAILED_EXT;
14382 if(newSize == alloc->GetSize())
14387 switch(alloc->GetType())
14389 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14390 return VK_ERROR_FEATURE_NOT_PRESENT;
14391 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14392 if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
14394 alloc->ChangeSize(newSize);
14395 VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
14400 return VK_ERROR_OUT_OF_POOL_MEMORY;
14404 return VK_ERROR_VALIDATION_FAILED_EXT;
14408 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
14411 InitStatInfo(pStats->
total);
14412 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
14414 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14418 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14420 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
14421 VMA_ASSERT(pBlockVector);
14422 pBlockVector->AddStats(pStats);
14427 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
14428 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
14430 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
14435 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14437 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14438 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14439 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
14440 VMA_ASSERT(pDedicatedAllocVector);
14441 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
14444 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
14445 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14446 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14447 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
14452 VmaPostprocessCalcStatInfo(pStats->
total);
14453 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
14454 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
14455 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
14456 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
14459 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
14461 VkResult VmaAllocator_T::DefragmentationBegin(
14471 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
14472 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
14475 (*pContext)->AddAllocations(
14478 VkResult res = (*pContext)->Defragment(
14483 if(res != VK_NOT_READY)
14485 vma_delete(
this, *pContext);
14486 *pContext = VMA_NULL;
14492 VkResult VmaAllocator_T::DefragmentationEnd(
14495 vma_delete(
this, context);
14501 if(hAllocation->CanBecomeLost())
14507 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14508 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14511 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
14515 pAllocationInfo->
offset = 0;
14516 pAllocationInfo->
size = hAllocation->GetSize();
14518 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14521 else if(localLastUseFrameIndex == localCurrFrameIndex)
14523 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
14524 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
14525 pAllocationInfo->
offset = hAllocation->GetOffset();
14526 pAllocationInfo->
size = hAllocation->GetSize();
14528 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14533 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14535 localLastUseFrameIndex = localCurrFrameIndex;
14542 #if VMA_STATS_STRING_ENABLED 14543 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14544 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14547 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
14548 if(localLastUseFrameIndex == localCurrFrameIndex)
14554 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14556 localLastUseFrameIndex = localCurrFrameIndex;
14562 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
14563 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
14564 pAllocationInfo->
offset = hAllocation->GetOffset();
14565 pAllocationInfo->
size = hAllocation->GetSize();
14566 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
14567 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14571 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
14574 if(hAllocation->CanBecomeLost())
14576 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14577 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14580 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
14584 else if(localLastUseFrameIndex == localCurrFrameIndex)
14590 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14592 localLastUseFrameIndex = localCurrFrameIndex;
14599 #if VMA_STATS_STRING_ENABLED 14600 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14601 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14604 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
14605 if(localLastUseFrameIndex == localCurrFrameIndex)
14611 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14613 localLastUseFrameIndex = localCurrFrameIndex;
14625 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
14635 return VK_ERROR_INITIALIZATION_FAILED;
14638 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
14640 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
14642 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
14643 if(res != VK_SUCCESS)
14645 vma_delete(
this, *pPool);
14652 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
14653 (*pPool)->SetId(m_NextPoolId++);
14654 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
14660 void VmaAllocator_T::DestroyPool(
VmaPool pool)
14664 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
14665 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
14666 VMA_ASSERT(success &&
"Pool not found in Allocator.");
14669 vma_delete(
this, pool);
14674 pool->m_BlockVector.GetPoolStats(pPoolStats);
14677 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
14679 m_CurrentFrameIndex.store(frameIndex);
14682 void VmaAllocator_T::MakePoolAllocationsLost(
14684 size_t* pLostAllocationCount)
14686 hPool->m_BlockVector.MakePoolAllocationsLost(
14687 m_CurrentFrameIndex.load(),
14688 pLostAllocationCount);
14691 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
14693 return hPool->m_BlockVector.CheckCorruption();
14696 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
14698 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
14701 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14703 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
14705 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
14706 VMA_ASSERT(pBlockVector);
14707 VkResult localRes = pBlockVector->CheckCorruption();
14710 case VK_ERROR_FEATURE_NOT_PRESENT:
14713 finalRes = VK_SUCCESS;
14723 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
14724 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
14726 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
14728 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
14731 case VK_ERROR_FEATURE_NOT_PRESENT:
14734 finalRes = VK_SUCCESS;
14746 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
14748 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
14749 (*pAllocation)->InitLost();
14752 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
14754 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
14757 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
14759 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
14760 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
14762 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
14763 if(res == VK_SUCCESS)
14765 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
14770 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
14775 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
14778 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
14780 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
14786 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
14788 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
14790 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
14793 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
14795 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
14796 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
14798 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
14799 m_HeapSizeLimit[heapIndex] += size;
14803 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
14805 if(hAllocation->CanBecomeLost())
14807 return VK_ERROR_MEMORY_MAP_FAILED;
14810 switch(hAllocation->GetType())
14812 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14814 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
14815 char *pBytes = VMA_NULL;
14816 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
14817 if(res == VK_SUCCESS)
14819 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
14820 hAllocation->BlockAllocMap();
14824 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14825 return hAllocation->DedicatedAllocMap(
this, ppData);
14828 return VK_ERROR_MEMORY_MAP_FAILED;
14834 switch(hAllocation->GetType())
14836 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14838 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
14839 hAllocation->BlockAllocUnmap();
14840 pBlock->Unmap(
this, 1);
14843 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14844 hAllocation->DedicatedAllocUnmap(
this);
14851 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
14853 VkResult res = VK_SUCCESS;
14854 switch(hAllocation->GetType())
14856 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14857 res = GetVulkanFunctions().vkBindBufferMemory(
14860 hAllocation->GetMemory(),
14863 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14865 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
14866 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
14867 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
14876 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
14878 VkResult res = VK_SUCCESS;
14879 switch(hAllocation->GetType())
14881 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14882 res = GetVulkanFunctions().vkBindImageMemory(
14885 hAllocation->GetMemory(),
14888 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14890 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
14891 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
14892 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
14901 void VmaAllocator_T::FlushOrInvalidateAllocation(
14903 VkDeviceSize offset, VkDeviceSize size,
14904 VMA_CACHE_OPERATION op)
14906 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
14907 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
14909 const VkDeviceSize allocationSize = hAllocation->GetSize();
14910 VMA_ASSERT(offset <= allocationSize);
14912 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
14914 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
14915 memRange.memory = hAllocation->GetMemory();
14917 switch(hAllocation->GetType())
14919 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14920 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
14921 if(size == VK_WHOLE_SIZE)
14923 memRange.size = allocationSize - memRange.offset;
14927 VMA_ASSERT(offset + size <= allocationSize);
14928 memRange.size = VMA_MIN(
14929 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
14930 allocationSize - memRange.offset);
14934 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14937 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
14938 if(size == VK_WHOLE_SIZE)
14940 size = allocationSize - offset;
14944 VMA_ASSERT(offset + size <= allocationSize);
14946 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
14949 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
14950 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
14951 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
14952 memRange.offset += allocationOffset;
14953 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
14964 case VMA_CACHE_FLUSH:
14965 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
14967 case VMA_CACHE_INVALIDATE:
14968 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
14977 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
14979 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
14981 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14983 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14984 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14985 VMA_ASSERT(pDedicatedAllocations);
14986 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
14987 VMA_ASSERT(success);
14990 VkDeviceMemory hMemory = allocation->GetMemory();
15002 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15004 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15007 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
15009 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
15010 !hAllocation->CanBecomeLost() &&
15011 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15013 void* pData = VMA_NULL;
15014 VkResult res = Map(hAllocation, &pData);
15015 if(res == VK_SUCCESS)
15017 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
15018 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
15019 Unmap(hAllocation);
15023 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15028 #if VMA_STATS_STRING_ENABLED 15030 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15032 bool dedicatedAllocationsStarted =
false;
15033 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15035 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15036 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15037 VMA_ASSERT(pDedicatedAllocVector);
15038 if(pDedicatedAllocVector->empty() ==
false)
15040 if(dedicatedAllocationsStarted ==
false)
15042 dedicatedAllocationsStarted =
true;
15043 json.WriteString(
"DedicatedAllocations");
15044 json.BeginObject();
15047 json.BeginString(
"Type ");
15048 json.ContinueString(memTypeIndex);
15053 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
15055 json.BeginObject(
true);
15057 hAlloc->PrintParameters(json);
15064 if(dedicatedAllocationsStarted)
15070 bool allocationsStarted =
false;
15071 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15073 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
15075 if(allocationsStarted ==
false)
15077 allocationsStarted =
true;
15078 json.WriteString(
"DefaultPools");
15079 json.BeginObject();
15082 json.BeginString(
"Type ");
15083 json.ContinueString(memTypeIndex);
15086 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
15089 if(allocationsStarted)
15097 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15098 const size_t poolCount = m_Pools.size();
15101 json.WriteString(
"Pools");
15102 json.BeginObject();
15103 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15105 json.BeginString();
15106 json.ContinueString(m_Pools[poolIndex]->GetId());
15109 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
15116 #endif // #if VMA_STATS_STRING_ENABLED 15125 VMA_ASSERT(pCreateInfo && pAllocator);
15126 VMA_DEBUG_LOG(
"vmaCreateAllocator");
15128 return (*pAllocator)->Init(pCreateInfo);
15134 if(allocator != VK_NULL_HANDLE)
15136 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
15137 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
15138 vma_delete(&allocationCallbacks, allocator);
15144 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
15146 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
15147 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
15152 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
15154 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
15155 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
15160 uint32_t memoryTypeIndex,
15161 VkMemoryPropertyFlags* pFlags)
15163 VMA_ASSERT(allocator && pFlags);
15164 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
15165 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
15170 uint32_t frameIndex)
15172 VMA_ASSERT(allocator);
15173 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
15175 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15177 allocator->SetCurrentFrameIndex(frameIndex);
15184 VMA_ASSERT(allocator && pStats);
15185 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15186 allocator->CalculateStats(pStats);
15189 #if VMA_STATS_STRING_ENABLED 15193 char** ppStatsString,
15194 VkBool32 detailedMap)
15196 VMA_ASSERT(allocator && ppStatsString);
15197 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15199 VmaStringBuilder sb(allocator);
15201 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
15202 json.BeginObject();
15205 allocator->CalculateStats(&stats);
15207 json.WriteString(
"Total");
15208 VmaPrintStatInfo(json, stats.
total);
15210 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
15212 json.BeginString(
"Heap ");
15213 json.ContinueString(heapIndex);
15215 json.BeginObject();
15217 json.WriteString(
"Size");
15218 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
15220 json.WriteString(
"Flags");
15221 json.BeginArray(
true);
15222 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
15224 json.WriteString(
"DEVICE_LOCAL");
15230 json.WriteString(
"Stats");
15231 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
15234 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
15236 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
15238 json.BeginString(
"Type ");
15239 json.ContinueString(typeIndex);
15242 json.BeginObject();
15244 json.WriteString(
"Flags");
15245 json.BeginArray(
true);
15246 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
15247 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
15249 json.WriteString(
"DEVICE_LOCAL");
15251 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15253 json.WriteString(
"HOST_VISIBLE");
15255 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
15257 json.WriteString(
"HOST_COHERENT");
15259 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
15261 json.WriteString(
"HOST_CACHED");
15263 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
15265 json.WriteString(
"LAZILY_ALLOCATED");
15271 json.WriteString(
"Stats");
15272 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
15281 if(detailedMap == VK_TRUE)
15283 allocator->PrintDetailedMap(json);
15289 const size_t len = sb.GetLength();
15290 char*
const pChars = vma_new_array(allocator,
char, len + 1);
15293 memcpy(pChars, sb.GetData(), len);
15295 pChars[len] =
'\0';
15296 *ppStatsString = pChars;
15301 char* pStatsString)
15303 if(pStatsString != VMA_NULL)
15305 VMA_ASSERT(allocator);
15306 size_t len = strlen(pStatsString);
15307 vma_delete_array(allocator, pStatsString, len + 1);
15311 #endif // #if VMA_STATS_STRING_ENABLED 15318 uint32_t memoryTypeBits,
15320 uint32_t* pMemoryTypeIndex)
15322 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15323 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15324 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15331 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
15332 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
15337 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15341 switch(pAllocationCreateInfo->
usage)
15346 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15348 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15352 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
15355 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15356 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15358 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15362 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15363 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
15369 *pMemoryTypeIndex = UINT32_MAX;
15370 uint32_t minCost = UINT32_MAX;
15371 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
15372 memTypeIndex < allocator->GetMemoryTypeCount();
15373 ++memTypeIndex, memTypeBit <<= 1)
15376 if((memTypeBit & memoryTypeBits) != 0)
15378 const VkMemoryPropertyFlags currFlags =
15379 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
15381 if((requiredFlags & ~currFlags) == 0)
15384 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
15386 if(currCost < minCost)
15388 *pMemoryTypeIndex = memTypeIndex;
15393 minCost = currCost;
15398 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
15403 const VkBufferCreateInfo* pBufferCreateInfo,
15405 uint32_t* pMemoryTypeIndex)
15407 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15408 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
15409 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15410 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15412 const VkDevice hDev = allocator->m_hDevice;
15413 VkBuffer hBuffer = VK_NULL_HANDLE;
15414 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
15415 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
15416 if(res == VK_SUCCESS)
15418 VkMemoryRequirements memReq = {};
15419 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
15420 hDev, hBuffer, &memReq);
15424 memReq.memoryTypeBits,
15425 pAllocationCreateInfo,
15428 allocator->GetVulkanFunctions().vkDestroyBuffer(
15429 hDev, hBuffer, allocator->GetAllocationCallbacks());
15436 const VkImageCreateInfo* pImageCreateInfo,
15438 uint32_t* pMemoryTypeIndex)
15440 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15441 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
15442 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15443 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15445 const VkDevice hDev = allocator->m_hDevice;
15446 VkImage hImage = VK_NULL_HANDLE;
15447 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
15448 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
15449 if(res == VK_SUCCESS)
15451 VkMemoryRequirements memReq = {};
15452 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
15453 hDev, hImage, &memReq);
15457 memReq.memoryTypeBits,
15458 pAllocationCreateInfo,
15461 allocator->GetVulkanFunctions().vkDestroyImage(
15462 hDev, hImage, allocator->GetAllocationCallbacks());
15472 VMA_ASSERT(allocator && pCreateInfo && pPool);
15474 VMA_DEBUG_LOG(
"vmaCreatePool");
15476 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15478 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
15480 #if VMA_RECORDING_ENABLED 15481 if(allocator->GetRecorder() != VMA_NULL)
15483 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
15494 VMA_ASSERT(allocator);
15496 if(pool == VK_NULL_HANDLE)
15501 VMA_DEBUG_LOG(
"vmaDestroyPool");
15503 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15505 #if VMA_RECORDING_ENABLED 15506 if(allocator->GetRecorder() != VMA_NULL)
15508 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
15512 allocator->DestroyPool(pool);
15520 VMA_ASSERT(allocator && pool && pPoolStats);
15522 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15524 allocator->GetPoolStats(pool, pPoolStats);
15530 size_t* pLostAllocationCount)
15532 VMA_ASSERT(allocator && pool);
15534 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15536 #if VMA_RECORDING_ENABLED 15537 if(allocator->GetRecorder() != VMA_NULL)
15539 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
15543 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
15548 VMA_ASSERT(allocator && pool);
15550 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15552 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
15554 return allocator->CheckPoolCorruption(pool);
15559 const VkMemoryRequirements* pVkMemoryRequirements,
15564 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
15566 VMA_DEBUG_LOG(
"vmaAllocateMemory");
15568 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15570 VkResult result = allocator->AllocateMemory(
15571 *pVkMemoryRequirements,
15577 VMA_SUBALLOCATION_TYPE_UNKNOWN,
15580 #if VMA_RECORDING_ENABLED 15581 if(allocator->GetRecorder() != VMA_NULL)
15583 allocator->GetRecorder()->RecordAllocateMemory(
15584 allocator->GetCurrentFrameIndex(),
15585 *pVkMemoryRequirements,
15591 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
15593 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
15606 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
15608 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
15610 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15612 VkMemoryRequirements vkMemReq = {};
15613 bool requiresDedicatedAllocation =
false;
15614 bool prefersDedicatedAllocation =
false;
15615 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
15616 requiresDedicatedAllocation,
15617 prefersDedicatedAllocation);
15619 VkResult result = allocator->AllocateMemory(
15621 requiresDedicatedAllocation,
15622 prefersDedicatedAllocation,
15626 VMA_SUBALLOCATION_TYPE_BUFFER,
15629 #if VMA_RECORDING_ENABLED 15630 if(allocator->GetRecorder() != VMA_NULL)
15632 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
15633 allocator->GetCurrentFrameIndex(),
15635 requiresDedicatedAllocation,
15636 prefersDedicatedAllocation,
15642 if(pAllocationInfo && result == VK_SUCCESS)
15644 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
15657 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
15659 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
15661 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15663 VkMemoryRequirements vkMemReq = {};
15664 bool requiresDedicatedAllocation =
false;
15665 bool prefersDedicatedAllocation =
false;
15666 allocator->GetImageMemoryRequirements(image, vkMemReq,
15667 requiresDedicatedAllocation, prefersDedicatedAllocation);
15669 VkResult result = allocator->AllocateMemory(
15671 requiresDedicatedAllocation,
15672 prefersDedicatedAllocation,
15676 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
15679 #if VMA_RECORDING_ENABLED 15680 if(allocator->GetRecorder() != VMA_NULL)
15682 allocator->GetRecorder()->RecordAllocateMemoryForImage(
15683 allocator->GetCurrentFrameIndex(),
15685 requiresDedicatedAllocation,
15686 prefersDedicatedAllocation,
15692 if(pAllocationInfo && result == VK_SUCCESS)
15694 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
15704 VMA_ASSERT(allocator);
15706 if(allocation == VK_NULL_HANDLE)
15711 VMA_DEBUG_LOG(
"vmaFreeMemory");
15713 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15715 #if VMA_RECORDING_ENABLED 15716 if(allocator->GetRecorder() != VMA_NULL)
15718 allocator->GetRecorder()->RecordFreeMemory(
15719 allocator->GetCurrentFrameIndex(),
15724 allocator->FreeMemory(allocation);
15730 VkDeviceSize newSize)
15732 VMA_ASSERT(allocator && allocation);
15734 VMA_DEBUG_LOG(
"vmaResizeAllocation");
15736 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15738 #if VMA_RECORDING_ENABLED 15739 if(allocator->GetRecorder() != VMA_NULL)
15741 allocator->GetRecorder()->RecordResizeAllocation(
15742 allocator->GetCurrentFrameIndex(),
15748 return allocator->ResizeAllocation(allocation, newSize);
15756 VMA_ASSERT(allocator && allocation && pAllocationInfo);
15758 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15760 #if VMA_RECORDING_ENABLED 15761 if(allocator->GetRecorder() != VMA_NULL)
15763 allocator->GetRecorder()->RecordGetAllocationInfo(
15764 allocator->GetCurrentFrameIndex(),
15769 allocator->GetAllocationInfo(allocation, pAllocationInfo);
15776 VMA_ASSERT(allocator && allocation);
15778 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15780 #if VMA_RECORDING_ENABLED 15781 if(allocator->GetRecorder() != VMA_NULL)
15783 allocator->GetRecorder()->RecordTouchAllocation(
15784 allocator->GetCurrentFrameIndex(),
15789 return allocator->TouchAllocation(allocation);
15797 VMA_ASSERT(allocator && allocation);
15799 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15801 allocation->SetUserData(allocator, pUserData);
15803 #if VMA_RECORDING_ENABLED 15804 if(allocator->GetRecorder() != VMA_NULL)
15806 allocator->GetRecorder()->RecordSetAllocationUserData(
15807 allocator->GetCurrentFrameIndex(),
15818 VMA_ASSERT(allocator && pAllocation);
15820 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
15822 allocator->CreateLostAllocation(pAllocation);
15824 #if VMA_RECORDING_ENABLED 15825 if(allocator->GetRecorder() != VMA_NULL)
15827 allocator->GetRecorder()->RecordCreateLostAllocation(
15828 allocator->GetCurrentFrameIndex(),
15839 VMA_ASSERT(allocator && allocation && ppData);
15841 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15843 VkResult res = allocator->Map(allocation, ppData);
15845 #if VMA_RECORDING_ENABLED 15846 if(allocator->GetRecorder() != VMA_NULL)
15848 allocator->GetRecorder()->RecordMapMemory(
15849 allocator->GetCurrentFrameIndex(),
15861 VMA_ASSERT(allocator && allocation);
15863 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15865 #if VMA_RECORDING_ENABLED 15866 if(allocator->GetRecorder() != VMA_NULL)
15868 allocator->GetRecorder()->RecordUnmapMemory(
15869 allocator->GetCurrentFrameIndex(),
15874 allocator->Unmap(allocation);
15879 VMA_ASSERT(allocator && allocation);
15881 VMA_DEBUG_LOG(
"vmaFlushAllocation");
15883 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15885 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
15887 #if VMA_RECORDING_ENABLED 15888 if(allocator->GetRecorder() != VMA_NULL)
15890 allocator->GetRecorder()->RecordFlushAllocation(
15891 allocator->GetCurrentFrameIndex(),
15892 allocation, offset, size);
15899 VMA_ASSERT(allocator && allocation);
15901 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
15903 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15905 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
15907 #if VMA_RECORDING_ENABLED 15908 if(allocator->GetRecorder() != VMA_NULL)
15910 allocator->GetRecorder()->RecordInvalidateAllocation(
15911 allocator->GetCurrentFrameIndex(),
15912 allocation, offset, size);
15919 VMA_ASSERT(allocator);
15921 VMA_DEBUG_LOG(
"vmaCheckCorruption");
15923 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15925 return allocator->CheckCorruption(memoryTypeBits);
15931 size_t allocationCount,
15932 VkBool32* pAllocationsChanged,
15942 if(pDefragmentationInfo != VMA_NULL)
15956 if(res == VK_NOT_READY)
15969 VMA_ASSERT(allocator && pInfo && pContext);
15973 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
15975 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15977 return allocator->DefragmentationBegin(*pInfo, pStats, pContext);
15984 VMA_ASSERT(allocator);
15986 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
15988 if(context != VK_NULL_HANDLE)
15990 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15992 return allocator->DefragmentationEnd(context);
16005 VMA_ASSERT(allocator && allocation && buffer);
16007 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
16009 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16011 return allocator->BindBufferMemory(allocation, buffer);
16019 VMA_ASSERT(allocator && allocation && image);
16021 VMA_DEBUG_LOG(
"vmaBindImageMemory");
16023 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16025 return allocator->BindImageMemory(allocation, image);
16030 const VkBufferCreateInfo* pBufferCreateInfo,
16036 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16038 if(pBufferCreateInfo->size == 0)
16040 return VK_ERROR_VALIDATION_FAILED_EXT;
16043 VMA_DEBUG_LOG(
"vmaCreateBuffer");
16045 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16047 *pBuffer = VK_NULL_HANDLE;
16048 *pAllocation = VK_NULL_HANDLE;
16051 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
16052 allocator->m_hDevice,
16054 allocator->GetAllocationCallbacks(),
16059 VkMemoryRequirements vkMemReq = {};
16060 bool requiresDedicatedAllocation =
false;
16061 bool prefersDedicatedAllocation =
false;
16062 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
16063 requiresDedicatedAllocation, prefersDedicatedAllocation);
16067 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
16069 VMA_ASSERT(vkMemReq.alignment %
16070 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
16072 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
16074 VMA_ASSERT(vkMemReq.alignment %
16075 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
16077 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
16079 VMA_ASSERT(vkMemReq.alignment %
16080 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
16084 res = allocator->AllocateMemory(
16086 requiresDedicatedAllocation,
16087 prefersDedicatedAllocation,
16090 *pAllocationCreateInfo,
16091 VMA_SUBALLOCATION_TYPE_BUFFER,
16094 #if VMA_RECORDING_ENABLED 16095 if(allocator->GetRecorder() != VMA_NULL)
16097 allocator->GetRecorder()->RecordCreateBuffer(
16098 allocator->GetCurrentFrameIndex(),
16099 *pBufferCreateInfo,
16100 *pAllocationCreateInfo,
16108 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
16112 #if VMA_STATS_STRING_ENABLED 16113 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
16115 if(pAllocationInfo != VMA_NULL)
16117 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16122 allocator->FreeMemory(*pAllocation);
16123 *pAllocation = VK_NULL_HANDLE;
16124 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16125 *pBuffer = VK_NULL_HANDLE;
16128 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16129 *pBuffer = VK_NULL_HANDLE;
16140 VMA_ASSERT(allocator);
16142 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16147 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
16149 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16151 #if VMA_RECORDING_ENABLED 16152 if(allocator->GetRecorder() != VMA_NULL)
16154 allocator->GetRecorder()->RecordDestroyBuffer(
16155 allocator->GetCurrentFrameIndex(),
16160 if(buffer != VK_NULL_HANDLE)
16162 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
16165 if(allocation != VK_NULL_HANDLE)
16167 allocator->FreeMemory(allocation);
16173 const VkImageCreateInfo* pImageCreateInfo,
16179 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
16181 if(pImageCreateInfo->extent.width == 0 ||
16182 pImageCreateInfo->extent.height == 0 ||
16183 pImageCreateInfo->extent.depth == 0 ||
16184 pImageCreateInfo->mipLevels == 0 ||
16185 pImageCreateInfo->arrayLayers == 0)
16187 return VK_ERROR_VALIDATION_FAILED_EXT;
16190 VMA_DEBUG_LOG(
"vmaCreateImage");
16192 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16194 *pImage = VK_NULL_HANDLE;
16195 *pAllocation = VK_NULL_HANDLE;
16198 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
16199 allocator->m_hDevice,
16201 allocator->GetAllocationCallbacks(),
16205 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
16206 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
16207 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
16210 VkMemoryRequirements vkMemReq = {};
16211 bool requiresDedicatedAllocation =
false;
16212 bool prefersDedicatedAllocation =
false;
16213 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
16214 requiresDedicatedAllocation, prefersDedicatedAllocation);
16216 res = allocator->AllocateMemory(
16218 requiresDedicatedAllocation,
16219 prefersDedicatedAllocation,
16222 *pAllocationCreateInfo,
16226 #if VMA_RECORDING_ENABLED 16227 if(allocator->GetRecorder() != VMA_NULL)
16229 allocator->GetRecorder()->RecordCreateImage(
16230 allocator->GetCurrentFrameIndex(),
16232 *pAllocationCreateInfo,
16240 res = allocator->BindImageMemory(*pAllocation, *pImage);
16244 #if VMA_STATS_STRING_ENABLED 16245 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
16247 if(pAllocationInfo != VMA_NULL)
16249 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16254 allocator->FreeMemory(*pAllocation);
16255 *pAllocation = VK_NULL_HANDLE;
16256 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16257 *pImage = VK_NULL_HANDLE;
16260 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16261 *pImage = VK_NULL_HANDLE;
16272 VMA_ASSERT(allocator);
16274 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16279 VMA_DEBUG_LOG(
"vmaDestroyImage");
16281 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16283 #if VMA_RECORDING_ENABLED 16284 if(allocator->GetRecorder() != VMA_NULL)
16286 allocator->GetRecorder()->RecordDestroyImage(
16287 allocator->GetCurrentFrameIndex(),
16292 if(image != VK_NULL_HANDLE)
16294 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
16296 if(allocation != VK_NULL_HANDLE)
16298 allocator->FreeMemory(allocation);
16302 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1727
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2030
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1785
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side...
Definition: vk_mem_alloc.h:2782
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
Definition: vk_mem_alloc.h:1759
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2355
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1739
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1987
Definition: vk_mem_alloc.h:2090
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2735
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1731
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2455
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1782
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2818
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2244
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1626
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2336
Definition: vk_mem_alloc.h:2067
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2738
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1720
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2143
Definition: vk_mem_alloc.h:2014
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1794
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2272
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1848
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1779
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2018
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1920
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1736
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2772
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1919
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2822
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1811
VmaStatInfo total
Definition: vk_mem_alloc.h:1929
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2830
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2127
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2813
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1737
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1662
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1788
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2286
Definition: vk_mem_alloc.h:2280
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1743
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1855
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2465
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1732
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1757
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2164
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2306
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2342
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1718
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2289
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2787
VmaMemoryUsage
Definition: vk_mem_alloc.h:1965
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2747
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2808
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2826
Definition: vk_mem_alloc.h:2004
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2151
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1735
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1925
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1668
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2726
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
Definition: vk_mem_alloc.h:2724
Definition: vk_mem_alloc.h:2111
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2753
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1689
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1761
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1694
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2828
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2138
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2352
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1728
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1908
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2301
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1681
Definition: vk_mem_alloc.h:2276
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:2074
Represents Opaque object that represents started defragmentation process.
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1921
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1685
Definition: vk_mem_alloc.h:2101
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2292
Definition: vk_mem_alloc.h:2013
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1734
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2133
Definition: vk_mem_alloc.h:2124
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1911
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1730
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2314
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1797
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2345
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2122
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2777
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2157
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1836
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1927
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:2054
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1920
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1741
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1767
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use...
Definition: vk_mem_alloc.h:2723
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2801
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1683
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1740
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2328
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1733
Definition: vk_mem_alloc.h:2085
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1775
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2479
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1791
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1920
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1917
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2333
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2732
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
Definition: vk_mem_alloc.h:2094
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2460
Definition: vk_mem_alloc.h:2108
Definition: vk_mem_alloc.h:2120
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2824
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1726
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1915
Definition: vk_mem_alloc.h:1970
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2282
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1764
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1913
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1738
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1742
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2041
Definition: vk_mem_alloc.h:2115
Definition: vk_mem_alloc.h:1997
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2474
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1716
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1729
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2261
VkResult vmaResizeAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize newSize)
Tries to resize an allocation in place, if there is enough free memory after it.
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2441
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:2105
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2226
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1921
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
Definition: vk_mem_alloc.h:2080
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1751
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1928
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2339
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1921
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side...
Definition: vk_mem_alloc.h:2792
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2446
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2756