23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1602 #ifndef VMA_RECORDING_ENABLED 1604 #define VMA_RECORDING_ENABLED 1 1606 #define VMA_RECORDING_ENABLED 0 1611 #define NOMINMAX // For windows.h 1615 #include <vulkan/vulkan.h> 1618 #if VMA_RECORDING_ENABLED 1619 #include <windows.h> 1622 #if !defined(VMA_DEDICATED_ALLOCATION) 1623 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1624 #define VMA_DEDICATED_ALLOCATION 1 1626 #define VMA_DEDICATED_ALLOCATION 0 1644 uint32_t memoryType,
1645 VkDeviceMemory memory,
1650 uint32_t memoryType,
1651 VkDeviceMemory memory,
1724 #if VMA_DEDICATED_ALLOCATION 1725 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1726 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1853 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1861 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1871 uint32_t memoryTypeIndex,
1872 VkMemoryPropertyFlags* pFlags);
1884 uint32_t frameIndex);
1917 #define VMA_STATS_STRING_ENABLED 1 1919 #if VMA_STATS_STRING_ENABLED 1926 char** ppStatsString,
1927 VkBool32 detailedMap);
1931 char* pStatsString);
1933 #endif // #if VMA_STATS_STRING_ENABLED 2165 uint32_t memoryTypeBits,
2167 uint32_t* pMemoryTypeIndex);
2183 const VkBufferCreateInfo* pBufferCreateInfo,
2185 uint32_t* pMemoryTypeIndex);
2201 const VkImageCreateInfo* pImageCreateInfo,
2203 uint32_t* pMemoryTypeIndex);
2375 size_t* pLostAllocationCount);
2474 const VkMemoryRequirements* pVkMemoryRequirements,
2528 VkDeviceSize newSize);
2897 size_t allocationCount,
2898 VkBool32* pAllocationsChanged,
2964 const VkBufferCreateInfo* pBufferCreateInfo,
2989 const VkImageCreateInfo* pImageCreateInfo,
3015 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 3018 #if defined(__cplusplus) && defined(__INTELLISENSE__) 3019 #define VMA_IMPLEMENTATION 3022 #ifdef VMA_IMPLEMENTATION 3023 #undef VMA_IMPLEMENTATION 3045 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 3046 #define VMA_STATIC_VULKAN_FUNCTIONS 1 3058 #if VMA_USE_STL_CONTAINERS 3059 #define VMA_USE_STL_VECTOR 1 3060 #define VMA_USE_STL_UNORDERED_MAP 1 3061 #define VMA_USE_STL_LIST 1 3064 #ifndef VMA_USE_STL_SHARED_MUTEX 3066 #if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 3067 #define VMA_USE_STL_SHARED_MUTEX 1 3071 #if VMA_USE_STL_VECTOR 3075 #if VMA_USE_STL_UNORDERED_MAP 3076 #include <unordered_map> 3079 #if VMA_USE_STL_LIST 3088 #include <algorithm> 3094 #define VMA_NULL nullptr 3097 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 3099 void *aligned_alloc(
size_t alignment,
size_t size)
3102 if(alignment <
sizeof(
void*))
3104 alignment =
sizeof(
void*);
3107 return memalign(alignment, size);
3109 #elif defined(__APPLE__) || defined(__ANDROID__) 3111 void *aligned_alloc(
size_t alignment,
size_t size)
3114 if(alignment <
sizeof(
void*))
3116 alignment =
sizeof(
void*);
3120 if(posix_memalign(&pointer, alignment, size) == 0)
3134 #define VMA_ASSERT(expr) assert(expr) 3136 #define VMA_ASSERT(expr) 3142 #ifndef VMA_HEAVY_ASSERT 3144 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 3146 #define VMA_HEAVY_ASSERT(expr) 3150 #ifndef VMA_ALIGN_OF 3151 #define VMA_ALIGN_OF(type) (__alignof(type)) 3154 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 3156 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 3158 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 3162 #ifndef VMA_SYSTEM_FREE 3164 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 3166 #define VMA_SYSTEM_FREE(ptr) free(ptr) 3171 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 3175 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 3179 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 3183 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 3186 #ifndef VMA_DEBUG_LOG 3187 #define VMA_DEBUG_LOG(format, ...) 3197 #if VMA_STATS_STRING_ENABLED 3198 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3200 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3202 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3204 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3206 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3208 snprintf(outStr, strLen,
"%p", ptr);
3216 void Lock() { m_Mutex.lock(); }
3217 void Unlock() { m_Mutex.unlock(); }
3221 #define VMA_MUTEX VmaMutex 3225 #ifndef VMA_RW_MUTEX 3226 #if VMA_USE_STL_SHARED_MUTEX 3228 #include <shared_mutex> 3232 void LockRead() { m_Mutex.lock_shared(); }
3233 void UnlockRead() { m_Mutex.unlock_shared(); }
3234 void LockWrite() { m_Mutex.lock(); }
3235 void UnlockWrite() { m_Mutex.unlock(); }
3237 std::shared_mutex m_Mutex;
3239 #define VMA_RW_MUTEX VmaRWMutex 3240 #elif defined(_WIN32) 3245 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3246 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3247 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3248 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3249 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3253 #define VMA_RW_MUTEX VmaRWMutex 3259 void LockRead() { m_Mutex.Lock(); }
3260 void UnlockRead() { m_Mutex.Unlock(); }
3261 void LockWrite() { m_Mutex.Lock(); }
3262 void UnlockWrite() { m_Mutex.Unlock(); }
3266 #define VMA_RW_MUTEX VmaRWMutex 3267 #endif // #if VMA_USE_STL_SHARED_MUTEX 3268 #endif // #ifndef VMA_RW_MUTEX 3278 #ifndef VMA_ATOMIC_UINT32 3279 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 3282 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 3287 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 3290 #ifndef VMA_DEBUG_ALIGNMENT 3295 #define VMA_DEBUG_ALIGNMENT (1) 3298 #ifndef VMA_DEBUG_MARGIN 3303 #define VMA_DEBUG_MARGIN (0) 3306 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3311 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3314 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3320 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3323 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3328 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3331 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3336 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3339 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3340 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3344 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3345 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3349 #ifndef VMA_CLASS_NO_COPY 3350 #define VMA_CLASS_NO_COPY(className) \ 3352 className(const className&) = delete; \ 3353 className& operator=(const className&) = delete; 3356 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3359 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3361 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3362 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3368 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3370 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3371 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3374 static inline uint32_t VmaCountBitsSet(uint32_t v)
3376 uint32_t c = v - ((v >> 1) & 0x55555555);
3377 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3378 c = ((c >> 4) + c) & 0x0F0F0F0F;
3379 c = ((c >> 8) + c) & 0x00FF00FF;
3380 c = ((c >> 16) + c) & 0x0000FFFF;
3386 template <
typename T>
3387 static inline T VmaAlignUp(T val, T align)
3389 return (val + align - 1) / align * align;
3393 template <
typename T>
3394 static inline T VmaAlignDown(T val, T align)
3396 return val / align * align;
3400 template <
typename T>
3401 static inline T VmaRoundDiv(T x, T y)
3403 return (x + (y / (T)2)) / y;
3411 template <
typename T>
3412 inline bool VmaIsPow2(T x)
3414 return (x & (x-1)) == 0;
3418 static inline uint32_t VmaNextPow2(uint32_t v)
3429 static inline uint64_t VmaNextPow2(uint64_t v)
3443 static inline uint32_t VmaPrevPow2(uint32_t v)
3453 static inline uint64_t VmaPrevPow2(uint64_t v)
3465 static inline bool VmaStrIsEmpty(
const char* pStr)
3467 return pStr == VMA_NULL || *pStr ==
'\0';
3470 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3488 template<
typename Iterator,
typename Compare>
3489 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3491 Iterator centerValue = end; --centerValue;
3492 Iterator insertIndex = beg;
3493 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3495 if(cmp(*memTypeIndex, *centerValue))
3497 if(insertIndex != memTypeIndex)
3499 VMA_SWAP(*memTypeIndex, *insertIndex);
3504 if(insertIndex != centerValue)
3506 VMA_SWAP(*insertIndex, *centerValue);
3511 template<
typename Iterator,
typename Compare>
3512 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3516 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3517 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3518 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3522 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3524 #endif // #ifndef VMA_SORT 3533 static inline bool VmaBlocksOnSamePage(
3534 VkDeviceSize resourceAOffset,
3535 VkDeviceSize resourceASize,
3536 VkDeviceSize resourceBOffset,
3537 VkDeviceSize pageSize)
3539 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3540 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3541 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3542 VkDeviceSize resourceBStart = resourceBOffset;
3543 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3544 return resourceAEndPage == resourceBStartPage;
3547 enum VmaSuballocationType
3549 VMA_SUBALLOCATION_TYPE_FREE = 0,
3550 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3551 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3552 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3553 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3554 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3555 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3564 static inline bool VmaIsBufferImageGranularityConflict(
3565 VmaSuballocationType suballocType1,
3566 VmaSuballocationType suballocType2)
3568 if(suballocType1 > suballocType2)
3570 VMA_SWAP(suballocType1, suballocType2);
3573 switch(suballocType1)
3575 case VMA_SUBALLOCATION_TYPE_FREE:
3577 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3579 case VMA_SUBALLOCATION_TYPE_BUFFER:
3581 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3582 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3583 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3585 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3586 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3587 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3588 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3590 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3591 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3599 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3601 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3602 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3603 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3605 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3609 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3611 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3612 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3613 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3615 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3626 VMA_CLASS_NO_COPY(VmaMutexLock)
3628 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3629 m_pMutex(useMutex ? &mutex : VMA_NULL)
3630 {
if(m_pMutex) { m_pMutex->Lock(); } }
3632 {
if(m_pMutex) { m_pMutex->Unlock(); } }
3634 VMA_MUTEX* m_pMutex;
3638 struct VmaMutexLockRead
3640 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3642 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3643 m_pMutex(useMutex ? &mutex : VMA_NULL)
3644 {
if(m_pMutex) { m_pMutex->LockRead(); } }
3645 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
3647 VMA_RW_MUTEX* m_pMutex;
3651 struct VmaMutexLockWrite
3653 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3655 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
3656 m_pMutex(useMutex ? &mutex : VMA_NULL)
3657 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
3658 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
3660 VMA_RW_MUTEX* m_pMutex;
3663 #if VMA_DEBUG_GLOBAL_MUTEX 3664 static VMA_MUTEX gDebugGlobalMutex;
3665 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3667 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3671 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3682 template <
typename CmpLess,
typename IterT,
typename KeyT>
3683 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3685 size_t down = 0, up = (end - beg);
3688 const size_t mid = (down + up) / 2;
3689 if(cmp(*(beg+mid), key))
3704 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3706 if((pAllocationCallbacks != VMA_NULL) &&
3707 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3709 return (*pAllocationCallbacks->pfnAllocation)(
3710 pAllocationCallbacks->pUserData,
3713 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3717 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3721 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3723 if((pAllocationCallbacks != VMA_NULL) &&
3724 (pAllocationCallbacks->pfnFree != VMA_NULL))
3726 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3730 VMA_SYSTEM_FREE(ptr);
3734 template<
typename T>
3735 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3737 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3740 template<
typename T>
3741 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3743 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3746 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3748 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3750 template<
typename T>
3751 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3754 VmaFree(pAllocationCallbacks, ptr);
3757 template<
typename T>
3758 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3762 for(
size_t i = count; i--; )
3766 VmaFree(pAllocationCallbacks, ptr);
3771 template<
typename T>
3772 class VmaStlAllocator
3775 const VkAllocationCallbacks*
const m_pCallbacks;
3776 typedef T value_type;
3778 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3779 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3781 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3782 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3784 template<
typename U>
3785 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3787 return m_pCallbacks == rhs.m_pCallbacks;
3789 template<
typename U>
3790 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3792 return m_pCallbacks != rhs.m_pCallbacks;
3795 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3798 #if VMA_USE_STL_VECTOR 3800 #define VmaVector std::vector 3802 template<
typename T,
typename allocatorT>
3803 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3805 vec.insert(vec.begin() + index, item);
3808 template<
typename T,
typename allocatorT>
3809 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3811 vec.erase(vec.begin() + index);
3814 #else // #if VMA_USE_STL_VECTOR 3819 template<
typename T,
typename AllocatorT>
3823 typedef T value_type;
3825 VmaVector(
const AllocatorT& allocator) :
3826 m_Allocator(allocator),
3833 VmaVector(
size_t count,
const AllocatorT& allocator) :
3834 m_Allocator(allocator),
3835 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3841 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3842 m_Allocator(src.m_Allocator),
3843 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3844 m_Count(src.m_Count),
3845 m_Capacity(src.m_Count)
3849 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3855 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3858 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3862 resize(rhs.m_Count);
3865 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3871 bool empty()
const {
return m_Count == 0; }
3872 size_t size()
const {
return m_Count; }
3873 T* data() {
return m_pArray; }
3874 const T* data()
const {
return m_pArray; }
3876 T& operator[](
size_t index)
3878 VMA_HEAVY_ASSERT(index < m_Count);
3879 return m_pArray[index];
3881 const T& operator[](
size_t index)
const 3883 VMA_HEAVY_ASSERT(index < m_Count);
3884 return m_pArray[index];
3889 VMA_HEAVY_ASSERT(m_Count > 0);
3892 const T& front()
const 3894 VMA_HEAVY_ASSERT(m_Count > 0);
3899 VMA_HEAVY_ASSERT(m_Count > 0);
3900 return m_pArray[m_Count - 1];
3902 const T& back()
const 3904 VMA_HEAVY_ASSERT(m_Count > 0);
3905 return m_pArray[m_Count - 1];
3908 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3910 newCapacity = VMA_MAX(newCapacity, m_Count);
3912 if((newCapacity < m_Capacity) && !freeMemory)
3914 newCapacity = m_Capacity;
3917 if(newCapacity != m_Capacity)
3919 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3922 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3924 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3925 m_Capacity = newCapacity;
3926 m_pArray = newArray;
3930 void resize(
size_t newCount,
bool freeMemory =
false)
3932 size_t newCapacity = m_Capacity;
3933 if(newCount > m_Capacity)
3935 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3939 newCapacity = newCount;
3942 if(newCapacity != m_Capacity)
3944 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3945 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3946 if(elementsToCopy != 0)
3948 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3950 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3951 m_Capacity = newCapacity;
3952 m_pArray = newArray;
3958 void clear(
bool freeMemory =
false)
3960 resize(0, freeMemory);
3963 void insert(
size_t index,
const T& src)
3965 VMA_HEAVY_ASSERT(index <= m_Count);
3966 const size_t oldCount = size();
3967 resize(oldCount + 1);
3968 if(index < oldCount)
3970 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3972 m_pArray[index] = src;
3975 void remove(
size_t index)
3977 VMA_HEAVY_ASSERT(index < m_Count);
3978 const size_t oldCount = size();
3979 if(index < oldCount - 1)
3981 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
3983 resize(oldCount - 1);
3986 void push_back(
const T& src)
3988 const size_t newIndex = size();
3989 resize(newIndex + 1);
3990 m_pArray[newIndex] = src;
3995 VMA_HEAVY_ASSERT(m_Count > 0);
3999 void push_front(
const T& src)
4006 VMA_HEAVY_ASSERT(m_Count > 0);
4010 typedef T* iterator;
4012 iterator begin() {
return m_pArray; }
4013 iterator end() {
return m_pArray + m_Count; }
4016 AllocatorT m_Allocator;
4022 template<
typename T,
typename allocatorT>
4023 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4025 vec.insert(index, item);
4028 template<
typename T,
typename allocatorT>
4029 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4034 #endif // #if VMA_USE_STL_VECTOR 4036 template<
typename CmpLess,
typename VectorT>
4037 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4039 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4041 vector.data() + vector.size(),
4043 CmpLess()) - vector.data();
4044 VmaVectorInsert(vector, indexToInsert, value);
4045 return indexToInsert;
4048 template<
typename CmpLess,
typename VectorT>
4049 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4052 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4057 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4059 size_t indexToRemove = it - vector.begin();
4060 VmaVectorRemove(vector, indexToRemove);
4066 template<
typename CmpLess,
typename IterT,
typename KeyT>
4067 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
4070 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4071 beg, end, value, comparator);
4073 (!comparator(*it, value) && !comparator(value, *it)))
4088 template<
typename T>
4089 class VmaPoolAllocator
4091 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4093 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
4094 ~VmaPoolAllocator();
4102 uint32_t NextFreeIndex;
4109 uint32_t FirstFreeIndex;
4112 const VkAllocationCallbacks* m_pAllocationCallbacks;
4113 size_t m_ItemsPerBlock;
4114 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4116 ItemBlock& CreateNewBlock();
4119 template<
typename T>
4120 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
4121 m_pAllocationCallbacks(pAllocationCallbacks),
4122 m_ItemsPerBlock(itemsPerBlock),
4123 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4125 VMA_ASSERT(itemsPerBlock > 0);
4128 template<
typename T>
4129 VmaPoolAllocator<T>::~VmaPoolAllocator()
4134 template<
typename T>
4135 void VmaPoolAllocator<T>::Clear()
4137 for(
size_t i = m_ItemBlocks.size(); i--; )
4138 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
4139 m_ItemBlocks.clear();
4142 template<
typename T>
4143 T* VmaPoolAllocator<T>::Alloc()
4145 for(
size_t i = m_ItemBlocks.size(); i--; )
4147 ItemBlock& block = m_ItemBlocks[i];
4149 if(block.FirstFreeIndex != UINT32_MAX)
4151 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4152 block.FirstFreeIndex = pItem->NextFreeIndex;
4153 return &pItem->Value;
4158 ItemBlock& newBlock = CreateNewBlock();
4159 Item*
const pItem = &newBlock.pItems[0];
4160 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4161 return &pItem->Value;
4164 template<
typename T>
4165 void VmaPoolAllocator<T>::Free(T* ptr)
4168 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
4170 ItemBlock& block = m_ItemBlocks[i];
4174 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4177 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
4179 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
4180 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4181 block.FirstFreeIndex = index;
4185 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4188 template<
typename T>
4189 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4191 ItemBlock newBlock = {
4192 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
4194 m_ItemBlocks.push_back(newBlock);
4197 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
4198 newBlock.pItems[i].NextFreeIndex = i + 1;
4199 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
4200 return m_ItemBlocks.back();
4206 #if VMA_USE_STL_LIST 4208 #define VmaList std::list 4210 #else // #if VMA_USE_STL_LIST 4212 template<
typename T>
4221 template<
typename T>
4224 VMA_CLASS_NO_COPY(VmaRawList)
4226 typedef VmaListItem<T> ItemType;
4228 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4232 size_t GetCount()
const {
return m_Count; }
4233 bool IsEmpty()
const {
return m_Count == 0; }
4235 ItemType* Front() {
return m_pFront; }
4236 const ItemType* Front()
const {
return m_pFront; }
4237 ItemType* Back() {
return m_pBack; }
4238 const ItemType* Back()
const {
return m_pBack; }
4240 ItemType* PushBack();
4241 ItemType* PushFront();
4242 ItemType* PushBack(
const T& value);
4243 ItemType* PushFront(
const T& value);
4248 ItemType* InsertBefore(ItemType* pItem);
4250 ItemType* InsertAfter(ItemType* pItem);
4252 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4253 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4255 void Remove(ItemType* pItem);
4258 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4259 VmaPoolAllocator<ItemType> m_ItemAllocator;
4265 template<
typename T>
4266 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4267 m_pAllocationCallbacks(pAllocationCallbacks),
4268 m_ItemAllocator(pAllocationCallbacks, 128),
4275 template<
typename T>
4276 VmaRawList<T>::~VmaRawList()
4282 template<
typename T>
4283 void VmaRawList<T>::Clear()
4285 if(IsEmpty() ==
false)
4287 ItemType* pItem = m_pBack;
4288 while(pItem != VMA_NULL)
4290 ItemType*
const pPrevItem = pItem->pPrev;
4291 m_ItemAllocator.Free(pItem);
4294 m_pFront = VMA_NULL;
4300 template<
typename T>
4301 VmaListItem<T>* VmaRawList<T>::PushBack()
4303 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4304 pNewItem->pNext = VMA_NULL;
4307 pNewItem->pPrev = VMA_NULL;
4308 m_pFront = pNewItem;
4314 pNewItem->pPrev = m_pBack;
4315 m_pBack->pNext = pNewItem;
4322 template<
typename T>
4323 VmaListItem<T>* VmaRawList<T>::PushFront()
4325 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4326 pNewItem->pPrev = VMA_NULL;
4329 pNewItem->pNext = VMA_NULL;
4330 m_pFront = pNewItem;
4336 pNewItem->pNext = m_pFront;
4337 m_pFront->pPrev = pNewItem;
4338 m_pFront = pNewItem;
4344 template<
typename T>
4345 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4347 ItemType*
const pNewItem = PushBack();
4348 pNewItem->Value = value;
4352 template<
typename T>
4353 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4355 ItemType*
const pNewItem = PushFront();
4356 pNewItem->Value = value;
4360 template<
typename T>
4361 void VmaRawList<T>::PopBack()
4363 VMA_HEAVY_ASSERT(m_Count > 0);
4364 ItemType*
const pBackItem = m_pBack;
4365 ItemType*
const pPrevItem = pBackItem->pPrev;
4366 if(pPrevItem != VMA_NULL)
4368 pPrevItem->pNext = VMA_NULL;
4370 m_pBack = pPrevItem;
4371 m_ItemAllocator.Free(pBackItem);
4375 template<
typename T>
4376 void VmaRawList<T>::PopFront()
4378 VMA_HEAVY_ASSERT(m_Count > 0);
4379 ItemType*
const pFrontItem = m_pFront;
4380 ItemType*
const pNextItem = pFrontItem->pNext;
4381 if(pNextItem != VMA_NULL)
4383 pNextItem->pPrev = VMA_NULL;
4385 m_pFront = pNextItem;
4386 m_ItemAllocator.Free(pFrontItem);
4390 template<
typename T>
4391 void VmaRawList<T>::Remove(ItemType* pItem)
4393 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4394 VMA_HEAVY_ASSERT(m_Count > 0);
4396 if(pItem->pPrev != VMA_NULL)
4398 pItem->pPrev->pNext = pItem->pNext;
4402 VMA_HEAVY_ASSERT(m_pFront == pItem);
4403 m_pFront = pItem->pNext;
4406 if(pItem->pNext != VMA_NULL)
4408 pItem->pNext->pPrev = pItem->pPrev;
4412 VMA_HEAVY_ASSERT(m_pBack == pItem);
4413 m_pBack = pItem->pPrev;
4416 m_ItemAllocator.Free(pItem);
4420 template<
typename T>
4421 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4423 if(pItem != VMA_NULL)
4425 ItemType*
const prevItem = pItem->pPrev;
4426 ItemType*
const newItem = m_ItemAllocator.Alloc();
4427 newItem->pPrev = prevItem;
4428 newItem->pNext = pItem;
4429 pItem->pPrev = newItem;
4430 if(prevItem != VMA_NULL)
4432 prevItem->pNext = newItem;
4436 VMA_HEAVY_ASSERT(m_pFront == pItem);
4446 template<
typename T>
4447 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4449 if(pItem != VMA_NULL)
4451 ItemType*
const nextItem = pItem->pNext;
4452 ItemType*
const newItem = m_ItemAllocator.Alloc();
4453 newItem->pNext = nextItem;
4454 newItem->pPrev = pItem;
4455 pItem->pNext = newItem;
4456 if(nextItem != VMA_NULL)
4458 nextItem->pPrev = newItem;
4462 VMA_HEAVY_ASSERT(m_pBack == pItem);
4472 template<
typename T>
4473 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4475 ItemType*
const newItem = InsertBefore(pItem);
4476 newItem->Value = value;
4480 template<
typename T>
4481 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4483 ItemType*
const newItem = InsertAfter(pItem);
4484 newItem->Value = value;
4488 template<
typename T,
typename AllocatorT>
4491 VMA_CLASS_NO_COPY(VmaList)
4502 T& operator*()
const 4504 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4505 return m_pItem->Value;
4507 T* operator->()
const 4509 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4510 return &m_pItem->Value;
4513 iterator& operator++()
4515 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4516 m_pItem = m_pItem->pNext;
4519 iterator& operator--()
4521 if(m_pItem != VMA_NULL)
4523 m_pItem = m_pItem->pPrev;
4527 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4528 m_pItem = m_pList->Back();
4533 iterator operator++(
int)
4535 iterator result = *
this;
4539 iterator operator--(
int)
4541 iterator result = *
this;
4546 bool operator==(
const iterator& rhs)
const 4548 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4549 return m_pItem == rhs.m_pItem;
4551 bool operator!=(
const iterator& rhs)
const 4553 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4554 return m_pItem != rhs.m_pItem;
4558 VmaRawList<T>* m_pList;
4559 VmaListItem<T>* m_pItem;
4561 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4567 friend class VmaList<T, AllocatorT>;
4570 class const_iterator
4579 const_iterator(
const iterator& src) :
4580 m_pList(src.m_pList),
4581 m_pItem(src.m_pItem)
4585 const T& operator*()
const 4587 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4588 return m_pItem->Value;
4590 const T* operator->()
const 4592 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4593 return &m_pItem->Value;
4596 const_iterator& operator++()
4598 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4599 m_pItem = m_pItem->pNext;
4602 const_iterator& operator--()
4604 if(m_pItem != VMA_NULL)
4606 m_pItem = m_pItem->pPrev;
4610 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4611 m_pItem = m_pList->Back();
4616 const_iterator operator++(
int)
4618 const_iterator result = *
this;
4622 const_iterator operator--(
int)
4624 const_iterator result = *
this;
4629 bool operator==(
const const_iterator& rhs)
const 4631 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4632 return m_pItem == rhs.m_pItem;
4634 bool operator!=(
const const_iterator& rhs)
const 4636 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4637 return m_pItem != rhs.m_pItem;
4641 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4647 const VmaRawList<T>* m_pList;
4648 const VmaListItem<T>* m_pItem;
4650 friend class VmaList<T, AllocatorT>;
4653 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4655 bool empty()
const {
return m_RawList.IsEmpty(); }
4656 size_t size()
const {
return m_RawList.GetCount(); }
4658 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4659 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4661 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4662 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4664 void clear() { m_RawList.Clear(); }
4665 void push_back(
const T& value) { m_RawList.PushBack(value); }
4666 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4667 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4670 VmaRawList<T> m_RawList;
4673 #endif // #if VMA_USE_STL_LIST 4681 #if VMA_USE_STL_UNORDERED_MAP 4683 #define VmaPair std::pair 4685 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4686 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4688 #else // #if VMA_USE_STL_UNORDERED_MAP 4690 template<
typename T1,
typename T2>
4696 VmaPair() : first(), second() { }
4697 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4703 template<
typename KeyT,
typename ValueT>
4707 typedef VmaPair<KeyT, ValueT> PairType;
4708 typedef PairType* iterator;
4710 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4712 iterator begin() {
return m_Vector.begin(); }
4713 iterator end() {
return m_Vector.end(); }
4715 void insert(
const PairType& pair);
4716 iterator find(
const KeyT& key);
4717 void erase(iterator it);
4720 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4723 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4725 template<
typename FirstT,
typename SecondT>
4726 struct VmaPairFirstLess
4728 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4730 return lhs.first < rhs.first;
4732 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4734 return lhs.first < rhsFirst;
4738 template<
typename KeyT,
typename ValueT>
4739 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4741 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4743 m_Vector.data() + m_Vector.size(),
4745 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4746 VmaVectorInsert(m_Vector, indexToInsert, pair);
4749 template<
typename KeyT,
typename ValueT>
4750 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4752 PairType* it = VmaBinaryFindFirstNotLess(
4754 m_Vector.data() + m_Vector.size(),
4756 VmaPairFirstLess<KeyT, ValueT>());
4757 if((it != m_Vector.end()) && (it->first == key))
4763 return m_Vector.end();
4767 template<
typename KeyT,
typename ValueT>
4768 void VmaMap<KeyT, ValueT>::erase(iterator it)
4770 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4773 #endif // #if VMA_USE_STL_UNORDERED_MAP 4779 class VmaDeviceMemoryBlock;
4781 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4783 struct VmaAllocation_T
4785 VMA_CLASS_NO_COPY(VmaAllocation_T)
4787 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4791 FLAG_USER_DATA_STRING = 0x01,
4795 enum ALLOCATION_TYPE
4797 ALLOCATION_TYPE_NONE,
4798 ALLOCATION_TYPE_BLOCK,
4799 ALLOCATION_TYPE_DEDICATED,
4802 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4805 m_pUserData(VMA_NULL),
4806 m_LastUseFrameIndex(currentFrameIndex),
4807 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4808 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4810 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4812 #if VMA_STATS_STRING_ENABLED 4813 m_CreationFrameIndex = currentFrameIndex;
4814 m_BufferImageUsage = 0;
4820 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4823 VMA_ASSERT(m_pUserData == VMA_NULL);
4826 void InitBlockAllocation(
4828 VmaDeviceMemoryBlock* block,
4829 VkDeviceSize offset,
4830 VkDeviceSize alignment,
4832 VmaSuballocationType suballocationType,
4836 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4837 VMA_ASSERT(block != VMA_NULL);
4838 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4839 m_Alignment = alignment;
4841 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4842 m_SuballocationType = (uint8_t)suballocationType;
4843 m_BlockAllocation.m_hPool = hPool;
4844 m_BlockAllocation.m_Block = block;
4845 m_BlockAllocation.m_Offset = offset;
4846 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4851 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4852 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4853 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4854 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4855 m_BlockAllocation.m_Block = VMA_NULL;
4856 m_BlockAllocation.m_Offset = 0;
4857 m_BlockAllocation.m_CanBecomeLost =
true;
4860 void ChangeBlockAllocation(
4862 VmaDeviceMemoryBlock* block,
4863 VkDeviceSize offset);
4865 void ChangeSize(VkDeviceSize newSize);
4866 void ChangeOffset(VkDeviceSize newOffset);
4869 void InitDedicatedAllocation(
4870 uint32_t memoryTypeIndex,
4871 VkDeviceMemory hMemory,
4872 VmaSuballocationType suballocationType,
4876 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4877 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4878 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4881 m_SuballocationType = (uint8_t)suballocationType;
4882 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4883 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4884 m_DedicatedAllocation.m_hMemory = hMemory;
4885 m_DedicatedAllocation.m_pMappedData = pMappedData;
4888 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4889 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4890 VkDeviceSize GetSize()
const {
return m_Size; }
4891 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4892 void* GetUserData()
const {
return m_pUserData; }
4893 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4894 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4896 VmaDeviceMemoryBlock* GetBlock()
const 4898 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4899 return m_BlockAllocation.m_Block;
4901 VkDeviceSize GetOffset()
const;
4902 VkDeviceMemory GetMemory()
const;
4903 uint32_t GetMemoryTypeIndex()
const;
4904 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4905 void* GetMappedData()
const;
4906 bool CanBecomeLost()
const;
4909 uint32_t GetLastUseFrameIndex()
const 4911 return m_LastUseFrameIndex.load();
4913 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4915 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4925 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4927 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4929 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4940 void BlockAllocMap();
4941 void BlockAllocUnmap();
4942 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4945 #if VMA_STATS_STRING_ENABLED 4946 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4947 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4949 void InitBufferImageUsage(uint32_t bufferImageUsage)
4951 VMA_ASSERT(m_BufferImageUsage == 0);
4952 m_BufferImageUsage = bufferImageUsage;
4955 void PrintParameters(
class VmaJsonWriter& json)
const;
4959 VkDeviceSize m_Alignment;
4960 VkDeviceSize m_Size;
4962 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4964 uint8_t m_SuballocationType;
4971 struct BlockAllocation
4974 VmaDeviceMemoryBlock* m_Block;
4975 VkDeviceSize m_Offset;
4976 bool m_CanBecomeLost;
4980 struct DedicatedAllocation
4982 uint32_t m_MemoryTypeIndex;
4983 VkDeviceMemory m_hMemory;
4984 void* m_pMappedData;
4990 BlockAllocation m_BlockAllocation;
4992 DedicatedAllocation m_DedicatedAllocation;
4995 #if VMA_STATS_STRING_ENABLED 4996 uint32_t m_CreationFrameIndex;
4997 uint32_t m_BufferImageUsage;
5007 struct VmaSuballocation
5009 VkDeviceSize offset;
5012 VmaSuballocationType type;
5016 struct VmaSuballocationOffsetLess
5018 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5020 return lhs.offset < rhs.offset;
5023 struct VmaSuballocationOffsetGreater
5025 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5027 return lhs.offset > rhs.offset;
5031 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5034 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5049 struct VmaAllocationRequest
5051 VkDeviceSize offset;
5052 VkDeviceSize sumFreeSize;
5053 VkDeviceSize sumItemSize;
5054 VmaSuballocationList::iterator item;
5055 size_t itemsToMakeLostCount;
5058 VkDeviceSize CalcCost()
const 5060 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5068 class VmaBlockMetadata
5072 virtual ~VmaBlockMetadata() { }
5073 virtual void Init(VkDeviceSize size) { m_Size = size; }
5076 virtual bool Validate()
const = 0;
5077 VkDeviceSize GetSize()
const {
return m_Size; }
5078 virtual size_t GetAllocationCount()
const = 0;
5079 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5080 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5082 virtual bool IsEmpty()
const = 0;
5084 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5086 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5088 #if VMA_STATS_STRING_ENABLED 5089 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5095 virtual bool CreateAllocationRequest(
5096 uint32_t currentFrameIndex,
5097 uint32_t frameInUseCount,
5098 VkDeviceSize bufferImageGranularity,
5099 VkDeviceSize allocSize,
5100 VkDeviceSize allocAlignment,
5102 VmaSuballocationType allocType,
5103 bool canMakeOtherLost,
5106 VmaAllocationRequest* pAllocationRequest) = 0;
5108 virtual bool MakeRequestedAllocationsLost(
5109 uint32_t currentFrameIndex,
5110 uint32_t frameInUseCount,
5111 VmaAllocationRequest* pAllocationRequest) = 0;
5113 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5115 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5119 const VmaAllocationRequest& request,
5120 VmaSuballocationType type,
5121 VkDeviceSize allocSize,
5127 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5130 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize) {
return false; }
5133 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5135 #if VMA_STATS_STRING_ENABLED 5136 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5137 VkDeviceSize unusedBytes,
5138 size_t allocationCount,
5139 size_t unusedRangeCount)
const;
5140 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5141 VkDeviceSize offset,
5143 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5144 VkDeviceSize offset,
5145 VkDeviceSize size)
const;
5146 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5150 VkDeviceSize m_Size;
5151 const VkAllocationCallbacks* m_pAllocationCallbacks;
5154 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 5155 VMA_ASSERT(0 && "Validation failed: " #cond); \ 5159 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5161 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5164 virtual ~VmaBlockMetadata_Generic();
5165 virtual void Init(VkDeviceSize size);
5167 virtual bool Validate()
const;
5168 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5169 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5170 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5171 virtual bool IsEmpty()
const;
5173 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5174 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5176 #if VMA_STATS_STRING_ENABLED 5177 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5180 virtual bool CreateAllocationRequest(
5181 uint32_t currentFrameIndex,
5182 uint32_t frameInUseCount,
5183 VkDeviceSize bufferImageGranularity,
5184 VkDeviceSize allocSize,
5185 VkDeviceSize allocAlignment,
5187 VmaSuballocationType allocType,
5188 bool canMakeOtherLost,
5190 VmaAllocationRequest* pAllocationRequest);
5192 virtual bool MakeRequestedAllocationsLost(
5193 uint32_t currentFrameIndex,
5194 uint32_t frameInUseCount,
5195 VmaAllocationRequest* pAllocationRequest);
5197 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5199 virtual VkResult CheckCorruption(
const void* pBlockData);
5202 const VmaAllocationRequest& request,
5203 VmaSuballocationType type,
5204 VkDeviceSize allocSize,
5209 virtual void FreeAtOffset(VkDeviceSize offset);
5211 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize);
5216 bool IsBufferImageGranularityConflictPossible(
5217 VkDeviceSize bufferImageGranularity,
5218 VmaSuballocationType& inOutPrevSuballocType)
const;
5221 friend class VmaDefragmentationAlgorithm_Generic;
5222 friend class VmaDefragmentationAlgorithm_Fast;
5224 uint32_t m_FreeCount;
5225 VkDeviceSize m_SumFreeSize;
5226 VmaSuballocationList m_Suballocations;
5229 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5231 bool ValidateFreeSuballocationList()
const;
5235 bool CheckAllocation(
5236 uint32_t currentFrameIndex,
5237 uint32_t frameInUseCount,
5238 VkDeviceSize bufferImageGranularity,
5239 VkDeviceSize allocSize,
5240 VkDeviceSize allocAlignment,
5241 VmaSuballocationType allocType,
5242 VmaSuballocationList::const_iterator suballocItem,
5243 bool canMakeOtherLost,
5244 VkDeviceSize* pOffset,
5245 size_t* itemsToMakeLostCount,
5246 VkDeviceSize* pSumFreeSize,
5247 VkDeviceSize* pSumItemSize)
const;
5249 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5253 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5256 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5259 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5340 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5342 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5345 virtual ~VmaBlockMetadata_Linear();
5346 virtual void Init(VkDeviceSize size);
5348 virtual bool Validate()
const;
5349 virtual size_t GetAllocationCount()
const;
5350 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5351 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5352 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5354 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5355 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5357 #if VMA_STATS_STRING_ENABLED 5358 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5361 virtual bool CreateAllocationRequest(
5362 uint32_t currentFrameIndex,
5363 uint32_t frameInUseCount,
5364 VkDeviceSize bufferImageGranularity,
5365 VkDeviceSize allocSize,
5366 VkDeviceSize allocAlignment,
5368 VmaSuballocationType allocType,
5369 bool canMakeOtherLost,
5371 VmaAllocationRequest* pAllocationRequest);
5373 virtual bool MakeRequestedAllocationsLost(
5374 uint32_t currentFrameIndex,
5375 uint32_t frameInUseCount,
5376 VmaAllocationRequest* pAllocationRequest);
5378 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5380 virtual VkResult CheckCorruption(
const void* pBlockData);
5383 const VmaAllocationRequest& request,
5384 VmaSuballocationType type,
5385 VkDeviceSize allocSize,
5390 virtual void FreeAtOffset(VkDeviceSize offset);
5400 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5402 enum SECOND_VECTOR_MODE
5404 SECOND_VECTOR_EMPTY,
5409 SECOND_VECTOR_RING_BUFFER,
5415 SECOND_VECTOR_DOUBLE_STACK,
5418 VkDeviceSize m_SumFreeSize;
5419 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5420 uint32_t m_1stVectorIndex;
5421 SECOND_VECTOR_MODE m_2ndVectorMode;
5423 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5424 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5425 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5426 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5429 size_t m_1stNullItemsBeginCount;
5431 size_t m_1stNullItemsMiddleCount;
5433 size_t m_2ndNullItemsCount;
5435 bool ShouldCompact1st()
const;
5436 void CleanupAfterFree();
5450 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5452 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5455 virtual ~VmaBlockMetadata_Buddy();
5456 virtual void Init(VkDeviceSize size);
5458 virtual bool Validate()
const;
5459 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5460 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5461 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5462 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5464 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5465 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5467 #if VMA_STATS_STRING_ENABLED 5468 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5471 virtual bool CreateAllocationRequest(
5472 uint32_t currentFrameIndex,
5473 uint32_t frameInUseCount,
5474 VkDeviceSize bufferImageGranularity,
5475 VkDeviceSize allocSize,
5476 VkDeviceSize allocAlignment,
5478 VmaSuballocationType allocType,
5479 bool canMakeOtherLost,
5481 VmaAllocationRequest* pAllocationRequest);
5483 virtual bool MakeRequestedAllocationsLost(
5484 uint32_t currentFrameIndex,
5485 uint32_t frameInUseCount,
5486 VmaAllocationRequest* pAllocationRequest);
5488 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5490 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5493 const VmaAllocationRequest& request,
5494 VmaSuballocationType type,
5495 VkDeviceSize allocSize,
5499 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5500 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5503 static const VkDeviceSize MIN_NODE_SIZE = 32;
5504 static const size_t MAX_LEVELS = 30;
5506 struct ValidationContext
5508 size_t calculatedAllocationCount;
5509 size_t calculatedFreeCount;
5510 VkDeviceSize calculatedSumFreeSize;
5512 ValidationContext() :
5513 calculatedAllocationCount(0),
5514 calculatedFreeCount(0),
5515 calculatedSumFreeSize(0) { }
5520 VkDeviceSize offset;
5550 VkDeviceSize m_UsableSize;
5551 uint32_t m_LevelCount;
5557 } m_FreeList[MAX_LEVELS];
5559 size_t m_AllocationCount;
5563 VkDeviceSize m_SumFreeSize;
5565 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5566 void DeleteNode(Node* node);
5567 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5568 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5569 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5571 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5572 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5576 void AddToFreeListFront(uint32_t level, Node* node);
5580 void RemoveFromFreeList(uint32_t level, Node* node);
5582 #if VMA_STATS_STRING_ENABLED 5583 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5593 class VmaDeviceMemoryBlock
5595 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5597 VmaBlockMetadata* m_pMetadata;
5601 ~VmaDeviceMemoryBlock()
5603 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5604 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5610 uint32_t newMemoryTypeIndex,
5611 VkDeviceMemory newMemory,
5612 VkDeviceSize newSize,
5614 uint32_t algorithm);
5618 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5619 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5620 uint32_t GetId()
const {
return m_Id; }
5621 void* GetMappedData()
const {
return m_pMappedData; }
5624 bool Validate()
const;
5629 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5632 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5633 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5635 VkResult BindBufferMemory(
5639 VkResult BindImageMemory(
5645 uint32_t m_MemoryTypeIndex;
5647 VkDeviceMemory m_hMemory;
5655 uint32_t m_MapCount;
5656 void* m_pMappedData;
5659 struct VmaPointerLess
5661 bool operator()(
const void* lhs,
const void* rhs)
const 5667 struct VmaDefragmentationMove
5669 size_t srcBlockIndex;
5670 size_t dstBlockIndex;
5671 VkDeviceSize srcOffset;
5672 VkDeviceSize dstOffset;
5676 class VmaDefragmentationAlgorithm;
5684 struct VmaBlockVector
5686 VMA_CLASS_NO_COPY(VmaBlockVector)
5690 uint32_t memoryTypeIndex,
5691 VkDeviceSize preferredBlockSize,
5692 size_t minBlockCount,
5693 size_t maxBlockCount,
5694 VkDeviceSize bufferImageGranularity,
5695 uint32_t frameInUseCount,
5697 bool explicitBlockSize,
5698 uint32_t algorithm);
5701 VkResult CreateMinBlocks();
5703 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5704 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5705 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5706 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5707 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5711 bool IsEmpty()
const {
return m_Blocks.empty(); }
5712 bool IsCorruptionDetectionEnabled()
const;
5716 uint32_t currentFrameIndex,
5718 VkDeviceSize alignment,
5720 VmaSuballocationType suballocType,
5729 #if VMA_STATS_STRING_ENABLED 5730 void PrintDetailedMap(
class VmaJsonWriter& json);
5733 void MakePoolAllocationsLost(
5734 uint32_t currentFrameIndex,
5735 size_t* pLostAllocationCount);
5736 VkResult CheckCorruption();
5740 class VmaBlockVectorDefragmentationContext* pCtx,
5742 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
5743 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
5744 VkCommandBuffer commandBuffer);
5745 void DefragmentationEnd(
5746 class VmaBlockVectorDefragmentationContext* pCtx,
5752 size_t GetBlockCount()
const {
return m_Blocks.size(); }
5753 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
5754 size_t CalcAllocationCount()
const;
5755 bool IsBufferImageGranularityConflictPossible()
const;
5758 friend class VmaDefragmentationAlgorithm_Generic;
5761 const uint32_t m_MemoryTypeIndex;
5762 const VkDeviceSize m_PreferredBlockSize;
5763 const size_t m_MinBlockCount;
5764 const size_t m_MaxBlockCount;
5765 const VkDeviceSize m_BufferImageGranularity;
5766 const uint32_t m_FrameInUseCount;
5767 const bool m_IsCustomPool;
5768 const bool m_ExplicitBlockSize;
5769 const uint32_t m_Algorithm;
5773 bool m_HasEmptyBlock;
5774 VMA_RW_MUTEX m_Mutex;
5776 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5777 uint32_t m_NextBlockId;
5779 VkDeviceSize CalcMaxBlockSize()
const;
5782 void Remove(VmaDeviceMemoryBlock* pBlock);
5786 void IncrementallySortBlocks();
5789 VkResult AllocateFromBlock(
5790 VmaDeviceMemoryBlock* pBlock,
5792 uint32_t currentFrameIndex,
5794 VkDeviceSize alignment,
5797 VmaSuballocationType suballocType,
5801 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5804 void ApplyDefragmentationMovesCpu(
5805 class VmaBlockVectorDefragmentationContext* pDefragCtx,
5806 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
5808 void ApplyDefragmentationMovesGpu(
5809 class VmaBlockVectorDefragmentationContext* pDefragCtx,
5810 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
5811 VkCommandBuffer commandBuffer);
5822 VMA_CLASS_NO_COPY(VmaPool_T)
5824 VmaBlockVector m_BlockVector;
5829 VkDeviceSize preferredBlockSize);
5832 uint32_t GetId()
const {
return m_Id; }
5833 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5835 #if VMA_STATS_STRING_ENABLED 5850 class VmaDefragmentationAlgorithm
5852 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
5854 VmaDefragmentationAlgorithm(
5856 VmaBlockVector* pBlockVector,
5857 uint32_t currentFrameIndex) :
5858 m_hAllocator(hAllocator),
5859 m_pBlockVector(pBlockVector),
5860 m_CurrentFrameIndex(currentFrameIndex)
5863 virtual ~VmaDefragmentationAlgorithm()
5867 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
5868 virtual void AddAll() = 0;
5870 virtual VkResult Defragment(
5871 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
5872 VkDeviceSize maxBytesToMove,
5873 uint32_t maxAllocationsToMove) = 0;
5875 virtual VkDeviceSize GetBytesMoved()
const = 0;
5876 virtual uint32_t GetAllocationsMoved()
const = 0;
5880 VmaBlockVector*
const m_pBlockVector;
5881 const uint32_t m_CurrentFrameIndex;
5883 struct AllocationInfo
5886 VkBool32* m_pChanged;
5889 m_hAllocation(VK_NULL_HANDLE),
5890 m_pChanged(VMA_NULL)
5894 m_hAllocation(hAlloc),
5895 m_pChanged(pChanged)
5901 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
5903 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
5905 VmaDefragmentationAlgorithm_Generic(
5907 VmaBlockVector* pBlockVector,
5908 uint32_t currentFrameIndex,
5909 bool overlappingMoveSupported);
5910 virtual ~VmaDefragmentationAlgorithm_Generic();
5912 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
5913 virtual void AddAll() { m_AllAllocations =
true; }
5915 virtual VkResult Defragment(
5916 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
5917 VkDeviceSize maxBytesToMove,
5918 uint32_t maxAllocationsToMove);
5920 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
5921 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
5924 uint32_t m_AllocationCount;
5925 bool m_AllAllocations;
5927 VkDeviceSize m_BytesMoved;
5928 uint32_t m_AllocationsMoved;
5930 struct AllocationInfoSizeGreater
5932 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5934 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
5938 struct AllocationInfoOffsetGreater
5940 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5942 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
5948 size_t m_OriginalBlockIndex;
5949 VmaDeviceMemoryBlock* m_pBlock;
5950 bool m_HasNonMovableAllocations;
5951 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5953 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
5954 m_OriginalBlockIndex(SIZE_MAX),
5956 m_HasNonMovableAllocations(true),
5957 m_Allocations(pAllocationCallbacks)
5961 void CalcHasNonMovableAllocations()
5963 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
5964 const size_t defragmentAllocCount = m_Allocations.size();
5965 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
5968 void SortAllocationsBySizeDescending()
5970 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
5973 void SortAllocationsByOffsetDescending()
5975 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
5979 struct BlockPointerLess
5981 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 5983 return pLhsBlockInfo->m_pBlock < pRhsBlock;
5985 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5987 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
5993 struct BlockInfoCompareMoveDestination
5995 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5997 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6001 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6005 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6013 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6014 BlockInfoVector m_Blocks;
6016 VkResult DefragmentRound(
6017 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6018 VkDeviceSize maxBytesToMove,
6019 uint32_t maxAllocationsToMove);
6021 size_t CalcBlocksWithNonMovableCount()
const;
6023 static bool MoveMakesSense(
6024 size_t dstBlockIndex, VkDeviceSize dstOffset,
6025 size_t srcBlockIndex, VkDeviceSize srcOffset);
6028 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6030 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6032 VmaDefragmentationAlgorithm_Fast(
6034 VmaBlockVector* pBlockVector,
6035 uint32_t currentFrameIndex,
6036 bool overlappingMoveSupported);
6037 virtual ~VmaDefragmentationAlgorithm_Fast();
6039 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6040 virtual void AddAll() { m_AllAllocations =
true; }
6042 virtual VkResult Defragment(
6043 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6044 VkDeviceSize maxBytesToMove,
6045 uint32_t maxAllocationsToMove);
6047 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6048 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6053 size_t origBlockIndex;
6056 class FreeSpaceDatabase
6062 s.blockInfoIndex = SIZE_MAX;
6063 for(
size_t i = 0; i < MAX_COUNT; ++i)
6065 m_FreeSpaces[i] = s;
6069 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6071 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6077 size_t bestIndex = SIZE_MAX;
6078 for(
size_t i = 0; i < MAX_COUNT; ++i)
6081 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6086 if(m_FreeSpaces[i].size < size &&
6087 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6093 if(bestIndex != SIZE_MAX)
6095 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6096 m_FreeSpaces[bestIndex].offset = offset;
6097 m_FreeSpaces[bestIndex].size = size;
6101 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6102 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6104 size_t bestIndex = SIZE_MAX;
6105 VkDeviceSize bestFreeSpaceAfter = 0;
6106 for(
size_t i = 0; i < MAX_COUNT; ++i)
6109 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6111 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6113 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6115 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6117 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6120 bestFreeSpaceAfter = freeSpaceAfter;
6126 if(bestIndex != SIZE_MAX)
6128 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6129 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6131 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6134 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6135 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6136 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6141 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6151 static const size_t MAX_COUNT = 4;
6155 size_t blockInfoIndex;
6156 VkDeviceSize offset;
6158 } m_FreeSpaces[MAX_COUNT];
6161 const bool m_OverlappingMoveSupported;
6163 uint32_t m_AllocationCount;
6164 bool m_AllAllocations;
6166 VkDeviceSize m_BytesMoved;
6167 uint32_t m_AllocationsMoved;
6169 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6171 void PreprocessMetadata();
6172 void PostprocessMetadata();
6173 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6176 struct VmaBlockDefragmentationContext
6179 VMA_CLASS_NO_COPY(VmaBlockDefragmentationContext)
6183 BLOCK_FLAG_USED = 0x00000001,
6188 VmaBlockDefragmentationContext() :
6190 hBuffer(VK_NULL_HANDLE)
6195 class VmaBlockVectorDefragmentationContext
6197 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6201 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6203 VmaBlockVectorDefragmentationContext(
6206 VmaBlockVector* pBlockVector,
6207 uint32_t currFrameIndex,
6209 ~VmaBlockVectorDefragmentationContext();
6211 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6212 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6213 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6215 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6216 void AddAll() { m_AllAllocations =
true; }
6218 void Begin(
bool overlappingMoveSupported);
6225 VmaBlockVector*
const m_pBlockVector;
6226 const uint32_t m_CurrFrameIndex;
6227 const uint32_t m_AlgorithmFlags;
6229 VmaDefragmentationAlgorithm* m_pAlgorithm;
6237 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6238 bool m_AllAllocations;
6241 struct VmaDefragmentationContext_T
6244 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6246 VmaDefragmentationContext_T(
6248 uint32_t currFrameIndex,
6251 ~VmaDefragmentationContext_T();
6253 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6254 void AddAllocations(
6255 uint32_t allocationCount,
6257 VkBool32* pAllocationsChanged);
6265 VkResult Defragment(
6266 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6267 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6272 const uint32_t m_CurrFrameIndex;
6273 const uint32_t m_Flags;
6276 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6278 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6281 #if VMA_RECORDING_ENABLED 6288 void WriteConfiguration(
6289 const VkPhysicalDeviceProperties& devProps,
6290 const VkPhysicalDeviceMemoryProperties& memProps,
6291 bool dedicatedAllocationExtensionEnabled);
6294 void RecordCreateAllocator(uint32_t frameIndex);
6295 void RecordDestroyAllocator(uint32_t frameIndex);
6296 void RecordCreatePool(uint32_t frameIndex,
6299 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6300 void RecordAllocateMemory(uint32_t frameIndex,
6301 const VkMemoryRequirements& vkMemReq,
6304 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6305 const VkMemoryRequirements& vkMemReq,
6306 bool requiresDedicatedAllocation,
6307 bool prefersDedicatedAllocation,
6310 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6311 const VkMemoryRequirements& vkMemReq,
6312 bool requiresDedicatedAllocation,
6313 bool prefersDedicatedAllocation,
6316 void RecordFreeMemory(uint32_t frameIndex,
6318 void RecordResizeAllocation(
6319 uint32_t frameIndex,
6321 VkDeviceSize newSize);
6322 void RecordSetAllocationUserData(uint32_t frameIndex,
6324 const void* pUserData);
6325 void RecordCreateLostAllocation(uint32_t frameIndex,
6327 void RecordMapMemory(uint32_t frameIndex,
6329 void RecordUnmapMemory(uint32_t frameIndex,
6331 void RecordFlushAllocation(uint32_t frameIndex,
6332 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6333 void RecordInvalidateAllocation(uint32_t frameIndex,
6334 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6335 void RecordCreateBuffer(uint32_t frameIndex,
6336 const VkBufferCreateInfo& bufCreateInfo,
6339 void RecordCreateImage(uint32_t frameIndex,
6340 const VkImageCreateInfo& imageCreateInfo,
6343 void RecordDestroyBuffer(uint32_t frameIndex,
6345 void RecordDestroyImage(uint32_t frameIndex,
6347 void RecordTouchAllocation(uint32_t frameIndex,
6349 void RecordGetAllocationInfo(uint32_t frameIndex,
6351 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6361 class UserDataString
6365 const char* GetString()
const {
return m_Str; }
6375 VMA_MUTEX m_FileMutex;
6377 int64_t m_StartCounter;
6379 void GetBasicParams(CallParams& outParams);
6383 #endif // #if VMA_RECORDING_ENABLED 6386 struct VmaAllocator_T
6388 VMA_CLASS_NO_COPY(VmaAllocator_T)
6391 bool m_UseKhrDedicatedAllocation;
6393 bool m_AllocationCallbacksSpecified;
6394 VkAllocationCallbacks m_AllocationCallbacks;
6398 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
6399 VMA_MUTEX m_HeapSizeLimitMutex;
6401 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
6402 VkPhysicalDeviceMemoryProperties m_MemProps;
6405 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
6408 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
6409 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
6410 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
6416 const VkAllocationCallbacks* GetAllocationCallbacks()
const 6418 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
6422 return m_VulkanFunctions;
6425 VkDeviceSize GetBufferImageGranularity()
const 6428 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
6429 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
6432 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
6433 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
6435 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 6437 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
6438 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6441 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 6443 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
6444 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6447 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 6449 return IsMemoryTypeNonCoherent(memTypeIndex) ?
6450 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
6451 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
6454 bool IsIntegratedGpu()
const 6456 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
6459 #if VMA_RECORDING_ENABLED 6460 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
6463 void GetBufferMemoryRequirements(
6465 VkMemoryRequirements& memReq,
6466 bool& requiresDedicatedAllocation,
6467 bool& prefersDedicatedAllocation)
const;
6468 void GetImageMemoryRequirements(
6470 VkMemoryRequirements& memReq,
6471 bool& requiresDedicatedAllocation,
6472 bool& prefersDedicatedAllocation)
const;
6475 VkResult AllocateMemory(
6476 const VkMemoryRequirements& vkMemReq,
6477 bool requiresDedicatedAllocation,
6478 bool prefersDedicatedAllocation,
6479 VkBuffer dedicatedBuffer,
6480 VkImage dedicatedImage,
6482 VmaSuballocationType suballocType,
6488 VkResult ResizeAllocation(
6490 VkDeviceSize newSize);
6492 void CalculateStats(
VmaStats* pStats);
6494 #if VMA_STATS_STRING_ENABLED 6495 void PrintDetailedMap(
class VmaJsonWriter& json);
6498 VkResult DefragmentationBegin(
6502 VkResult DefragmentationEnd(
6509 void DestroyPool(
VmaPool pool);
6512 void SetCurrentFrameIndex(uint32_t frameIndex);
6513 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
6515 void MakePoolAllocationsLost(
6517 size_t* pLostAllocationCount);
6518 VkResult CheckPoolCorruption(
VmaPool hPool);
6519 VkResult CheckCorruption(uint32_t memoryTypeBits);
6523 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
6524 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
6529 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
6530 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
6532 void FlushOrInvalidateAllocation(
6534 VkDeviceSize offset, VkDeviceSize size,
6535 VMA_CACHE_OPERATION op);
6537 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
6540 VkDeviceSize m_PreferredLargeHeapBlockSize;
6542 VkPhysicalDevice m_PhysicalDevice;
6543 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
6545 VMA_RW_MUTEX m_PoolsMutex;
6547 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
6548 uint32_t m_NextPoolId;
6552 #if VMA_RECORDING_ENABLED 6553 VmaRecorder* m_pRecorder;
6558 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
6560 VkResult AllocateMemoryOfType(
6562 VkDeviceSize alignment,
6563 bool dedicatedAllocation,
6564 VkBuffer dedicatedBuffer,
6565 VkImage dedicatedImage,
6567 uint32_t memTypeIndex,
6568 VmaSuballocationType suballocType,
6572 VkResult AllocateDedicatedMemory(
6574 VmaSuballocationType suballocType,
6575 uint32_t memTypeIndex,
6577 bool isUserDataString,
6579 VkBuffer dedicatedBuffer,
6580 VkImage dedicatedImage,
6590 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
6592 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
6595 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
6597 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
6600 template<
typename T>
6603 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
6606 template<
typename T>
6607 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
6609 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
6612 template<
typename T>
6613 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
6618 VmaFree(hAllocator, ptr);
6622 template<
typename T>
6623 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
6627 for(
size_t i = count; i--; )
6629 VmaFree(hAllocator, ptr);
6636 #if VMA_STATS_STRING_ENABLED 6638 class VmaStringBuilder
6641 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
6642 size_t GetLength()
const {
return m_Data.size(); }
6643 const char* GetData()
const {
return m_Data.data(); }
6645 void Add(
char ch) { m_Data.push_back(ch); }
6646 void Add(
const char* pStr);
6647 void AddNewLine() { Add(
'\n'); }
6648 void AddNumber(uint32_t num);
6649 void AddNumber(uint64_t num);
6650 void AddPointer(
const void* ptr);
6653 VmaVector< char, VmaStlAllocator<char> > m_Data;
6656 void VmaStringBuilder::Add(
const char* pStr)
6658 const size_t strLen = strlen(pStr);
6661 const size_t oldCount = m_Data.size();
6662 m_Data.resize(oldCount + strLen);
6663 memcpy(m_Data.data() + oldCount, pStr, strLen);
6667 void VmaStringBuilder::AddNumber(uint32_t num)
6670 VmaUint32ToStr(buf,
sizeof(buf), num);
6674 void VmaStringBuilder::AddNumber(uint64_t num)
6677 VmaUint64ToStr(buf,
sizeof(buf), num);
6681 void VmaStringBuilder::AddPointer(
const void* ptr)
6684 VmaPtrToStr(buf,
sizeof(buf), ptr);
6688 #endif // #if VMA_STATS_STRING_ENABLED 6693 #if VMA_STATS_STRING_ENABLED 6697 VMA_CLASS_NO_COPY(VmaJsonWriter)
6699 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6702 void BeginObject(
bool singleLine =
false);
6705 void BeginArray(
bool singleLine =
false);
6708 void WriteString(
const char* pStr);
6709 void BeginString(
const char* pStr = VMA_NULL);
6710 void ContinueString(
const char* pStr);
6711 void ContinueString(uint32_t n);
6712 void ContinueString(uint64_t n);
6713 void ContinueString_Pointer(
const void* ptr);
6714 void EndString(
const char* pStr = VMA_NULL);
6716 void WriteNumber(uint32_t n);
6717 void WriteNumber(uint64_t n);
6718 void WriteBool(
bool b);
6722 static const char*
const INDENT;
6724 enum COLLECTION_TYPE
6726 COLLECTION_TYPE_OBJECT,
6727 COLLECTION_TYPE_ARRAY,
6731 COLLECTION_TYPE type;
6732 uint32_t valueCount;
6733 bool singleLineMode;
6736 VmaStringBuilder& m_SB;
6737 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
6738 bool m_InsideString;
6740 void BeginValue(
bool isString);
6741 void WriteIndent(
bool oneLess =
false);
6744 const char*
const VmaJsonWriter::INDENT =
" ";
6746 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
6748 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
6749 m_InsideString(false)
6753 VmaJsonWriter::~VmaJsonWriter()
6755 VMA_ASSERT(!m_InsideString);
6756 VMA_ASSERT(m_Stack.empty());
6759 void VmaJsonWriter::BeginObject(
bool singleLine)
6761 VMA_ASSERT(!m_InsideString);
6767 item.type = COLLECTION_TYPE_OBJECT;
6768 item.valueCount = 0;
6769 item.singleLineMode = singleLine;
6770 m_Stack.push_back(item);
6773 void VmaJsonWriter::EndObject()
6775 VMA_ASSERT(!m_InsideString);
6780 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
6784 void VmaJsonWriter::BeginArray(
bool singleLine)
6786 VMA_ASSERT(!m_InsideString);
6792 item.type = COLLECTION_TYPE_ARRAY;
6793 item.valueCount = 0;
6794 item.singleLineMode = singleLine;
6795 m_Stack.push_back(item);
6798 void VmaJsonWriter::EndArray()
6800 VMA_ASSERT(!m_InsideString);
6805 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
6809 void VmaJsonWriter::WriteString(
const char* pStr)
6815 void VmaJsonWriter::BeginString(
const char* pStr)
6817 VMA_ASSERT(!m_InsideString);
6821 m_InsideString =
true;
6822 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6824 ContinueString(pStr);
6828 void VmaJsonWriter::ContinueString(
const char* pStr)
6830 VMA_ASSERT(m_InsideString);
6832 const size_t strLen = strlen(pStr);
6833 for(
size_t i = 0; i < strLen; ++i)
6866 VMA_ASSERT(0 &&
"Character not currently supported.");
6872 void VmaJsonWriter::ContinueString(uint32_t n)
6874 VMA_ASSERT(m_InsideString);
6878 void VmaJsonWriter::ContinueString(uint64_t n)
6880 VMA_ASSERT(m_InsideString);
6884 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
6886 VMA_ASSERT(m_InsideString);
6887 m_SB.AddPointer(ptr);
6890 void VmaJsonWriter::EndString(
const char* pStr)
6892 VMA_ASSERT(m_InsideString);
6893 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6895 ContinueString(pStr);
6898 m_InsideString =
false;
6901 void VmaJsonWriter::WriteNumber(uint32_t n)
6903 VMA_ASSERT(!m_InsideString);
6908 void VmaJsonWriter::WriteNumber(uint64_t n)
6910 VMA_ASSERT(!m_InsideString);
6915 void VmaJsonWriter::WriteBool(
bool b)
6917 VMA_ASSERT(!m_InsideString);
6919 m_SB.Add(b ?
"true" :
"false");
6922 void VmaJsonWriter::WriteNull()
6924 VMA_ASSERT(!m_InsideString);
6929 void VmaJsonWriter::BeginValue(
bool isString)
6931 if(!m_Stack.empty())
6933 StackItem& currItem = m_Stack.back();
6934 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6935 currItem.valueCount % 2 == 0)
6937 VMA_ASSERT(isString);
6940 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6941 currItem.valueCount % 2 != 0)
6945 else if(currItem.valueCount > 0)
6954 ++currItem.valueCount;
6958 void VmaJsonWriter::WriteIndent(
bool oneLess)
6960 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
6964 size_t count = m_Stack.size();
6965 if(count > 0 && oneLess)
6969 for(
size_t i = 0; i < count; ++i)
6976 #endif // #if VMA_STATS_STRING_ENABLED 6980 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
6982 if(IsUserDataString())
6984 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
6986 FreeUserDataString(hAllocator);
6988 if(pUserData != VMA_NULL)
6990 const char*
const newStrSrc = (
char*)pUserData;
6991 const size_t newStrLen = strlen(newStrSrc);
6992 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
6993 memcpy(newStrDst, newStrSrc, newStrLen + 1);
6994 m_pUserData = newStrDst;
6999 m_pUserData = pUserData;
7003 void VmaAllocation_T::ChangeBlockAllocation(
7005 VmaDeviceMemoryBlock* block,
7006 VkDeviceSize offset)
7008 VMA_ASSERT(block != VMA_NULL);
7009 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7012 if(block != m_BlockAllocation.m_Block)
7014 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7015 if(IsPersistentMap())
7017 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7018 block->Map(hAllocator, mapRefCount, VMA_NULL);
7021 m_BlockAllocation.m_Block = block;
7022 m_BlockAllocation.m_Offset = offset;
7025 void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
7027 VMA_ASSERT(newSize > 0);
7031 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7033 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7034 m_BlockAllocation.m_Offset = newOffset;
7037 VkDeviceSize VmaAllocation_T::GetOffset()
const 7041 case ALLOCATION_TYPE_BLOCK:
7042 return m_BlockAllocation.m_Offset;
7043 case ALLOCATION_TYPE_DEDICATED:
7051 VkDeviceMemory VmaAllocation_T::GetMemory()
const 7055 case ALLOCATION_TYPE_BLOCK:
7056 return m_BlockAllocation.m_Block->GetDeviceMemory();
7057 case ALLOCATION_TYPE_DEDICATED:
7058 return m_DedicatedAllocation.m_hMemory;
7061 return VK_NULL_HANDLE;
7065 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 7069 case ALLOCATION_TYPE_BLOCK:
7070 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
7071 case ALLOCATION_TYPE_DEDICATED:
7072 return m_DedicatedAllocation.m_MemoryTypeIndex;
7079 void* VmaAllocation_T::GetMappedData()
const 7083 case ALLOCATION_TYPE_BLOCK:
7086 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7087 VMA_ASSERT(pBlockData != VMA_NULL);
7088 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7095 case ALLOCATION_TYPE_DEDICATED:
7096 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7097 return m_DedicatedAllocation.m_pMappedData;
7104 bool VmaAllocation_T::CanBecomeLost()
const 7108 case ALLOCATION_TYPE_BLOCK:
7109 return m_BlockAllocation.m_CanBecomeLost;
7110 case ALLOCATION_TYPE_DEDICATED:
7118 VmaPool VmaAllocation_T::GetPool()
const 7120 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7121 return m_BlockAllocation.m_hPool;
7124 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7126 VMA_ASSERT(CanBecomeLost());
7132 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7135 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7140 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7146 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7156 #if VMA_STATS_STRING_ENABLED 7159 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7168 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 7170 json.WriteString(
"Type");
7171 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7173 json.WriteString(
"Size");
7174 json.WriteNumber(m_Size);
7176 if(m_pUserData != VMA_NULL)
7178 json.WriteString(
"UserData");
7179 if(IsUserDataString())
7181 json.WriteString((
const char*)m_pUserData);
7186 json.ContinueString_Pointer(m_pUserData);
7191 json.WriteString(
"CreationFrameIndex");
7192 json.WriteNumber(m_CreationFrameIndex);
7194 json.WriteString(
"LastUseFrameIndex");
7195 json.WriteNumber(GetLastUseFrameIndex());
7197 if(m_BufferImageUsage != 0)
7199 json.WriteString(
"Usage");
7200 json.WriteNumber(m_BufferImageUsage);
7206 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7208 VMA_ASSERT(IsUserDataString());
7209 if(m_pUserData != VMA_NULL)
7211 char*
const oldStr = (
char*)m_pUserData;
7212 const size_t oldStrLen = strlen(oldStr);
7213 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
7214 m_pUserData = VMA_NULL;
7218 void VmaAllocation_T::BlockAllocMap()
7220 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7222 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7228 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7232 void VmaAllocation_T::BlockAllocUnmap()
7234 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7236 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7242 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7246 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7248 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7252 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7254 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7255 *ppData = m_DedicatedAllocation.m_pMappedData;
7261 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7262 return VK_ERROR_MEMORY_MAP_FAILED;
7267 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7268 hAllocator->m_hDevice,
7269 m_DedicatedAllocation.m_hMemory,
7274 if(result == VK_SUCCESS)
7276 m_DedicatedAllocation.m_pMappedData = *ppData;
7283 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7285 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7287 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7292 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7293 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7294 hAllocator->m_hDevice,
7295 m_DedicatedAllocation.m_hMemory);
7300 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7304 #if VMA_STATS_STRING_ENABLED 7306 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7310 json.WriteString(
"Blocks");
7313 json.WriteString(
"Allocations");
7316 json.WriteString(
"UnusedRanges");
7319 json.WriteString(
"UsedBytes");
7322 json.WriteString(
"UnusedBytes");
7327 json.WriteString(
"AllocationSize");
7328 json.BeginObject(
true);
7329 json.WriteString(
"Min");
7331 json.WriteString(
"Avg");
7333 json.WriteString(
"Max");
7340 json.WriteString(
"UnusedRangeSize");
7341 json.BeginObject(
true);
7342 json.WriteString(
"Min");
7344 json.WriteString(
"Avg");
7346 json.WriteString(
"Max");
7354 #endif // #if VMA_STATS_STRING_ENABLED 7356 struct VmaSuballocationItemSizeLess
7359 const VmaSuballocationList::iterator lhs,
7360 const VmaSuballocationList::iterator rhs)
const 7362 return lhs->size < rhs->size;
7365 const VmaSuballocationList::iterator lhs,
7366 VkDeviceSize rhsSize)
const 7368 return lhs->size < rhsSize;
7376 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
7378 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
7382 #if VMA_STATS_STRING_ENABLED 7384 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
7385 VkDeviceSize unusedBytes,
7386 size_t allocationCount,
7387 size_t unusedRangeCount)
const 7391 json.WriteString(
"TotalBytes");
7392 json.WriteNumber(GetSize());
7394 json.WriteString(
"UnusedBytes");
7395 json.WriteNumber(unusedBytes);
7397 json.WriteString(
"Allocations");
7398 json.WriteNumber((uint64_t)allocationCount);
7400 json.WriteString(
"UnusedRanges");
7401 json.WriteNumber((uint64_t)unusedRangeCount);
7403 json.WriteString(
"Suballocations");
7407 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
7408 VkDeviceSize offset,
7411 json.BeginObject(
true);
7413 json.WriteString(
"Offset");
7414 json.WriteNumber(offset);
7416 hAllocation->PrintParameters(json);
7421 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
7422 VkDeviceSize offset,
7423 VkDeviceSize size)
const 7425 json.BeginObject(
true);
7427 json.WriteString(
"Offset");
7428 json.WriteNumber(offset);
7430 json.WriteString(
"Type");
7431 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
7433 json.WriteString(
"Size");
7434 json.WriteNumber(size);
7439 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 7445 #endif // #if VMA_STATS_STRING_ENABLED 7450 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
7451 VmaBlockMetadata(hAllocator),
7454 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7455 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
7459 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
7463 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
7465 VmaBlockMetadata::Init(size);
7468 m_SumFreeSize = size;
7470 VmaSuballocation suballoc = {};
7471 suballoc.offset = 0;
7472 suballoc.size = size;
7473 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7474 suballoc.hAllocation = VK_NULL_HANDLE;
7476 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7477 m_Suballocations.push_back(suballoc);
7478 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
7480 m_FreeSuballocationsBySize.push_back(suballocItem);
7483 bool VmaBlockMetadata_Generic::Validate()
const 7485 VMA_VALIDATE(!m_Suballocations.empty());
7488 VkDeviceSize calculatedOffset = 0;
7490 uint32_t calculatedFreeCount = 0;
7492 VkDeviceSize calculatedSumFreeSize = 0;
7495 size_t freeSuballocationsToRegister = 0;
7497 bool prevFree =
false;
7499 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7500 suballocItem != m_Suballocations.cend();
7503 const VmaSuballocation& subAlloc = *suballocItem;
7506 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
7508 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
7510 VMA_VALIDATE(!prevFree || !currFree);
7512 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
7516 calculatedSumFreeSize += subAlloc.size;
7517 ++calculatedFreeCount;
7518 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7520 ++freeSuballocationsToRegister;
7524 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
7528 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
7529 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
7532 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
7535 calculatedOffset += subAlloc.size;
7536 prevFree = currFree;
7541 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
7543 VkDeviceSize lastSize = 0;
7544 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
7546 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
7549 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7551 VMA_VALIDATE(suballocItem->size >= lastSize);
7553 lastSize = suballocItem->size;
7557 VMA_VALIDATE(ValidateFreeSuballocationList());
7558 VMA_VALIDATE(calculatedOffset == GetSize());
7559 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
7560 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
7565 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 7567 if(!m_FreeSuballocationsBySize.empty())
7569 return m_FreeSuballocationsBySize.back()->size;
7577 bool VmaBlockMetadata_Generic::IsEmpty()
const 7579 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
7582 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7586 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7598 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7599 suballocItem != m_Suballocations.cend();
7602 const VmaSuballocation& suballoc = *suballocItem;
7603 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7616 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 7618 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7620 inoutStats.
size += GetSize();
7627 #if VMA_STATS_STRING_ENABLED 7629 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 7631 PrintDetailedMap_Begin(json,
7633 m_Suballocations.size() - (size_t)m_FreeCount,
7637 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7638 suballocItem != m_Suballocations.cend();
7639 ++suballocItem, ++i)
7641 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7643 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
7647 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
7651 PrintDetailedMap_End(json);
7654 #endif // #if VMA_STATS_STRING_ENABLED 7656 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
7657 uint32_t currentFrameIndex,
7658 uint32_t frameInUseCount,
7659 VkDeviceSize bufferImageGranularity,
7660 VkDeviceSize allocSize,
7661 VkDeviceSize allocAlignment,
7663 VmaSuballocationType allocType,
7664 bool canMakeOtherLost,
7666 VmaAllocationRequest* pAllocationRequest)
7668 VMA_ASSERT(allocSize > 0);
7669 VMA_ASSERT(!upperAddress);
7670 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7671 VMA_ASSERT(pAllocationRequest != VMA_NULL);
7672 VMA_HEAVY_ASSERT(Validate());
7675 if(canMakeOtherLost ==
false &&
7676 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
7682 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
7683 if(freeSuballocCount > 0)
7688 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7689 m_FreeSuballocationsBySize.data(),
7690 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7691 allocSize + 2 * VMA_DEBUG_MARGIN,
7692 VmaSuballocationItemSizeLess());
7693 size_t index = it - m_FreeSuballocationsBySize.data();
7694 for(; index < freeSuballocCount; ++index)
7699 bufferImageGranularity,
7703 m_FreeSuballocationsBySize[index],
7705 &pAllocationRequest->offset,
7706 &pAllocationRequest->itemsToMakeLostCount,
7707 &pAllocationRequest->sumFreeSize,
7708 &pAllocationRequest->sumItemSize))
7710 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7715 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
7717 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7718 it != m_Suballocations.end();
7721 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
7724 bufferImageGranularity,
7730 &pAllocationRequest->offset,
7731 &pAllocationRequest->itemsToMakeLostCount,
7732 &pAllocationRequest->sumFreeSize,
7733 &pAllocationRequest->sumItemSize))
7735 pAllocationRequest->item = it;
7743 for(
size_t index = freeSuballocCount; index--; )
7748 bufferImageGranularity,
7752 m_FreeSuballocationsBySize[index],
7754 &pAllocationRequest->offset,
7755 &pAllocationRequest->itemsToMakeLostCount,
7756 &pAllocationRequest->sumFreeSize,
7757 &pAllocationRequest->sumItemSize))
7759 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7766 if(canMakeOtherLost)
7770 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
7771 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
7773 VmaAllocationRequest tmpAllocRequest = {};
7774 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
7775 suballocIt != m_Suballocations.end();
7778 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
7779 suballocIt->hAllocation->CanBecomeLost())
7784 bufferImageGranularity,
7790 &tmpAllocRequest.offset,
7791 &tmpAllocRequest.itemsToMakeLostCount,
7792 &tmpAllocRequest.sumFreeSize,
7793 &tmpAllocRequest.sumItemSize))
7795 tmpAllocRequest.item = suballocIt;
7797 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
7800 *pAllocationRequest = tmpAllocRequest;
7806 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
7815 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
7816 uint32_t currentFrameIndex,
7817 uint32_t frameInUseCount,
7818 VmaAllocationRequest* pAllocationRequest)
7820 while(pAllocationRequest->itemsToMakeLostCount > 0)
7822 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
7824 ++pAllocationRequest->item;
7826 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7827 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
7828 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
7829 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7831 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
7832 --pAllocationRequest->itemsToMakeLostCount;
7840 VMA_HEAVY_ASSERT(Validate());
7841 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7842 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
7847 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7849 uint32_t lostAllocationCount = 0;
7850 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7851 it != m_Suballocations.end();
7854 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
7855 it->hAllocation->CanBecomeLost() &&
7856 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7858 it = FreeSuballocation(it);
7859 ++lostAllocationCount;
7862 return lostAllocationCount;
7865 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
7867 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7868 it != m_Suballocations.end();
7871 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
7873 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
7875 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
7876 return VK_ERROR_VALIDATION_FAILED_EXT;
7878 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
7880 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
7881 return VK_ERROR_VALIDATION_FAILED_EXT;
7889 void VmaBlockMetadata_Generic::Alloc(
7890 const VmaAllocationRequest& request,
7891 VmaSuballocationType type,
7892 VkDeviceSize allocSize,
7896 VMA_ASSERT(!upperAddress);
7897 VMA_ASSERT(request.item != m_Suballocations.end());
7898 VmaSuballocation& suballoc = *request.item;
7900 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7902 VMA_ASSERT(request.offset >= suballoc.offset);
7903 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
7904 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
7905 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
7909 UnregisterFreeSuballocation(request.item);
7911 suballoc.offset = request.offset;
7912 suballoc.size = allocSize;
7913 suballoc.type = type;
7914 suballoc.hAllocation = hAllocation;
7919 VmaSuballocation paddingSuballoc = {};
7920 paddingSuballoc.offset = request.offset + allocSize;
7921 paddingSuballoc.size = paddingEnd;
7922 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7923 VmaSuballocationList::iterator next = request.item;
7925 const VmaSuballocationList::iterator paddingEndItem =
7926 m_Suballocations.insert(next, paddingSuballoc);
7927 RegisterFreeSuballocation(paddingEndItem);
7933 VmaSuballocation paddingSuballoc = {};
7934 paddingSuballoc.offset = request.offset - paddingBegin;
7935 paddingSuballoc.size = paddingBegin;
7936 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7937 const VmaSuballocationList::iterator paddingBeginItem =
7938 m_Suballocations.insert(request.item, paddingSuballoc);
7939 RegisterFreeSuballocation(paddingBeginItem);
7943 m_FreeCount = m_FreeCount - 1;
7944 if(paddingBegin > 0)
7952 m_SumFreeSize -= allocSize;
7955 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
7957 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7958 suballocItem != m_Suballocations.end();
7961 VmaSuballocation& suballoc = *suballocItem;
7962 if(suballoc.hAllocation == allocation)
7964 FreeSuballocation(suballocItem);
7965 VMA_HEAVY_ASSERT(Validate());
7969 VMA_ASSERT(0 &&
"Not found!");
7972 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
7974 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7975 suballocItem != m_Suballocations.end();
7978 VmaSuballocation& suballoc = *suballocItem;
7979 if(suballoc.offset == offset)
7981 FreeSuballocation(suballocItem);
7985 VMA_ASSERT(0 &&
"Not found!");
7988 bool VmaBlockMetadata_Generic::ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize)
7990 typedef VmaSuballocationList::iterator iter_type;
7991 for(iter_type suballocItem = m_Suballocations.begin();
7992 suballocItem != m_Suballocations.end();
7995 VmaSuballocation& suballoc = *suballocItem;
7996 if(suballoc.hAllocation == alloc)
7998 iter_type nextItem = suballocItem;
8002 VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
8005 if(newSize < alloc->GetSize())
8007 const VkDeviceSize sizeDiff = suballoc.size - newSize;
8010 if(nextItem != m_Suballocations.end())
8013 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8016 UnregisterFreeSuballocation(nextItem);
8017 nextItem->offset -= sizeDiff;
8018 nextItem->size += sizeDiff;
8019 RegisterFreeSuballocation(nextItem);
8025 VmaSuballocation newFreeSuballoc;
8026 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8027 newFreeSuballoc.offset = suballoc.offset + newSize;
8028 newFreeSuballoc.size = sizeDiff;
8029 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8030 iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
8031 RegisterFreeSuballocation(newFreeSuballocIt);
8040 VmaSuballocation newFreeSuballoc;
8041 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8042 newFreeSuballoc.offset = suballoc.offset + newSize;
8043 newFreeSuballoc.size = sizeDiff;
8044 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8045 m_Suballocations.push_back(newFreeSuballoc);
8047 iter_type newFreeSuballocIt = m_Suballocations.end();
8048 RegisterFreeSuballocation(--newFreeSuballocIt);
8053 suballoc.size = newSize;
8054 m_SumFreeSize += sizeDiff;
8059 const VkDeviceSize sizeDiff = newSize - suballoc.size;
8062 if(nextItem != m_Suballocations.end())
8065 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8068 if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
8074 if(nextItem->size > sizeDiff)
8077 UnregisterFreeSuballocation(nextItem);
8078 nextItem->offset += sizeDiff;
8079 nextItem->size -= sizeDiff;
8080 RegisterFreeSuballocation(nextItem);
8086 UnregisterFreeSuballocation(nextItem);
8087 m_Suballocations.erase(nextItem);
8103 suballoc.size = newSize;
8104 m_SumFreeSize -= sizeDiff;
8111 VMA_ASSERT(0 &&
"Not found!");
8115 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 8117 VkDeviceSize lastSize = 0;
8118 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8120 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8122 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8123 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8124 VMA_VALIDATE(it->size >= lastSize);
8125 lastSize = it->size;
8130 bool VmaBlockMetadata_Generic::CheckAllocation(
8131 uint32_t currentFrameIndex,
8132 uint32_t frameInUseCount,
8133 VkDeviceSize bufferImageGranularity,
8134 VkDeviceSize allocSize,
8135 VkDeviceSize allocAlignment,
8136 VmaSuballocationType allocType,
8137 VmaSuballocationList::const_iterator suballocItem,
8138 bool canMakeOtherLost,
8139 VkDeviceSize* pOffset,
8140 size_t* itemsToMakeLostCount,
8141 VkDeviceSize* pSumFreeSize,
8142 VkDeviceSize* pSumItemSize)
const 8144 VMA_ASSERT(allocSize > 0);
8145 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8146 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8147 VMA_ASSERT(pOffset != VMA_NULL);
8149 *itemsToMakeLostCount = 0;
8153 if(canMakeOtherLost)
8155 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8157 *pSumFreeSize = suballocItem->size;
8161 if(suballocItem->hAllocation->CanBecomeLost() &&
8162 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8164 ++*itemsToMakeLostCount;
8165 *pSumItemSize = suballocItem->size;
8174 if(GetSize() - suballocItem->offset < allocSize)
8180 *pOffset = suballocItem->offset;
8183 if(VMA_DEBUG_MARGIN > 0)
8185 *pOffset += VMA_DEBUG_MARGIN;
8189 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8193 if(bufferImageGranularity > 1)
8195 bool bufferImageGranularityConflict =
false;
8196 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8197 while(prevSuballocItem != m_Suballocations.cbegin())
8200 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8201 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8203 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8205 bufferImageGranularityConflict =
true;
8213 if(bufferImageGranularityConflict)
8215 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8221 if(*pOffset >= suballocItem->offset + suballocItem->size)
8227 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8230 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8232 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8234 if(suballocItem->offset + totalSize > GetSize())
8241 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8242 if(totalSize > suballocItem->size)
8244 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8245 while(remainingSize > 0)
8248 if(lastSuballocItem == m_Suballocations.cend())
8252 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8254 *pSumFreeSize += lastSuballocItem->size;
8258 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8259 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8260 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8262 ++*itemsToMakeLostCount;
8263 *pSumItemSize += lastSuballocItem->size;
8270 remainingSize = (lastSuballocItem->size < remainingSize) ?
8271 remainingSize - lastSuballocItem->size : 0;
8277 if(bufferImageGranularity > 1)
8279 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8281 while(nextSuballocItem != m_Suballocations.cend())
8283 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8284 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8286 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8288 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8289 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8290 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8292 ++*itemsToMakeLostCount;
8311 const VmaSuballocation& suballoc = *suballocItem;
8312 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8314 *pSumFreeSize = suballoc.size;
8317 if(suballoc.size < allocSize)
8323 *pOffset = suballoc.offset;
8326 if(VMA_DEBUG_MARGIN > 0)
8328 *pOffset += VMA_DEBUG_MARGIN;
8332 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8336 if(bufferImageGranularity > 1)
8338 bool bufferImageGranularityConflict =
false;
8339 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8340 while(prevSuballocItem != m_Suballocations.cbegin())
8343 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8344 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8346 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8348 bufferImageGranularityConflict =
true;
8356 if(bufferImageGranularityConflict)
8358 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8363 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8366 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8369 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8376 if(bufferImageGranularity > 1)
8378 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8380 while(nextSuballocItem != m_Suballocations.cend())
8382 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8383 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8385 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8404 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8406 VMA_ASSERT(item != m_Suballocations.end());
8407 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8409 VmaSuballocationList::iterator nextItem = item;
8411 VMA_ASSERT(nextItem != m_Suballocations.end());
8412 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8414 item->size += nextItem->size;
8416 m_Suballocations.erase(nextItem);
8419 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8422 VmaSuballocation& suballoc = *suballocItem;
8423 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8424 suballoc.hAllocation = VK_NULL_HANDLE;
8428 m_SumFreeSize += suballoc.size;
8431 bool mergeWithNext =
false;
8432 bool mergeWithPrev =
false;
8434 VmaSuballocationList::iterator nextItem = suballocItem;
8436 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8438 mergeWithNext =
true;
8441 VmaSuballocationList::iterator prevItem = suballocItem;
8442 if(suballocItem != m_Suballocations.begin())
8445 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8447 mergeWithPrev =
true;
8453 UnregisterFreeSuballocation(nextItem);
8454 MergeFreeWithNext(suballocItem);
8459 UnregisterFreeSuballocation(prevItem);
8460 MergeFreeWithNext(prevItem);
8461 RegisterFreeSuballocation(prevItem);
8466 RegisterFreeSuballocation(suballocItem);
8467 return suballocItem;
8471 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
8473 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8474 VMA_ASSERT(item->size > 0);
8478 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8480 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8482 if(m_FreeSuballocationsBySize.empty())
8484 m_FreeSuballocationsBySize.push_back(item);
8488 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
8496 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
8498 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8499 VMA_ASSERT(item->size > 0);
8503 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8505 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8507 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8508 m_FreeSuballocationsBySize.data(),
8509 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
8511 VmaSuballocationItemSizeLess());
8512 for(
size_t index = it - m_FreeSuballocationsBySize.data();
8513 index < m_FreeSuballocationsBySize.size();
8516 if(m_FreeSuballocationsBySize[index] == item)
8518 VmaVectorRemove(m_FreeSuballocationsBySize, index);
8521 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
8523 VMA_ASSERT(0 &&
"Not found.");
8529 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
8530 VkDeviceSize bufferImageGranularity,
8531 VmaSuballocationType& inOutPrevSuballocType)
const 8533 if(bufferImageGranularity == 1 || IsEmpty())
8538 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
8539 bool typeConflictFound =
false;
8540 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
8541 it != m_Suballocations.cend();
8544 const VmaSuballocationType suballocType = it->type;
8545 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
8547 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
8548 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
8550 typeConflictFound =
true;
8552 inOutPrevSuballocType = suballocType;
8556 return typeConflictFound || minAlignment >= bufferImageGranularity;
8562 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
8563 VmaBlockMetadata(hAllocator),
8565 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8566 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8567 m_1stVectorIndex(0),
8568 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
8569 m_1stNullItemsBeginCount(0),
8570 m_1stNullItemsMiddleCount(0),
8571 m_2ndNullItemsCount(0)
8575 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
8579 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
8581 VmaBlockMetadata::Init(size);
8582 m_SumFreeSize = size;
8585 bool VmaBlockMetadata_Linear::Validate()
const 8587 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8588 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8590 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
8591 VMA_VALIDATE(!suballocations1st.empty() ||
8592 suballocations2nd.empty() ||
8593 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
8595 if(!suballocations1st.empty())
8598 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
8600 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
8602 if(!suballocations2nd.empty())
8605 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
8608 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
8609 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
8611 VkDeviceSize sumUsedSize = 0;
8612 const size_t suballoc1stCount = suballocations1st.size();
8613 VkDeviceSize offset = VMA_DEBUG_MARGIN;
8615 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8617 const size_t suballoc2ndCount = suballocations2nd.size();
8618 size_t nullItem2ndCount = 0;
8619 for(
size_t i = 0; i < suballoc2ndCount; ++i)
8621 const VmaSuballocation& suballoc = suballocations2nd[i];
8622 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8624 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8625 VMA_VALIDATE(suballoc.offset >= offset);
8629 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8630 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8631 sumUsedSize += suballoc.size;
8638 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8641 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8644 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
8646 const VmaSuballocation& suballoc = suballocations1st[i];
8647 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
8648 suballoc.hAllocation == VK_NULL_HANDLE);
8651 size_t nullItem1stCount = m_1stNullItemsBeginCount;
8653 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
8655 const VmaSuballocation& suballoc = suballocations1st[i];
8656 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8658 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8659 VMA_VALIDATE(suballoc.offset >= offset);
8660 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
8664 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8665 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8666 sumUsedSize += suballoc.size;
8673 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8675 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
8677 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8679 const size_t suballoc2ndCount = suballocations2nd.size();
8680 size_t nullItem2ndCount = 0;
8681 for(
size_t i = suballoc2ndCount; i--; )
8683 const VmaSuballocation& suballoc = suballocations2nd[i];
8684 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8686 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8687 VMA_VALIDATE(suballoc.offset >= offset);
8691 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8692 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8693 sumUsedSize += suballoc.size;
8700 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8703 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8706 VMA_VALIDATE(offset <= GetSize());
8707 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
8712 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 8714 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
8715 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
8718 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 8720 const VkDeviceSize size = GetSize();
8732 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8734 switch(m_2ndVectorMode)
8736 case SECOND_VECTOR_EMPTY:
8742 const size_t suballocations1stCount = suballocations1st.size();
8743 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
8744 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8745 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
8747 firstSuballoc.offset,
8748 size - (lastSuballoc.offset + lastSuballoc.size));
8752 case SECOND_VECTOR_RING_BUFFER:
8757 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8758 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
8759 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
8760 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
8764 case SECOND_VECTOR_DOUBLE_STACK:
8769 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8770 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
8771 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
8772 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
8782 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 8784 const VkDeviceSize size = GetSize();
8785 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8786 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8787 const size_t suballoc1stCount = suballocations1st.size();
8788 const size_t suballoc2ndCount = suballocations2nd.size();
8799 VkDeviceSize lastOffset = 0;
8801 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8803 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8804 size_t nextAlloc2ndIndex = 0;
8805 while(lastOffset < freeSpace2ndTo1stEnd)
8808 while(nextAlloc2ndIndex < suballoc2ndCount &&
8809 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8811 ++nextAlloc2ndIndex;
8815 if(nextAlloc2ndIndex < suballoc2ndCount)
8817 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8820 if(lastOffset < suballoc.offset)
8823 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8837 lastOffset = suballoc.offset + suballoc.size;
8838 ++nextAlloc2ndIndex;
8844 if(lastOffset < freeSpace2ndTo1stEnd)
8846 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8854 lastOffset = freeSpace2ndTo1stEnd;
8859 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8860 const VkDeviceSize freeSpace1stTo2ndEnd =
8861 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8862 while(lastOffset < freeSpace1stTo2ndEnd)
8865 while(nextAlloc1stIndex < suballoc1stCount &&
8866 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8868 ++nextAlloc1stIndex;
8872 if(nextAlloc1stIndex < suballoc1stCount)
8874 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8877 if(lastOffset < suballoc.offset)
8880 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8894 lastOffset = suballoc.offset + suballoc.size;
8895 ++nextAlloc1stIndex;
8901 if(lastOffset < freeSpace1stTo2ndEnd)
8903 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8911 lastOffset = freeSpace1stTo2ndEnd;
8915 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8917 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8918 while(lastOffset < size)
8921 while(nextAlloc2ndIndex != SIZE_MAX &&
8922 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8924 --nextAlloc2ndIndex;
8928 if(nextAlloc2ndIndex != SIZE_MAX)
8930 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8933 if(lastOffset < suballoc.offset)
8936 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8950 lastOffset = suballoc.offset + suballoc.size;
8951 --nextAlloc2ndIndex;
8957 if(lastOffset < size)
8959 const VkDeviceSize unusedRangeSize = size - lastOffset;
8975 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 8977 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8978 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8979 const VkDeviceSize size = GetSize();
8980 const size_t suballoc1stCount = suballocations1st.size();
8981 const size_t suballoc2ndCount = suballocations2nd.size();
8983 inoutStats.
size += size;
8985 VkDeviceSize lastOffset = 0;
8987 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8989 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8990 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
8991 while(lastOffset < freeSpace2ndTo1stEnd)
8994 while(nextAlloc2ndIndex < suballoc2ndCount &&
8995 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8997 ++nextAlloc2ndIndex;
9001 if(nextAlloc2ndIndex < suballoc2ndCount)
9003 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9006 if(lastOffset < suballoc.offset)
9009 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9020 lastOffset = suballoc.offset + suballoc.size;
9021 ++nextAlloc2ndIndex;
9026 if(lastOffset < freeSpace2ndTo1stEnd)
9029 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9036 lastOffset = freeSpace2ndTo1stEnd;
9041 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9042 const VkDeviceSize freeSpace1stTo2ndEnd =
9043 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9044 while(lastOffset < freeSpace1stTo2ndEnd)
9047 while(nextAlloc1stIndex < suballoc1stCount &&
9048 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9050 ++nextAlloc1stIndex;
9054 if(nextAlloc1stIndex < suballoc1stCount)
9056 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9059 if(lastOffset < suballoc.offset)
9062 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9073 lastOffset = suballoc.offset + suballoc.size;
9074 ++nextAlloc1stIndex;
9079 if(lastOffset < freeSpace1stTo2ndEnd)
9082 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9089 lastOffset = freeSpace1stTo2ndEnd;
9093 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9095 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9096 while(lastOffset < size)
9099 while(nextAlloc2ndIndex != SIZE_MAX &&
9100 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9102 --nextAlloc2ndIndex;
9106 if(nextAlloc2ndIndex != SIZE_MAX)
9108 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9111 if(lastOffset < suballoc.offset)
9114 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9125 lastOffset = suballoc.offset + suballoc.size;
9126 --nextAlloc2ndIndex;
9131 if(lastOffset < size)
9134 const VkDeviceSize unusedRangeSize = size - lastOffset;
9147 #if VMA_STATS_STRING_ENABLED 9148 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 9150 const VkDeviceSize size = GetSize();
9151 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9152 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9153 const size_t suballoc1stCount = suballocations1st.size();
9154 const size_t suballoc2ndCount = suballocations2nd.size();
9158 size_t unusedRangeCount = 0;
9159 VkDeviceSize usedBytes = 0;
9161 VkDeviceSize lastOffset = 0;
9163 size_t alloc2ndCount = 0;
9164 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9166 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9167 size_t nextAlloc2ndIndex = 0;
9168 while(lastOffset < freeSpace2ndTo1stEnd)
9171 while(nextAlloc2ndIndex < suballoc2ndCount &&
9172 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9174 ++nextAlloc2ndIndex;
9178 if(nextAlloc2ndIndex < suballoc2ndCount)
9180 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9183 if(lastOffset < suballoc.offset)
9192 usedBytes += suballoc.size;
9195 lastOffset = suballoc.offset + suballoc.size;
9196 ++nextAlloc2ndIndex;
9201 if(lastOffset < freeSpace2ndTo1stEnd)
9208 lastOffset = freeSpace2ndTo1stEnd;
9213 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9214 size_t alloc1stCount = 0;
9215 const VkDeviceSize freeSpace1stTo2ndEnd =
9216 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9217 while(lastOffset < freeSpace1stTo2ndEnd)
9220 while(nextAlloc1stIndex < suballoc1stCount &&
9221 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9223 ++nextAlloc1stIndex;
9227 if(nextAlloc1stIndex < suballoc1stCount)
9229 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9232 if(lastOffset < suballoc.offset)
9241 usedBytes += suballoc.size;
9244 lastOffset = suballoc.offset + suballoc.size;
9245 ++nextAlloc1stIndex;
9250 if(lastOffset < size)
9257 lastOffset = freeSpace1stTo2ndEnd;
9261 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9263 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9264 while(lastOffset < size)
9267 while(nextAlloc2ndIndex != SIZE_MAX &&
9268 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9270 --nextAlloc2ndIndex;
9274 if(nextAlloc2ndIndex != SIZE_MAX)
9276 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9279 if(lastOffset < suballoc.offset)
9288 usedBytes += suballoc.size;
9291 lastOffset = suballoc.offset + suballoc.size;
9292 --nextAlloc2ndIndex;
9297 if(lastOffset < size)
9309 const VkDeviceSize unusedBytes = size - usedBytes;
9310 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9315 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9317 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9318 size_t nextAlloc2ndIndex = 0;
9319 while(lastOffset < freeSpace2ndTo1stEnd)
9322 while(nextAlloc2ndIndex < suballoc2ndCount &&
9323 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9325 ++nextAlloc2ndIndex;
9329 if(nextAlloc2ndIndex < suballoc2ndCount)
9331 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9334 if(lastOffset < suballoc.offset)
9337 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9338 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9343 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9346 lastOffset = suballoc.offset + suballoc.size;
9347 ++nextAlloc2ndIndex;
9352 if(lastOffset < freeSpace2ndTo1stEnd)
9355 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9356 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9360 lastOffset = freeSpace2ndTo1stEnd;
9365 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9366 while(lastOffset < freeSpace1stTo2ndEnd)
9369 while(nextAlloc1stIndex < suballoc1stCount &&
9370 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9372 ++nextAlloc1stIndex;
9376 if(nextAlloc1stIndex < suballoc1stCount)
9378 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9381 if(lastOffset < suballoc.offset)
9384 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9385 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9390 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9393 lastOffset = suballoc.offset + suballoc.size;
9394 ++nextAlloc1stIndex;
9399 if(lastOffset < freeSpace1stTo2ndEnd)
9402 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9403 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9407 lastOffset = freeSpace1stTo2ndEnd;
9411 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9413 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9414 while(lastOffset < size)
9417 while(nextAlloc2ndIndex != SIZE_MAX &&
9418 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9420 --nextAlloc2ndIndex;
9424 if(nextAlloc2ndIndex != SIZE_MAX)
9426 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9429 if(lastOffset < suballoc.offset)
9432 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9433 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9438 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9441 lastOffset = suballoc.offset + suballoc.size;
9442 --nextAlloc2ndIndex;
9447 if(lastOffset < size)
9450 const VkDeviceSize unusedRangeSize = size - lastOffset;
9451 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9460 PrintDetailedMap_End(json);
9462 #endif // #if VMA_STATS_STRING_ENABLED 9464 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9465 uint32_t currentFrameIndex,
9466 uint32_t frameInUseCount,
9467 VkDeviceSize bufferImageGranularity,
9468 VkDeviceSize allocSize,
9469 VkDeviceSize allocAlignment,
9471 VmaSuballocationType allocType,
9472 bool canMakeOtherLost,
9474 VmaAllocationRequest* pAllocationRequest)
9476 VMA_ASSERT(allocSize > 0);
9477 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9478 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9479 VMA_HEAVY_ASSERT(Validate());
9481 const VkDeviceSize size = GetSize();
9482 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9483 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9487 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9489 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9494 if(allocSize > size)
9498 VkDeviceSize resultBaseOffset = size - allocSize;
9499 if(!suballocations2nd.empty())
9501 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9502 resultBaseOffset = lastSuballoc.offset - allocSize;
9503 if(allocSize > lastSuballoc.offset)
9510 VkDeviceSize resultOffset = resultBaseOffset;
9513 if(VMA_DEBUG_MARGIN > 0)
9515 if(resultOffset < VMA_DEBUG_MARGIN)
9519 resultOffset -= VMA_DEBUG_MARGIN;
9523 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9527 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9529 bool bufferImageGranularityConflict =
false;
9530 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9532 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9533 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9535 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9537 bufferImageGranularityConflict =
true;
9545 if(bufferImageGranularityConflict)
9547 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9552 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9553 suballocations1st.back().offset + suballocations1st.back().size :
9555 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
9559 if(bufferImageGranularity > 1)
9561 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9563 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9564 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9566 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9580 pAllocationRequest->offset = resultOffset;
9581 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
9582 pAllocationRequest->sumItemSize = 0;
9584 pAllocationRequest->itemsToMakeLostCount = 0;
9590 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9594 VkDeviceSize resultBaseOffset = 0;
9595 if(!suballocations1st.empty())
9597 const VmaSuballocation& lastSuballoc = suballocations1st.back();
9598 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9602 VkDeviceSize resultOffset = resultBaseOffset;
9605 if(VMA_DEBUG_MARGIN > 0)
9607 resultOffset += VMA_DEBUG_MARGIN;
9611 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9615 if(bufferImageGranularity > 1 && !suballocations1st.empty())
9617 bool bufferImageGranularityConflict =
false;
9618 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9620 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9621 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9623 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9625 bufferImageGranularityConflict =
true;
9633 if(bufferImageGranularityConflict)
9635 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9639 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
9640 suballocations2nd.back().offset : size;
9643 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
9647 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9649 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9651 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9652 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9654 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9668 pAllocationRequest->offset = resultOffset;
9669 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
9670 pAllocationRequest->sumItemSize = 0;
9672 pAllocationRequest->itemsToMakeLostCount = 0;
9679 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9681 VMA_ASSERT(!suballocations1st.empty());
9683 VkDeviceSize resultBaseOffset = 0;
9684 if(!suballocations2nd.empty())
9686 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9687 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9691 VkDeviceSize resultOffset = resultBaseOffset;
9694 if(VMA_DEBUG_MARGIN > 0)
9696 resultOffset += VMA_DEBUG_MARGIN;
9700 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9704 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9706 bool bufferImageGranularityConflict =
false;
9707 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
9709 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
9710 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9712 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9714 bufferImageGranularityConflict =
true;
9722 if(bufferImageGranularityConflict)
9724 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9728 pAllocationRequest->itemsToMakeLostCount = 0;
9729 pAllocationRequest->sumItemSize = 0;
9730 size_t index1st = m_1stNullItemsBeginCount;
9732 if(canMakeOtherLost)
9734 while(index1st < suballocations1st.size() &&
9735 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
9738 const VmaSuballocation& suballoc = suballocations1st[index1st];
9739 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
9745 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9746 if(suballoc.hAllocation->CanBecomeLost() &&
9747 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9749 ++pAllocationRequest->itemsToMakeLostCount;
9750 pAllocationRequest->sumItemSize += suballoc.size;
9762 if(bufferImageGranularity > 1)
9764 while(index1st < suballocations1st.size())
9766 const VmaSuballocation& suballoc = suballocations1st[index1st];
9767 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
9769 if(suballoc.hAllocation != VK_NULL_HANDLE)
9772 if(suballoc.hAllocation->CanBecomeLost() &&
9773 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9775 ++pAllocationRequest->itemsToMakeLostCount;
9776 pAllocationRequest->sumItemSize += suballoc.size;
9795 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
9796 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
9800 if(bufferImageGranularity > 1)
9802 for(
size_t nextSuballocIndex = index1st;
9803 nextSuballocIndex < suballocations1st.size();
9804 nextSuballocIndex++)
9806 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
9807 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9809 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9823 pAllocationRequest->offset = resultOffset;
9824 pAllocationRequest->sumFreeSize =
9825 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
9827 - pAllocationRequest->sumItemSize;
9837 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
9838 uint32_t currentFrameIndex,
9839 uint32_t frameInUseCount,
9840 VmaAllocationRequest* pAllocationRequest)
9842 if(pAllocationRequest->itemsToMakeLostCount == 0)
9847 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
9849 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9850 size_t index1st = m_1stNullItemsBeginCount;
9851 size_t madeLostCount = 0;
9852 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
9854 VMA_ASSERT(index1st < suballocations1st.size());
9855 VmaSuballocation& suballoc = suballocations1st[index1st];
9856 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9858 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9859 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
9860 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9862 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9863 suballoc.hAllocation = VK_NULL_HANDLE;
9864 m_SumFreeSize += suballoc.size;
9865 ++m_1stNullItemsMiddleCount;
9882 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9884 uint32_t lostAllocationCount = 0;
9886 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9887 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
9889 VmaSuballocation& suballoc = suballocations1st[i];
9890 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
9891 suballoc.hAllocation->CanBecomeLost() &&
9892 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9894 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9895 suballoc.hAllocation = VK_NULL_HANDLE;
9896 ++m_1stNullItemsMiddleCount;
9897 m_SumFreeSize += suballoc.size;
9898 ++lostAllocationCount;
9902 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9903 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9905 VmaSuballocation& suballoc = suballocations2nd[i];
9906 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
9907 suballoc.hAllocation->CanBecomeLost() &&
9908 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9910 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9911 suballoc.hAllocation = VK_NULL_HANDLE;
9912 ++m_2ndNullItemsCount;
9913 ++lostAllocationCount;
9917 if(lostAllocationCount)
9922 return lostAllocationCount;
9925 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
9927 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9928 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
9930 const VmaSuballocation& suballoc = suballocations1st[i];
9931 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9933 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9935 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9936 return VK_ERROR_VALIDATION_FAILED_EXT;
9938 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9940 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9941 return VK_ERROR_VALIDATION_FAILED_EXT;
9946 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9947 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9949 const VmaSuballocation& suballoc = suballocations2nd[i];
9950 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9952 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9954 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9955 return VK_ERROR_VALIDATION_FAILED_EXT;
9957 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9959 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9960 return VK_ERROR_VALIDATION_FAILED_EXT;
9968 void VmaBlockMetadata_Linear::Alloc(
9969 const VmaAllocationRequest& request,
9970 VmaSuballocationType type,
9971 VkDeviceSize allocSize,
9975 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
9979 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
9980 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
9981 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9982 suballocations2nd.push_back(newSuballoc);
9983 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
9987 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9990 if(suballocations1st.empty())
9992 suballocations1st.push_back(newSuballoc);
9997 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
10000 VMA_ASSERT(request.offset + allocSize <= GetSize());
10001 suballocations1st.push_back(newSuballoc);
10004 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
10006 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10008 switch(m_2ndVectorMode)
10010 case SECOND_VECTOR_EMPTY:
10012 VMA_ASSERT(suballocations2nd.empty());
10013 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10015 case SECOND_VECTOR_RING_BUFFER:
10017 VMA_ASSERT(!suballocations2nd.empty());
10019 case SECOND_VECTOR_DOUBLE_STACK:
10020 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10026 suballocations2nd.push_back(newSuballoc);
10030 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10035 m_SumFreeSize -= newSuballoc.size;
10038 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10040 FreeAtOffset(allocation->GetOffset());
10043 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10045 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10046 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10048 if(!suballocations1st.empty())
10051 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10052 if(firstSuballoc.offset == offset)
10054 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10055 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10056 m_SumFreeSize += firstSuballoc.size;
10057 ++m_1stNullItemsBeginCount;
10058 CleanupAfterFree();
10064 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10065 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10067 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10068 if(lastSuballoc.offset == offset)
10070 m_SumFreeSize += lastSuballoc.size;
10071 suballocations2nd.pop_back();
10072 CleanupAfterFree();
10077 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10079 VmaSuballocation& lastSuballoc = suballocations1st.back();
10080 if(lastSuballoc.offset == offset)
10082 m_SumFreeSize += lastSuballoc.size;
10083 suballocations1st.pop_back();
10084 CleanupAfterFree();
10091 VmaSuballocation refSuballoc;
10092 refSuballoc.offset = offset;
10094 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
10095 suballocations1st.begin() + m_1stNullItemsBeginCount,
10096 suballocations1st.end(),
10098 if(it != suballocations1st.end())
10100 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10101 it->hAllocation = VK_NULL_HANDLE;
10102 ++m_1stNullItemsMiddleCount;
10103 m_SumFreeSize += it->size;
10104 CleanupAfterFree();
10109 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10112 VmaSuballocation refSuballoc;
10113 refSuballoc.offset = offset;
10115 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10116 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
10117 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
10118 if(it != suballocations2nd.end())
10120 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10121 it->hAllocation = VK_NULL_HANDLE;
10122 ++m_2ndNullItemsCount;
10123 m_SumFreeSize += it->size;
10124 CleanupAfterFree();
10129 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10132 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 10134 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10135 const size_t suballocCount = AccessSuballocations1st().size();
10136 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10139 void VmaBlockMetadata_Linear::CleanupAfterFree()
10141 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10142 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10146 suballocations1st.clear();
10147 suballocations2nd.clear();
10148 m_1stNullItemsBeginCount = 0;
10149 m_1stNullItemsMiddleCount = 0;
10150 m_2ndNullItemsCount = 0;
10151 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10155 const size_t suballoc1stCount = suballocations1st.size();
10156 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10157 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10160 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10161 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10163 ++m_1stNullItemsBeginCount;
10164 --m_1stNullItemsMiddleCount;
10168 while(m_1stNullItemsMiddleCount > 0 &&
10169 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10171 --m_1stNullItemsMiddleCount;
10172 suballocations1st.pop_back();
10176 while(m_2ndNullItemsCount > 0 &&
10177 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10179 --m_2ndNullItemsCount;
10180 suballocations2nd.pop_back();
10183 if(ShouldCompact1st())
10185 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10186 size_t srcIndex = m_1stNullItemsBeginCount;
10187 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10189 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10193 if(dstIndex != srcIndex)
10195 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10199 suballocations1st.resize(nonNullItemCount);
10200 m_1stNullItemsBeginCount = 0;
10201 m_1stNullItemsMiddleCount = 0;
10205 if(suballocations2nd.empty())
10207 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10211 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10213 suballocations1st.clear();
10214 m_1stNullItemsBeginCount = 0;
10216 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10219 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10220 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10221 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10222 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10224 ++m_1stNullItemsBeginCount;
10225 --m_1stNullItemsMiddleCount;
10227 m_2ndNullItemsCount = 0;
10228 m_1stVectorIndex ^= 1;
10233 VMA_HEAVY_ASSERT(Validate());
10240 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10241 VmaBlockMetadata(hAllocator),
10243 m_AllocationCount(0),
10247 memset(m_FreeList, 0,
sizeof(m_FreeList));
10250 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10252 DeleteNode(m_Root);
10255 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10257 VmaBlockMetadata::Init(size);
10259 m_UsableSize = VmaPrevPow2(size);
10260 m_SumFreeSize = m_UsableSize;
10264 while(m_LevelCount < MAX_LEVELS &&
10265 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10270 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10271 rootNode->offset = 0;
10272 rootNode->type = Node::TYPE_FREE;
10273 rootNode->parent = VMA_NULL;
10274 rootNode->buddy = VMA_NULL;
10277 AddToFreeListFront(0, rootNode);
10280 bool VmaBlockMetadata_Buddy::Validate()
const 10283 ValidationContext ctx;
10284 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10286 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10288 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10289 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10292 for(uint32_t level = 0; level < m_LevelCount; ++level)
10294 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10295 m_FreeList[level].front->free.prev == VMA_NULL);
10297 for(Node* node = m_FreeList[level].front;
10299 node = node->free.next)
10301 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10303 if(node->free.next == VMA_NULL)
10305 VMA_VALIDATE(m_FreeList[level].back == node);
10309 VMA_VALIDATE(node->free.next->free.prev == node);
10315 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10317 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10323 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 10325 for(uint32_t level = 0; level < m_LevelCount; ++level)
10327 if(m_FreeList[level].front != VMA_NULL)
10329 return LevelToNodeSize(level);
10335 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 10337 const VkDeviceSize unusableSize = GetUnusableSize();
10348 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10350 if(unusableSize > 0)
10359 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 10361 const VkDeviceSize unusableSize = GetUnusableSize();
10363 inoutStats.
size += GetSize();
10364 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10369 if(unusableSize > 0)
10376 #if VMA_STATS_STRING_ENABLED 10378 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 10382 CalcAllocationStatInfo(stat);
10384 PrintDetailedMap_Begin(
10390 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10392 const VkDeviceSize unusableSize = GetUnusableSize();
10393 if(unusableSize > 0)
10395 PrintDetailedMap_UnusedRange(json,
10400 PrintDetailedMap_End(json);
10403 #endif // #if VMA_STATS_STRING_ENABLED 10405 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
10406 uint32_t currentFrameIndex,
10407 uint32_t frameInUseCount,
10408 VkDeviceSize bufferImageGranularity,
10409 VkDeviceSize allocSize,
10410 VkDeviceSize allocAlignment,
10412 VmaSuballocationType allocType,
10413 bool canMakeOtherLost,
10415 VmaAllocationRequest* pAllocationRequest)
10417 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10421 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
10422 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
10423 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
10425 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
10426 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
10429 if(allocSize > m_UsableSize)
10434 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10435 for(uint32_t level = targetLevel + 1; level--; )
10437 for(Node* freeNode = m_FreeList[level].front;
10438 freeNode != VMA_NULL;
10439 freeNode = freeNode->free.next)
10441 if(freeNode->offset % allocAlignment == 0)
10443 pAllocationRequest->offset = freeNode->offset;
10444 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
10445 pAllocationRequest->sumItemSize = 0;
10446 pAllocationRequest->itemsToMakeLostCount = 0;
10447 pAllocationRequest->customData = (
void*)(uintptr_t)level;
10456 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
10457 uint32_t currentFrameIndex,
10458 uint32_t frameInUseCount,
10459 VmaAllocationRequest* pAllocationRequest)
10465 return pAllocationRequest->itemsToMakeLostCount == 0;
10468 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10477 void VmaBlockMetadata_Buddy::Alloc(
10478 const VmaAllocationRequest& request,
10479 VmaSuballocationType type,
10480 VkDeviceSize allocSize,
10484 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10485 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
10487 Node* currNode = m_FreeList[currLevel].front;
10488 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10489 while(currNode->offset != request.offset)
10491 currNode = currNode->free.next;
10492 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10496 while(currLevel < targetLevel)
10500 RemoveFromFreeList(currLevel, currNode);
10502 const uint32_t childrenLevel = currLevel + 1;
10505 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
10506 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
10508 leftChild->offset = currNode->offset;
10509 leftChild->type = Node::TYPE_FREE;
10510 leftChild->parent = currNode;
10511 leftChild->buddy = rightChild;
10513 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
10514 rightChild->type = Node::TYPE_FREE;
10515 rightChild->parent = currNode;
10516 rightChild->buddy = leftChild;
10519 currNode->type = Node::TYPE_SPLIT;
10520 currNode->split.leftChild = leftChild;
10523 AddToFreeListFront(childrenLevel, rightChild);
10524 AddToFreeListFront(childrenLevel, leftChild);
10529 currNode = m_FreeList[currLevel].front;
10538 VMA_ASSERT(currLevel == targetLevel &&
10539 currNode != VMA_NULL &&
10540 currNode->type == Node::TYPE_FREE);
10541 RemoveFromFreeList(currLevel, currNode);
10544 currNode->type = Node::TYPE_ALLOCATION;
10545 currNode->allocation.alloc = hAllocation;
10547 ++m_AllocationCount;
10549 m_SumFreeSize -= allocSize;
10552 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
10554 if(node->type == Node::TYPE_SPLIT)
10556 DeleteNode(node->split.leftChild->buddy);
10557 DeleteNode(node->split.leftChild);
10560 vma_delete(GetAllocationCallbacks(), node);
10563 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 10565 VMA_VALIDATE(level < m_LevelCount);
10566 VMA_VALIDATE(curr->parent == parent);
10567 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
10568 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
10571 case Node::TYPE_FREE:
10573 ctx.calculatedSumFreeSize += levelNodeSize;
10574 ++ctx.calculatedFreeCount;
10576 case Node::TYPE_ALLOCATION:
10577 ++ctx.calculatedAllocationCount;
10578 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
10579 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
10581 case Node::TYPE_SPLIT:
10583 const uint32_t childrenLevel = level + 1;
10584 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
10585 const Node*
const leftChild = curr->split.leftChild;
10586 VMA_VALIDATE(leftChild != VMA_NULL);
10587 VMA_VALIDATE(leftChild->offset == curr->offset);
10588 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
10590 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
10592 const Node*
const rightChild = leftChild->buddy;
10593 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
10594 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
10596 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
10607 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 10610 uint32_t level = 0;
10611 VkDeviceSize currLevelNodeSize = m_UsableSize;
10612 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
10613 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
10616 currLevelNodeSize = nextLevelNodeSize;
10617 nextLevelNodeSize = currLevelNodeSize >> 1;
10622 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
10625 Node* node = m_Root;
10626 VkDeviceSize nodeOffset = 0;
10627 uint32_t level = 0;
10628 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
10629 while(node->type == Node::TYPE_SPLIT)
10631 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
10632 if(offset < nodeOffset + nextLevelSize)
10634 node = node->split.leftChild;
10638 node = node->split.leftChild->buddy;
10639 nodeOffset += nextLevelSize;
10642 levelNodeSize = nextLevelSize;
10645 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
10646 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
10649 --m_AllocationCount;
10650 m_SumFreeSize += alloc->GetSize();
10652 node->type = Node::TYPE_FREE;
10655 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
10657 RemoveFromFreeList(level, node->buddy);
10658 Node*
const parent = node->parent;
10660 vma_delete(GetAllocationCallbacks(), node->buddy);
10661 vma_delete(GetAllocationCallbacks(), node);
10662 parent->type = Node::TYPE_FREE;
10670 AddToFreeListFront(level, node);
10673 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 10677 case Node::TYPE_FREE:
10683 case Node::TYPE_ALLOCATION:
10685 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10691 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
10692 if(unusedRangeSize > 0)
10701 case Node::TYPE_SPLIT:
10703 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
10704 const Node*
const leftChild = node->split.leftChild;
10705 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
10706 const Node*
const rightChild = leftChild->buddy;
10707 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
10715 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
10717 VMA_ASSERT(node->type == Node::TYPE_FREE);
10720 Node*
const frontNode = m_FreeList[level].front;
10721 if(frontNode == VMA_NULL)
10723 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
10724 node->free.prev = node->free.next = VMA_NULL;
10725 m_FreeList[level].front = m_FreeList[level].back = node;
10729 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
10730 node->free.prev = VMA_NULL;
10731 node->free.next = frontNode;
10732 frontNode->free.prev = node;
10733 m_FreeList[level].front = node;
10737 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
10739 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
10742 if(node->free.prev == VMA_NULL)
10744 VMA_ASSERT(m_FreeList[level].front == node);
10745 m_FreeList[level].front = node->free.next;
10749 Node*
const prevFreeNode = node->free.prev;
10750 VMA_ASSERT(prevFreeNode->free.next == node);
10751 prevFreeNode->free.next = node->free.next;
10755 if(node->free.next == VMA_NULL)
10757 VMA_ASSERT(m_FreeList[level].back == node);
10758 m_FreeList[level].back = node->free.prev;
10762 Node*
const nextFreeNode = node->free.next;
10763 VMA_ASSERT(nextFreeNode->free.prev == node);
10764 nextFreeNode->free.prev = node->free.prev;
10768 #if VMA_STATS_STRING_ENABLED 10769 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 10773 case Node::TYPE_FREE:
10774 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
10776 case Node::TYPE_ALLOCATION:
10778 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
10779 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10780 if(allocSize < levelNodeSize)
10782 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
10786 case Node::TYPE_SPLIT:
10788 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
10789 const Node*
const leftChild = node->split.leftChild;
10790 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
10791 const Node*
const rightChild = leftChild->buddy;
10792 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
10799 #endif // #if VMA_STATS_STRING_ENABLED 10805 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
10806 m_pMetadata(VMA_NULL),
10807 m_MemoryTypeIndex(UINT32_MAX),
10809 m_hMemory(VK_NULL_HANDLE),
10811 m_pMappedData(VMA_NULL)
10815 void VmaDeviceMemoryBlock::Init(
10817 uint32_t newMemoryTypeIndex,
10818 VkDeviceMemory newMemory,
10819 VkDeviceSize newSize,
10821 uint32_t algorithm)
10823 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
10825 m_MemoryTypeIndex = newMemoryTypeIndex;
10827 m_hMemory = newMemory;
10832 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
10835 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
10841 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
10843 m_pMetadata->Init(newSize);
10846 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
10850 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
10852 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
10853 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
10854 m_hMemory = VK_NULL_HANDLE;
10856 vma_delete(allocator, m_pMetadata);
10857 m_pMetadata = VMA_NULL;
10860 bool VmaDeviceMemoryBlock::Validate()
const 10862 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
10863 (m_pMetadata->GetSize() != 0));
10865 return m_pMetadata->Validate();
10868 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
10870 void* pData =
nullptr;
10871 VkResult res = Map(hAllocator, 1, &pData);
10872 if(res != VK_SUCCESS)
10877 res = m_pMetadata->CheckCorruption(pData);
10879 Unmap(hAllocator, 1);
10884 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
10891 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10892 if(m_MapCount != 0)
10894 m_MapCount += count;
10895 VMA_ASSERT(m_pMappedData != VMA_NULL);
10896 if(ppData != VMA_NULL)
10898 *ppData = m_pMappedData;
10904 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
10905 hAllocator->m_hDevice,
10911 if(result == VK_SUCCESS)
10913 if(ppData != VMA_NULL)
10915 *ppData = m_pMappedData;
10917 m_MapCount = count;
10923 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
10930 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10931 if(m_MapCount >= count)
10933 m_MapCount -= count;
10934 if(m_MapCount == 0)
10936 m_pMappedData = VMA_NULL;
10937 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
10942 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
10946 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10948 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10949 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10952 VkResult res = Map(hAllocator, 1, &pData);
10953 if(res != VK_SUCCESS)
10958 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
10959 VmaWriteMagicValue(pData, allocOffset + allocSize);
10961 Unmap(hAllocator, 1);
10966 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10968 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10969 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10972 VkResult res = Map(hAllocator, 1, &pData);
10973 if(res != VK_SUCCESS)
10978 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
10980 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
10982 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
10984 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
10987 Unmap(hAllocator, 1);
10992 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
10997 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10998 hAllocation->GetBlock() ==
this);
11000 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11001 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
11002 hAllocator->m_hDevice,
11005 hAllocation->GetOffset());
11008 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11013 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11014 hAllocation->GetBlock() ==
this);
11016 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11017 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
11018 hAllocator->m_hDevice,
11021 hAllocation->GetOffset());
11026 memset(&outInfo, 0,
sizeof(outInfo));
11045 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11053 VmaPool_T::VmaPool_T(
11056 VkDeviceSize preferredBlockSize) :
11059 createInfo.memoryTypeIndex,
11060 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11061 createInfo.minBlockCount,
11062 createInfo.maxBlockCount,
11064 createInfo.frameInUseCount,
11066 createInfo.blockSize != 0,
11072 VmaPool_T::~VmaPool_T()
11076 #if VMA_STATS_STRING_ENABLED 11078 #endif // #if VMA_STATS_STRING_ENABLED 11080 VmaBlockVector::VmaBlockVector(
11082 uint32_t memoryTypeIndex,
11083 VkDeviceSize preferredBlockSize,
11084 size_t minBlockCount,
11085 size_t maxBlockCount,
11086 VkDeviceSize bufferImageGranularity,
11087 uint32_t frameInUseCount,
11089 bool explicitBlockSize,
11090 uint32_t algorithm) :
11091 m_hAllocator(hAllocator),
11092 m_MemoryTypeIndex(memoryTypeIndex),
11093 m_PreferredBlockSize(preferredBlockSize),
11094 m_MinBlockCount(minBlockCount),
11095 m_MaxBlockCount(maxBlockCount),
11096 m_BufferImageGranularity(bufferImageGranularity),
11097 m_FrameInUseCount(frameInUseCount),
11098 m_IsCustomPool(isCustomPool),
11099 m_ExplicitBlockSize(explicitBlockSize),
11100 m_Algorithm(algorithm),
11101 m_HasEmptyBlock(false),
11102 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11107 VmaBlockVector::~VmaBlockVector()
11109 for(
size_t i = m_Blocks.size(); i--; )
11111 m_Blocks[i]->Destroy(m_hAllocator);
11112 vma_delete(m_hAllocator, m_Blocks[i]);
11116 VkResult VmaBlockVector::CreateMinBlocks()
11118 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11120 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11121 if(res != VK_SUCCESS)
11129 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11131 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11133 const size_t blockCount = m_Blocks.size();
11142 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11144 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11145 VMA_ASSERT(pBlock);
11146 VMA_HEAVY_ASSERT(pBlock->Validate());
11147 pBlock->m_pMetadata->AddPoolStats(*pStats);
11151 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 11153 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11154 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11155 (VMA_DEBUG_MARGIN > 0) &&
11156 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11159 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11161 VkResult VmaBlockVector::Allocate(
11163 uint32_t currentFrameIndex,
11165 VkDeviceSize alignment,
11167 VmaSuballocationType suballocType,
11174 const bool canCreateNewBlock =
11176 (m_Blocks.size() < m_MaxBlockCount);
11183 canMakeOtherLost =
false;
11187 if(isUpperAddress &&
11190 return VK_ERROR_FEATURE_NOT_PRESENT;
11204 return VK_ERROR_FEATURE_NOT_PRESENT;
11208 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11210 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11213 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11220 if(!canMakeOtherLost || canCreateNewBlock)
11229 if(!m_Blocks.empty())
11231 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11232 VMA_ASSERT(pCurrBlock);
11233 VkResult res = AllocateFromBlock(
11244 if(res == VK_SUCCESS)
11246 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
11256 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11258 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11259 VMA_ASSERT(pCurrBlock);
11260 VkResult res = AllocateFromBlock(
11271 if(res == VK_SUCCESS)
11273 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11281 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11283 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11284 VMA_ASSERT(pCurrBlock);
11285 VkResult res = AllocateFromBlock(
11296 if(res == VK_SUCCESS)
11298 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11306 if(canCreateNewBlock)
11309 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11310 uint32_t newBlockSizeShift = 0;
11311 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11313 if(!m_ExplicitBlockSize)
11316 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11317 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
11319 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11320 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11322 newBlockSize = smallerNewBlockSize;
11323 ++newBlockSizeShift;
11332 size_t newBlockIndex = 0;
11333 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
11335 if(!m_ExplicitBlockSize)
11337 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
11339 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11340 if(smallerNewBlockSize >= size)
11342 newBlockSize = smallerNewBlockSize;
11343 ++newBlockSizeShift;
11344 res = CreateBlock(newBlockSize, &newBlockIndex);
11353 if(res == VK_SUCCESS)
11355 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
11356 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
11358 res = AllocateFromBlock(
11369 if(res == VK_SUCCESS)
11371 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
11377 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11384 if(canMakeOtherLost)
11386 uint32_t tryIndex = 0;
11387 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
11389 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
11390 VmaAllocationRequest bestRequest = {};
11391 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
11397 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11399 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11400 VMA_ASSERT(pCurrBlock);
11401 VmaAllocationRequest currRequest = {};
11402 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11405 m_BufferImageGranularity,
11414 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11415 if(pBestRequestBlock == VMA_NULL ||
11416 currRequestCost < bestRequestCost)
11418 pBestRequestBlock = pCurrBlock;
11419 bestRequest = currRequest;
11420 bestRequestCost = currRequestCost;
11422 if(bestRequestCost == 0)
11433 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11435 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11436 VMA_ASSERT(pCurrBlock);
11437 VmaAllocationRequest currRequest = {};
11438 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11441 m_BufferImageGranularity,
11450 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11451 if(pBestRequestBlock == VMA_NULL ||
11452 currRequestCost < bestRequestCost ||
11455 pBestRequestBlock = pCurrBlock;
11456 bestRequest = currRequest;
11457 bestRequestCost = currRequestCost;
11459 if(bestRequestCost == 0 ||
11469 if(pBestRequestBlock != VMA_NULL)
11473 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
11474 if(res != VK_SUCCESS)
11480 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
11486 if(pBestRequestBlock->m_pMetadata->IsEmpty())
11488 m_HasEmptyBlock =
false;
11491 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
11492 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
11493 (*pAllocation)->InitBlockAllocation(
11496 bestRequest.offset,
11502 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
11503 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
11504 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
11505 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11507 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11509 if(IsCorruptionDetectionEnabled())
11511 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
11512 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11527 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
11529 return VK_ERROR_TOO_MANY_OBJECTS;
11533 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11536 void VmaBlockVector::Free(
11539 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
11543 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11545 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
11547 if(IsCorruptionDetectionEnabled())
11549 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
11550 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
11553 if(hAllocation->IsPersistentMap())
11555 pBlock->Unmap(m_hAllocator, 1);
11558 pBlock->m_pMetadata->Free(hAllocation);
11559 VMA_HEAVY_ASSERT(pBlock->Validate());
11561 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
11564 if(pBlock->m_pMetadata->IsEmpty())
11567 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
11569 pBlockToDelete = pBlock;
11575 m_HasEmptyBlock =
true;
11580 else if(m_HasEmptyBlock)
11582 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
11583 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
11585 pBlockToDelete = pLastBlock;
11586 m_Blocks.pop_back();
11587 m_HasEmptyBlock =
false;
11591 IncrementallySortBlocks();
11596 if(pBlockToDelete != VMA_NULL)
11598 VMA_DEBUG_LOG(
" Deleted empty allocation");
11599 pBlockToDelete->Destroy(m_hAllocator);
11600 vma_delete(m_hAllocator, pBlockToDelete);
11604 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 11606 VkDeviceSize result = 0;
11607 for(
size_t i = m_Blocks.size(); i--; )
11609 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
11610 if(result >= m_PreferredBlockSize)
11618 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
11620 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11622 if(m_Blocks[blockIndex] == pBlock)
11624 VmaVectorRemove(m_Blocks, blockIndex);
11631 void VmaBlockVector::IncrementallySortBlocks()
11636 for(
size_t i = 1; i < m_Blocks.size(); ++i)
11638 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
11640 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
11647 VkResult VmaBlockVector::AllocateFromBlock(
11648 VmaDeviceMemoryBlock* pBlock,
11650 uint32_t currentFrameIndex,
11652 VkDeviceSize alignment,
11655 VmaSuballocationType suballocType,
11664 VmaAllocationRequest currRequest = {};
11665 if(pBlock->m_pMetadata->CreateAllocationRequest(
11668 m_BufferImageGranularity,
11678 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
11682 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
11683 if(res != VK_SUCCESS)
11690 if(pBlock->m_pMetadata->IsEmpty())
11692 m_HasEmptyBlock =
false;
11695 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
11696 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
11697 (*pAllocation)->InitBlockAllocation(
11700 currRequest.offset,
11706 VMA_HEAVY_ASSERT(pBlock->Validate());
11707 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
11708 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11710 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11712 if(IsCorruptionDetectionEnabled())
11714 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
11715 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11719 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11722 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
11724 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
11725 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
11726 allocInfo.allocationSize = blockSize;
11727 VkDeviceMemory mem = VK_NULL_HANDLE;
11728 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
11737 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
11742 allocInfo.allocationSize,
11746 m_Blocks.push_back(pBlock);
11747 if(pNewBlockIndex != VMA_NULL)
11749 *pNewBlockIndex = m_Blocks.size() - 1;
11755 void VmaBlockVector::ApplyDefragmentationMovesCpu(
11756 class VmaBlockVectorDefragmentationContext* pDefragCtx,
11757 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
11759 const size_t blockCount = m_Blocks.size();
11760 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
11764 BLOCK_FLAG_USED = 0x00000001,
11765 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
11773 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
11774 blockInfo(blockCount, VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
11775 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
11778 const size_t moveCount = moves.size();
11779 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
11781 const VmaDefragmentationMove& move = moves[moveIndex];
11782 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
11783 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
11786 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
11789 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
11791 BlockInfo& currBlockInfo = blockInfo[blockIndex];
11792 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
11793 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
11795 currBlockInfo.pMappedData = pBlock->GetMappedData();
11797 if(currBlockInfo.pMappedData == VMA_NULL)
11799 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
11800 if(pDefragCtx->res == VK_SUCCESS)
11802 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
11809 if(pDefragCtx->res == VK_SUCCESS)
11811 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
11812 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
11814 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
11816 const VmaDefragmentationMove& move = moves[moveIndex];
11818 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
11819 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
11821 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
11826 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
11827 memRange.memory = pSrcBlock->GetDeviceMemory();
11828 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
11829 memRange.size = VMA_MIN(
11830 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
11831 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
11832 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
11837 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
11838 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
11839 static_cast<size_t>(move.size));
11841 if(IsCorruptionDetectionEnabled())
11843 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
11844 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
11850 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
11851 memRange.memory = pDstBlock->GetDeviceMemory();
11852 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
11853 memRange.size = VMA_MIN(
11854 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
11855 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
11856 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
11863 for(
size_t blockIndex = blockCount; blockIndex--; )
11865 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
11866 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
11868 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
11869 pBlock->Unmap(m_hAllocator, 1);
11874 void VmaBlockVector::ApplyDefragmentationMovesGpu(
11875 class VmaBlockVectorDefragmentationContext* pDefragCtx,
11876 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
11877 VkCommandBuffer commandBuffer)
11879 const size_t blockCount = m_Blocks.size();
11881 pDefragCtx->blockContexts.resize(blockCount);
11882 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
11885 const size_t moveCount = moves.size();
11886 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
11888 const VmaDefragmentationMove& move = moves[moveIndex];
11889 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
11890 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
11893 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
11897 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
11898 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
11899 VK_BUFFER_USAGE_TRANSFER_DST_BIT;
11901 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
11903 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
11904 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
11905 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
11907 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
11908 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
11909 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
11910 if(pDefragCtx->res == VK_SUCCESS)
11912 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
11913 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
11920 if(pDefragCtx->res == VK_SUCCESS)
11922 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
11923 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
11925 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
11927 const VmaDefragmentationMove& move = moves[moveIndex];
11929 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
11930 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
11932 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
11934 VkBufferCopy region = {
11938 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
11939 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
11944 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
11946 pDefragCtx->res = VK_NOT_READY;
11952 m_HasEmptyBlock =
false;
11953 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11955 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
11956 if(pBlock->m_pMetadata->IsEmpty())
11958 if(m_Blocks.size() > m_MinBlockCount)
11960 if(pDefragmentationStats != VMA_NULL)
11963 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
11966 VmaVectorRemove(m_Blocks, blockIndex);
11967 pBlock->Destroy(m_hAllocator);
11968 vma_delete(m_hAllocator, pBlock);
11972 m_HasEmptyBlock =
true;
11978 #if VMA_STATS_STRING_ENABLED 11980 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
11982 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11984 json.BeginObject();
11988 json.WriteString(
"MemoryTypeIndex");
11989 json.WriteNumber(m_MemoryTypeIndex);
11991 json.WriteString(
"BlockSize");
11992 json.WriteNumber(m_PreferredBlockSize);
11994 json.WriteString(
"BlockCount");
11995 json.BeginObject(
true);
11996 if(m_MinBlockCount > 0)
11998 json.WriteString(
"Min");
11999 json.WriteNumber((uint64_t)m_MinBlockCount);
12001 if(m_MaxBlockCount < SIZE_MAX)
12003 json.WriteString(
"Max");
12004 json.WriteNumber((uint64_t)m_MaxBlockCount);
12006 json.WriteString(
"Cur");
12007 json.WriteNumber((uint64_t)m_Blocks.size());
12010 if(m_FrameInUseCount > 0)
12012 json.WriteString(
"FrameInUseCount");
12013 json.WriteNumber(m_FrameInUseCount);
12016 if(m_Algorithm != 0)
12018 json.WriteString(
"Algorithm");
12019 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12024 json.WriteString(
"PreferredBlockSize");
12025 json.WriteNumber(m_PreferredBlockSize);
12028 json.WriteString(
"Blocks");
12029 json.BeginObject();
12030 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12032 json.BeginString();
12033 json.ContinueString(m_Blocks[i]->GetId());
12036 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12043 #endif // #if VMA_STATS_STRING_ENABLED 12045 void VmaBlockVector::Defragment(
12046 class VmaBlockVectorDefragmentationContext* pCtx,
12048 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12049 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12050 VkCommandBuffer commandBuffer)
12052 pCtx->res = VK_SUCCESS;
12054 const VkMemoryPropertyFlags memPropFlags =
12055 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12056 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12057 (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12058 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0;
12061 if(canDefragmentOnCpu || canDefragmentOnGpu)
12063 bool defragmentOnGpu;
12065 if(canDefragmentOnGpu != canDefragmentOnCpu)
12067 defragmentOnGpu = canDefragmentOnGpu;
12072 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12073 m_hAllocator->IsIntegratedGpu();
12076 bool overlappingMoveSupported = !defragmentOnGpu;
12078 if(m_hAllocator->m_UseMutex)
12080 m_Mutex.LockWrite();
12081 pCtx->mutexLocked =
true;
12084 pCtx->Begin(overlappingMoveSupported);
12088 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12089 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12090 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12091 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12092 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12095 if(pStats != VMA_NULL)
12097 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12098 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12101 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12102 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12103 if(defragmentOnGpu)
12105 maxGpuBytesToMove -= bytesMoved;
12106 maxGpuAllocationsToMove -= allocationsMoved;
12110 maxCpuBytesToMove -= bytesMoved;
12111 maxCpuAllocationsToMove -= allocationsMoved;
12115 if(pCtx->res >= VK_SUCCESS)
12117 if(defragmentOnGpu)
12119 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12123 ApplyDefragmentationMovesCpu(pCtx, moves);
12129 void VmaBlockVector::DefragmentationEnd(
12130 class VmaBlockVectorDefragmentationContext* pCtx,
12134 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12136 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12137 if(blockCtx.hBuffer)
12139 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12140 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12144 if(pCtx->res >= VK_SUCCESS)
12146 FreeEmptyBlocks(pStats);
12149 if(pCtx->mutexLocked)
12151 VMA_ASSERT(m_hAllocator->m_UseMutex);
12152 m_Mutex.UnlockWrite();
12156 size_t VmaBlockVector::CalcAllocationCount()
const 12159 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12161 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12166 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const 12168 if(m_BufferImageGranularity == 1)
12172 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12173 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12175 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12176 VMA_ASSERT(m_Algorithm == 0);
12177 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12178 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12186 void VmaBlockVector::MakePoolAllocationsLost(
12187 uint32_t currentFrameIndex,
12188 size_t* pLostAllocationCount)
12190 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12191 size_t lostAllocationCount = 0;
12192 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12194 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12195 VMA_ASSERT(pBlock);
12196 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12198 if(pLostAllocationCount != VMA_NULL)
12200 *pLostAllocationCount = lostAllocationCount;
12204 VkResult VmaBlockVector::CheckCorruption()
12206 if(!IsCorruptionDetectionEnabled())
12208 return VK_ERROR_FEATURE_NOT_PRESENT;
12211 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12212 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12214 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12215 VMA_ASSERT(pBlock);
12216 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12217 if(res != VK_SUCCESS)
12225 void VmaBlockVector::AddStats(
VmaStats* pStats)
12227 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12228 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12230 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12232 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12234 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12235 VMA_ASSERT(pBlock);
12236 VMA_HEAVY_ASSERT(pBlock->Validate());
12238 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12239 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12240 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12241 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12248 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12250 VmaBlockVector* pBlockVector,
12251 uint32_t currentFrameIndex,
12252 bool overlappingMoveSupported) :
12253 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12254 m_AllAllocations(false),
12255 m_AllocationCount(0),
12257 m_AllocationsMoved(0),
12258 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12261 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12262 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12264 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12265 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12266 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12267 m_Blocks.push_back(pBlockInfo);
12271 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12274 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12276 for(
size_t i = m_Blocks.size(); i--; )
12278 vma_delete(m_hAllocator, m_Blocks[i]);
12282 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12285 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12287 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12288 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12289 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12291 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12292 (*it)->m_Allocations.push_back(allocInfo);
12299 ++m_AllocationCount;
12303 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
12304 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12305 VkDeviceSize maxBytesToMove,
12306 uint32_t maxAllocationsToMove)
12308 if(m_Blocks.empty())
12321 size_t srcBlockMinIndex = 0;
12334 size_t srcBlockIndex = m_Blocks.size() - 1;
12335 size_t srcAllocIndex = SIZE_MAX;
12341 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
12343 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
12346 if(srcBlockIndex == srcBlockMinIndex)
12353 srcAllocIndex = SIZE_MAX;
12358 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
12362 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
12363 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
12365 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
12366 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
12367 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
12368 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
12371 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
12373 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
12374 VmaAllocationRequest dstAllocRequest;
12375 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
12376 m_CurrentFrameIndex,
12377 m_pBlockVector->GetFrameInUseCount(),
12378 m_pBlockVector->GetBufferImageGranularity(),
12385 &dstAllocRequest) &&
12387 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
12389 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
12392 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
12393 (m_BytesMoved + size > maxBytesToMove))
12398 VmaDefragmentationMove move;
12399 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
12400 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
12401 move.srcOffset = srcOffset;
12402 move.dstOffset = dstAllocRequest.offset;
12404 moves.push_back(move);
12406 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
12411 allocInfo.m_hAllocation);
12412 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
12414 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
12416 if(allocInfo.m_pChanged != VMA_NULL)
12418 *allocInfo.m_pChanged = VK_TRUE;
12421 ++m_AllocationsMoved;
12422 m_BytesMoved += size;
12424 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
12432 if(srcAllocIndex > 0)
12438 if(srcBlockIndex > 0)
12441 srcAllocIndex = SIZE_MAX;
12451 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const 12454 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12456 if(m_Blocks[i]->m_HasNonMovableAllocations)
12464 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
12465 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12466 VkDeviceSize maxBytesToMove,
12467 uint32_t maxAllocationsToMove)
12469 if(!m_AllAllocations && m_AllocationCount == 0)
12474 const size_t blockCount = m_Blocks.size();
12475 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12477 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
12479 if(m_AllAllocations)
12481 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
12482 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
12483 it != pMetadata->m_Suballocations.end();
12486 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
12488 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
12489 pBlockInfo->m_Allocations.push_back(allocInfo);
12494 pBlockInfo->CalcHasNonMovableAllocations();
12498 pBlockInfo->SortAllocationsByOffsetDescending();
12504 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
12507 const uint32_t roundCount = 2;
12510 VkResult result = VK_SUCCESS;
12511 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
12513 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
12519 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
12520 size_t dstBlockIndex, VkDeviceSize dstOffset,
12521 size_t srcBlockIndex, VkDeviceSize srcOffset)
12523 if(dstBlockIndex < srcBlockIndex)
12527 if(dstBlockIndex > srcBlockIndex)
12531 if(dstOffset < srcOffset)
12541 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
12543 VmaBlockVector* pBlockVector,
12544 uint32_t currentFrameIndex,
12545 bool overlappingMoveSupported) :
12546 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12547 m_OverlappingMoveSupported(overlappingMoveSupported),
12548 m_AllocationCount(0),
12549 m_AllAllocations(false),
12551 m_AllocationsMoved(0),
12552 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
12554 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
12558 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
12562 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
12563 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12564 VkDeviceSize maxBytesToMove,
12565 uint32_t maxAllocationsToMove)
12567 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
12569 const size_t blockCount = m_pBlockVector->GetBlockCount();
12570 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
12575 PreprocessMetadata();
12579 m_BlockInfos.resize(blockCount);
12580 for(
size_t i = 0; i < blockCount; ++i)
12582 m_BlockInfos[i].origBlockIndex = i;
12585 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
12586 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
12587 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
12592 FreeSpaceDatabase freeSpaceDb;
12594 size_t dstBlockInfoIndex = 0;
12595 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12596 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12597 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12598 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
12599 VkDeviceSize dstOffset = 0;
12602 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
12604 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
12605 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
12606 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
12607 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
12608 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
12610 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
12611 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
12612 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
12613 if(m_AllocationsMoved == maxAllocationsToMove ||
12614 m_BytesMoved + srcAllocSize > maxBytesToMove)
12619 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
12622 size_t freeSpaceInfoIndex;
12623 VkDeviceSize dstAllocOffset;
12624 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
12625 freeSpaceInfoIndex, dstAllocOffset))
12627 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
12628 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
12629 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
12630 VkDeviceSize freeSpaceBlockSize = pFreeSpaceMetadata->GetSize();
12633 if(freeSpaceInfoIndex == srcBlockInfoIndex)
12635 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
12639 VmaSuballocation suballoc = *srcSuballocIt;
12640 suballoc.offset = dstAllocOffset;
12641 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
12642 m_BytesMoved += srcAllocSize;
12643 ++m_AllocationsMoved;
12645 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12647 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12648 srcSuballocIt = nextSuballocIt;
12650 InsertSuballoc(pFreeSpaceMetadata, suballoc);
12652 VmaDefragmentationMove move = {
12653 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
12654 srcAllocOffset, dstAllocOffset,
12656 moves.push_back(move);
12663 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
12665 VmaSuballocation suballoc = *srcSuballocIt;
12666 suballoc.offset = dstAllocOffset;
12667 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
12668 m_BytesMoved += srcAllocSize;
12669 ++m_AllocationsMoved;
12671 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12673 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12674 srcSuballocIt = nextSuballocIt;
12676 InsertSuballoc(pFreeSpaceMetadata, suballoc);
12678 VmaDefragmentationMove move = {
12679 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
12680 srcAllocOffset, dstAllocOffset,
12682 moves.push_back(move);
12687 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
12690 while(dstBlockInfoIndex < srcBlockInfoIndex &&
12691 dstAllocOffset + srcAllocSize > dstBlockSize)
12694 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
12696 ++dstBlockInfoIndex;
12697 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12698 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12699 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12700 dstBlockSize = pDstMetadata->GetSize();
12702 dstAllocOffset = 0;
12706 if(dstBlockInfoIndex == srcBlockInfoIndex)
12708 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
12710 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
12712 bool skipOver = overlap;
12713 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
12717 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
12722 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
12724 dstOffset = srcAllocOffset + srcAllocSize;
12730 srcSuballocIt->offset = dstAllocOffset;
12731 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
12732 dstOffset = dstAllocOffset + srcAllocSize;
12733 m_BytesMoved += srcAllocSize;
12734 ++m_AllocationsMoved;
12736 VmaDefragmentationMove move = {
12737 srcOrigBlockIndex, dstOrigBlockIndex,
12738 srcAllocOffset, dstAllocOffset,
12740 moves.push_back(move);
12748 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
12749 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
12751 VmaSuballocation suballoc = *srcSuballocIt;
12752 suballoc.offset = dstAllocOffset;
12753 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
12754 dstOffset = dstAllocOffset + srcAllocSize;
12755 m_BytesMoved += srcAllocSize;
12756 ++m_AllocationsMoved;
12758 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12760 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12761 srcSuballocIt = nextSuballocIt;
12763 pDstMetadata->m_Suballocations.push_back(suballoc);
12765 VmaDefragmentationMove move = {
12766 srcOrigBlockIndex, dstOrigBlockIndex,
12767 srcAllocOffset, dstAllocOffset,
12769 moves.push_back(move);
12775 m_BlockInfos.clear();
12777 PostprocessMetadata();
12782 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
12784 const size_t blockCount = m_pBlockVector->GetBlockCount();
12785 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12787 VmaBlockMetadata_Generic*
const pMetadata =
12788 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
12789 pMetadata->m_FreeCount = 0;
12790 pMetadata->m_SumFreeSize = pMetadata->GetSize();
12791 pMetadata->m_FreeSuballocationsBySize.clear();
12792 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
12793 it != pMetadata->m_Suballocations.end(); )
12795 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
12797 VmaSuballocationList::iterator nextIt = it;
12799 pMetadata->m_Suballocations.erase(it);
12810 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
12812 const size_t blockCount = m_pBlockVector->GetBlockCount();
12813 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12815 VmaBlockMetadata_Generic*
const pMetadata =
12816 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
12817 const VkDeviceSize blockSize = pMetadata->GetSize();
12820 if(pMetadata->m_Suballocations.empty())
12822 pMetadata->m_FreeCount = 1;
12824 VmaSuballocation suballoc = {
12828 VMA_SUBALLOCATION_TYPE_FREE };
12829 pMetadata->m_Suballocations.push_back(suballoc);
12830 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
12835 VkDeviceSize offset = 0;
12836 VmaSuballocationList::iterator it;
12837 for(it = pMetadata->m_Suballocations.begin();
12838 it != pMetadata->m_Suballocations.end();
12841 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
12842 VMA_ASSERT(it->offset >= offset);
12845 if(it->offset > offset)
12847 ++pMetadata->m_FreeCount;
12848 const VkDeviceSize freeSize = it->offset - offset;
12849 VmaSuballocation suballoc = {
12853 VMA_SUBALLOCATION_TYPE_FREE };
12854 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
12855 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
12857 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
12861 pMetadata->m_SumFreeSize -= it->size;
12862 offset = it->offset + it->size;
12866 if(offset < blockSize)
12868 ++pMetadata->m_FreeCount;
12869 const VkDeviceSize freeSize = blockSize - offset;
12870 VmaSuballocation suballoc = {
12874 VMA_SUBALLOCATION_TYPE_FREE };
12875 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
12876 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
12877 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
12879 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
12884 pMetadata->m_FreeSuballocationsBySize.begin(),
12885 pMetadata->m_FreeSuballocationsBySize.end(),
12886 VmaSuballocationItemSizeLess());
12889 VMA_HEAVY_ASSERT(pMetadata->Validate());
12893 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
12896 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
12897 while(it != pMetadata->m_Suballocations.end())
12899 if(it->offset < suballoc.offset)
12904 pMetadata->m_Suballocations.insert(it, suballoc);
12910 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
12913 VmaBlockVector* pBlockVector,
12914 uint32_t currFrameIndex,
12915 uint32_t algorithmFlags) :
12917 mutexLocked(false),
12918 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
12919 m_hAllocator(hAllocator),
12920 m_hCustomPool(hCustomPool),
12921 m_pBlockVector(pBlockVector),
12922 m_CurrFrameIndex(currFrameIndex),
12923 m_AlgorithmFlags(algorithmFlags),
12924 m_pAlgorithm(VMA_NULL),
12925 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
12926 m_AllAllocations(false)
12930 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
12932 vma_delete(m_hAllocator, m_pAlgorithm);
12935 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12937 AllocInfo info = { hAlloc, pChanged };
12938 m_Allocations.push_back(info);
12941 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
12943 const bool allAllocations = m_AllAllocations ||
12944 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
12956 if(VMA_DEBUG_MARGIN == 0 &&
12958 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
12960 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
12961 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
12965 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
12966 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
12971 m_pAlgorithm->AddAll();
12975 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
12977 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
12985 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
12987 uint32_t currFrameIndex,
12990 m_hAllocator(hAllocator),
12991 m_CurrFrameIndex(currFrameIndex),
12994 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
12996 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
12999 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13001 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13003 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13004 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13005 vma_delete(m_hAllocator, pBlockVectorCtx);
13007 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13009 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13010 if(pBlockVectorCtx)
13012 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13013 vma_delete(m_hAllocator, pBlockVectorCtx);
13018 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13020 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13022 VmaPool pool = pPools[poolIndex];
13025 if(pool->m_BlockVector.GetAlgorithm() == 0)
13027 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13029 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13031 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13033 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13038 if(!pBlockVectorDefragCtx)
13040 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13043 &pool->m_BlockVector,
13046 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13049 pBlockVectorDefragCtx->AddAll();
13054 void VmaDefragmentationContext_T::AddAllocations(
13055 uint32_t allocationCount,
13057 VkBool32* pAllocationsChanged)
13060 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13063 VMA_ASSERT(hAlloc);
13065 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13067 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13069 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13071 const VmaPool hAllocPool = hAlloc->GetPool();
13073 if(hAllocPool != VK_NULL_HANDLE)
13076 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13078 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13080 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13082 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13086 if(!pBlockVectorDefragCtx)
13088 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13091 &hAllocPool->m_BlockVector,
13094 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13101 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13102 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13103 if(!pBlockVectorDefragCtx)
13105 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13108 m_hAllocator->m_pBlockVectors[memTypeIndex],
13111 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13115 if(pBlockVectorDefragCtx)
13117 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13118 &pAllocationsChanged[allocIndex] : VMA_NULL;
13119 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13125 VkResult VmaDefragmentationContext_T::Defragment(
13126 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13127 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13135 if(commandBuffer == VK_NULL_HANDLE)
13137 maxGpuBytesToMove = 0;
13138 maxGpuAllocationsToMove = 0;
13141 VkResult res = VK_SUCCESS;
13144 for(uint32_t memTypeIndex = 0;
13145 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13148 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13149 if(pBlockVectorCtx)
13151 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13152 pBlockVectorCtx->GetBlockVector()->Defragment(
13155 maxCpuBytesToMove, maxCpuAllocationsToMove,
13156 maxGpuBytesToMove, maxGpuAllocationsToMove,
13158 if(pBlockVectorCtx->res != VK_SUCCESS)
13160 res = pBlockVectorCtx->res;
13166 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13167 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13170 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13171 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13172 pBlockVectorCtx->GetBlockVector()->Defragment(
13175 maxCpuBytesToMove, maxCpuAllocationsToMove,
13176 maxGpuBytesToMove, maxGpuAllocationsToMove,
13178 if(pBlockVectorCtx->res != VK_SUCCESS)
13180 res = pBlockVectorCtx->res;
13190 #if VMA_RECORDING_ENABLED 13192 VmaRecorder::VmaRecorder() :
13197 m_StartCounter(INT64_MAX)
13203 m_UseMutex = useMutex;
13204 m_Flags = settings.
flags;
13206 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13207 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13210 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13213 return VK_ERROR_INITIALIZATION_FAILED;
13217 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13218 fprintf(m_File,
"%s\n",
"1,4");
13223 VmaRecorder::~VmaRecorder()
13225 if(m_File != VMA_NULL)
13231 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13233 CallParams callParams;
13234 GetBasicParams(callParams);
13236 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13237 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13241 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13243 CallParams callParams;
13244 GetBasicParams(callParams);
13246 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13247 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13253 CallParams callParams;
13254 GetBasicParams(callParams);
13256 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13257 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13268 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13270 CallParams callParams;
13271 GetBasicParams(callParams);
13273 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13274 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13279 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13280 const VkMemoryRequirements& vkMemReq,
13284 CallParams callParams;
13285 GetBasicParams(callParams);
13287 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13288 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13289 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13291 vkMemReq.alignment,
13292 vkMemReq.memoryTypeBits,
13300 userDataStr.GetString());
13304 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
13305 const VkMemoryRequirements& vkMemReq,
13306 bool requiresDedicatedAllocation,
13307 bool prefersDedicatedAllocation,
13311 CallParams callParams;
13312 GetBasicParams(callParams);
13314 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13315 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13316 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13318 vkMemReq.alignment,
13319 vkMemReq.memoryTypeBits,
13320 requiresDedicatedAllocation ? 1 : 0,
13321 prefersDedicatedAllocation ? 1 : 0,
13329 userDataStr.GetString());
13333 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
13334 const VkMemoryRequirements& vkMemReq,
13335 bool requiresDedicatedAllocation,
13336 bool prefersDedicatedAllocation,
13340 CallParams callParams;
13341 GetBasicParams(callParams);
13343 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13344 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13345 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13347 vkMemReq.alignment,
13348 vkMemReq.memoryTypeBits,
13349 requiresDedicatedAllocation ? 1 : 0,
13350 prefersDedicatedAllocation ? 1 : 0,
13358 userDataStr.GetString());
13362 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
13365 CallParams callParams;
13366 GetBasicParams(callParams);
13368 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13369 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13374 void VmaRecorder::RecordResizeAllocation(
13375 uint32_t frameIndex,
13377 VkDeviceSize newSize)
13379 CallParams callParams;
13380 GetBasicParams(callParams);
13382 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13383 fprintf(m_File,
"%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
13384 allocation, newSize);
13388 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
13390 const void* pUserData)
13392 CallParams callParams;
13393 GetBasicParams(callParams);
13395 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13396 UserDataString userDataStr(
13399 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13401 userDataStr.GetString());
13405 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
13408 CallParams callParams;
13409 GetBasicParams(callParams);
13411 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13412 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13417 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
13420 CallParams callParams;
13421 GetBasicParams(callParams);
13423 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13424 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13429 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
13432 CallParams callParams;
13433 GetBasicParams(callParams);
13435 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13436 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13441 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
13442 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13444 CallParams callParams;
13445 GetBasicParams(callParams);
13447 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13448 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13455 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
13456 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13458 CallParams callParams;
13459 GetBasicParams(callParams);
13461 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13462 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13469 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
13470 const VkBufferCreateInfo& bufCreateInfo,
13474 CallParams callParams;
13475 GetBasicParams(callParams);
13477 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13478 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13479 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13480 bufCreateInfo.flags,
13481 bufCreateInfo.size,
13482 bufCreateInfo.usage,
13483 bufCreateInfo.sharingMode,
13484 allocCreateInfo.
flags,
13485 allocCreateInfo.
usage,
13489 allocCreateInfo.
pool,
13491 userDataStr.GetString());
13495 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
13496 const VkImageCreateInfo& imageCreateInfo,
13500 CallParams callParams;
13501 GetBasicParams(callParams);
13503 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13504 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13505 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13506 imageCreateInfo.flags,
13507 imageCreateInfo.imageType,
13508 imageCreateInfo.format,
13509 imageCreateInfo.extent.width,
13510 imageCreateInfo.extent.height,
13511 imageCreateInfo.extent.depth,
13512 imageCreateInfo.mipLevels,
13513 imageCreateInfo.arrayLayers,
13514 imageCreateInfo.samples,
13515 imageCreateInfo.tiling,
13516 imageCreateInfo.usage,
13517 imageCreateInfo.sharingMode,
13518 imageCreateInfo.initialLayout,
13519 allocCreateInfo.
flags,
13520 allocCreateInfo.
usage,
13524 allocCreateInfo.
pool,
13526 userDataStr.GetString());
13530 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
13533 CallParams callParams;
13534 GetBasicParams(callParams);
13536 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13537 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
13542 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
13545 CallParams callParams;
13546 GetBasicParams(callParams);
13548 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13549 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
13554 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
13557 CallParams callParams;
13558 GetBasicParams(callParams);
13560 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13561 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13566 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
13569 CallParams callParams;
13570 GetBasicParams(callParams);
13572 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13573 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
13578 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
13581 CallParams callParams;
13582 GetBasicParams(callParams);
13584 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13585 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
13592 if(pUserData != VMA_NULL)
13596 m_Str = (
const char*)pUserData;
13600 sprintf_s(m_PtrStr,
"%p", pUserData);
13610 void VmaRecorder::WriteConfiguration(
13611 const VkPhysicalDeviceProperties& devProps,
13612 const VkPhysicalDeviceMemoryProperties& memProps,
13613 bool dedicatedAllocationExtensionEnabled)
13615 fprintf(m_File,
"Config,Begin\n");
13617 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
13618 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
13619 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
13620 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
13621 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
13622 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
13624 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
13625 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
13626 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
13628 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
13629 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
13631 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
13632 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
13634 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
13635 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
13637 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
13638 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
13641 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
13643 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
13644 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
13645 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
13646 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
13647 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
13648 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
13649 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
13650 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
13651 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
13653 fprintf(m_File,
"Config,End\n");
13656 void VmaRecorder::GetBasicParams(CallParams& outParams)
13658 outParams.threadId = GetCurrentThreadId();
13660 LARGE_INTEGER counter;
13661 QueryPerformanceCounter(&counter);
13662 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
13665 void VmaRecorder::Flush()
13673 #endif // #if VMA_RECORDING_ENABLED 13681 m_hDevice(pCreateInfo->device),
13682 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
13683 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
13684 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
13685 m_PreferredLargeHeapBlockSize(0),
13686 m_PhysicalDevice(pCreateInfo->physicalDevice),
13687 m_CurrentFrameIndex(0),
13688 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
13691 ,m_pRecorder(VMA_NULL)
13694 if(VMA_DEBUG_DETECT_CORRUPTION)
13697 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
13702 #if !(VMA_DEDICATED_ALLOCATION) 13705 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
13709 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
13710 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
13711 memset(&m_MemProps, 0,
sizeof(m_MemProps));
13713 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
13714 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
13716 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
13718 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
13729 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
13730 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
13732 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
13733 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
13734 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
13735 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
13742 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
13744 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
13745 if(limit != VK_WHOLE_SIZE)
13747 m_HeapSizeLimit[heapIndex] = limit;
13748 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
13750 m_MemProps.memoryHeaps[heapIndex].size = limit;
13756 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13758 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
13760 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
13763 preferredBlockSize,
13766 GetBufferImageGranularity(),
13773 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
13780 VkResult res = VK_SUCCESS;
13785 #if VMA_RECORDING_ENABLED 13786 m_pRecorder = vma_new(
this, VmaRecorder)();
13788 if(res != VK_SUCCESS)
13792 m_pRecorder->WriteConfiguration(
13793 m_PhysicalDeviceProperties,
13795 m_UseKhrDedicatedAllocation);
13796 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
13798 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
13799 return VK_ERROR_FEATURE_NOT_PRESENT;
13806 VmaAllocator_T::~VmaAllocator_T()
13808 #if VMA_RECORDING_ENABLED 13809 if(m_pRecorder != VMA_NULL)
13811 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
13812 vma_delete(
this, m_pRecorder);
13816 VMA_ASSERT(m_Pools.empty());
13818 for(
size_t i = GetMemoryTypeCount(); i--; )
13820 vma_delete(
this, m_pDedicatedAllocations[i]);
13821 vma_delete(
this, m_pBlockVectors[i]);
13825 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
13827 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 13828 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
13829 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
13830 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
13831 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
13832 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
13833 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
13834 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
13835 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
13836 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
13837 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
13838 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
13839 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
13840 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
13841 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
13842 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
13843 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
13844 m_VulkanFunctions.vkCmdCopyBuffer = &vkCmdCopyBuffer;
13845 #if VMA_DEDICATED_ALLOCATION 13846 if(m_UseKhrDedicatedAllocation)
13848 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
13849 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
13850 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
13851 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
13853 #endif // #if VMA_DEDICATED_ALLOCATION 13854 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 13856 #define VMA_COPY_IF_NOT_NULL(funcName) \ 13857 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 13859 if(pVulkanFunctions != VMA_NULL)
13861 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
13862 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
13863 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
13864 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
13865 VMA_COPY_IF_NOT_NULL(vkMapMemory);
13866 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
13867 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
13868 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
13869 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
13870 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
13871 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
13872 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
13873 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
13874 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
13875 VMA_COPY_IF_NOT_NULL(vkCreateImage);
13876 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
13877 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
13878 #if VMA_DEDICATED_ALLOCATION 13879 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
13880 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
13884 #undef VMA_COPY_IF_NOT_NULL 13888 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
13889 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
13890 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
13891 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
13892 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
13893 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
13894 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
13895 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
13896 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
13897 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
13898 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
13899 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
13900 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
13901 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
13902 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
13903 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
13904 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
13905 #if VMA_DEDICATED_ALLOCATION 13906 if(m_UseKhrDedicatedAllocation)
13908 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
13909 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
13914 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
13916 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
13917 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
13918 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
13919 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
13922 VkResult VmaAllocator_T::AllocateMemoryOfType(
13924 VkDeviceSize alignment,
13925 bool dedicatedAllocation,
13926 VkBuffer dedicatedBuffer,
13927 VkImage dedicatedImage,
13929 uint32_t memTypeIndex,
13930 VmaSuballocationType suballocType,
13933 VMA_ASSERT(pAllocation != VMA_NULL);
13934 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
13940 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13945 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
13946 VMA_ASSERT(blockVector);
13948 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
13949 bool preferDedicatedMemory =
13950 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
13951 dedicatedAllocation ||
13953 size > preferredBlockSize / 2;
13955 if(preferDedicatedMemory &&
13957 finalCreateInfo.
pool == VK_NULL_HANDLE)
13966 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
13970 return AllocateDedicatedMemory(
13984 VkResult res = blockVector->Allocate(
13986 m_CurrentFrameIndex.load(),
13992 if(res == VK_SUCCESS)
14000 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14004 res = AllocateDedicatedMemory(
14010 finalCreateInfo.pUserData,
14014 if(res == VK_SUCCESS)
14017 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14023 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14030 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14032 VmaSuballocationType suballocType,
14033 uint32_t memTypeIndex,
14035 bool isUserDataString,
14037 VkBuffer dedicatedBuffer,
14038 VkImage dedicatedImage,
14041 VMA_ASSERT(pAllocation);
14043 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14044 allocInfo.memoryTypeIndex = memTypeIndex;
14045 allocInfo.allocationSize = size;
14047 #if VMA_DEDICATED_ALLOCATION 14048 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14049 if(m_UseKhrDedicatedAllocation)
14051 if(dedicatedBuffer != VK_NULL_HANDLE)
14053 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14054 dedicatedAllocInfo.buffer = dedicatedBuffer;
14055 allocInfo.pNext = &dedicatedAllocInfo;
14057 else if(dedicatedImage != VK_NULL_HANDLE)
14059 dedicatedAllocInfo.image = dedicatedImage;
14060 allocInfo.pNext = &dedicatedAllocInfo;
14063 #endif // #if VMA_DEDICATED_ALLOCATION 14066 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14067 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14070 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14074 void* pMappedData = VMA_NULL;
14077 res = (*m_VulkanFunctions.vkMapMemory)(
14086 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
14087 FreeVulkanMemory(memTypeIndex, size, hMemory);
14092 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
14093 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
14094 (*pAllocation)->SetUserData(
this, pUserData);
14095 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14097 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14102 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14103 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14104 VMA_ASSERT(pDedicatedAllocations);
14105 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
14108 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
14113 void VmaAllocator_T::GetBufferMemoryRequirements(
14115 VkMemoryRequirements& memReq,
14116 bool& requiresDedicatedAllocation,
14117 bool& prefersDedicatedAllocation)
const 14119 #if VMA_DEDICATED_ALLOCATION 14120 if(m_UseKhrDedicatedAllocation)
14122 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14123 memReqInfo.buffer = hBuffer;
14125 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14127 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14128 memReq2.pNext = &memDedicatedReq;
14130 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14132 memReq = memReq2.memoryRequirements;
14133 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14134 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14137 #endif // #if VMA_DEDICATED_ALLOCATION 14139 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14140 requiresDedicatedAllocation =
false;
14141 prefersDedicatedAllocation =
false;
14145 void VmaAllocator_T::GetImageMemoryRequirements(
14147 VkMemoryRequirements& memReq,
14148 bool& requiresDedicatedAllocation,
14149 bool& prefersDedicatedAllocation)
const 14151 #if VMA_DEDICATED_ALLOCATION 14152 if(m_UseKhrDedicatedAllocation)
14154 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
14155 memReqInfo.image = hImage;
14157 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14159 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14160 memReq2.pNext = &memDedicatedReq;
14162 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14164 memReq = memReq2.memoryRequirements;
14165 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14166 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14169 #endif // #if VMA_DEDICATED_ALLOCATION 14171 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14172 requiresDedicatedAllocation =
false;
14173 prefersDedicatedAllocation =
false;
14177 VkResult VmaAllocator_T::AllocateMemory(
14178 const VkMemoryRequirements& vkMemReq,
14179 bool requiresDedicatedAllocation,
14180 bool prefersDedicatedAllocation,
14181 VkBuffer dedicatedBuffer,
14182 VkImage dedicatedImage,
14184 VmaSuballocationType suballocType,
14187 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
14189 if(vkMemReq.size == 0)
14191 return VK_ERROR_VALIDATION_FAILED_EXT;
14196 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14197 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14202 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
14203 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14205 if(requiresDedicatedAllocation)
14209 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
14210 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14212 if(createInfo.
pool != VK_NULL_HANDLE)
14214 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
14215 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14218 if((createInfo.
pool != VK_NULL_HANDLE) &&
14221 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
14222 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14225 if(createInfo.
pool != VK_NULL_HANDLE)
14227 const VkDeviceSize alignmentForPool = VMA_MAX(
14228 vkMemReq.alignment,
14229 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
14230 return createInfo.
pool->m_BlockVector.Allocate(
14232 m_CurrentFrameIndex.load(),
14242 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
14243 uint32_t memTypeIndex = UINT32_MAX;
14245 if(res == VK_SUCCESS)
14247 VkDeviceSize alignmentForMemType = VMA_MAX(
14248 vkMemReq.alignment,
14249 GetMemoryTypeMinAlignment(memTypeIndex));
14251 res = AllocateMemoryOfType(
14253 alignmentForMemType,
14254 requiresDedicatedAllocation || prefersDedicatedAllocation,
14262 if(res == VK_SUCCESS)
14272 memoryTypeBits &= ~(1u << memTypeIndex);
14275 if(res == VK_SUCCESS)
14277 alignmentForMemType = VMA_MAX(
14278 vkMemReq.alignment,
14279 GetMemoryTypeMinAlignment(memTypeIndex));
14281 res = AllocateMemoryOfType(
14283 alignmentForMemType,
14284 requiresDedicatedAllocation || prefersDedicatedAllocation,
14292 if(res == VK_SUCCESS)
14302 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14313 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
14315 VMA_ASSERT(allocation);
14317 if(TouchAllocation(allocation))
14319 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14321 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
14324 switch(allocation->GetType())
14326 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14328 VmaBlockVector* pBlockVector = VMA_NULL;
14329 VmaPool hPool = allocation->GetPool();
14330 if(hPool != VK_NULL_HANDLE)
14332 pBlockVector = &hPool->m_BlockVector;
14336 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14337 pBlockVector = m_pBlockVectors[memTypeIndex];
14339 pBlockVector->Free(allocation);
14342 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14343 FreeDedicatedMemory(allocation);
14350 allocation->SetUserData(
this, VMA_NULL);
14351 vma_delete(
this, allocation);
14354 VkResult VmaAllocator_T::ResizeAllocation(
14356 VkDeviceSize newSize)
14358 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
14360 return VK_ERROR_VALIDATION_FAILED_EXT;
14362 if(newSize == alloc->GetSize())
14367 switch(alloc->GetType())
14369 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14370 return VK_ERROR_FEATURE_NOT_PRESENT;
14371 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14372 if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
14374 alloc->ChangeSize(newSize);
14375 VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
14380 return VK_ERROR_OUT_OF_POOL_MEMORY;
14384 return VK_ERROR_VALIDATION_FAILED_EXT;
14388 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
14391 InitStatInfo(pStats->
total);
14392 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
14394 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14398 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14400 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
14401 VMA_ASSERT(pBlockVector);
14402 pBlockVector->AddStats(pStats);
14407 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
14408 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
14410 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
14415 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14417 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14418 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14419 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
14420 VMA_ASSERT(pDedicatedAllocVector);
14421 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
14424 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
14425 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14426 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14427 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
14432 VmaPostprocessCalcStatInfo(pStats->
total);
14433 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
14434 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
14435 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
14436 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
14439 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
14441 VkResult VmaAllocator_T::DefragmentationBegin(
14451 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
14452 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
14455 (*pContext)->AddAllocations(
14458 VkResult res = (*pContext)->Defragment(
14463 if(res != VK_NOT_READY)
14465 vma_delete(
this, *pContext);
14466 *pContext = VMA_NULL;
14472 VkResult VmaAllocator_T::DefragmentationEnd(
14475 vma_delete(
this, context);
14481 if(hAllocation->CanBecomeLost())
14487 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14488 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14491 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
14495 pAllocationInfo->
offset = 0;
14496 pAllocationInfo->
size = hAllocation->GetSize();
14498 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14501 else if(localLastUseFrameIndex == localCurrFrameIndex)
14503 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
14504 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
14505 pAllocationInfo->
offset = hAllocation->GetOffset();
14506 pAllocationInfo->
size = hAllocation->GetSize();
14508 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14513 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14515 localLastUseFrameIndex = localCurrFrameIndex;
14522 #if VMA_STATS_STRING_ENABLED 14523 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14524 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14527 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
14528 if(localLastUseFrameIndex == localCurrFrameIndex)
14534 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14536 localLastUseFrameIndex = localCurrFrameIndex;
14542 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
14543 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
14544 pAllocationInfo->
offset = hAllocation->GetOffset();
14545 pAllocationInfo->
size = hAllocation->GetSize();
14546 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
14547 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14551 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
14554 if(hAllocation->CanBecomeLost())
14556 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14557 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14560 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
14564 else if(localLastUseFrameIndex == localCurrFrameIndex)
14570 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14572 localLastUseFrameIndex = localCurrFrameIndex;
14579 #if VMA_STATS_STRING_ENABLED 14580 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14581 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14584 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
14585 if(localLastUseFrameIndex == localCurrFrameIndex)
14591 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14593 localLastUseFrameIndex = localCurrFrameIndex;
14605 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
14615 return VK_ERROR_INITIALIZATION_FAILED;
14618 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
14620 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
14622 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
14623 if(res != VK_SUCCESS)
14625 vma_delete(
this, *pPool);
14632 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
14633 (*pPool)->SetId(m_NextPoolId++);
14634 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
14640 void VmaAllocator_T::DestroyPool(
VmaPool pool)
14644 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
14645 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
14646 VMA_ASSERT(success &&
"Pool not found in Allocator.");
14649 vma_delete(
this, pool);
14654 pool->m_BlockVector.GetPoolStats(pPoolStats);
14657 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
14659 m_CurrentFrameIndex.store(frameIndex);
14662 void VmaAllocator_T::MakePoolAllocationsLost(
14664 size_t* pLostAllocationCount)
14666 hPool->m_BlockVector.MakePoolAllocationsLost(
14667 m_CurrentFrameIndex.load(),
14668 pLostAllocationCount);
14671 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
14673 return hPool->m_BlockVector.CheckCorruption();
14676 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
14678 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
14681 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14683 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
14685 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
14686 VMA_ASSERT(pBlockVector);
14687 VkResult localRes = pBlockVector->CheckCorruption();
14690 case VK_ERROR_FEATURE_NOT_PRESENT:
14693 finalRes = VK_SUCCESS;
14703 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
14704 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
14706 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
14708 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
14711 case VK_ERROR_FEATURE_NOT_PRESENT:
14714 finalRes = VK_SUCCESS;
14726 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
14728 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
14729 (*pAllocation)->InitLost();
14732 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
14734 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
14737 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
14739 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
14740 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
14742 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
14743 if(res == VK_SUCCESS)
14745 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
14750 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
14755 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
14758 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
14760 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
14766 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
14768 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
14770 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
14773 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
14775 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
14776 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
14778 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
14779 m_HeapSizeLimit[heapIndex] += size;
14783 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
14785 if(hAllocation->CanBecomeLost())
14787 return VK_ERROR_MEMORY_MAP_FAILED;
14790 switch(hAllocation->GetType())
14792 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14794 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
14795 char *pBytes = VMA_NULL;
14796 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
14797 if(res == VK_SUCCESS)
14799 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
14800 hAllocation->BlockAllocMap();
14804 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14805 return hAllocation->DedicatedAllocMap(
this, ppData);
14808 return VK_ERROR_MEMORY_MAP_FAILED;
14814 switch(hAllocation->GetType())
14816 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14818 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
14819 hAllocation->BlockAllocUnmap();
14820 pBlock->Unmap(
this, 1);
14823 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14824 hAllocation->DedicatedAllocUnmap(
this);
14831 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
14833 VkResult res = VK_SUCCESS;
14834 switch(hAllocation->GetType())
14836 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14837 res = GetVulkanFunctions().vkBindBufferMemory(
14840 hAllocation->GetMemory(),
14843 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14845 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
14846 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
14847 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
14856 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
14858 VkResult res = VK_SUCCESS;
14859 switch(hAllocation->GetType())
14861 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14862 res = GetVulkanFunctions().vkBindImageMemory(
14865 hAllocation->GetMemory(),
14868 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14870 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
14871 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
14872 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
14881 void VmaAllocator_T::FlushOrInvalidateAllocation(
14883 VkDeviceSize offset, VkDeviceSize size,
14884 VMA_CACHE_OPERATION op)
14886 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
14887 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
14889 const VkDeviceSize allocationSize = hAllocation->GetSize();
14890 VMA_ASSERT(offset <= allocationSize);
14892 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
14894 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
14895 memRange.memory = hAllocation->GetMemory();
14897 switch(hAllocation->GetType())
14899 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14900 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
14901 if(size == VK_WHOLE_SIZE)
14903 memRange.size = allocationSize - memRange.offset;
14907 VMA_ASSERT(offset + size <= allocationSize);
14908 memRange.size = VMA_MIN(
14909 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
14910 allocationSize - memRange.offset);
14914 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14917 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
14918 if(size == VK_WHOLE_SIZE)
14920 size = allocationSize - offset;
14924 VMA_ASSERT(offset + size <= allocationSize);
14926 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
14929 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
14930 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
14931 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
14932 memRange.offset += allocationOffset;
14933 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
14944 case VMA_CACHE_FLUSH:
14945 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
14947 case VMA_CACHE_INVALIDATE:
14948 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
14957 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
14959 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
14961 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14963 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14964 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14965 VMA_ASSERT(pDedicatedAllocations);
14966 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
14967 VMA_ASSERT(success);
14970 VkDeviceMemory hMemory = allocation->GetMemory();
14982 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
14984 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
14987 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
14989 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
14990 !hAllocation->CanBecomeLost() &&
14991 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
14993 void* pData = VMA_NULL;
14994 VkResult res = Map(hAllocation, &pData);
14995 if(res == VK_SUCCESS)
14997 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
14998 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
14999 Unmap(hAllocation);
15003 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15008 #if VMA_STATS_STRING_ENABLED 15010 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15012 bool dedicatedAllocationsStarted =
false;
15013 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15015 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15016 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15017 VMA_ASSERT(pDedicatedAllocVector);
15018 if(pDedicatedAllocVector->empty() ==
false)
15020 if(dedicatedAllocationsStarted ==
false)
15022 dedicatedAllocationsStarted =
true;
15023 json.WriteString(
"DedicatedAllocations");
15024 json.BeginObject();
15027 json.BeginString(
"Type ");
15028 json.ContinueString(memTypeIndex);
15033 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
15035 json.BeginObject(
true);
15037 hAlloc->PrintParameters(json);
15044 if(dedicatedAllocationsStarted)
15050 bool allocationsStarted =
false;
15051 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15053 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
15055 if(allocationsStarted ==
false)
15057 allocationsStarted =
true;
15058 json.WriteString(
"DefaultPools");
15059 json.BeginObject();
15062 json.BeginString(
"Type ");
15063 json.ContinueString(memTypeIndex);
15066 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
15069 if(allocationsStarted)
15077 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15078 const size_t poolCount = m_Pools.size();
15081 json.WriteString(
"Pools");
15082 json.BeginObject();
15083 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15085 json.BeginString();
15086 json.ContinueString(m_Pools[poolIndex]->GetId());
15089 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
15096 #endif // #if VMA_STATS_STRING_ENABLED 15105 VMA_ASSERT(pCreateInfo && pAllocator);
15106 VMA_DEBUG_LOG(
"vmaCreateAllocator");
15108 return (*pAllocator)->Init(pCreateInfo);
15114 if(allocator != VK_NULL_HANDLE)
15116 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
15117 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
15118 vma_delete(&allocationCallbacks, allocator);
15124 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
15126 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
15127 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
15132 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
15134 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
15135 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
15140 uint32_t memoryTypeIndex,
15141 VkMemoryPropertyFlags* pFlags)
15143 VMA_ASSERT(allocator && pFlags);
15144 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
15145 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
15150 uint32_t frameIndex)
15152 VMA_ASSERT(allocator);
15153 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
15155 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15157 allocator->SetCurrentFrameIndex(frameIndex);
15164 VMA_ASSERT(allocator && pStats);
15165 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15166 allocator->CalculateStats(pStats);
15169 #if VMA_STATS_STRING_ENABLED 15173 char** ppStatsString,
15174 VkBool32 detailedMap)
15176 VMA_ASSERT(allocator && ppStatsString);
15177 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15179 VmaStringBuilder sb(allocator);
15181 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
15182 json.BeginObject();
15185 allocator->CalculateStats(&stats);
15187 json.WriteString(
"Total");
15188 VmaPrintStatInfo(json, stats.
total);
15190 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
15192 json.BeginString(
"Heap ");
15193 json.ContinueString(heapIndex);
15195 json.BeginObject();
15197 json.WriteString(
"Size");
15198 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
15200 json.WriteString(
"Flags");
15201 json.BeginArray(
true);
15202 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
15204 json.WriteString(
"DEVICE_LOCAL");
15210 json.WriteString(
"Stats");
15211 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
15214 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
15216 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
15218 json.BeginString(
"Type ");
15219 json.ContinueString(typeIndex);
15222 json.BeginObject();
15224 json.WriteString(
"Flags");
15225 json.BeginArray(
true);
15226 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
15227 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
15229 json.WriteString(
"DEVICE_LOCAL");
15231 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15233 json.WriteString(
"HOST_VISIBLE");
15235 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
15237 json.WriteString(
"HOST_COHERENT");
15239 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
15241 json.WriteString(
"HOST_CACHED");
15243 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
15245 json.WriteString(
"LAZILY_ALLOCATED");
15251 json.WriteString(
"Stats");
15252 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
15261 if(detailedMap == VK_TRUE)
15263 allocator->PrintDetailedMap(json);
15269 const size_t len = sb.GetLength();
15270 char*
const pChars = vma_new_array(allocator,
char, len + 1);
15273 memcpy(pChars, sb.GetData(), len);
15275 pChars[len] =
'\0';
15276 *ppStatsString = pChars;
15281 char* pStatsString)
15283 if(pStatsString != VMA_NULL)
15285 VMA_ASSERT(allocator);
15286 size_t len = strlen(pStatsString);
15287 vma_delete_array(allocator, pStatsString, len + 1);
15291 #endif // #if VMA_STATS_STRING_ENABLED 15298 uint32_t memoryTypeBits,
15300 uint32_t* pMemoryTypeIndex)
15302 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15303 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15304 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15311 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
15312 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
15317 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15321 switch(pAllocationCreateInfo->
usage)
15326 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15328 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15332 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
15335 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15336 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15338 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15342 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15343 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
15349 *pMemoryTypeIndex = UINT32_MAX;
15350 uint32_t minCost = UINT32_MAX;
15351 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
15352 memTypeIndex < allocator->GetMemoryTypeCount();
15353 ++memTypeIndex, memTypeBit <<= 1)
15356 if((memTypeBit & memoryTypeBits) != 0)
15358 const VkMemoryPropertyFlags currFlags =
15359 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
15361 if((requiredFlags & ~currFlags) == 0)
15364 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
15366 if(currCost < minCost)
15368 *pMemoryTypeIndex = memTypeIndex;
15373 minCost = currCost;
15378 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
15383 const VkBufferCreateInfo* pBufferCreateInfo,
15385 uint32_t* pMemoryTypeIndex)
15387 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15388 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
15389 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15390 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15392 const VkDevice hDev = allocator->m_hDevice;
15393 VkBuffer hBuffer = VK_NULL_HANDLE;
15394 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
15395 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
15396 if(res == VK_SUCCESS)
15398 VkMemoryRequirements memReq = {};
15399 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
15400 hDev, hBuffer, &memReq);
15404 memReq.memoryTypeBits,
15405 pAllocationCreateInfo,
15408 allocator->GetVulkanFunctions().vkDestroyBuffer(
15409 hDev, hBuffer, allocator->GetAllocationCallbacks());
15416 const VkImageCreateInfo* pImageCreateInfo,
15418 uint32_t* pMemoryTypeIndex)
15420 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15421 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
15422 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15423 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15425 const VkDevice hDev = allocator->m_hDevice;
15426 VkImage hImage = VK_NULL_HANDLE;
15427 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
15428 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
15429 if(res == VK_SUCCESS)
15431 VkMemoryRequirements memReq = {};
15432 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
15433 hDev, hImage, &memReq);
15437 memReq.memoryTypeBits,
15438 pAllocationCreateInfo,
15441 allocator->GetVulkanFunctions().vkDestroyImage(
15442 hDev, hImage, allocator->GetAllocationCallbacks());
15452 VMA_ASSERT(allocator && pCreateInfo && pPool);
15454 VMA_DEBUG_LOG(
"vmaCreatePool");
15456 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15458 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
15460 #if VMA_RECORDING_ENABLED 15461 if(allocator->GetRecorder() != VMA_NULL)
15463 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
15474 VMA_ASSERT(allocator);
15476 if(pool == VK_NULL_HANDLE)
15481 VMA_DEBUG_LOG(
"vmaDestroyPool");
15483 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15485 #if VMA_RECORDING_ENABLED 15486 if(allocator->GetRecorder() != VMA_NULL)
15488 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
15492 allocator->DestroyPool(pool);
15500 VMA_ASSERT(allocator && pool && pPoolStats);
15502 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15504 allocator->GetPoolStats(pool, pPoolStats);
15510 size_t* pLostAllocationCount)
15512 VMA_ASSERT(allocator && pool);
15514 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15516 #if VMA_RECORDING_ENABLED 15517 if(allocator->GetRecorder() != VMA_NULL)
15519 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
15523 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
15528 VMA_ASSERT(allocator && pool);
15530 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15532 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
15534 return allocator->CheckPoolCorruption(pool);
15539 const VkMemoryRequirements* pVkMemoryRequirements,
15544 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
15546 VMA_DEBUG_LOG(
"vmaAllocateMemory");
15548 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15550 VkResult result = allocator->AllocateMemory(
15551 *pVkMemoryRequirements,
15557 VMA_SUBALLOCATION_TYPE_UNKNOWN,
15560 #if VMA_RECORDING_ENABLED 15561 if(allocator->GetRecorder() != VMA_NULL)
15563 allocator->GetRecorder()->RecordAllocateMemory(
15564 allocator->GetCurrentFrameIndex(),
15565 *pVkMemoryRequirements,
15571 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
15573 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
15586 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
15588 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
15590 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15592 VkMemoryRequirements vkMemReq = {};
15593 bool requiresDedicatedAllocation =
false;
15594 bool prefersDedicatedAllocation =
false;
15595 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
15596 requiresDedicatedAllocation,
15597 prefersDedicatedAllocation);
15599 VkResult result = allocator->AllocateMemory(
15601 requiresDedicatedAllocation,
15602 prefersDedicatedAllocation,
15606 VMA_SUBALLOCATION_TYPE_BUFFER,
15609 #if VMA_RECORDING_ENABLED 15610 if(allocator->GetRecorder() != VMA_NULL)
15612 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
15613 allocator->GetCurrentFrameIndex(),
15615 requiresDedicatedAllocation,
15616 prefersDedicatedAllocation,
15622 if(pAllocationInfo && result == VK_SUCCESS)
15624 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
15637 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
15639 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
15641 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15643 VkMemoryRequirements vkMemReq = {};
15644 bool requiresDedicatedAllocation =
false;
15645 bool prefersDedicatedAllocation =
false;
15646 allocator->GetImageMemoryRequirements(image, vkMemReq,
15647 requiresDedicatedAllocation, prefersDedicatedAllocation);
15649 VkResult result = allocator->AllocateMemory(
15651 requiresDedicatedAllocation,
15652 prefersDedicatedAllocation,
15656 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
15659 #if VMA_RECORDING_ENABLED 15660 if(allocator->GetRecorder() != VMA_NULL)
15662 allocator->GetRecorder()->RecordAllocateMemoryForImage(
15663 allocator->GetCurrentFrameIndex(),
15665 requiresDedicatedAllocation,
15666 prefersDedicatedAllocation,
15672 if(pAllocationInfo && result == VK_SUCCESS)
15674 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
15684 VMA_ASSERT(allocator);
15686 if(allocation == VK_NULL_HANDLE)
15691 VMA_DEBUG_LOG(
"vmaFreeMemory");
15693 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15695 #if VMA_RECORDING_ENABLED 15696 if(allocator->GetRecorder() != VMA_NULL)
15698 allocator->GetRecorder()->RecordFreeMemory(
15699 allocator->GetCurrentFrameIndex(),
15704 allocator->FreeMemory(allocation);
15710 VkDeviceSize newSize)
15712 VMA_ASSERT(allocator && allocation);
15714 VMA_DEBUG_LOG(
"vmaResizeAllocation");
15716 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15718 #if VMA_RECORDING_ENABLED 15719 if(allocator->GetRecorder() != VMA_NULL)
15721 allocator->GetRecorder()->RecordResizeAllocation(
15722 allocator->GetCurrentFrameIndex(),
15728 return allocator->ResizeAllocation(allocation, newSize);
15736 VMA_ASSERT(allocator && allocation && pAllocationInfo);
15738 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15740 #if VMA_RECORDING_ENABLED 15741 if(allocator->GetRecorder() != VMA_NULL)
15743 allocator->GetRecorder()->RecordGetAllocationInfo(
15744 allocator->GetCurrentFrameIndex(),
15749 allocator->GetAllocationInfo(allocation, pAllocationInfo);
15756 VMA_ASSERT(allocator && allocation);
15758 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15760 #if VMA_RECORDING_ENABLED 15761 if(allocator->GetRecorder() != VMA_NULL)
15763 allocator->GetRecorder()->RecordTouchAllocation(
15764 allocator->GetCurrentFrameIndex(),
15769 return allocator->TouchAllocation(allocation);
15777 VMA_ASSERT(allocator && allocation);
15779 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15781 allocation->SetUserData(allocator, pUserData);
15783 #if VMA_RECORDING_ENABLED 15784 if(allocator->GetRecorder() != VMA_NULL)
15786 allocator->GetRecorder()->RecordSetAllocationUserData(
15787 allocator->GetCurrentFrameIndex(),
15798 VMA_ASSERT(allocator && pAllocation);
15800 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
15802 allocator->CreateLostAllocation(pAllocation);
15804 #if VMA_RECORDING_ENABLED 15805 if(allocator->GetRecorder() != VMA_NULL)
15807 allocator->GetRecorder()->RecordCreateLostAllocation(
15808 allocator->GetCurrentFrameIndex(),
15819 VMA_ASSERT(allocator && allocation && ppData);
15821 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15823 VkResult res = allocator->Map(allocation, ppData);
15825 #if VMA_RECORDING_ENABLED 15826 if(allocator->GetRecorder() != VMA_NULL)
15828 allocator->GetRecorder()->RecordMapMemory(
15829 allocator->GetCurrentFrameIndex(),
15841 VMA_ASSERT(allocator && allocation);
15843 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15845 #if VMA_RECORDING_ENABLED 15846 if(allocator->GetRecorder() != VMA_NULL)
15848 allocator->GetRecorder()->RecordUnmapMemory(
15849 allocator->GetCurrentFrameIndex(),
15854 allocator->Unmap(allocation);
15859 VMA_ASSERT(allocator && allocation);
15861 VMA_DEBUG_LOG(
"vmaFlushAllocation");
15863 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15865 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
15867 #if VMA_RECORDING_ENABLED 15868 if(allocator->GetRecorder() != VMA_NULL)
15870 allocator->GetRecorder()->RecordFlushAllocation(
15871 allocator->GetCurrentFrameIndex(),
15872 allocation, offset, size);
15879 VMA_ASSERT(allocator && allocation);
15881 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
15883 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15885 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
15887 #if VMA_RECORDING_ENABLED 15888 if(allocator->GetRecorder() != VMA_NULL)
15890 allocator->GetRecorder()->RecordInvalidateAllocation(
15891 allocator->GetCurrentFrameIndex(),
15892 allocation, offset, size);
15899 VMA_ASSERT(allocator);
15901 VMA_DEBUG_LOG(
"vmaCheckCorruption");
15903 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15905 return allocator->CheckCorruption(memoryTypeBits);
15911 size_t allocationCount,
15912 VkBool32* pAllocationsChanged,
15922 if(pDefragmentationInfo != VMA_NULL)
15936 if(res == VK_NOT_READY)
15949 VMA_ASSERT(allocator && pInfo && pContext);
15953 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
15955 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15957 return allocator->DefragmentationBegin(*pInfo, pStats, pContext);
15964 VMA_ASSERT(allocator);
15966 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
15968 if(context != VK_NULL_HANDLE)
15970 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15972 return allocator->DefragmentationEnd(context);
15985 VMA_ASSERT(allocator && allocation && buffer);
15987 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
15989 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15991 return allocator->BindBufferMemory(allocation, buffer);
15999 VMA_ASSERT(allocator && allocation && image);
16001 VMA_DEBUG_LOG(
"vmaBindImageMemory");
16003 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16005 return allocator->BindImageMemory(allocation, image);
16010 const VkBufferCreateInfo* pBufferCreateInfo,
16016 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16018 if(pBufferCreateInfo->size == 0)
16020 return VK_ERROR_VALIDATION_FAILED_EXT;
16023 VMA_DEBUG_LOG(
"vmaCreateBuffer");
16025 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16027 *pBuffer = VK_NULL_HANDLE;
16028 *pAllocation = VK_NULL_HANDLE;
16031 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
16032 allocator->m_hDevice,
16034 allocator->GetAllocationCallbacks(),
16039 VkMemoryRequirements vkMemReq = {};
16040 bool requiresDedicatedAllocation =
false;
16041 bool prefersDedicatedAllocation =
false;
16042 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
16043 requiresDedicatedAllocation, prefersDedicatedAllocation);
16047 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
16049 VMA_ASSERT(vkMemReq.alignment %
16050 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
16052 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
16054 VMA_ASSERT(vkMemReq.alignment %
16055 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
16057 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
16059 VMA_ASSERT(vkMemReq.alignment %
16060 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
16064 res = allocator->AllocateMemory(
16066 requiresDedicatedAllocation,
16067 prefersDedicatedAllocation,
16070 *pAllocationCreateInfo,
16071 VMA_SUBALLOCATION_TYPE_BUFFER,
16074 #if VMA_RECORDING_ENABLED 16075 if(allocator->GetRecorder() != VMA_NULL)
16077 allocator->GetRecorder()->RecordCreateBuffer(
16078 allocator->GetCurrentFrameIndex(),
16079 *pBufferCreateInfo,
16080 *pAllocationCreateInfo,
16088 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
16092 #if VMA_STATS_STRING_ENABLED 16093 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
16095 if(pAllocationInfo != VMA_NULL)
16097 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16102 allocator->FreeMemory(*pAllocation);
16103 *pAllocation = VK_NULL_HANDLE;
16104 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16105 *pBuffer = VK_NULL_HANDLE;
16108 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16109 *pBuffer = VK_NULL_HANDLE;
16120 VMA_ASSERT(allocator);
16122 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16127 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
16129 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16131 #if VMA_RECORDING_ENABLED 16132 if(allocator->GetRecorder() != VMA_NULL)
16134 allocator->GetRecorder()->RecordDestroyBuffer(
16135 allocator->GetCurrentFrameIndex(),
16140 if(buffer != VK_NULL_HANDLE)
16142 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
16145 if(allocation != VK_NULL_HANDLE)
16147 allocator->FreeMemory(allocation);
16153 const VkImageCreateInfo* pImageCreateInfo,
16159 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
16161 if(pImageCreateInfo->extent.width == 0 ||
16162 pImageCreateInfo->extent.height == 0 ||
16163 pImageCreateInfo->extent.depth == 0 ||
16164 pImageCreateInfo->mipLevels == 0 ||
16165 pImageCreateInfo->arrayLayers == 0)
16167 return VK_ERROR_VALIDATION_FAILED_EXT;
16170 VMA_DEBUG_LOG(
"vmaCreateImage");
16172 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16174 *pImage = VK_NULL_HANDLE;
16175 *pAllocation = VK_NULL_HANDLE;
16178 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
16179 allocator->m_hDevice,
16181 allocator->GetAllocationCallbacks(),
16185 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
16186 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
16187 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
16190 VkMemoryRequirements vkMemReq = {};
16191 bool requiresDedicatedAllocation =
false;
16192 bool prefersDedicatedAllocation =
false;
16193 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
16194 requiresDedicatedAllocation, prefersDedicatedAllocation);
16196 res = allocator->AllocateMemory(
16198 requiresDedicatedAllocation,
16199 prefersDedicatedAllocation,
16202 *pAllocationCreateInfo,
16206 #if VMA_RECORDING_ENABLED 16207 if(allocator->GetRecorder() != VMA_NULL)
16209 allocator->GetRecorder()->RecordCreateImage(
16210 allocator->GetCurrentFrameIndex(),
16212 *pAllocationCreateInfo,
16220 res = allocator->BindImageMemory(*pAllocation, *pImage);
16224 #if VMA_STATS_STRING_ENABLED 16225 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
16227 if(pAllocationInfo != VMA_NULL)
16229 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16234 allocator->FreeMemory(*pAllocation);
16235 *pAllocation = VK_NULL_HANDLE;
16236 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16237 *pImage = VK_NULL_HANDLE;
16240 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16241 *pImage = VK_NULL_HANDLE;
16252 VMA_ASSERT(allocator);
16254 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16259 VMA_DEBUG_LOG(
"vmaDestroyImage");
16261 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16263 #if VMA_RECORDING_ENABLED 16264 if(allocator->GetRecorder() != VMA_NULL)
16266 allocator->GetRecorder()->RecordDestroyImage(
16267 allocator->GetCurrentFrameIndex(),
16272 if(image != VK_NULL_HANDLE)
16274 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
16276 if(allocation != VK_NULL_HANDLE)
16278 allocator->FreeMemory(allocation);
16282 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1707
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2010
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1765
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side...
Definition: vk_mem_alloc.h:2762
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
Definition: vk_mem_alloc.h:1739
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2335
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1719
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1967
Definition: vk_mem_alloc.h:2070
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2715
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1711
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2435
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1762
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2798
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2224
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1606
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2316
Definition: vk_mem_alloc.h:2047
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2718
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1700
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2123
Definition: vk_mem_alloc.h:1994
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1774
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2252
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1828
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1759
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1998
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1900
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1716
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2752
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1899
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2802
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1791
VmaStatInfo total
Definition: vk_mem_alloc.h:1909
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2810
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2107
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2793
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1717
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1642
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1768
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2266
Definition: vk_mem_alloc.h:2260
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1723
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1835
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2445
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1712
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1737
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2144
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2286
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2322
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1698
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2269
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2767
VmaMemoryUsage
Definition: vk_mem_alloc.h:1945
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2727
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2788
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2806
Definition: vk_mem_alloc.h:1984
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2131
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1715
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1905
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1648
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2706
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
Definition: vk_mem_alloc.h:2704
Definition: vk_mem_alloc.h:2091
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2733
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1669
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1741
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1674
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2808
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2118
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2332
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1708
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1888
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2281
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1661
Definition: vk_mem_alloc.h:2256
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:2054
Represents Opaque object that represents started defragmentation process.
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1901
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1665
Definition: vk_mem_alloc.h:2081
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2272
Definition: vk_mem_alloc.h:1993
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1714
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2113
Definition: vk_mem_alloc.h:2104
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1891
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1710
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2294
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1777
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2325
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2102
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2757
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2137
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1816
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1907
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:2034
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1900
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1721
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1747
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use...
Definition: vk_mem_alloc.h:2703
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2781
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1663
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1720
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2308
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1713
Definition: vk_mem_alloc.h:2065
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1755
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2459
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1771
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1900
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1897
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2313
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2712
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
Definition: vk_mem_alloc.h:2074
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2440
Definition: vk_mem_alloc.h:2088
Definition: vk_mem_alloc.h:2100
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2804
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1706
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1895
Definition: vk_mem_alloc.h:1950
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2262
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1744
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1893
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1718
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1722
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2021
Definition: vk_mem_alloc.h:2095
Definition: vk_mem_alloc.h:1977
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2454
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1696
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1709
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2241
VkResult vmaResizeAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize newSize)
Tries to resize an allocation in place, if there is enough free memory after it.
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2421
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:2085
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2206
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1901
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
Definition: vk_mem_alloc.h:2060
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1731
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1908
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2319
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1901
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side...
Definition: vk_mem_alloc.h:2772
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2426
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2736