23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1648 #ifndef VMA_RECORDING_ENABLED 1650 #define VMA_RECORDING_ENABLED 1 1652 #define VMA_RECORDING_ENABLED 0 1657 #define NOMINMAX // For windows.h 1661 #include <vulkan/vulkan.h> 1664 #if VMA_RECORDING_ENABLED 1665 #include <windows.h> 1668 #if !defined(VMA_DEDICATED_ALLOCATION) 1669 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1670 #define VMA_DEDICATED_ALLOCATION 1 1672 #define VMA_DEDICATED_ALLOCATION 0 1690 uint32_t memoryType,
1691 VkDeviceMemory memory,
1696 uint32_t memoryType,
1697 VkDeviceMemory memory,
1770 #if VMA_DEDICATED_ALLOCATION 1771 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1772 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1899 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1907 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1917 uint32_t memoryTypeIndex,
1918 VkMemoryPropertyFlags* pFlags);
1930 uint32_t frameIndex);
1963 #ifndef VMA_STATS_STRING_ENABLED 1964 #define VMA_STATS_STRING_ENABLED 1 1967 #if VMA_STATS_STRING_ENABLED 1974 char** ppStatsString,
1975 VkBool32 detailedMap);
1979 char* pStatsString);
1981 #endif // #if VMA_STATS_STRING_ENABLED 2214 uint32_t memoryTypeBits,
2216 uint32_t* pMemoryTypeIndex);
2232 const VkBufferCreateInfo* pBufferCreateInfo,
2234 uint32_t* pMemoryTypeIndex);
2250 const VkImageCreateInfo* pImageCreateInfo,
2252 uint32_t* pMemoryTypeIndex);
2424 size_t* pLostAllocationCount);
2523 const VkMemoryRequirements* pVkMemoryRequirements,
2549 const VkMemoryRequirements* pVkMemoryRequirements,
2551 size_t allocationCount,
2596 size_t allocationCount,
2622 VkDeviceSize newSize);
2991 size_t allocationCount,
2992 VkBool32* pAllocationsChanged,
3058 const VkBufferCreateInfo* pBufferCreateInfo,
3083 const VkImageCreateInfo* pImageCreateInfo,
3109 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 3112 #if defined(__cplusplus) && defined(__INTELLISENSE__) 3113 #define VMA_IMPLEMENTATION 3116 #ifdef VMA_IMPLEMENTATION 3117 #undef VMA_IMPLEMENTATION 3139 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 3140 #define VMA_STATIC_VULKAN_FUNCTIONS 1 3152 #if VMA_USE_STL_CONTAINERS 3153 #define VMA_USE_STL_VECTOR 1 3154 #define VMA_USE_STL_UNORDERED_MAP 1 3155 #define VMA_USE_STL_LIST 1 3158 #ifndef VMA_USE_STL_SHARED_MUTEX 3160 #if __cplusplus >= 201703L 3161 #define VMA_USE_STL_SHARED_MUTEX 1 3165 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L 3166 #define VMA_USE_STL_SHARED_MUTEX 1 3168 #define VMA_USE_STL_SHARED_MUTEX 0 3172 #if VMA_USE_STL_VECTOR 3176 #if VMA_USE_STL_UNORDERED_MAP 3177 #include <unordered_map> 3180 #if VMA_USE_STL_LIST 3189 #include <algorithm> 3195 #define VMA_NULL nullptr 3198 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 3200 void *aligned_alloc(
size_t alignment,
size_t size)
3203 if(alignment <
sizeof(
void*))
3205 alignment =
sizeof(
void*);
3208 return memalign(alignment, size);
3210 #elif defined(__APPLE__) || defined(__ANDROID__) 3212 void *aligned_alloc(
size_t alignment,
size_t size)
3215 if(alignment <
sizeof(
void*))
3217 alignment =
sizeof(
void*);
3221 if(posix_memalign(&pointer, alignment, size) == 0)
3235 #define VMA_ASSERT(expr) assert(expr) 3237 #define VMA_ASSERT(expr) 3243 #ifndef VMA_HEAVY_ASSERT 3245 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 3247 #define VMA_HEAVY_ASSERT(expr) 3251 #ifndef VMA_ALIGN_OF 3252 #define VMA_ALIGN_OF(type) (__alignof(type)) 3255 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 3257 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 3259 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 3263 #ifndef VMA_SYSTEM_FREE 3265 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 3267 #define VMA_SYSTEM_FREE(ptr) free(ptr) 3272 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 3276 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 3280 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 3284 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 3287 #ifndef VMA_DEBUG_LOG 3288 #define VMA_DEBUG_LOG(format, ...) 3298 #if VMA_STATS_STRING_ENABLED 3299 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3301 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3303 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3305 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3307 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3309 snprintf(outStr, strLen,
"%p", ptr);
3317 void Lock() { m_Mutex.lock(); }
3318 void Unlock() { m_Mutex.unlock(); }
3322 #define VMA_MUTEX VmaMutex 3326 #ifndef VMA_RW_MUTEX 3327 #if VMA_USE_STL_SHARED_MUTEX 3329 #include <shared_mutex> 3333 void LockRead() { m_Mutex.lock_shared(); }
3334 void UnlockRead() { m_Mutex.unlock_shared(); }
3335 void LockWrite() { m_Mutex.lock(); }
3336 void UnlockWrite() { m_Mutex.unlock(); }
3338 std::shared_mutex m_Mutex;
3340 #define VMA_RW_MUTEX VmaRWMutex 3341 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600 3347 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3348 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3349 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3350 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3351 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3355 #define VMA_RW_MUTEX VmaRWMutex 3361 void LockRead() { m_Mutex.Lock(); }
3362 void UnlockRead() { m_Mutex.Unlock(); }
3363 void LockWrite() { m_Mutex.Lock(); }
3364 void UnlockWrite() { m_Mutex.Unlock(); }
3368 #define VMA_RW_MUTEX VmaRWMutex 3369 #endif // #if VMA_USE_STL_SHARED_MUTEX 3370 #endif // #ifndef VMA_RW_MUTEX 3380 #ifndef VMA_ATOMIC_UINT32 3381 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 3384 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 3389 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 3392 #ifndef VMA_DEBUG_ALIGNMENT 3397 #define VMA_DEBUG_ALIGNMENT (1) 3400 #ifndef VMA_DEBUG_MARGIN 3405 #define VMA_DEBUG_MARGIN (0) 3408 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3413 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3416 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3422 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3425 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3430 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3433 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3438 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3441 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3442 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3446 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3447 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3451 #ifndef VMA_CLASS_NO_COPY 3452 #define VMA_CLASS_NO_COPY(className) \ 3454 className(const className&) = delete; \ 3455 className& operator=(const className&) = delete; 3458 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3461 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3463 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3464 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3470 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3472 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3473 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3476 static inline uint32_t VmaCountBitsSet(uint32_t v)
3478 uint32_t c = v - ((v >> 1) & 0x55555555);
3479 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3480 c = ((c >> 4) + c) & 0x0F0F0F0F;
3481 c = ((c >> 8) + c) & 0x00FF00FF;
3482 c = ((c >> 16) + c) & 0x0000FFFF;
3488 template <
typename T>
3489 static inline T VmaAlignUp(T val, T align)
3491 return (val + align - 1) / align * align;
3495 template <
typename T>
3496 static inline T VmaAlignDown(T val, T align)
3498 return val / align * align;
3502 template <
typename T>
3503 static inline T VmaRoundDiv(T x, T y)
3505 return (x + (y / (T)2)) / y;
3513 template <
typename T>
3514 inline bool VmaIsPow2(T x)
3516 return (x & (x-1)) == 0;
3520 static inline uint32_t VmaNextPow2(uint32_t v)
3531 static inline uint64_t VmaNextPow2(uint64_t v)
3545 static inline uint32_t VmaPrevPow2(uint32_t v)
3555 static inline uint64_t VmaPrevPow2(uint64_t v)
3567 static inline bool VmaStrIsEmpty(
const char* pStr)
3569 return pStr == VMA_NULL || *pStr ==
'\0';
3572 #if VMA_STATS_STRING_ENABLED 3574 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3590 #endif // #if VMA_STATS_STRING_ENABLED 3594 template<
typename Iterator,
typename Compare>
3595 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3597 Iterator centerValue = end; --centerValue;
3598 Iterator insertIndex = beg;
3599 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3601 if(cmp(*memTypeIndex, *centerValue))
3603 if(insertIndex != memTypeIndex)
3605 VMA_SWAP(*memTypeIndex, *insertIndex);
3610 if(insertIndex != centerValue)
3612 VMA_SWAP(*insertIndex, *centerValue);
3617 template<
typename Iterator,
typename Compare>
3618 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3622 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3623 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3624 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3628 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3630 #endif // #ifndef VMA_SORT 3639 static inline bool VmaBlocksOnSamePage(
3640 VkDeviceSize resourceAOffset,
3641 VkDeviceSize resourceASize,
3642 VkDeviceSize resourceBOffset,
3643 VkDeviceSize pageSize)
3645 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3646 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3647 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3648 VkDeviceSize resourceBStart = resourceBOffset;
3649 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3650 return resourceAEndPage == resourceBStartPage;
3653 enum VmaSuballocationType
3655 VMA_SUBALLOCATION_TYPE_FREE = 0,
3656 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3657 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3658 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3659 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3660 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3661 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3670 static inline bool VmaIsBufferImageGranularityConflict(
3671 VmaSuballocationType suballocType1,
3672 VmaSuballocationType suballocType2)
3674 if(suballocType1 > suballocType2)
3676 VMA_SWAP(suballocType1, suballocType2);
3679 switch(suballocType1)
3681 case VMA_SUBALLOCATION_TYPE_FREE:
3683 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3685 case VMA_SUBALLOCATION_TYPE_BUFFER:
3687 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3688 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3689 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3691 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3692 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3693 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3694 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3696 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3697 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3705 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3707 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3708 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3709 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3711 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3715 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3717 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3718 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3719 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3721 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3732 VMA_CLASS_NO_COPY(VmaMutexLock)
3734 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
3735 m_pMutex(useMutex ? &mutex : VMA_NULL)
3736 {
if(m_pMutex) { m_pMutex->Lock(); } }
3738 {
if(m_pMutex) { m_pMutex->Unlock(); } }
3740 VMA_MUTEX* m_pMutex;
3744 struct VmaMutexLockRead
3746 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3748 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3749 m_pMutex(useMutex ? &mutex : VMA_NULL)
3750 {
if(m_pMutex) { m_pMutex->LockRead(); } }
3751 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
3753 VMA_RW_MUTEX* m_pMutex;
3757 struct VmaMutexLockWrite
3759 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3761 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
3762 m_pMutex(useMutex ? &mutex : VMA_NULL)
3763 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
3764 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
3766 VMA_RW_MUTEX* m_pMutex;
3769 #if VMA_DEBUG_GLOBAL_MUTEX 3770 static VMA_MUTEX gDebugGlobalMutex;
3771 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3773 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3777 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3788 template <
typename CmpLess,
typename IterT,
typename KeyT>
3789 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3791 size_t down = 0, up = (end - beg);
3794 const size_t mid = (down + up) / 2;
3795 if(cmp(*(beg+mid), key))
3812 template<
typename T>
3813 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
3815 for(uint32_t i = 0; i < count; ++i)
3817 const T iPtr = arr[i];
3818 if(iPtr == VMA_NULL)
3822 for(uint32_t j = i + 1; j < count; ++j)
3836 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3838 if((pAllocationCallbacks != VMA_NULL) &&
3839 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3841 return (*pAllocationCallbacks->pfnAllocation)(
3842 pAllocationCallbacks->pUserData,
3845 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3849 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3853 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3855 if((pAllocationCallbacks != VMA_NULL) &&
3856 (pAllocationCallbacks->pfnFree != VMA_NULL))
3858 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3862 VMA_SYSTEM_FREE(ptr);
3866 template<
typename T>
3867 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3869 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3872 template<
typename T>
3873 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3875 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3878 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3880 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3882 template<
typename T>
3883 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3886 VmaFree(pAllocationCallbacks, ptr);
3889 template<
typename T>
3890 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3894 for(
size_t i = count; i--; )
3898 VmaFree(pAllocationCallbacks, ptr);
3903 template<
typename T>
3904 class VmaStlAllocator
3907 const VkAllocationCallbacks*
const m_pCallbacks;
3908 typedef T value_type;
3910 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3911 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3913 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3914 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3916 template<
typename U>
3917 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3919 return m_pCallbacks == rhs.m_pCallbacks;
3921 template<
typename U>
3922 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3924 return m_pCallbacks != rhs.m_pCallbacks;
3927 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3930 #if VMA_USE_STL_VECTOR 3932 #define VmaVector std::vector 3934 template<
typename T,
typename allocatorT>
3935 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3937 vec.insert(vec.begin() + index, item);
3940 template<
typename T,
typename allocatorT>
3941 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3943 vec.erase(vec.begin() + index);
3946 #else // #if VMA_USE_STL_VECTOR 3951 template<
typename T,
typename AllocatorT>
3955 typedef T value_type;
3957 VmaVector(
const AllocatorT& allocator) :
3958 m_Allocator(allocator),
3965 VmaVector(
size_t count,
const AllocatorT& allocator) :
3966 m_Allocator(allocator),
3967 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3973 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3974 m_Allocator(src.m_Allocator),
3975 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3976 m_Count(src.m_Count),
3977 m_Capacity(src.m_Count)
3981 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3987 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3990 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3994 resize(rhs.m_Count);
3997 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4003 bool empty()
const {
return m_Count == 0; }
4004 size_t size()
const {
return m_Count; }
4005 T* data() {
return m_pArray; }
4006 const T* data()
const {
return m_pArray; }
4008 T& operator[](
size_t index)
4010 VMA_HEAVY_ASSERT(index < m_Count);
4011 return m_pArray[index];
4013 const T& operator[](
size_t index)
const 4015 VMA_HEAVY_ASSERT(index < m_Count);
4016 return m_pArray[index];
4021 VMA_HEAVY_ASSERT(m_Count > 0);
4024 const T& front()
const 4026 VMA_HEAVY_ASSERT(m_Count > 0);
4031 VMA_HEAVY_ASSERT(m_Count > 0);
4032 return m_pArray[m_Count - 1];
4034 const T& back()
const 4036 VMA_HEAVY_ASSERT(m_Count > 0);
4037 return m_pArray[m_Count - 1];
4040 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4042 newCapacity = VMA_MAX(newCapacity, m_Count);
4044 if((newCapacity < m_Capacity) && !freeMemory)
4046 newCapacity = m_Capacity;
4049 if(newCapacity != m_Capacity)
4051 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4054 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4056 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4057 m_Capacity = newCapacity;
4058 m_pArray = newArray;
4062 void resize(
size_t newCount,
bool freeMemory =
false)
4064 size_t newCapacity = m_Capacity;
4065 if(newCount > m_Capacity)
4067 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4071 newCapacity = newCount;
4074 if(newCapacity != m_Capacity)
4076 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4077 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4078 if(elementsToCopy != 0)
4080 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4082 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4083 m_Capacity = newCapacity;
4084 m_pArray = newArray;
4090 void clear(
bool freeMemory =
false)
4092 resize(0, freeMemory);
4095 void insert(
size_t index,
const T& src)
4097 VMA_HEAVY_ASSERT(index <= m_Count);
4098 const size_t oldCount = size();
4099 resize(oldCount + 1);
4100 if(index < oldCount)
4102 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4104 m_pArray[index] = src;
4107 void remove(
size_t index)
4109 VMA_HEAVY_ASSERT(index < m_Count);
4110 const size_t oldCount = size();
4111 if(index < oldCount - 1)
4113 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4115 resize(oldCount - 1);
4118 void push_back(
const T& src)
4120 const size_t newIndex = size();
4121 resize(newIndex + 1);
4122 m_pArray[newIndex] = src;
4127 VMA_HEAVY_ASSERT(m_Count > 0);
4131 void push_front(
const T& src)
4138 VMA_HEAVY_ASSERT(m_Count > 0);
4142 typedef T* iterator;
4144 iterator begin() {
return m_pArray; }
4145 iterator end() {
return m_pArray + m_Count; }
4148 AllocatorT m_Allocator;
4154 template<
typename T,
typename allocatorT>
4155 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4157 vec.insert(index, item);
4160 template<
typename T,
typename allocatorT>
4161 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4166 #endif // #if VMA_USE_STL_VECTOR 4168 template<
typename CmpLess,
typename VectorT>
4169 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4171 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4173 vector.data() + vector.size(),
4175 CmpLess()) - vector.data();
4176 VmaVectorInsert(vector, indexToInsert, value);
4177 return indexToInsert;
4180 template<
typename CmpLess,
typename VectorT>
4181 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4184 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4189 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4191 size_t indexToRemove = it - vector.begin();
4192 VmaVectorRemove(vector, indexToRemove);
4198 template<
typename CmpLess,
typename IterT,
typename KeyT>
4199 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
4202 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4203 beg, end, value, comparator);
4205 (!comparator(*it, value) && !comparator(value, *it)))
4220 template<
typename T>
4221 class VmaPoolAllocator
4223 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4225 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4226 ~VmaPoolAllocator();
4234 uint32_t NextFreeIndex;
4242 uint32_t FirstFreeIndex;
4245 const VkAllocationCallbacks* m_pAllocationCallbacks;
4246 const uint32_t m_FirstBlockCapacity;
4247 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4249 ItemBlock& CreateNewBlock();
4252 template<
typename T>
4253 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4254 m_pAllocationCallbacks(pAllocationCallbacks),
4255 m_FirstBlockCapacity(firstBlockCapacity),
4256 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4258 VMA_ASSERT(m_FirstBlockCapacity > 1);
4261 template<
typename T>
4262 VmaPoolAllocator<T>::~VmaPoolAllocator()
4267 template<
typename T>
4268 void VmaPoolAllocator<T>::Clear()
4270 for(
size_t i = m_ItemBlocks.size(); i--; )
4271 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4272 m_ItemBlocks.clear();
4275 template<
typename T>
4276 T* VmaPoolAllocator<T>::Alloc()
4278 for(
size_t i = m_ItemBlocks.size(); i--; )
4280 ItemBlock& block = m_ItemBlocks[i];
4282 if(block.FirstFreeIndex != UINT32_MAX)
4284 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4285 block.FirstFreeIndex = pItem->NextFreeIndex;
4286 return &pItem->Value;
4291 ItemBlock& newBlock = CreateNewBlock();
4292 Item*
const pItem = &newBlock.pItems[0];
4293 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4294 return &pItem->Value;
4297 template<
typename T>
4298 void VmaPoolAllocator<T>::Free(T* ptr)
4301 for(
size_t i = m_ItemBlocks.size(); i--; )
4303 ItemBlock& block = m_ItemBlocks[i];
4307 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4310 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4312 const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
4313 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4314 block.FirstFreeIndex = index;
4318 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4321 template<
typename T>
4322 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4324 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4325 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4327 const ItemBlock newBlock = {
4328 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4332 m_ItemBlocks.push_back(newBlock);
4335 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4336 newBlock.pItems[i].NextFreeIndex = i + 1;
4337 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4338 return m_ItemBlocks.back();
4344 #if VMA_USE_STL_LIST 4346 #define VmaList std::list 4348 #else // #if VMA_USE_STL_LIST 4350 template<
typename T>
4359 template<
typename T>
4362 VMA_CLASS_NO_COPY(VmaRawList)
4364 typedef VmaListItem<T> ItemType;
4366 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4370 size_t GetCount()
const {
return m_Count; }
4371 bool IsEmpty()
const {
return m_Count == 0; }
4373 ItemType* Front() {
return m_pFront; }
4374 const ItemType* Front()
const {
return m_pFront; }
4375 ItemType* Back() {
return m_pBack; }
4376 const ItemType* Back()
const {
return m_pBack; }
4378 ItemType* PushBack();
4379 ItemType* PushFront();
4380 ItemType* PushBack(
const T& value);
4381 ItemType* PushFront(
const T& value);
4386 ItemType* InsertBefore(ItemType* pItem);
4388 ItemType* InsertAfter(ItemType* pItem);
4390 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4391 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4393 void Remove(ItemType* pItem);
4396 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4397 VmaPoolAllocator<ItemType> m_ItemAllocator;
4403 template<
typename T>
4404 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4405 m_pAllocationCallbacks(pAllocationCallbacks),
4406 m_ItemAllocator(pAllocationCallbacks, 128),
4413 template<
typename T>
4414 VmaRawList<T>::~VmaRawList()
4420 template<
typename T>
4421 void VmaRawList<T>::Clear()
4423 if(IsEmpty() ==
false)
4425 ItemType* pItem = m_pBack;
4426 while(pItem != VMA_NULL)
4428 ItemType*
const pPrevItem = pItem->pPrev;
4429 m_ItemAllocator.Free(pItem);
4432 m_pFront = VMA_NULL;
4438 template<
typename T>
4439 VmaListItem<T>* VmaRawList<T>::PushBack()
4441 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4442 pNewItem->pNext = VMA_NULL;
4445 pNewItem->pPrev = VMA_NULL;
4446 m_pFront = pNewItem;
4452 pNewItem->pPrev = m_pBack;
4453 m_pBack->pNext = pNewItem;
4460 template<
typename T>
4461 VmaListItem<T>* VmaRawList<T>::PushFront()
4463 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4464 pNewItem->pPrev = VMA_NULL;
4467 pNewItem->pNext = VMA_NULL;
4468 m_pFront = pNewItem;
4474 pNewItem->pNext = m_pFront;
4475 m_pFront->pPrev = pNewItem;
4476 m_pFront = pNewItem;
4482 template<
typename T>
4483 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4485 ItemType*
const pNewItem = PushBack();
4486 pNewItem->Value = value;
4490 template<
typename T>
4491 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4493 ItemType*
const pNewItem = PushFront();
4494 pNewItem->Value = value;
4498 template<
typename T>
4499 void VmaRawList<T>::PopBack()
4501 VMA_HEAVY_ASSERT(m_Count > 0);
4502 ItemType*
const pBackItem = m_pBack;
4503 ItemType*
const pPrevItem = pBackItem->pPrev;
4504 if(pPrevItem != VMA_NULL)
4506 pPrevItem->pNext = VMA_NULL;
4508 m_pBack = pPrevItem;
4509 m_ItemAllocator.Free(pBackItem);
4513 template<
typename T>
4514 void VmaRawList<T>::PopFront()
4516 VMA_HEAVY_ASSERT(m_Count > 0);
4517 ItemType*
const pFrontItem = m_pFront;
4518 ItemType*
const pNextItem = pFrontItem->pNext;
4519 if(pNextItem != VMA_NULL)
4521 pNextItem->pPrev = VMA_NULL;
4523 m_pFront = pNextItem;
4524 m_ItemAllocator.Free(pFrontItem);
4528 template<
typename T>
4529 void VmaRawList<T>::Remove(ItemType* pItem)
4531 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4532 VMA_HEAVY_ASSERT(m_Count > 0);
4534 if(pItem->pPrev != VMA_NULL)
4536 pItem->pPrev->pNext = pItem->pNext;
4540 VMA_HEAVY_ASSERT(m_pFront == pItem);
4541 m_pFront = pItem->pNext;
4544 if(pItem->pNext != VMA_NULL)
4546 pItem->pNext->pPrev = pItem->pPrev;
4550 VMA_HEAVY_ASSERT(m_pBack == pItem);
4551 m_pBack = pItem->pPrev;
4554 m_ItemAllocator.Free(pItem);
4558 template<
typename T>
4559 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4561 if(pItem != VMA_NULL)
4563 ItemType*
const prevItem = pItem->pPrev;
4564 ItemType*
const newItem = m_ItemAllocator.Alloc();
4565 newItem->pPrev = prevItem;
4566 newItem->pNext = pItem;
4567 pItem->pPrev = newItem;
4568 if(prevItem != VMA_NULL)
4570 prevItem->pNext = newItem;
4574 VMA_HEAVY_ASSERT(m_pFront == pItem);
4584 template<
typename T>
4585 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4587 if(pItem != VMA_NULL)
4589 ItemType*
const nextItem = pItem->pNext;
4590 ItemType*
const newItem = m_ItemAllocator.Alloc();
4591 newItem->pNext = nextItem;
4592 newItem->pPrev = pItem;
4593 pItem->pNext = newItem;
4594 if(nextItem != VMA_NULL)
4596 nextItem->pPrev = newItem;
4600 VMA_HEAVY_ASSERT(m_pBack == pItem);
4610 template<
typename T>
4611 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4613 ItemType*
const newItem = InsertBefore(pItem);
4614 newItem->Value = value;
4618 template<
typename T>
4619 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4621 ItemType*
const newItem = InsertAfter(pItem);
4622 newItem->Value = value;
4626 template<
typename T,
typename AllocatorT>
4629 VMA_CLASS_NO_COPY(VmaList)
4640 T& operator*()
const 4642 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4643 return m_pItem->Value;
4645 T* operator->()
const 4647 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4648 return &m_pItem->Value;
4651 iterator& operator++()
4653 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4654 m_pItem = m_pItem->pNext;
4657 iterator& operator--()
4659 if(m_pItem != VMA_NULL)
4661 m_pItem = m_pItem->pPrev;
4665 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4666 m_pItem = m_pList->Back();
4671 iterator operator++(
int)
4673 iterator result = *
this;
4677 iterator operator--(
int)
4679 iterator result = *
this;
4684 bool operator==(
const iterator& rhs)
const 4686 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4687 return m_pItem == rhs.m_pItem;
4689 bool operator!=(
const iterator& rhs)
const 4691 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4692 return m_pItem != rhs.m_pItem;
4696 VmaRawList<T>* m_pList;
4697 VmaListItem<T>* m_pItem;
4699 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4705 friend class VmaList<T, AllocatorT>;
4708 class const_iterator
4717 const_iterator(
const iterator& src) :
4718 m_pList(src.m_pList),
4719 m_pItem(src.m_pItem)
4723 const T& operator*()
const 4725 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4726 return m_pItem->Value;
4728 const T* operator->()
const 4730 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4731 return &m_pItem->Value;
4734 const_iterator& operator++()
4736 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4737 m_pItem = m_pItem->pNext;
4740 const_iterator& operator--()
4742 if(m_pItem != VMA_NULL)
4744 m_pItem = m_pItem->pPrev;
4748 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4749 m_pItem = m_pList->Back();
4754 const_iterator operator++(
int)
4756 const_iterator result = *
this;
4760 const_iterator operator--(
int)
4762 const_iterator result = *
this;
4767 bool operator==(
const const_iterator& rhs)
const 4769 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4770 return m_pItem == rhs.m_pItem;
4772 bool operator!=(
const const_iterator& rhs)
const 4774 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4775 return m_pItem != rhs.m_pItem;
4779 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4785 const VmaRawList<T>* m_pList;
4786 const VmaListItem<T>* m_pItem;
4788 friend class VmaList<T, AllocatorT>;
4791 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4793 bool empty()
const {
return m_RawList.IsEmpty(); }
4794 size_t size()
const {
return m_RawList.GetCount(); }
4796 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4797 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4799 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4800 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4802 void clear() { m_RawList.Clear(); }
4803 void push_back(
const T& value) { m_RawList.PushBack(value); }
4804 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4805 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4808 VmaRawList<T> m_RawList;
4811 #endif // #if VMA_USE_STL_LIST 4819 #if VMA_USE_STL_UNORDERED_MAP 4821 #define VmaPair std::pair 4823 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4824 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4826 #else // #if VMA_USE_STL_UNORDERED_MAP 4828 template<
typename T1,
typename T2>
4834 VmaPair() : first(), second() { }
4835 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4841 template<
typename KeyT,
typename ValueT>
4845 typedef VmaPair<KeyT, ValueT> PairType;
4846 typedef PairType* iterator;
4848 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4850 iterator begin() {
return m_Vector.begin(); }
4851 iterator end() {
return m_Vector.end(); }
4853 void insert(
const PairType& pair);
4854 iterator find(
const KeyT& key);
4855 void erase(iterator it);
4858 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4861 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4863 template<
typename FirstT,
typename SecondT>
4864 struct VmaPairFirstLess
4866 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4868 return lhs.first < rhs.first;
4870 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4872 return lhs.first < rhsFirst;
4876 template<
typename KeyT,
typename ValueT>
4877 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4879 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4881 m_Vector.data() + m_Vector.size(),
4883 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4884 VmaVectorInsert(m_Vector, indexToInsert, pair);
4887 template<
typename KeyT,
typename ValueT>
4888 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4890 PairType* it = VmaBinaryFindFirstNotLess(
4892 m_Vector.data() + m_Vector.size(),
4894 VmaPairFirstLess<KeyT, ValueT>());
4895 if((it != m_Vector.end()) && (it->first == key))
4901 return m_Vector.end();
4905 template<
typename KeyT,
typename ValueT>
4906 void VmaMap<KeyT, ValueT>::erase(iterator it)
4908 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4911 #endif // #if VMA_USE_STL_UNORDERED_MAP 4917 class VmaDeviceMemoryBlock;
4919 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4921 struct VmaAllocation_T
4924 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4928 FLAG_USER_DATA_STRING = 0x01,
4932 enum ALLOCATION_TYPE
4934 ALLOCATION_TYPE_NONE,
4935 ALLOCATION_TYPE_BLOCK,
4936 ALLOCATION_TYPE_DEDICATED,
4944 void Ctor(uint32_t currentFrameIndex,
bool userDataString)
4948 m_pUserData = VMA_NULL;
4949 m_LastUseFrameIndex = currentFrameIndex;
4950 m_Type = (uint8_t)ALLOCATION_TYPE_NONE;
4951 m_SuballocationType = (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN;
4953 m_Flags = userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0;
4955 #if VMA_STATS_STRING_ENABLED 4956 m_CreationFrameIndex = currentFrameIndex;
4957 m_BufferImageUsage = 0;
4963 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4966 VMA_ASSERT(m_pUserData == VMA_NULL);
4969 void InitBlockAllocation(
4970 VmaDeviceMemoryBlock* block,
4971 VkDeviceSize offset,
4972 VkDeviceSize alignment,
4974 VmaSuballocationType suballocationType,
4978 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4979 VMA_ASSERT(block != VMA_NULL);
4980 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4981 m_Alignment = alignment;
4983 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4984 m_SuballocationType = (uint8_t)suballocationType;
4985 m_BlockAllocation.m_Block = block;
4986 m_BlockAllocation.m_Offset = offset;
4987 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4992 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4993 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4994 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4995 m_BlockAllocation.m_Block = VMA_NULL;
4996 m_BlockAllocation.m_Offset = 0;
4997 m_BlockAllocation.m_CanBecomeLost =
true;
5000 void ChangeBlockAllocation(
5002 VmaDeviceMemoryBlock* block,
5003 VkDeviceSize offset);
5005 void ChangeSize(VkDeviceSize newSize);
5006 void ChangeOffset(VkDeviceSize newOffset);
5009 void InitDedicatedAllocation(
5010 uint32_t memoryTypeIndex,
5011 VkDeviceMemory hMemory,
5012 VmaSuballocationType suballocationType,
5016 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5017 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5018 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5021 m_SuballocationType = (uint8_t)suballocationType;
5022 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5023 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
5024 m_DedicatedAllocation.m_hMemory = hMemory;
5025 m_DedicatedAllocation.m_pMappedData = pMappedData;
5028 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5029 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5030 VkDeviceSize GetSize()
const {
return m_Size; }
5031 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5032 void* GetUserData()
const {
return m_pUserData; }
5033 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5034 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5036 VmaDeviceMemoryBlock* GetBlock()
const 5038 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5039 return m_BlockAllocation.m_Block;
5041 VkDeviceSize GetOffset()
const;
5042 VkDeviceMemory GetMemory()
const;
5043 uint32_t GetMemoryTypeIndex()
const;
5044 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5045 void* GetMappedData()
const;
5046 bool CanBecomeLost()
const;
5048 uint32_t GetLastUseFrameIndex()
const 5050 return m_LastUseFrameIndex.load();
5052 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5054 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5064 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5066 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5068 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5079 void BlockAllocMap();
5080 void BlockAllocUnmap();
5081 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5084 #if VMA_STATS_STRING_ENABLED 5085 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5086 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5088 void InitBufferImageUsage(uint32_t bufferImageUsage)
5090 VMA_ASSERT(m_BufferImageUsage == 0);
5091 m_BufferImageUsage = bufferImageUsage;
5094 void PrintParameters(
class VmaJsonWriter& json)
const;
5098 VkDeviceSize m_Alignment;
5099 VkDeviceSize m_Size;
5101 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5103 uint8_t m_SuballocationType;
5110 struct BlockAllocation
5112 VmaDeviceMemoryBlock* m_Block;
5113 VkDeviceSize m_Offset;
5114 bool m_CanBecomeLost;
5118 struct DedicatedAllocation
5120 uint32_t m_MemoryTypeIndex;
5121 VkDeviceMemory m_hMemory;
5122 void* m_pMappedData;
5128 BlockAllocation m_BlockAllocation;
5130 DedicatedAllocation m_DedicatedAllocation;
5133 #if VMA_STATS_STRING_ENABLED 5134 uint32_t m_CreationFrameIndex;
5135 uint32_t m_BufferImageUsage;
5145 struct VmaSuballocation
5147 VkDeviceSize offset;
5150 VmaSuballocationType type;
5154 struct VmaSuballocationOffsetLess
5156 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5158 return lhs.offset < rhs.offset;
5161 struct VmaSuballocationOffsetGreater
5163 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5165 return lhs.offset > rhs.offset;
5169 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5172 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5174 enum class VmaAllocationRequestType
5196 struct VmaAllocationRequest
5198 VkDeviceSize offset;
5199 VkDeviceSize sumFreeSize;
5200 VkDeviceSize sumItemSize;
5201 VmaSuballocationList::iterator item;
5202 size_t itemsToMakeLostCount;
5204 VmaAllocationRequestType type;
5206 VkDeviceSize CalcCost()
const 5208 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5216 class VmaBlockMetadata
5220 virtual ~VmaBlockMetadata() { }
5221 virtual void Init(VkDeviceSize size) { m_Size = size; }
5224 virtual bool Validate()
const = 0;
5225 VkDeviceSize GetSize()
const {
return m_Size; }
5226 virtual size_t GetAllocationCount()
const = 0;
5227 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5228 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5230 virtual bool IsEmpty()
const = 0;
5232 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5234 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5236 #if VMA_STATS_STRING_ENABLED 5237 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5243 virtual bool CreateAllocationRequest(
5244 uint32_t currentFrameIndex,
5245 uint32_t frameInUseCount,
5246 VkDeviceSize bufferImageGranularity,
5247 VkDeviceSize allocSize,
5248 VkDeviceSize allocAlignment,
5250 VmaSuballocationType allocType,
5251 bool canMakeOtherLost,
5254 VmaAllocationRequest* pAllocationRequest) = 0;
5256 virtual bool MakeRequestedAllocationsLost(
5257 uint32_t currentFrameIndex,
5258 uint32_t frameInUseCount,
5259 VmaAllocationRequest* pAllocationRequest) = 0;
5261 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5263 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5267 const VmaAllocationRequest& request,
5268 VmaSuballocationType type,
5269 VkDeviceSize allocSize,
5274 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5277 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize) {
return false; }
5280 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5282 #if VMA_STATS_STRING_ENABLED 5283 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5284 VkDeviceSize unusedBytes,
5285 size_t allocationCount,
5286 size_t unusedRangeCount)
const;
5287 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5288 VkDeviceSize offset,
5290 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5291 VkDeviceSize offset,
5292 VkDeviceSize size)
const;
5293 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5297 VkDeviceSize m_Size;
5298 const VkAllocationCallbacks* m_pAllocationCallbacks;
5301 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 5302 VMA_ASSERT(0 && "Validation failed: " #cond); \ 5306 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5308 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5311 virtual ~VmaBlockMetadata_Generic();
5312 virtual void Init(VkDeviceSize size);
5314 virtual bool Validate()
const;
5315 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5316 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5317 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5318 virtual bool IsEmpty()
const;
5320 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5321 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5323 #if VMA_STATS_STRING_ENABLED 5324 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5327 virtual bool CreateAllocationRequest(
5328 uint32_t currentFrameIndex,
5329 uint32_t frameInUseCount,
5330 VkDeviceSize bufferImageGranularity,
5331 VkDeviceSize allocSize,
5332 VkDeviceSize allocAlignment,
5334 VmaSuballocationType allocType,
5335 bool canMakeOtherLost,
5337 VmaAllocationRequest* pAllocationRequest);
5339 virtual bool MakeRequestedAllocationsLost(
5340 uint32_t currentFrameIndex,
5341 uint32_t frameInUseCount,
5342 VmaAllocationRequest* pAllocationRequest);
5344 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5346 virtual VkResult CheckCorruption(
const void* pBlockData);
5349 const VmaAllocationRequest& request,
5350 VmaSuballocationType type,
5351 VkDeviceSize allocSize,
5355 virtual void FreeAtOffset(VkDeviceSize offset);
5357 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize);
5362 bool IsBufferImageGranularityConflictPossible(
5363 VkDeviceSize bufferImageGranularity,
5364 VmaSuballocationType& inOutPrevSuballocType)
const;
5367 friend class VmaDefragmentationAlgorithm_Generic;
5368 friend class VmaDefragmentationAlgorithm_Fast;
5370 uint32_t m_FreeCount;
5371 VkDeviceSize m_SumFreeSize;
5372 VmaSuballocationList m_Suballocations;
5375 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5377 bool ValidateFreeSuballocationList()
const;
5381 bool CheckAllocation(
5382 uint32_t currentFrameIndex,
5383 uint32_t frameInUseCount,
5384 VkDeviceSize bufferImageGranularity,
5385 VkDeviceSize allocSize,
5386 VkDeviceSize allocAlignment,
5387 VmaSuballocationType allocType,
5388 VmaSuballocationList::const_iterator suballocItem,
5389 bool canMakeOtherLost,
5390 VkDeviceSize* pOffset,
5391 size_t* itemsToMakeLostCount,
5392 VkDeviceSize* pSumFreeSize,
5393 VkDeviceSize* pSumItemSize)
const;
5395 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5399 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5402 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5405 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5486 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5488 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5491 virtual ~VmaBlockMetadata_Linear();
5492 virtual void Init(VkDeviceSize size);
5494 virtual bool Validate()
const;
5495 virtual size_t GetAllocationCount()
const;
5496 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5497 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5498 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5500 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5501 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5503 #if VMA_STATS_STRING_ENABLED 5504 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5507 virtual bool CreateAllocationRequest(
5508 uint32_t currentFrameIndex,
5509 uint32_t frameInUseCount,
5510 VkDeviceSize bufferImageGranularity,
5511 VkDeviceSize allocSize,
5512 VkDeviceSize allocAlignment,
5514 VmaSuballocationType allocType,
5515 bool canMakeOtherLost,
5517 VmaAllocationRequest* pAllocationRequest);
5519 virtual bool MakeRequestedAllocationsLost(
5520 uint32_t currentFrameIndex,
5521 uint32_t frameInUseCount,
5522 VmaAllocationRequest* pAllocationRequest);
5524 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5526 virtual VkResult CheckCorruption(
const void* pBlockData);
5529 const VmaAllocationRequest& request,
5530 VmaSuballocationType type,
5531 VkDeviceSize allocSize,
5535 virtual void FreeAtOffset(VkDeviceSize offset);
5545 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5547 enum SECOND_VECTOR_MODE
5549 SECOND_VECTOR_EMPTY,
5554 SECOND_VECTOR_RING_BUFFER,
5560 SECOND_VECTOR_DOUBLE_STACK,
5563 VkDeviceSize m_SumFreeSize;
5564 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5565 uint32_t m_1stVectorIndex;
5566 SECOND_VECTOR_MODE m_2ndVectorMode;
5568 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5569 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5570 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5571 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5574 size_t m_1stNullItemsBeginCount;
5576 size_t m_1stNullItemsMiddleCount;
5578 size_t m_2ndNullItemsCount;
5580 bool ShouldCompact1st()
const;
5581 void CleanupAfterFree();
5583 bool CreateAllocationRequest_LowerAddress(
5584 uint32_t currentFrameIndex,
5585 uint32_t frameInUseCount,
5586 VkDeviceSize bufferImageGranularity,
5587 VkDeviceSize allocSize,
5588 VkDeviceSize allocAlignment,
5589 VmaSuballocationType allocType,
5590 bool canMakeOtherLost,
5592 VmaAllocationRequest* pAllocationRequest);
5593 bool CreateAllocationRequest_UpperAddress(
5594 uint32_t currentFrameIndex,
5595 uint32_t frameInUseCount,
5596 VkDeviceSize bufferImageGranularity,
5597 VkDeviceSize allocSize,
5598 VkDeviceSize allocAlignment,
5599 VmaSuballocationType allocType,
5600 bool canMakeOtherLost,
5602 VmaAllocationRequest* pAllocationRequest);
5616 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5618 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5621 virtual ~VmaBlockMetadata_Buddy();
5622 virtual void Init(VkDeviceSize size);
5624 virtual bool Validate()
const;
5625 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5626 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5627 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5628 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5630 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5631 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5633 #if VMA_STATS_STRING_ENABLED 5634 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5637 virtual bool CreateAllocationRequest(
5638 uint32_t currentFrameIndex,
5639 uint32_t frameInUseCount,
5640 VkDeviceSize bufferImageGranularity,
5641 VkDeviceSize allocSize,
5642 VkDeviceSize allocAlignment,
5644 VmaSuballocationType allocType,
5645 bool canMakeOtherLost,
5647 VmaAllocationRequest* pAllocationRequest);
5649 virtual bool MakeRequestedAllocationsLost(
5650 uint32_t currentFrameIndex,
5651 uint32_t frameInUseCount,
5652 VmaAllocationRequest* pAllocationRequest);
5654 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5656 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5659 const VmaAllocationRequest& request,
5660 VmaSuballocationType type,
5661 VkDeviceSize allocSize,
5664 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5665 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5668 static const VkDeviceSize MIN_NODE_SIZE = 32;
5669 static const size_t MAX_LEVELS = 30;
5671 struct ValidationContext
5673 size_t calculatedAllocationCount;
5674 size_t calculatedFreeCount;
5675 VkDeviceSize calculatedSumFreeSize;
5677 ValidationContext() :
5678 calculatedAllocationCount(0),
5679 calculatedFreeCount(0),
5680 calculatedSumFreeSize(0) { }
5685 VkDeviceSize offset;
5715 VkDeviceSize m_UsableSize;
5716 uint32_t m_LevelCount;
5722 } m_FreeList[MAX_LEVELS];
5724 size_t m_AllocationCount;
5728 VkDeviceSize m_SumFreeSize;
5730 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5731 void DeleteNode(Node* node);
5732 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5733 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5734 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5736 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5737 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5741 void AddToFreeListFront(uint32_t level, Node* node);
5745 void RemoveFromFreeList(uint32_t level, Node* node);
5747 #if VMA_STATS_STRING_ENABLED 5748 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5758 class VmaDeviceMemoryBlock
5760 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5762 VmaBlockMetadata* m_pMetadata;
5766 ~VmaDeviceMemoryBlock()
5768 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5769 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5776 uint32_t newMemoryTypeIndex,
5777 VkDeviceMemory newMemory,
5778 VkDeviceSize newSize,
5780 uint32_t algorithm);
5784 VmaPool GetParentPool()
const {
return m_hParentPool; }
5785 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5786 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5787 uint32_t GetId()
const {
return m_Id; }
5788 void* GetMappedData()
const {
return m_pMappedData; }
5791 bool Validate()
const;
5796 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5799 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5800 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5802 VkResult BindBufferMemory(
5806 VkResult BindImageMemory(
5813 uint32_t m_MemoryTypeIndex;
5815 VkDeviceMemory m_hMemory;
5823 uint32_t m_MapCount;
5824 void* m_pMappedData;
5827 struct VmaPointerLess
5829 bool operator()(
const void* lhs,
const void* rhs)
const 5835 struct VmaDefragmentationMove
5837 size_t srcBlockIndex;
5838 size_t dstBlockIndex;
5839 VkDeviceSize srcOffset;
5840 VkDeviceSize dstOffset;
5844 class VmaDefragmentationAlgorithm;
5852 struct VmaBlockVector
5854 VMA_CLASS_NO_COPY(VmaBlockVector)
5859 uint32_t memoryTypeIndex,
5860 VkDeviceSize preferredBlockSize,
5861 size_t minBlockCount,
5862 size_t maxBlockCount,
5863 VkDeviceSize bufferImageGranularity,
5864 uint32_t frameInUseCount,
5866 bool explicitBlockSize,
5867 uint32_t algorithm);
5870 VkResult CreateMinBlocks();
5872 VmaPool GetParentPool()
const {
return m_hParentPool; }
5873 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5874 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5875 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5876 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5877 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5881 bool IsEmpty()
const {
return m_Blocks.empty(); }
5882 bool IsCorruptionDetectionEnabled()
const;
5885 uint32_t currentFrameIndex,
5887 VkDeviceSize alignment,
5889 VmaSuballocationType suballocType,
5890 size_t allocationCount,
5899 #if VMA_STATS_STRING_ENABLED 5900 void PrintDetailedMap(
class VmaJsonWriter& json);
5903 void MakePoolAllocationsLost(
5904 uint32_t currentFrameIndex,
5905 size_t* pLostAllocationCount);
5906 VkResult CheckCorruption();
5910 class VmaBlockVectorDefragmentationContext* pCtx,
5912 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
5913 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
5914 VkCommandBuffer commandBuffer);
5915 void DefragmentationEnd(
5916 class VmaBlockVectorDefragmentationContext* pCtx,
5922 size_t GetBlockCount()
const {
return m_Blocks.size(); }
5923 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
5924 size_t CalcAllocationCount()
const;
5925 bool IsBufferImageGranularityConflictPossible()
const;
5928 friend class VmaDefragmentationAlgorithm_Generic;
5932 const uint32_t m_MemoryTypeIndex;
5933 const VkDeviceSize m_PreferredBlockSize;
5934 const size_t m_MinBlockCount;
5935 const size_t m_MaxBlockCount;
5936 const VkDeviceSize m_BufferImageGranularity;
5937 const uint32_t m_FrameInUseCount;
5938 const bool m_IsCustomPool;
5939 const bool m_ExplicitBlockSize;
5940 const uint32_t m_Algorithm;
5944 bool m_HasEmptyBlock;
5945 VMA_RW_MUTEX m_Mutex;
5947 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5948 uint32_t m_NextBlockId;
5950 VkDeviceSize CalcMaxBlockSize()
const;
5953 void Remove(VmaDeviceMemoryBlock* pBlock);
5957 void IncrementallySortBlocks();
5959 VkResult AllocatePage(
5960 uint32_t currentFrameIndex,
5962 VkDeviceSize alignment,
5964 VmaSuballocationType suballocType,
5968 VkResult AllocateFromBlock(
5969 VmaDeviceMemoryBlock* pBlock,
5970 uint32_t currentFrameIndex,
5972 VkDeviceSize alignment,
5975 VmaSuballocationType suballocType,
5979 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5982 void ApplyDefragmentationMovesCpu(
5983 class VmaBlockVectorDefragmentationContext* pDefragCtx,
5984 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
5986 void ApplyDefragmentationMovesGpu(
5987 class VmaBlockVectorDefragmentationContext* pDefragCtx,
5988 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
5989 VkCommandBuffer commandBuffer);
6000 VMA_CLASS_NO_COPY(VmaPool_T)
6002 VmaBlockVector m_BlockVector;
6007 VkDeviceSize preferredBlockSize);
6010 uint32_t GetId()
const {
return m_Id; }
6011 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6013 #if VMA_STATS_STRING_ENABLED 6028 class VmaDefragmentationAlgorithm
6030 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6032 VmaDefragmentationAlgorithm(
6034 VmaBlockVector* pBlockVector,
6035 uint32_t currentFrameIndex) :
6036 m_hAllocator(hAllocator),
6037 m_pBlockVector(pBlockVector),
6038 m_CurrentFrameIndex(currentFrameIndex)
6041 virtual ~VmaDefragmentationAlgorithm()
6045 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6046 virtual void AddAll() = 0;
6048 virtual VkResult Defragment(
6049 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6050 VkDeviceSize maxBytesToMove,
6051 uint32_t maxAllocationsToMove) = 0;
6053 virtual VkDeviceSize GetBytesMoved()
const = 0;
6054 virtual uint32_t GetAllocationsMoved()
const = 0;
6058 VmaBlockVector*
const m_pBlockVector;
6059 const uint32_t m_CurrentFrameIndex;
6061 struct AllocationInfo
6064 VkBool32* m_pChanged;
6067 m_hAllocation(VK_NULL_HANDLE),
6068 m_pChanged(VMA_NULL)
6072 m_hAllocation(hAlloc),
6073 m_pChanged(pChanged)
6079 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6081 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6083 VmaDefragmentationAlgorithm_Generic(
6085 VmaBlockVector* pBlockVector,
6086 uint32_t currentFrameIndex,
6087 bool overlappingMoveSupported);
6088 virtual ~VmaDefragmentationAlgorithm_Generic();
6090 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6091 virtual void AddAll() { m_AllAllocations =
true; }
6093 virtual VkResult Defragment(
6094 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6095 VkDeviceSize maxBytesToMove,
6096 uint32_t maxAllocationsToMove);
6098 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6099 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6102 uint32_t m_AllocationCount;
6103 bool m_AllAllocations;
6105 VkDeviceSize m_BytesMoved;
6106 uint32_t m_AllocationsMoved;
6108 struct AllocationInfoSizeGreater
6110 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6112 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6116 struct AllocationInfoOffsetGreater
6118 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6120 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6126 size_t m_OriginalBlockIndex;
6127 VmaDeviceMemoryBlock* m_pBlock;
6128 bool m_HasNonMovableAllocations;
6129 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6131 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6132 m_OriginalBlockIndex(SIZE_MAX),
6134 m_HasNonMovableAllocations(true),
6135 m_Allocations(pAllocationCallbacks)
6139 void CalcHasNonMovableAllocations()
6141 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6142 const size_t defragmentAllocCount = m_Allocations.size();
6143 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6146 void SortAllocationsBySizeDescending()
6148 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6151 void SortAllocationsByOffsetDescending()
6153 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6157 struct BlockPointerLess
6159 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 6161 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6163 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6165 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6171 struct BlockInfoCompareMoveDestination
6173 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6175 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6179 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6183 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6191 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6192 BlockInfoVector m_Blocks;
6194 VkResult DefragmentRound(
6195 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6196 VkDeviceSize maxBytesToMove,
6197 uint32_t maxAllocationsToMove);
6199 size_t CalcBlocksWithNonMovableCount()
const;
6201 static bool MoveMakesSense(
6202 size_t dstBlockIndex, VkDeviceSize dstOffset,
6203 size_t srcBlockIndex, VkDeviceSize srcOffset);
6206 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6208 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6210 VmaDefragmentationAlgorithm_Fast(
6212 VmaBlockVector* pBlockVector,
6213 uint32_t currentFrameIndex,
6214 bool overlappingMoveSupported);
6215 virtual ~VmaDefragmentationAlgorithm_Fast();
6217 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6218 virtual void AddAll() { m_AllAllocations =
true; }
6220 virtual VkResult Defragment(
6221 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6222 VkDeviceSize maxBytesToMove,
6223 uint32_t maxAllocationsToMove);
6225 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6226 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6231 size_t origBlockIndex;
6234 class FreeSpaceDatabase
6240 s.blockInfoIndex = SIZE_MAX;
6241 for(
size_t i = 0; i < MAX_COUNT; ++i)
6243 m_FreeSpaces[i] = s;
6247 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6249 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6255 size_t bestIndex = SIZE_MAX;
6256 for(
size_t i = 0; i < MAX_COUNT; ++i)
6259 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6264 if(m_FreeSpaces[i].size < size &&
6265 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6271 if(bestIndex != SIZE_MAX)
6273 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6274 m_FreeSpaces[bestIndex].offset = offset;
6275 m_FreeSpaces[bestIndex].size = size;
6279 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6280 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6282 size_t bestIndex = SIZE_MAX;
6283 VkDeviceSize bestFreeSpaceAfter = 0;
6284 for(
size_t i = 0; i < MAX_COUNT; ++i)
6287 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6289 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6291 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6293 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6295 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6298 bestFreeSpaceAfter = freeSpaceAfter;
6304 if(bestIndex != SIZE_MAX)
6306 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6307 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6309 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6312 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6313 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6314 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6319 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6329 static const size_t MAX_COUNT = 4;
6333 size_t blockInfoIndex;
6334 VkDeviceSize offset;
6336 } m_FreeSpaces[MAX_COUNT];
6339 const bool m_OverlappingMoveSupported;
6341 uint32_t m_AllocationCount;
6342 bool m_AllAllocations;
6344 VkDeviceSize m_BytesMoved;
6345 uint32_t m_AllocationsMoved;
6347 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6349 void PreprocessMetadata();
6350 void PostprocessMetadata();
6351 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6354 struct VmaBlockDefragmentationContext
6358 BLOCK_FLAG_USED = 0x00000001,
6363 VmaBlockDefragmentationContext() :
6365 hBuffer(VK_NULL_HANDLE)
6370 class VmaBlockVectorDefragmentationContext
6372 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6376 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6378 VmaBlockVectorDefragmentationContext(
6381 VmaBlockVector* pBlockVector,
6382 uint32_t currFrameIndex,
6384 ~VmaBlockVectorDefragmentationContext();
6386 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6387 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6388 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6390 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6391 void AddAll() { m_AllAllocations =
true; }
6393 void Begin(
bool overlappingMoveSupported);
6400 VmaBlockVector*
const m_pBlockVector;
6401 const uint32_t m_CurrFrameIndex;
6402 const uint32_t m_AlgorithmFlags;
6404 VmaDefragmentationAlgorithm* m_pAlgorithm;
6412 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6413 bool m_AllAllocations;
6416 struct VmaDefragmentationContext_T
6419 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6421 VmaDefragmentationContext_T(
6423 uint32_t currFrameIndex,
6426 ~VmaDefragmentationContext_T();
6428 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6429 void AddAllocations(
6430 uint32_t allocationCount,
6432 VkBool32* pAllocationsChanged);
6440 VkResult Defragment(
6441 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6442 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6447 const uint32_t m_CurrFrameIndex;
6448 const uint32_t m_Flags;
6451 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6453 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6456 #if VMA_RECORDING_ENABLED 6463 void WriteConfiguration(
6464 const VkPhysicalDeviceProperties& devProps,
6465 const VkPhysicalDeviceMemoryProperties& memProps,
6466 bool dedicatedAllocationExtensionEnabled);
6469 void RecordCreateAllocator(uint32_t frameIndex);
6470 void RecordDestroyAllocator(uint32_t frameIndex);
6471 void RecordCreatePool(uint32_t frameIndex,
6474 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6475 void RecordAllocateMemory(uint32_t frameIndex,
6476 const VkMemoryRequirements& vkMemReq,
6479 void RecordAllocateMemoryPages(uint32_t frameIndex,
6480 const VkMemoryRequirements& vkMemReq,
6482 uint64_t allocationCount,
6484 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6485 const VkMemoryRequirements& vkMemReq,
6486 bool requiresDedicatedAllocation,
6487 bool prefersDedicatedAllocation,
6490 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6491 const VkMemoryRequirements& vkMemReq,
6492 bool requiresDedicatedAllocation,
6493 bool prefersDedicatedAllocation,
6496 void RecordFreeMemory(uint32_t frameIndex,
6498 void RecordFreeMemoryPages(uint32_t frameIndex,
6499 uint64_t allocationCount,
6501 void RecordResizeAllocation(
6502 uint32_t frameIndex,
6504 VkDeviceSize newSize);
6505 void RecordSetAllocationUserData(uint32_t frameIndex,
6507 const void* pUserData);
6508 void RecordCreateLostAllocation(uint32_t frameIndex,
6510 void RecordMapMemory(uint32_t frameIndex,
6512 void RecordUnmapMemory(uint32_t frameIndex,
6514 void RecordFlushAllocation(uint32_t frameIndex,
6515 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6516 void RecordInvalidateAllocation(uint32_t frameIndex,
6517 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6518 void RecordCreateBuffer(uint32_t frameIndex,
6519 const VkBufferCreateInfo& bufCreateInfo,
6522 void RecordCreateImage(uint32_t frameIndex,
6523 const VkImageCreateInfo& imageCreateInfo,
6526 void RecordDestroyBuffer(uint32_t frameIndex,
6528 void RecordDestroyImage(uint32_t frameIndex,
6530 void RecordTouchAllocation(uint32_t frameIndex,
6532 void RecordGetAllocationInfo(uint32_t frameIndex,
6534 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6536 void RecordDefragmentationBegin(uint32_t frameIndex,
6539 void RecordDefragmentationEnd(uint32_t frameIndex,
6549 class UserDataString
6553 const char* GetString()
const {
return m_Str; }
6563 VMA_MUTEX m_FileMutex;
6565 int64_t m_StartCounter;
6567 void GetBasicParams(CallParams& outParams);
6570 template<
typename T>
6571 void PrintPointerList(uint64_t count,
const T* pItems)
6575 fprintf(m_File,
"%p", pItems[0]);
6576 for(uint64_t i = 1; i < count; ++i)
6578 fprintf(m_File,
" %p", pItems[i]);
6583 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6587 #endif // #if VMA_RECORDING_ENABLED 6592 class VmaAllocationObjectAllocator
6594 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
6596 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
6603 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
6607 struct VmaAllocator_T
6609 VMA_CLASS_NO_COPY(VmaAllocator_T)
6612 bool m_UseKhrDedicatedAllocation;
6614 bool m_AllocationCallbacksSpecified;
6615 VkAllocationCallbacks m_AllocationCallbacks;
6617 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
6620 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
6621 VMA_MUTEX m_HeapSizeLimitMutex;
6623 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
6624 VkPhysicalDeviceMemoryProperties m_MemProps;
6627 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
6630 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
6631 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
6632 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
6638 const VkAllocationCallbacks* GetAllocationCallbacks()
const 6640 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
6644 return m_VulkanFunctions;
6647 VkDeviceSize GetBufferImageGranularity()
const 6650 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
6651 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
6654 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
6655 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
6657 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 6659 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
6660 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6663 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 6665 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
6666 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6669 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 6671 return IsMemoryTypeNonCoherent(memTypeIndex) ?
6672 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
6673 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
6676 bool IsIntegratedGpu()
const 6678 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
6681 #if VMA_RECORDING_ENABLED 6682 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
6685 void GetBufferMemoryRequirements(
6687 VkMemoryRequirements& memReq,
6688 bool& requiresDedicatedAllocation,
6689 bool& prefersDedicatedAllocation)
const;
6690 void GetImageMemoryRequirements(
6692 VkMemoryRequirements& memReq,
6693 bool& requiresDedicatedAllocation,
6694 bool& prefersDedicatedAllocation)
const;
6697 VkResult AllocateMemory(
6698 const VkMemoryRequirements& vkMemReq,
6699 bool requiresDedicatedAllocation,
6700 bool prefersDedicatedAllocation,
6701 VkBuffer dedicatedBuffer,
6702 VkImage dedicatedImage,
6704 VmaSuballocationType suballocType,
6705 size_t allocationCount,
6710 size_t allocationCount,
6713 VkResult ResizeAllocation(
6715 VkDeviceSize newSize);
6717 void CalculateStats(
VmaStats* pStats);
6719 #if VMA_STATS_STRING_ENABLED 6720 void PrintDetailedMap(
class VmaJsonWriter& json);
6723 VkResult DefragmentationBegin(
6727 VkResult DefragmentationEnd(
6734 void DestroyPool(
VmaPool pool);
6737 void SetCurrentFrameIndex(uint32_t frameIndex);
6738 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
6740 void MakePoolAllocationsLost(
6742 size_t* pLostAllocationCount);
6743 VkResult CheckPoolCorruption(
VmaPool hPool);
6744 VkResult CheckCorruption(uint32_t memoryTypeBits);
6748 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
6749 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
6754 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
6755 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
6757 void FlushOrInvalidateAllocation(
6759 VkDeviceSize offset, VkDeviceSize size,
6760 VMA_CACHE_OPERATION op);
6762 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
6765 VkDeviceSize m_PreferredLargeHeapBlockSize;
6767 VkPhysicalDevice m_PhysicalDevice;
6768 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
6770 VMA_RW_MUTEX m_PoolsMutex;
6772 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
6773 uint32_t m_NextPoolId;
6777 #if VMA_RECORDING_ENABLED 6778 VmaRecorder* m_pRecorder;
6783 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
6785 VkResult AllocateMemoryOfType(
6787 VkDeviceSize alignment,
6788 bool dedicatedAllocation,
6789 VkBuffer dedicatedBuffer,
6790 VkImage dedicatedImage,
6792 uint32_t memTypeIndex,
6793 VmaSuballocationType suballocType,
6794 size_t allocationCount,
6798 VkResult AllocateDedicatedMemoryPage(
6800 VmaSuballocationType suballocType,
6801 uint32_t memTypeIndex,
6802 const VkMemoryAllocateInfo& allocInfo,
6804 bool isUserDataString,
6809 VkResult AllocateDedicatedMemory(
6811 VmaSuballocationType suballocType,
6812 uint32_t memTypeIndex,
6814 bool isUserDataString,
6816 VkBuffer dedicatedBuffer,
6817 VkImage dedicatedImage,
6818 size_t allocationCount,
6828 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
6830 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
6833 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
6835 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
6838 template<
typename T>
6841 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
6844 template<
typename T>
6845 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
6847 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
6850 template<
typename T>
6851 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
6856 VmaFree(hAllocator, ptr);
6860 template<
typename T>
6861 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
6865 for(
size_t i = count; i--; )
6867 VmaFree(hAllocator, ptr);
6874 #if VMA_STATS_STRING_ENABLED 6876 class VmaStringBuilder
6879 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
6880 size_t GetLength()
const {
return m_Data.size(); }
6881 const char* GetData()
const {
return m_Data.data(); }
6883 void Add(
char ch) { m_Data.push_back(ch); }
6884 void Add(
const char* pStr);
6885 void AddNewLine() { Add(
'\n'); }
6886 void AddNumber(uint32_t num);
6887 void AddNumber(uint64_t num);
6888 void AddPointer(
const void* ptr);
6891 VmaVector< char, VmaStlAllocator<char> > m_Data;
6894 void VmaStringBuilder::Add(
const char* pStr)
6896 const size_t strLen = strlen(pStr);
6899 const size_t oldCount = m_Data.size();
6900 m_Data.resize(oldCount + strLen);
6901 memcpy(m_Data.data() + oldCount, pStr, strLen);
6905 void VmaStringBuilder::AddNumber(uint32_t num)
6908 VmaUint32ToStr(buf,
sizeof(buf), num);
6912 void VmaStringBuilder::AddNumber(uint64_t num)
6915 VmaUint64ToStr(buf,
sizeof(buf), num);
6919 void VmaStringBuilder::AddPointer(
const void* ptr)
6922 VmaPtrToStr(buf,
sizeof(buf), ptr);
6926 #endif // #if VMA_STATS_STRING_ENABLED 6931 #if VMA_STATS_STRING_ENABLED 6935 VMA_CLASS_NO_COPY(VmaJsonWriter)
6937 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6940 void BeginObject(
bool singleLine =
false);
6943 void BeginArray(
bool singleLine =
false);
6946 void WriteString(
const char* pStr);
6947 void BeginString(
const char* pStr = VMA_NULL);
6948 void ContinueString(
const char* pStr);
6949 void ContinueString(uint32_t n);
6950 void ContinueString(uint64_t n);
6951 void ContinueString_Pointer(
const void* ptr);
6952 void EndString(
const char* pStr = VMA_NULL);
6954 void WriteNumber(uint32_t n);
6955 void WriteNumber(uint64_t n);
6956 void WriteBool(
bool b);
6960 static const char*
const INDENT;
6962 enum COLLECTION_TYPE
6964 COLLECTION_TYPE_OBJECT,
6965 COLLECTION_TYPE_ARRAY,
6969 COLLECTION_TYPE type;
6970 uint32_t valueCount;
6971 bool singleLineMode;
6974 VmaStringBuilder& m_SB;
6975 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
6976 bool m_InsideString;
6978 void BeginValue(
bool isString);
6979 void WriteIndent(
bool oneLess =
false);
6982 const char*
const VmaJsonWriter::INDENT =
" ";
6984 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
6986 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
6987 m_InsideString(false)
6991 VmaJsonWriter::~VmaJsonWriter()
6993 VMA_ASSERT(!m_InsideString);
6994 VMA_ASSERT(m_Stack.empty());
6997 void VmaJsonWriter::BeginObject(
bool singleLine)
6999 VMA_ASSERT(!m_InsideString);
7005 item.type = COLLECTION_TYPE_OBJECT;
7006 item.valueCount = 0;
7007 item.singleLineMode = singleLine;
7008 m_Stack.push_back(item);
7011 void VmaJsonWriter::EndObject()
7013 VMA_ASSERT(!m_InsideString);
7018 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7022 void VmaJsonWriter::BeginArray(
bool singleLine)
7024 VMA_ASSERT(!m_InsideString);
7030 item.type = COLLECTION_TYPE_ARRAY;
7031 item.valueCount = 0;
7032 item.singleLineMode = singleLine;
7033 m_Stack.push_back(item);
7036 void VmaJsonWriter::EndArray()
7038 VMA_ASSERT(!m_InsideString);
7043 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7047 void VmaJsonWriter::WriteString(
const char* pStr)
7053 void VmaJsonWriter::BeginString(
const char* pStr)
7055 VMA_ASSERT(!m_InsideString);
7059 m_InsideString =
true;
7060 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7062 ContinueString(pStr);
7066 void VmaJsonWriter::ContinueString(
const char* pStr)
7068 VMA_ASSERT(m_InsideString);
7070 const size_t strLen = strlen(pStr);
7071 for(
size_t i = 0; i < strLen; ++i)
7104 VMA_ASSERT(0 &&
"Character not currently supported.");
7110 void VmaJsonWriter::ContinueString(uint32_t n)
7112 VMA_ASSERT(m_InsideString);
7116 void VmaJsonWriter::ContinueString(uint64_t n)
7118 VMA_ASSERT(m_InsideString);
7122 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7124 VMA_ASSERT(m_InsideString);
7125 m_SB.AddPointer(ptr);
7128 void VmaJsonWriter::EndString(
const char* pStr)
7130 VMA_ASSERT(m_InsideString);
7131 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7133 ContinueString(pStr);
7136 m_InsideString =
false;
7139 void VmaJsonWriter::WriteNumber(uint32_t n)
7141 VMA_ASSERT(!m_InsideString);
7146 void VmaJsonWriter::WriteNumber(uint64_t n)
7148 VMA_ASSERT(!m_InsideString);
7153 void VmaJsonWriter::WriteBool(
bool b)
7155 VMA_ASSERT(!m_InsideString);
7157 m_SB.Add(b ?
"true" :
"false");
7160 void VmaJsonWriter::WriteNull()
7162 VMA_ASSERT(!m_InsideString);
7167 void VmaJsonWriter::BeginValue(
bool isString)
7169 if(!m_Stack.empty())
7171 StackItem& currItem = m_Stack.back();
7172 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7173 currItem.valueCount % 2 == 0)
7175 VMA_ASSERT(isString);
7178 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7179 currItem.valueCount % 2 != 0)
7183 else if(currItem.valueCount > 0)
7192 ++currItem.valueCount;
7196 void VmaJsonWriter::WriteIndent(
bool oneLess)
7198 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7202 size_t count = m_Stack.size();
7203 if(count > 0 && oneLess)
7207 for(
size_t i = 0; i < count; ++i)
7214 #endif // #if VMA_STATS_STRING_ENABLED 7218 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7220 if(IsUserDataString())
7222 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7224 FreeUserDataString(hAllocator);
7226 if(pUserData != VMA_NULL)
7228 const char*
const newStrSrc = (
char*)pUserData;
7229 const size_t newStrLen = strlen(newStrSrc);
7230 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
7231 memcpy(newStrDst, newStrSrc, newStrLen + 1);
7232 m_pUserData = newStrDst;
7237 m_pUserData = pUserData;
7241 void VmaAllocation_T::ChangeBlockAllocation(
7243 VmaDeviceMemoryBlock* block,
7244 VkDeviceSize offset)
7246 VMA_ASSERT(block != VMA_NULL);
7247 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7250 if(block != m_BlockAllocation.m_Block)
7252 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7253 if(IsPersistentMap())
7255 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7256 block->Map(hAllocator, mapRefCount, VMA_NULL);
7259 m_BlockAllocation.m_Block = block;
7260 m_BlockAllocation.m_Offset = offset;
7263 void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
7265 VMA_ASSERT(newSize > 0);
7269 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7271 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7272 m_BlockAllocation.m_Offset = newOffset;
7275 VkDeviceSize VmaAllocation_T::GetOffset()
const 7279 case ALLOCATION_TYPE_BLOCK:
7280 return m_BlockAllocation.m_Offset;
7281 case ALLOCATION_TYPE_DEDICATED:
7289 VkDeviceMemory VmaAllocation_T::GetMemory()
const 7293 case ALLOCATION_TYPE_BLOCK:
7294 return m_BlockAllocation.m_Block->GetDeviceMemory();
7295 case ALLOCATION_TYPE_DEDICATED:
7296 return m_DedicatedAllocation.m_hMemory;
7299 return VK_NULL_HANDLE;
7303 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 7307 case ALLOCATION_TYPE_BLOCK:
7308 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
7309 case ALLOCATION_TYPE_DEDICATED:
7310 return m_DedicatedAllocation.m_MemoryTypeIndex;
7317 void* VmaAllocation_T::GetMappedData()
const 7321 case ALLOCATION_TYPE_BLOCK:
7324 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7325 VMA_ASSERT(pBlockData != VMA_NULL);
7326 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7333 case ALLOCATION_TYPE_DEDICATED:
7334 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7335 return m_DedicatedAllocation.m_pMappedData;
7342 bool VmaAllocation_T::CanBecomeLost()
const 7346 case ALLOCATION_TYPE_BLOCK:
7347 return m_BlockAllocation.m_CanBecomeLost;
7348 case ALLOCATION_TYPE_DEDICATED:
7356 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7358 VMA_ASSERT(CanBecomeLost());
7364 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7367 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7372 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7378 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7388 #if VMA_STATS_STRING_ENABLED 7391 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7400 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 7402 json.WriteString(
"Type");
7403 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7405 json.WriteString(
"Size");
7406 json.WriteNumber(m_Size);
7408 if(m_pUserData != VMA_NULL)
7410 json.WriteString(
"UserData");
7411 if(IsUserDataString())
7413 json.WriteString((
const char*)m_pUserData);
7418 json.ContinueString_Pointer(m_pUserData);
7423 json.WriteString(
"CreationFrameIndex");
7424 json.WriteNumber(m_CreationFrameIndex);
7426 json.WriteString(
"LastUseFrameIndex");
7427 json.WriteNumber(GetLastUseFrameIndex());
7429 if(m_BufferImageUsage != 0)
7431 json.WriteString(
"Usage");
7432 json.WriteNumber(m_BufferImageUsage);
7438 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7440 VMA_ASSERT(IsUserDataString());
7441 if(m_pUserData != VMA_NULL)
7443 char*
const oldStr = (
char*)m_pUserData;
7444 const size_t oldStrLen = strlen(oldStr);
7445 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
7446 m_pUserData = VMA_NULL;
7450 void VmaAllocation_T::BlockAllocMap()
7452 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7454 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7460 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7464 void VmaAllocation_T::BlockAllocUnmap()
7466 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7468 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7474 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7478 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7480 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7484 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7486 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7487 *ppData = m_DedicatedAllocation.m_pMappedData;
7493 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7494 return VK_ERROR_MEMORY_MAP_FAILED;
7499 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7500 hAllocator->m_hDevice,
7501 m_DedicatedAllocation.m_hMemory,
7506 if(result == VK_SUCCESS)
7508 m_DedicatedAllocation.m_pMappedData = *ppData;
7515 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7517 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7519 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7524 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7525 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7526 hAllocator->m_hDevice,
7527 m_DedicatedAllocation.m_hMemory);
7532 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7536 #if VMA_STATS_STRING_ENABLED 7538 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7542 json.WriteString(
"Blocks");
7545 json.WriteString(
"Allocations");
7548 json.WriteString(
"UnusedRanges");
7551 json.WriteString(
"UsedBytes");
7554 json.WriteString(
"UnusedBytes");
7559 json.WriteString(
"AllocationSize");
7560 json.BeginObject(
true);
7561 json.WriteString(
"Min");
7563 json.WriteString(
"Avg");
7565 json.WriteString(
"Max");
7572 json.WriteString(
"UnusedRangeSize");
7573 json.BeginObject(
true);
7574 json.WriteString(
"Min");
7576 json.WriteString(
"Avg");
7578 json.WriteString(
"Max");
7586 #endif // #if VMA_STATS_STRING_ENABLED 7588 struct VmaSuballocationItemSizeLess
7591 const VmaSuballocationList::iterator lhs,
7592 const VmaSuballocationList::iterator rhs)
const 7594 return lhs->size < rhs->size;
7597 const VmaSuballocationList::iterator lhs,
7598 VkDeviceSize rhsSize)
const 7600 return lhs->size < rhsSize;
7608 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
7610 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
7614 #if VMA_STATS_STRING_ENABLED 7616 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
7617 VkDeviceSize unusedBytes,
7618 size_t allocationCount,
7619 size_t unusedRangeCount)
const 7623 json.WriteString(
"TotalBytes");
7624 json.WriteNumber(GetSize());
7626 json.WriteString(
"UnusedBytes");
7627 json.WriteNumber(unusedBytes);
7629 json.WriteString(
"Allocations");
7630 json.WriteNumber((uint64_t)allocationCount);
7632 json.WriteString(
"UnusedRanges");
7633 json.WriteNumber((uint64_t)unusedRangeCount);
7635 json.WriteString(
"Suballocations");
7639 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
7640 VkDeviceSize offset,
7643 json.BeginObject(
true);
7645 json.WriteString(
"Offset");
7646 json.WriteNumber(offset);
7648 hAllocation->PrintParameters(json);
7653 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
7654 VkDeviceSize offset,
7655 VkDeviceSize size)
const 7657 json.BeginObject(
true);
7659 json.WriteString(
"Offset");
7660 json.WriteNumber(offset);
7662 json.WriteString(
"Type");
7663 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
7665 json.WriteString(
"Size");
7666 json.WriteNumber(size);
7671 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 7677 #endif // #if VMA_STATS_STRING_ENABLED 7682 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
7683 VmaBlockMetadata(hAllocator),
7686 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7687 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
7691 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
7695 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
7697 VmaBlockMetadata::Init(size);
7700 m_SumFreeSize = size;
7702 VmaSuballocation suballoc = {};
7703 suballoc.offset = 0;
7704 suballoc.size = size;
7705 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7706 suballoc.hAllocation = VK_NULL_HANDLE;
7708 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7709 m_Suballocations.push_back(suballoc);
7710 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
7712 m_FreeSuballocationsBySize.push_back(suballocItem);
7715 bool VmaBlockMetadata_Generic::Validate()
const 7717 VMA_VALIDATE(!m_Suballocations.empty());
7720 VkDeviceSize calculatedOffset = 0;
7722 uint32_t calculatedFreeCount = 0;
7724 VkDeviceSize calculatedSumFreeSize = 0;
7727 size_t freeSuballocationsToRegister = 0;
7729 bool prevFree =
false;
7731 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7732 suballocItem != m_Suballocations.cend();
7735 const VmaSuballocation& subAlloc = *suballocItem;
7738 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
7740 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
7742 VMA_VALIDATE(!prevFree || !currFree);
7744 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
7748 calculatedSumFreeSize += subAlloc.size;
7749 ++calculatedFreeCount;
7750 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7752 ++freeSuballocationsToRegister;
7756 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
7760 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
7761 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
7764 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
7767 calculatedOffset += subAlloc.size;
7768 prevFree = currFree;
7773 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
7775 VkDeviceSize lastSize = 0;
7776 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
7778 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
7781 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7783 VMA_VALIDATE(suballocItem->size >= lastSize);
7785 lastSize = suballocItem->size;
7789 VMA_VALIDATE(ValidateFreeSuballocationList());
7790 VMA_VALIDATE(calculatedOffset == GetSize());
7791 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
7792 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
7797 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 7799 if(!m_FreeSuballocationsBySize.empty())
7801 return m_FreeSuballocationsBySize.back()->size;
7809 bool VmaBlockMetadata_Generic::IsEmpty()
const 7811 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
7814 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7818 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7830 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7831 suballocItem != m_Suballocations.cend();
7834 const VmaSuballocation& suballoc = *suballocItem;
7835 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7848 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 7850 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7852 inoutStats.
size += GetSize();
7859 #if VMA_STATS_STRING_ENABLED 7861 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 7863 PrintDetailedMap_Begin(json,
7865 m_Suballocations.size() - (size_t)m_FreeCount,
7869 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7870 suballocItem != m_Suballocations.cend();
7871 ++suballocItem, ++i)
7873 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7875 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
7879 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
7883 PrintDetailedMap_End(json);
7886 #endif // #if VMA_STATS_STRING_ENABLED 7888 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
7889 uint32_t currentFrameIndex,
7890 uint32_t frameInUseCount,
7891 VkDeviceSize bufferImageGranularity,
7892 VkDeviceSize allocSize,
7893 VkDeviceSize allocAlignment,
7895 VmaSuballocationType allocType,
7896 bool canMakeOtherLost,
7898 VmaAllocationRequest* pAllocationRequest)
7900 VMA_ASSERT(allocSize > 0);
7901 VMA_ASSERT(!upperAddress);
7902 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7903 VMA_ASSERT(pAllocationRequest != VMA_NULL);
7904 VMA_HEAVY_ASSERT(Validate());
7906 pAllocationRequest->type = VmaAllocationRequestType::Normal;
7909 if(canMakeOtherLost ==
false &&
7910 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
7916 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
7917 if(freeSuballocCount > 0)
7922 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7923 m_FreeSuballocationsBySize.data(),
7924 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7925 allocSize + 2 * VMA_DEBUG_MARGIN,
7926 VmaSuballocationItemSizeLess());
7927 size_t index = it - m_FreeSuballocationsBySize.data();
7928 for(; index < freeSuballocCount; ++index)
7933 bufferImageGranularity,
7937 m_FreeSuballocationsBySize[index],
7939 &pAllocationRequest->offset,
7940 &pAllocationRequest->itemsToMakeLostCount,
7941 &pAllocationRequest->sumFreeSize,
7942 &pAllocationRequest->sumItemSize))
7944 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7949 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
7951 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7952 it != m_Suballocations.end();
7955 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
7958 bufferImageGranularity,
7964 &pAllocationRequest->offset,
7965 &pAllocationRequest->itemsToMakeLostCount,
7966 &pAllocationRequest->sumFreeSize,
7967 &pAllocationRequest->sumItemSize))
7969 pAllocationRequest->item = it;
7977 for(
size_t index = freeSuballocCount; index--; )
7982 bufferImageGranularity,
7986 m_FreeSuballocationsBySize[index],
7988 &pAllocationRequest->offset,
7989 &pAllocationRequest->itemsToMakeLostCount,
7990 &pAllocationRequest->sumFreeSize,
7991 &pAllocationRequest->sumItemSize))
7993 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8000 if(canMakeOtherLost)
8005 VmaAllocationRequest tmpAllocRequest = {};
8006 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8007 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8008 suballocIt != m_Suballocations.end();
8011 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8012 suballocIt->hAllocation->CanBecomeLost())
8017 bufferImageGranularity,
8023 &tmpAllocRequest.offset,
8024 &tmpAllocRequest.itemsToMakeLostCount,
8025 &tmpAllocRequest.sumFreeSize,
8026 &tmpAllocRequest.sumItemSize))
8030 *pAllocationRequest = tmpAllocRequest;
8031 pAllocationRequest->item = suballocIt;
8034 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8036 *pAllocationRequest = tmpAllocRequest;
8037 pAllocationRequest->item = suballocIt;
8050 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8051 uint32_t currentFrameIndex,
8052 uint32_t frameInUseCount,
8053 VmaAllocationRequest* pAllocationRequest)
8055 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8057 while(pAllocationRequest->itemsToMakeLostCount > 0)
8059 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8061 ++pAllocationRequest->item;
8063 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8064 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8065 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8066 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8068 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8069 --pAllocationRequest->itemsToMakeLostCount;
8077 VMA_HEAVY_ASSERT(Validate());
8078 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8079 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8084 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8086 uint32_t lostAllocationCount = 0;
8087 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8088 it != m_Suballocations.end();
8091 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8092 it->hAllocation->CanBecomeLost() &&
8093 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8095 it = FreeSuballocation(it);
8096 ++lostAllocationCount;
8099 return lostAllocationCount;
8102 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8104 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8105 it != m_Suballocations.end();
8108 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8110 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8112 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8113 return VK_ERROR_VALIDATION_FAILED_EXT;
8115 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8117 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8118 return VK_ERROR_VALIDATION_FAILED_EXT;
8126 void VmaBlockMetadata_Generic::Alloc(
8127 const VmaAllocationRequest& request,
8128 VmaSuballocationType type,
8129 VkDeviceSize allocSize,
8132 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8133 VMA_ASSERT(request.item != m_Suballocations.end());
8134 VmaSuballocation& suballoc = *request.item;
8136 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8138 VMA_ASSERT(request.offset >= suballoc.offset);
8139 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8140 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8141 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8145 UnregisterFreeSuballocation(request.item);
8147 suballoc.offset = request.offset;
8148 suballoc.size = allocSize;
8149 suballoc.type = type;
8150 suballoc.hAllocation = hAllocation;
8155 VmaSuballocation paddingSuballoc = {};
8156 paddingSuballoc.offset = request.offset + allocSize;
8157 paddingSuballoc.size = paddingEnd;
8158 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8159 VmaSuballocationList::iterator next = request.item;
8161 const VmaSuballocationList::iterator paddingEndItem =
8162 m_Suballocations.insert(next, paddingSuballoc);
8163 RegisterFreeSuballocation(paddingEndItem);
8169 VmaSuballocation paddingSuballoc = {};
8170 paddingSuballoc.offset = request.offset - paddingBegin;
8171 paddingSuballoc.size = paddingBegin;
8172 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8173 const VmaSuballocationList::iterator paddingBeginItem =
8174 m_Suballocations.insert(request.item, paddingSuballoc);
8175 RegisterFreeSuballocation(paddingBeginItem);
8179 m_FreeCount = m_FreeCount - 1;
8180 if(paddingBegin > 0)
8188 m_SumFreeSize -= allocSize;
8191 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8193 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8194 suballocItem != m_Suballocations.end();
8197 VmaSuballocation& suballoc = *suballocItem;
8198 if(suballoc.hAllocation == allocation)
8200 FreeSuballocation(suballocItem);
8201 VMA_HEAVY_ASSERT(Validate());
8205 VMA_ASSERT(0 &&
"Not found!");
8208 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8210 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8211 suballocItem != m_Suballocations.end();
8214 VmaSuballocation& suballoc = *suballocItem;
8215 if(suballoc.offset == offset)
8217 FreeSuballocation(suballocItem);
8221 VMA_ASSERT(0 &&
"Not found!");
8224 bool VmaBlockMetadata_Generic::ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize)
8226 typedef VmaSuballocationList::iterator iter_type;
8227 for(iter_type suballocItem = m_Suballocations.begin();
8228 suballocItem != m_Suballocations.end();
8231 VmaSuballocation& suballoc = *suballocItem;
8232 if(suballoc.hAllocation == alloc)
8234 iter_type nextItem = suballocItem;
8238 VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
8241 if(newSize < alloc->GetSize())
8243 const VkDeviceSize sizeDiff = suballoc.size - newSize;
8246 if(nextItem != m_Suballocations.end())
8249 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8252 UnregisterFreeSuballocation(nextItem);
8253 nextItem->offset -= sizeDiff;
8254 nextItem->size += sizeDiff;
8255 RegisterFreeSuballocation(nextItem);
8261 VmaSuballocation newFreeSuballoc;
8262 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8263 newFreeSuballoc.offset = suballoc.offset + newSize;
8264 newFreeSuballoc.size = sizeDiff;
8265 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8266 iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
8267 RegisterFreeSuballocation(newFreeSuballocIt);
8276 VmaSuballocation newFreeSuballoc;
8277 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8278 newFreeSuballoc.offset = suballoc.offset + newSize;
8279 newFreeSuballoc.size = sizeDiff;
8280 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8281 m_Suballocations.push_back(newFreeSuballoc);
8283 iter_type newFreeSuballocIt = m_Suballocations.end();
8284 RegisterFreeSuballocation(--newFreeSuballocIt);
8289 suballoc.size = newSize;
8290 m_SumFreeSize += sizeDiff;
8295 const VkDeviceSize sizeDiff = newSize - suballoc.size;
8298 if(nextItem != m_Suballocations.end())
8301 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8304 if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
8310 if(nextItem->size > sizeDiff)
8313 UnregisterFreeSuballocation(nextItem);
8314 nextItem->offset += sizeDiff;
8315 nextItem->size -= sizeDiff;
8316 RegisterFreeSuballocation(nextItem);
8322 UnregisterFreeSuballocation(nextItem);
8323 m_Suballocations.erase(nextItem);
8339 suballoc.size = newSize;
8340 m_SumFreeSize -= sizeDiff;
8347 VMA_ASSERT(0 &&
"Not found!");
8351 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 8353 VkDeviceSize lastSize = 0;
8354 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8356 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8358 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8359 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8360 VMA_VALIDATE(it->size >= lastSize);
8361 lastSize = it->size;
8366 bool VmaBlockMetadata_Generic::CheckAllocation(
8367 uint32_t currentFrameIndex,
8368 uint32_t frameInUseCount,
8369 VkDeviceSize bufferImageGranularity,
8370 VkDeviceSize allocSize,
8371 VkDeviceSize allocAlignment,
8372 VmaSuballocationType allocType,
8373 VmaSuballocationList::const_iterator suballocItem,
8374 bool canMakeOtherLost,
8375 VkDeviceSize* pOffset,
8376 size_t* itemsToMakeLostCount,
8377 VkDeviceSize* pSumFreeSize,
8378 VkDeviceSize* pSumItemSize)
const 8380 VMA_ASSERT(allocSize > 0);
8381 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8382 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8383 VMA_ASSERT(pOffset != VMA_NULL);
8385 *itemsToMakeLostCount = 0;
8389 if(canMakeOtherLost)
8391 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8393 *pSumFreeSize = suballocItem->size;
8397 if(suballocItem->hAllocation->CanBecomeLost() &&
8398 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8400 ++*itemsToMakeLostCount;
8401 *pSumItemSize = suballocItem->size;
8410 if(GetSize() - suballocItem->offset < allocSize)
8416 *pOffset = suballocItem->offset;
8419 if(VMA_DEBUG_MARGIN > 0)
8421 *pOffset += VMA_DEBUG_MARGIN;
8425 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8429 if(bufferImageGranularity > 1)
8431 bool bufferImageGranularityConflict =
false;
8432 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8433 while(prevSuballocItem != m_Suballocations.cbegin())
8436 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8437 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8439 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8441 bufferImageGranularityConflict =
true;
8449 if(bufferImageGranularityConflict)
8451 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8457 if(*pOffset >= suballocItem->offset + suballocItem->size)
8463 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8466 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8468 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8470 if(suballocItem->offset + totalSize > GetSize())
8477 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8478 if(totalSize > suballocItem->size)
8480 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8481 while(remainingSize > 0)
8484 if(lastSuballocItem == m_Suballocations.cend())
8488 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8490 *pSumFreeSize += lastSuballocItem->size;
8494 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8495 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8496 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8498 ++*itemsToMakeLostCount;
8499 *pSumItemSize += lastSuballocItem->size;
8506 remainingSize = (lastSuballocItem->size < remainingSize) ?
8507 remainingSize - lastSuballocItem->size : 0;
8513 if(bufferImageGranularity > 1)
8515 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8517 while(nextSuballocItem != m_Suballocations.cend())
8519 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8520 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8522 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8524 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8525 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8526 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8528 ++*itemsToMakeLostCount;
8547 const VmaSuballocation& suballoc = *suballocItem;
8548 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8550 *pSumFreeSize = suballoc.size;
8553 if(suballoc.size < allocSize)
8559 *pOffset = suballoc.offset;
8562 if(VMA_DEBUG_MARGIN > 0)
8564 *pOffset += VMA_DEBUG_MARGIN;
8568 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8572 if(bufferImageGranularity > 1)
8574 bool bufferImageGranularityConflict =
false;
8575 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8576 while(prevSuballocItem != m_Suballocations.cbegin())
8579 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8580 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8582 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8584 bufferImageGranularityConflict =
true;
8592 if(bufferImageGranularityConflict)
8594 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8599 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8602 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8605 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8612 if(bufferImageGranularity > 1)
8614 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8616 while(nextSuballocItem != m_Suballocations.cend())
8618 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8619 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8621 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8640 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8642 VMA_ASSERT(item != m_Suballocations.end());
8643 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8645 VmaSuballocationList::iterator nextItem = item;
8647 VMA_ASSERT(nextItem != m_Suballocations.end());
8648 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8650 item->size += nextItem->size;
8652 m_Suballocations.erase(nextItem);
8655 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8658 VmaSuballocation& suballoc = *suballocItem;
8659 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8660 suballoc.hAllocation = VK_NULL_HANDLE;
8664 m_SumFreeSize += suballoc.size;
8667 bool mergeWithNext =
false;
8668 bool mergeWithPrev =
false;
8670 VmaSuballocationList::iterator nextItem = suballocItem;
8672 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8674 mergeWithNext =
true;
8677 VmaSuballocationList::iterator prevItem = suballocItem;
8678 if(suballocItem != m_Suballocations.begin())
8681 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8683 mergeWithPrev =
true;
8689 UnregisterFreeSuballocation(nextItem);
8690 MergeFreeWithNext(suballocItem);
8695 UnregisterFreeSuballocation(prevItem);
8696 MergeFreeWithNext(prevItem);
8697 RegisterFreeSuballocation(prevItem);
8702 RegisterFreeSuballocation(suballocItem);
8703 return suballocItem;
8707 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
8709 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8710 VMA_ASSERT(item->size > 0);
8714 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8716 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8718 if(m_FreeSuballocationsBySize.empty())
8720 m_FreeSuballocationsBySize.push_back(item);
8724 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
8732 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
8734 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8735 VMA_ASSERT(item->size > 0);
8739 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8741 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8743 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8744 m_FreeSuballocationsBySize.data(),
8745 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
8747 VmaSuballocationItemSizeLess());
8748 for(
size_t index = it - m_FreeSuballocationsBySize.data();
8749 index < m_FreeSuballocationsBySize.size();
8752 if(m_FreeSuballocationsBySize[index] == item)
8754 VmaVectorRemove(m_FreeSuballocationsBySize, index);
8757 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
8759 VMA_ASSERT(0 &&
"Not found.");
8765 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
8766 VkDeviceSize bufferImageGranularity,
8767 VmaSuballocationType& inOutPrevSuballocType)
const 8769 if(bufferImageGranularity == 1 || IsEmpty())
8774 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
8775 bool typeConflictFound =
false;
8776 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
8777 it != m_Suballocations.cend();
8780 const VmaSuballocationType suballocType = it->type;
8781 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
8783 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
8784 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
8786 typeConflictFound =
true;
8788 inOutPrevSuballocType = suballocType;
8792 return typeConflictFound || minAlignment >= bufferImageGranularity;
8798 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
8799 VmaBlockMetadata(hAllocator),
8801 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8802 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8803 m_1stVectorIndex(0),
8804 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
8805 m_1stNullItemsBeginCount(0),
8806 m_1stNullItemsMiddleCount(0),
8807 m_2ndNullItemsCount(0)
8811 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
8815 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
8817 VmaBlockMetadata::Init(size);
8818 m_SumFreeSize = size;
8821 bool VmaBlockMetadata_Linear::Validate()
const 8823 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8824 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8826 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
8827 VMA_VALIDATE(!suballocations1st.empty() ||
8828 suballocations2nd.empty() ||
8829 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
8831 if(!suballocations1st.empty())
8834 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
8836 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
8838 if(!suballocations2nd.empty())
8841 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
8844 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
8845 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
8847 VkDeviceSize sumUsedSize = 0;
8848 const size_t suballoc1stCount = suballocations1st.size();
8849 VkDeviceSize offset = VMA_DEBUG_MARGIN;
8851 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8853 const size_t suballoc2ndCount = suballocations2nd.size();
8854 size_t nullItem2ndCount = 0;
8855 for(
size_t i = 0; i < suballoc2ndCount; ++i)
8857 const VmaSuballocation& suballoc = suballocations2nd[i];
8858 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8860 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8861 VMA_VALIDATE(suballoc.offset >= offset);
8865 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8866 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8867 sumUsedSize += suballoc.size;
8874 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8877 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8880 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
8882 const VmaSuballocation& suballoc = suballocations1st[i];
8883 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
8884 suballoc.hAllocation == VK_NULL_HANDLE);
8887 size_t nullItem1stCount = m_1stNullItemsBeginCount;
8889 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
8891 const VmaSuballocation& suballoc = suballocations1st[i];
8892 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8894 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8895 VMA_VALIDATE(suballoc.offset >= offset);
8896 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
8900 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8901 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8902 sumUsedSize += suballoc.size;
8909 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8911 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
8913 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8915 const size_t suballoc2ndCount = suballocations2nd.size();
8916 size_t nullItem2ndCount = 0;
8917 for(
size_t i = suballoc2ndCount; i--; )
8919 const VmaSuballocation& suballoc = suballocations2nd[i];
8920 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8922 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8923 VMA_VALIDATE(suballoc.offset >= offset);
8927 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8928 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8929 sumUsedSize += suballoc.size;
8936 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8939 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8942 VMA_VALIDATE(offset <= GetSize());
8943 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
8948 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 8950 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
8951 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
8954 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 8956 const VkDeviceSize size = GetSize();
8968 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8970 switch(m_2ndVectorMode)
8972 case SECOND_VECTOR_EMPTY:
8978 const size_t suballocations1stCount = suballocations1st.size();
8979 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
8980 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8981 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
8983 firstSuballoc.offset,
8984 size - (lastSuballoc.offset + lastSuballoc.size));
8988 case SECOND_VECTOR_RING_BUFFER:
8993 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8994 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
8995 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
8996 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9000 case SECOND_VECTOR_DOUBLE_STACK:
9005 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9006 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9007 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9008 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9018 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9020 const VkDeviceSize size = GetSize();
9021 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9022 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9023 const size_t suballoc1stCount = suballocations1st.size();
9024 const size_t suballoc2ndCount = suballocations2nd.size();
9035 VkDeviceSize lastOffset = 0;
9037 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9039 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9040 size_t nextAlloc2ndIndex = 0;
9041 while(lastOffset < freeSpace2ndTo1stEnd)
9044 while(nextAlloc2ndIndex < suballoc2ndCount &&
9045 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9047 ++nextAlloc2ndIndex;
9051 if(nextAlloc2ndIndex < suballoc2ndCount)
9053 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9056 if(lastOffset < suballoc.offset)
9059 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9073 lastOffset = suballoc.offset + suballoc.size;
9074 ++nextAlloc2ndIndex;
9080 if(lastOffset < freeSpace2ndTo1stEnd)
9082 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9090 lastOffset = freeSpace2ndTo1stEnd;
9095 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9096 const VkDeviceSize freeSpace1stTo2ndEnd =
9097 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9098 while(lastOffset < freeSpace1stTo2ndEnd)
9101 while(nextAlloc1stIndex < suballoc1stCount &&
9102 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9104 ++nextAlloc1stIndex;
9108 if(nextAlloc1stIndex < suballoc1stCount)
9110 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9113 if(lastOffset < suballoc.offset)
9116 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9130 lastOffset = suballoc.offset + suballoc.size;
9131 ++nextAlloc1stIndex;
9137 if(lastOffset < freeSpace1stTo2ndEnd)
9139 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9147 lastOffset = freeSpace1stTo2ndEnd;
9151 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9153 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9154 while(lastOffset < size)
9157 while(nextAlloc2ndIndex != SIZE_MAX &&
9158 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9160 --nextAlloc2ndIndex;
9164 if(nextAlloc2ndIndex != SIZE_MAX)
9166 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9169 if(lastOffset < suballoc.offset)
9172 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9186 lastOffset = suballoc.offset + suballoc.size;
9187 --nextAlloc2ndIndex;
9193 if(lastOffset < size)
9195 const VkDeviceSize unusedRangeSize = size - lastOffset;
9211 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 9213 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9214 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9215 const VkDeviceSize size = GetSize();
9216 const size_t suballoc1stCount = suballocations1st.size();
9217 const size_t suballoc2ndCount = suballocations2nd.size();
9219 inoutStats.
size += size;
9221 VkDeviceSize lastOffset = 0;
9223 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9225 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9226 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9227 while(lastOffset < freeSpace2ndTo1stEnd)
9230 while(nextAlloc2ndIndex < suballoc2ndCount &&
9231 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9233 ++nextAlloc2ndIndex;
9237 if(nextAlloc2ndIndex < suballoc2ndCount)
9239 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9242 if(lastOffset < suballoc.offset)
9245 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9256 lastOffset = suballoc.offset + suballoc.size;
9257 ++nextAlloc2ndIndex;
9262 if(lastOffset < freeSpace2ndTo1stEnd)
9265 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9272 lastOffset = freeSpace2ndTo1stEnd;
9277 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9278 const VkDeviceSize freeSpace1stTo2ndEnd =
9279 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9280 while(lastOffset < freeSpace1stTo2ndEnd)
9283 while(nextAlloc1stIndex < suballoc1stCount &&
9284 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9286 ++nextAlloc1stIndex;
9290 if(nextAlloc1stIndex < suballoc1stCount)
9292 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9295 if(lastOffset < suballoc.offset)
9298 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9309 lastOffset = suballoc.offset + suballoc.size;
9310 ++nextAlloc1stIndex;
9315 if(lastOffset < freeSpace1stTo2ndEnd)
9318 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9325 lastOffset = freeSpace1stTo2ndEnd;
9329 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9331 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9332 while(lastOffset < size)
9335 while(nextAlloc2ndIndex != SIZE_MAX &&
9336 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9338 --nextAlloc2ndIndex;
9342 if(nextAlloc2ndIndex != SIZE_MAX)
9344 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9347 if(lastOffset < suballoc.offset)
9350 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9361 lastOffset = suballoc.offset + suballoc.size;
9362 --nextAlloc2ndIndex;
9367 if(lastOffset < size)
9370 const VkDeviceSize unusedRangeSize = size - lastOffset;
9383 #if VMA_STATS_STRING_ENABLED 9384 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 9386 const VkDeviceSize size = GetSize();
9387 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9388 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9389 const size_t suballoc1stCount = suballocations1st.size();
9390 const size_t suballoc2ndCount = suballocations2nd.size();
9394 size_t unusedRangeCount = 0;
9395 VkDeviceSize usedBytes = 0;
9397 VkDeviceSize lastOffset = 0;
9399 size_t alloc2ndCount = 0;
9400 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9402 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9403 size_t nextAlloc2ndIndex = 0;
9404 while(lastOffset < freeSpace2ndTo1stEnd)
9407 while(nextAlloc2ndIndex < suballoc2ndCount &&
9408 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9410 ++nextAlloc2ndIndex;
9414 if(nextAlloc2ndIndex < suballoc2ndCount)
9416 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9419 if(lastOffset < suballoc.offset)
9428 usedBytes += suballoc.size;
9431 lastOffset = suballoc.offset + suballoc.size;
9432 ++nextAlloc2ndIndex;
9437 if(lastOffset < freeSpace2ndTo1stEnd)
9444 lastOffset = freeSpace2ndTo1stEnd;
9449 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9450 size_t alloc1stCount = 0;
9451 const VkDeviceSize freeSpace1stTo2ndEnd =
9452 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9453 while(lastOffset < freeSpace1stTo2ndEnd)
9456 while(nextAlloc1stIndex < suballoc1stCount &&
9457 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9459 ++nextAlloc1stIndex;
9463 if(nextAlloc1stIndex < suballoc1stCount)
9465 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9468 if(lastOffset < suballoc.offset)
9477 usedBytes += suballoc.size;
9480 lastOffset = suballoc.offset + suballoc.size;
9481 ++nextAlloc1stIndex;
9486 if(lastOffset < size)
9493 lastOffset = freeSpace1stTo2ndEnd;
9497 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9499 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9500 while(lastOffset < size)
9503 while(nextAlloc2ndIndex != SIZE_MAX &&
9504 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9506 --nextAlloc2ndIndex;
9510 if(nextAlloc2ndIndex != SIZE_MAX)
9512 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9515 if(lastOffset < suballoc.offset)
9524 usedBytes += suballoc.size;
9527 lastOffset = suballoc.offset + suballoc.size;
9528 --nextAlloc2ndIndex;
9533 if(lastOffset < size)
9545 const VkDeviceSize unusedBytes = size - usedBytes;
9546 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9551 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9553 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9554 size_t nextAlloc2ndIndex = 0;
9555 while(lastOffset < freeSpace2ndTo1stEnd)
9558 while(nextAlloc2ndIndex < suballoc2ndCount &&
9559 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9561 ++nextAlloc2ndIndex;
9565 if(nextAlloc2ndIndex < suballoc2ndCount)
9567 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9570 if(lastOffset < suballoc.offset)
9573 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9574 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9579 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9582 lastOffset = suballoc.offset + suballoc.size;
9583 ++nextAlloc2ndIndex;
9588 if(lastOffset < freeSpace2ndTo1stEnd)
9591 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9592 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9596 lastOffset = freeSpace2ndTo1stEnd;
9601 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9602 while(lastOffset < freeSpace1stTo2ndEnd)
9605 while(nextAlloc1stIndex < suballoc1stCount &&
9606 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9608 ++nextAlloc1stIndex;
9612 if(nextAlloc1stIndex < suballoc1stCount)
9614 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9617 if(lastOffset < suballoc.offset)
9620 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9621 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9626 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9629 lastOffset = suballoc.offset + suballoc.size;
9630 ++nextAlloc1stIndex;
9635 if(lastOffset < freeSpace1stTo2ndEnd)
9638 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9639 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9643 lastOffset = freeSpace1stTo2ndEnd;
9647 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9649 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9650 while(lastOffset < size)
9653 while(nextAlloc2ndIndex != SIZE_MAX &&
9654 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9656 --nextAlloc2ndIndex;
9660 if(nextAlloc2ndIndex != SIZE_MAX)
9662 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9665 if(lastOffset < suballoc.offset)
9668 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9669 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9674 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9677 lastOffset = suballoc.offset + suballoc.size;
9678 --nextAlloc2ndIndex;
9683 if(lastOffset < size)
9686 const VkDeviceSize unusedRangeSize = size - lastOffset;
9687 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9696 PrintDetailedMap_End(json);
9698 #endif // #if VMA_STATS_STRING_ENABLED 9700 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9701 uint32_t currentFrameIndex,
9702 uint32_t frameInUseCount,
9703 VkDeviceSize bufferImageGranularity,
9704 VkDeviceSize allocSize,
9705 VkDeviceSize allocAlignment,
9707 VmaSuballocationType allocType,
9708 bool canMakeOtherLost,
9710 VmaAllocationRequest* pAllocationRequest)
9712 VMA_ASSERT(allocSize > 0);
9713 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9714 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9715 VMA_HEAVY_ASSERT(Validate());
9716 return upperAddress ?
9717 CreateAllocationRequest_UpperAddress(
9718 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9719 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
9720 CreateAllocationRequest_LowerAddress(
9721 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9722 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
9725 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
9726 uint32_t currentFrameIndex,
9727 uint32_t frameInUseCount,
9728 VkDeviceSize bufferImageGranularity,
9729 VkDeviceSize allocSize,
9730 VkDeviceSize allocAlignment,
9731 VmaSuballocationType allocType,
9732 bool canMakeOtherLost,
9734 VmaAllocationRequest* pAllocationRequest)
9736 const VkDeviceSize size = GetSize();
9737 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9738 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9740 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9742 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9747 if(allocSize > size)
9751 VkDeviceSize resultBaseOffset = size - allocSize;
9752 if(!suballocations2nd.empty())
9754 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9755 resultBaseOffset = lastSuballoc.offset - allocSize;
9756 if(allocSize > lastSuballoc.offset)
9763 VkDeviceSize resultOffset = resultBaseOffset;
9766 if(VMA_DEBUG_MARGIN > 0)
9768 if(resultOffset < VMA_DEBUG_MARGIN)
9772 resultOffset -= VMA_DEBUG_MARGIN;
9776 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9780 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9782 bool bufferImageGranularityConflict =
false;
9783 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9785 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9786 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9788 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9790 bufferImageGranularityConflict =
true;
9798 if(bufferImageGranularityConflict)
9800 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9805 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9806 suballocations1st.back().offset + suballocations1st.back().size :
9808 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
9812 if(bufferImageGranularity > 1)
9814 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9816 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9817 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9819 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9833 pAllocationRequest->offset = resultOffset;
9834 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
9835 pAllocationRequest->sumItemSize = 0;
9837 pAllocationRequest->itemsToMakeLostCount = 0;
9838 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
9845 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
9846 uint32_t currentFrameIndex,
9847 uint32_t frameInUseCount,
9848 VkDeviceSize bufferImageGranularity,
9849 VkDeviceSize allocSize,
9850 VkDeviceSize allocAlignment,
9851 VmaSuballocationType allocType,
9852 bool canMakeOtherLost,
9854 VmaAllocationRequest* pAllocationRequest)
9856 const VkDeviceSize size = GetSize();
9857 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9858 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9860 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9864 VkDeviceSize resultBaseOffset = 0;
9865 if(!suballocations1st.empty())
9867 const VmaSuballocation& lastSuballoc = suballocations1st.back();
9868 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9872 VkDeviceSize resultOffset = resultBaseOffset;
9875 if(VMA_DEBUG_MARGIN > 0)
9877 resultOffset += VMA_DEBUG_MARGIN;
9881 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9885 if(bufferImageGranularity > 1 && !suballocations1st.empty())
9887 bool bufferImageGranularityConflict =
false;
9888 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9890 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9891 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9893 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9895 bufferImageGranularityConflict =
true;
9903 if(bufferImageGranularityConflict)
9905 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9909 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
9910 suballocations2nd.back().offset : size;
9913 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
9917 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9919 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9921 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9922 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9924 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9938 pAllocationRequest->offset = resultOffset;
9939 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
9940 pAllocationRequest->sumItemSize = 0;
9942 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
9943 pAllocationRequest->itemsToMakeLostCount = 0;
9950 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9952 VMA_ASSERT(!suballocations1st.empty());
9954 VkDeviceSize resultBaseOffset = 0;
9955 if(!suballocations2nd.empty())
9957 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9958 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9962 VkDeviceSize resultOffset = resultBaseOffset;
9965 if(VMA_DEBUG_MARGIN > 0)
9967 resultOffset += VMA_DEBUG_MARGIN;
9971 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9975 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9977 bool bufferImageGranularityConflict =
false;
9978 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
9980 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
9981 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9983 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9985 bufferImageGranularityConflict =
true;
9993 if(bufferImageGranularityConflict)
9995 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9999 pAllocationRequest->itemsToMakeLostCount = 0;
10000 pAllocationRequest->sumItemSize = 0;
10001 size_t index1st = m_1stNullItemsBeginCount;
10003 if(canMakeOtherLost)
10005 while(index1st < suballocations1st.size() &&
10006 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10009 const VmaSuballocation& suballoc = suballocations1st[index1st];
10010 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10016 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10017 if(suballoc.hAllocation->CanBecomeLost() &&
10018 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10020 ++pAllocationRequest->itemsToMakeLostCount;
10021 pAllocationRequest->sumItemSize += suballoc.size;
10033 if(bufferImageGranularity > 1)
10035 while(index1st < suballocations1st.size())
10037 const VmaSuballocation& suballoc = suballocations1st[index1st];
10038 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10040 if(suballoc.hAllocation != VK_NULL_HANDLE)
10043 if(suballoc.hAllocation->CanBecomeLost() &&
10044 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10046 ++pAllocationRequest->itemsToMakeLostCount;
10047 pAllocationRequest->sumItemSize += suballoc.size;
10065 if(index1st == suballocations1st.size() &&
10066 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10069 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10074 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10075 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10079 if(bufferImageGranularity > 1)
10081 for(
size_t nextSuballocIndex = index1st;
10082 nextSuballocIndex < suballocations1st.size();
10083 nextSuballocIndex++)
10085 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10086 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10088 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10102 pAllocationRequest->offset = resultOffset;
10103 pAllocationRequest->sumFreeSize =
10104 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10106 - pAllocationRequest->sumItemSize;
10107 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10116 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10117 uint32_t currentFrameIndex,
10118 uint32_t frameInUseCount,
10119 VmaAllocationRequest* pAllocationRequest)
10121 if(pAllocationRequest->itemsToMakeLostCount == 0)
10126 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10129 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10130 size_t index = m_1stNullItemsBeginCount;
10131 size_t madeLostCount = 0;
10132 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10134 if(index == suballocations->size())
10138 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10140 suballocations = &AccessSuballocations2nd();
10144 VMA_ASSERT(!suballocations->empty());
10146 VmaSuballocation& suballoc = (*suballocations)[index];
10147 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10149 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10150 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10151 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10153 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10154 suballoc.hAllocation = VK_NULL_HANDLE;
10155 m_SumFreeSize += suballoc.size;
10156 if(suballocations == &AccessSuballocations1st())
10158 ++m_1stNullItemsMiddleCount;
10162 ++m_2ndNullItemsCount;
10174 CleanupAfterFree();
10180 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10182 uint32_t lostAllocationCount = 0;
10184 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10185 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10187 VmaSuballocation& suballoc = suballocations1st[i];
10188 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10189 suballoc.hAllocation->CanBecomeLost() &&
10190 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10192 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10193 suballoc.hAllocation = VK_NULL_HANDLE;
10194 ++m_1stNullItemsMiddleCount;
10195 m_SumFreeSize += suballoc.size;
10196 ++lostAllocationCount;
10200 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10201 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10203 VmaSuballocation& suballoc = suballocations2nd[i];
10204 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10205 suballoc.hAllocation->CanBecomeLost() &&
10206 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10208 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10209 suballoc.hAllocation = VK_NULL_HANDLE;
10210 ++m_2ndNullItemsCount;
10211 m_SumFreeSize += suballoc.size;
10212 ++lostAllocationCount;
10216 if(lostAllocationCount)
10218 CleanupAfterFree();
10221 return lostAllocationCount;
10224 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10226 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10227 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10229 const VmaSuballocation& suballoc = suballocations1st[i];
10230 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10232 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10234 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10235 return VK_ERROR_VALIDATION_FAILED_EXT;
10237 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10239 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10240 return VK_ERROR_VALIDATION_FAILED_EXT;
10245 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10246 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10248 const VmaSuballocation& suballoc = suballocations2nd[i];
10249 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10251 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10253 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10254 return VK_ERROR_VALIDATION_FAILED_EXT;
10256 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10258 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10259 return VK_ERROR_VALIDATION_FAILED_EXT;
10267 void VmaBlockMetadata_Linear::Alloc(
10268 const VmaAllocationRequest& request,
10269 VmaSuballocationType type,
10270 VkDeviceSize allocSize,
10273 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10275 switch(request.type)
10277 case VmaAllocationRequestType::UpperAddress:
10279 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10280 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10281 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10282 suballocations2nd.push_back(newSuballoc);
10283 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10286 case VmaAllocationRequestType::EndOf1st:
10288 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10290 VMA_ASSERT(suballocations1st.empty() ||
10291 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10293 VMA_ASSERT(request.offset + allocSize <= GetSize());
10295 suballocations1st.push_back(newSuballoc);
10298 case VmaAllocationRequestType::EndOf2nd:
10300 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10302 VMA_ASSERT(!suballocations1st.empty() &&
10303 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10304 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10306 switch(m_2ndVectorMode)
10308 case SECOND_VECTOR_EMPTY:
10310 VMA_ASSERT(suballocations2nd.empty());
10311 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10313 case SECOND_VECTOR_RING_BUFFER:
10315 VMA_ASSERT(!suballocations2nd.empty());
10317 case SECOND_VECTOR_DOUBLE_STACK:
10318 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10324 suballocations2nd.push_back(newSuballoc);
10328 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10331 m_SumFreeSize -= newSuballoc.size;
10334 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10336 FreeAtOffset(allocation->GetOffset());
10339 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10341 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10342 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10344 if(!suballocations1st.empty())
10347 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10348 if(firstSuballoc.offset == offset)
10350 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10351 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10352 m_SumFreeSize += firstSuballoc.size;
10353 ++m_1stNullItemsBeginCount;
10354 CleanupAfterFree();
10360 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10361 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10363 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10364 if(lastSuballoc.offset == offset)
10366 m_SumFreeSize += lastSuballoc.size;
10367 suballocations2nd.pop_back();
10368 CleanupAfterFree();
10373 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10375 VmaSuballocation& lastSuballoc = suballocations1st.back();
10376 if(lastSuballoc.offset == offset)
10378 m_SumFreeSize += lastSuballoc.size;
10379 suballocations1st.pop_back();
10380 CleanupAfterFree();
10387 VmaSuballocation refSuballoc;
10388 refSuballoc.offset = offset;
10390 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
10391 suballocations1st.begin() + m_1stNullItemsBeginCount,
10392 suballocations1st.end(),
10394 if(it != suballocations1st.end())
10396 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10397 it->hAllocation = VK_NULL_HANDLE;
10398 ++m_1stNullItemsMiddleCount;
10399 m_SumFreeSize += it->size;
10400 CleanupAfterFree();
10405 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10408 VmaSuballocation refSuballoc;
10409 refSuballoc.offset = offset;
10411 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10412 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
10413 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
10414 if(it != suballocations2nd.end())
10416 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10417 it->hAllocation = VK_NULL_HANDLE;
10418 ++m_2ndNullItemsCount;
10419 m_SumFreeSize += it->size;
10420 CleanupAfterFree();
10425 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10428 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 10430 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10431 const size_t suballocCount = AccessSuballocations1st().size();
10432 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10435 void VmaBlockMetadata_Linear::CleanupAfterFree()
10437 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10438 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10442 suballocations1st.clear();
10443 suballocations2nd.clear();
10444 m_1stNullItemsBeginCount = 0;
10445 m_1stNullItemsMiddleCount = 0;
10446 m_2ndNullItemsCount = 0;
10447 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10451 const size_t suballoc1stCount = suballocations1st.size();
10452 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10453 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10456 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10457 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10459 ++m_1stNullItemsBeginCount;
10460 --m_1stNullItemsMiddleCount;
10464 while(m_1stNullItemsMiddleCount > 0 &&
10465 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10467 --m_1stNullItemsMiddleCount;
10468 suballocations1st.pop_back();
10472 while(m_2ndNullItemsCount > 0 &&
10473 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10475 --m_2ndNullItemsCount;
10476 suballocations2nd.pop_back();
10480 while(m_2ndNullItemsCount > 0 &&
10481 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
10483 --m_2ndNullItemsCount;
10484 suballocations2nd.remove(0);
10487 if(ShouldCompact1st())
10489 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10490 size_t srcIndex = m_1stNullItemsBeginCount;
10491 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10493 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10497 if(dstIndex != srcIndex)
10499 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10503 suballocations1st.resize(nonNullItemCount);
10504 m_1stNullItemsBeginCount = 0;
10505 m_1stNullItemsMiddleCount = 0;
10509 if(suballocations2nd.empty())
10511 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10515 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10517 suballocations1st.clear();
10518 m_1stNullItemsBeginCount = 0;
10520 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10523 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10524 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10525 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10526 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10528 ++m_1stNullItemsBeginCount;
10529 --m_1stNullItemsMiddleCount;
10531 m_2ndNullItemsCount = 0;
10532 m_1stVectorIndex ^= 1;
10537 VMA_HEAVY_ASSERT(Validate());
10544 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10545 VmaBlockMetadata(hAllocator),
10547 m_AllocationCount(0),
10551 memset(m_FreeList, 0,
sizeof(m_FreeList));
10554 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10556 DeleteNode(m_Root);
10559 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10561 VmaBlockMetadata::Init(size);
10563 m_UsableSize = VmaPrevPow2(size);
10564 m_SumFreeSize = m_UsableSize;
10568 while(m_LevelCount < MAX_LEVELS &&
10569 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10574 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10575 rootNode->offset = 0;
10576 rootNode->type = Node::TYPE_FREE;
10577 rootNode->parent = VMA_NULL;
10578 rootNode->buddy = VMA_NULL;
10581 AddToFreeListFront(0, rootNode);
10584 bool VmaBlockMetadata_Buddy::Validate()
const 10587 ValidationContext ctx;
10588 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10590 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10592 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10593 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10596 for(uint32_t level = 0; level < m_LevelCount; ++level)
10598 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10599 m_FreeList[level].front->free.prev == VMA_NULL);
10601 for(Node* node = m_FreeList[level].front;
10603 node = node->free.next)
10605 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10607 if(node->free.next == VMA_NULL)
10609 VMA_VALIDATE(m_FreeList[level].back == node);
10613 VMA_VALIDATE(node->free.next->free.prev == node);
10619 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10621 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10627 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 10629 for(uint32_t level = 0; level < m_LevelCount; ++level)
10631 if(m_FreeList[level].front != VMA_NULL)
10633 return LevelToNodeSize(level);
10639 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 10641 const VkDeviceSize unusableSize = GetUnusableSize();
10652 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10654 if(unusableSize > 0)
10663 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 10665 const VkDeviceSize unusableSize = GetUnusableSize();
10667 inoutStats.
size += GetSize();
10668 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10673 if(unusableSize > 0)
10680 #if VMA_STATS_STRING_ENABLED 10682 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 10686 CalcAllocationStatInfo(stat);
10688 PrintDetailedMap_Begin(
10694 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10696 const VkDeviceSize unusableSize = GetUnusableSize();
10697 if(unusableSize > 0)
10699 PrintDetailedMap_UnusedRange(json,
10704 PrintDetailedMap_End(json);
10707 #endif // #if VMA_STATS_STRING_ENABLED 10709 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
10710 uint32_t currentFrameIndex,
10711 uint32_t frameInUseCount,
10712 VkDeviceSize bufferImageGranularity,
10713 VkDeviceSize allocSize,
10714 VkDeviceSize allocAlignment,
10716 VmaSuballocationType allocType,
10717 bool canMakeOtherLost,
10719 VmaAllocationRequest* pAllocationRequest)
10721 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10725 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
10726 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
10727 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
10729 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
10730 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
10733 if(allocSize > m_UsableSize)
10738 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10739 for(uint32_t level = targetLevel + 1; level--; )
10741 for(Node* freeNode = m_FreeList[level].front;
10742 freeNode != VMA_NULL;
10743 freeNode = freeNode->free.next)
10745 if(freeNode->offset % allocAlignment == 0)
10747 pAllocationRequest->type = VmaAllocationRequestType::Normal;
10748 pAllocationRequest->offset = freeNode->offset;
10749 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
10750 pAllocationRequest->sumItemSize = 0;
10751 pAllocationRequest->itemsToMakeLostCount = 0;
10752 pAllocationRequest->customData = (
void*)(uintptr_t)level;
10761 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
10762 uint32_t currentFrameIndex,
10763 uint32_t frameInUseCount,
10764 VmaAllocationRequest* pAllocationRequest)
10770 return pAllocationRequest->itemsToMakeLostCount == 0;
10773 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10782 void VmaBlockMetadata_Buddy::Alloc(
10783 const VmaAllocationRequest& request,
10784 VmaSuballocationType type,
10785 VkDeviceSize allocSize,
10788 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
10790 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10791 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
10793 Node* currNode = m_FreeList[currLevel].front;
10794 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10795 while(currNode->offset != request.offset)
10797 currNode = currNode->free.next;
10798 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10802 while(currLevel < targetLevel)
10806 RemoveFromFreeList(currLevel, currNode);
10808 const uint32_t childrenLevel = currLevel + 1;
10811 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
10812 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
10814 leftChild->offset = currNode->offset;
10815 leftChild->type = Node::TYPE_FREE;
10816 leftChild->parent = currNode;
10817 leftChild->buddy = rightChild;
10819 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
10820 rightChild->type = Node::TYPE_FREE;
10821 rightChild->parent = currNode;
10822 rightChild->buddy = leftChild;
10825 currNode->type = Node::TYPE_SPLIT;
10826 currNode->split.leftChild = leftChild;
10829 AddToFreeListFront(childrenLevel, rightChild);
10830 AddToFreeListFront(childrenLevel, leftChild);
10835 currNode = m_FreeList[currLevel].front;
10844 VMA_ASSERT(currLevel == targetLevel &&
10845 currNode != VMA_NULL &&
10846 currNode->type == Node::TYPE_FREE);
10847 RemoveFromFreeList(currLevel, currNode);
10850 currNode->type = Node::TYPE_ALLOCATION;
10851 currNode->allocation.alloc = hAllocation;
10853 ++m_AllocationCount;
10855 m_SumFreeSize -= allocSize;
10858 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
10860 if(node->type == Node::TYPE_SPLIT)
10862 DeleteNode(node->split.leftChild->buddy);
10863 DeleteNode(node->split.leftChild);
10866 vma_delete(GetAllocationCallbacks(), node);
10869 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 10871 VMA_VALIDATE(level < m_LevelCount);
10872 VMA_VALIDATE(curr->parent == parent);
10873 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
10874 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
10877 case Node::TYPE_FREE:
10879 ctx.calculatedSumFreeSize += levelNodeSize;
10880 ++ctx.calculatedFreeCount;
10882 case Node::TYPE_ALLOCATION:
10883 ++ctx.calculatedAllocationCount;
10884 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
10885 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
10887 case Node::TYPE_SPLIT:
10889 const uint32_t childrenLevel = level + 1;
10890 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
10891 const Node*
const leftChild = curr->split.leftChild;
10892 VMA_VALIDATE(leftChild != VMA_NULL);
10893 VMA_VALIDATE(leftChild->offset == curr->offset);
10894 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
10896 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
10898 const Node*
const rightChild = leftChild->buddy;
10899 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
10900 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
10902 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
10913 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 10916 uint32_t level = 0;
10917 VkDeviceSize currLevelNodeSize = m_UsableSize;
10918 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
10919 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
10922 currLevelNodeSize = nextLevelNodeSize;
10923 nextLevelNodeSize = currLevelNodeSize >> 1;
10928 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
10931 Node* node = m_Root;
10932 VkDeviceSize nodeOffset = 0;
10933 uint32_t level = 0;
10934 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
10935 while(node->type == Node::TYPE_SPLIT)
10937 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
10938 if(offset < nodeOffset + nextLevelSize)
10940 node = node->split.leftChild;
10944 node = node->split.leftChild->buddy;
10945 nodeOffset += nextLevelSize;
10948 levelNodeSize = nextLevelSize;
10951 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
10952 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
10955 --m_AllocationCount;
10956 m_SumFreeSize += alloc->GetSize();
10958 node->type = Node::TYPE_FREE;
10961 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
10963 RemoveFromFreeList(level, node->buddy);
10964 Node*
const parent = node->parent;
10966 vma_delete(GetAllocationCallbacks(), node->buddy);
10967 vma_delete(GetAllocationCallbacks(), node);
10968 parent->type = Node::TYPE_FREE;
10976 AddToFreeListFront(level, node);
10979 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 10983 case Node::TYPE_FREE:
10989 case Node::TYPE_ALLOCATION:
10991 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10997 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
10998 if(unusedRangeSize > 0)
11007 case Node::TYPE_SPLIT:
11009 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11010 const Node*
const leftChild = node->split.leftChild;
11011 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11012 const Node*
const rightChild = leftChild->buddy;
11013 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11021 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11023 VMA_ASSERT(node->type == Node::TYPE_FREE);
11026 Node*
const frontNode = m_FreeList[level].front;
11027 if(frontNode == VMA_NULL)
11029 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11030 node->free.prev = node->free.next = VMA_NULL;
11031 m_FreeList[level].front = m_FreeList[level].back = node;
11035 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11036 node->free.prev = VMA_NULL;
11037 node->free.next = frontNode;
11038 frontNode->free.prev = node;
11039 m_FreeList[level].front = node;
11043 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11045 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11048 if(node->free.prev == VMA_NULL)
11050 VMA_ASSERT(m_FreeList[level].front == node);
11051 m_FreeList[level].front = node->free.next;
11055 Node*
const prevFreeNode = node->free.prev;
11056 VMA_ASSERT(prevFreeNode->free.next == node);
11057 prevFreeNode->free.next = node->free.next;
11061 if(node->free.next == VMA_NULL)
11063 VMA_ASSERT(m_FreeList[level].back == node);
11064 m_FreeList[level].back = node->free.prev;
11068 Node*
const nextFreeNode = node->free.next;
11069 VMA_ASSERT(nextFreeNode->free.prev == node);
11070 nextFreeNode->free.prev = node->free.prev;
11074 #if VMA_STATS_STRING_ENABLED 11075 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 11079 case Node::TYPE_FREE:
11080 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11082 case Node::TYPE_ALLOCATION:
11084 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11085 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11086 if(allocSize < levelNodeSize)
11088 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11092 case Node::TYPE_SPLIT:
11094 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11095 const Node*
const leftChild = node->split.leftChild;
11096 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11097 const Node*
const rightChild = leftChild->buddy;
11098 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11105 #endif // #if VMA_STATS_STRING_ENABLED 11111 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11112 m_pMetadata(VMA_NULL),
11113 m_MemoryTypeIndex(UINT32_MAX),
11115 m_hMemory(VK_NULL_HANDLE),
11117 m_pMappedData(VMA_NULL)
11121 void VmaDeviceMemoryBlock::Init(
11124 uint32_t newMemoryTypeIndex,
11125 VkDeviceMemory newMemory,
11126 VkDeviceSize newSize,
11128 uint32_t algorithm)
11130 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11132 m_hParentPool = hParentPool;
11133 m_MemoryTypeIndex = newMemoryTypeIndex;
11135 m_hMemory = newMemory;
11140 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11143 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11149 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11151 m_pMetadata->Init(newSize);
11154 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11158 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11160 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11161 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11162 m_hMemory = VK_NULL_HANDLE;
11164 vma_delete(allocator, m_pMetadata);
11165 m_pMetadata = VMA_NULL;
11168 bool VmaDeviceMemoryBlock::Validate()
const 11170 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11171 (m_pMetadata->GetSize() != 0));
11173 return m_pMetadata->Validate();
11176 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11178 void* pData =
nullptr;
11179 VkResult res = Map(hAllocator, 1, &pData);
11180 if(res != VK_SUCCESS)
11185 res = m_pMetadata->CheckCorruption(pData);
11187 Unmap(hAllocator, 1);
11192 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11199 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11200 if(m_MapCount != 0)
11202 m_MapCount += count;
11203 VMA_ASSERT(m_pMappedData != VMA_NULL);
11204 if(ppData != VMA_NULL)
11206 *ppData = m_pMappedData;
11212 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11213 hAllocator->m_hDevice,
11219 if(result == VK_SUCCESS)
11221 if(ppData != VMA_NULL)
11223 *ppData = m_pMappedData;
11225 m_MapCount = count;
11231 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11238 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11239 if(m_MapCount >= count)
11241 m_MapCount -= count;
11242 if(m_MapCount == 0)
11244 m_pMappedData = VMA_NULL;
11245 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11250 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11254 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11256 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11257 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11260 VkResult res = Map(hAllocator, 1, &pData);
11261 if(res != VK_SUCCESS)
11266 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11267 VmaWriteMagicValue(pData, allocOffset + allocSize);
11269 Unmap(hAllocator, 1);
11274 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11276 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11277 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11280 VkResult res = Map(hAllocator, 1, &pData);
11281 if(res != VK_SUCCESS)
11286 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11288 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11290 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11292 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11295 Unmap(hAllocator, 1);
11300 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11305 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11306 hAllocation->GetBlock() ==
this);
11308 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11309 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
11310 hAllocator->m_hDevice,
11313 hAllocation->GetOffset());
11316 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11321 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11322 hAllocation->GetBlock() ==
this);
11324 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11325 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
11326 hAllocator->m_hDevice,
11329 hAllocation->GetOffset());
11334 memset(&outInfo, 0,
sizeof(outInfo));
11353 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11361 VmaPool_T::VmaPool_T(
11364 VkDeviceSize preferredBlockSize) :
11368 createInfo.memoryTypeIndex,
11369 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11370 createInfo.minBlockCount,
11371 createInfo.maxBlockCount,
11373 createInfo.frameInUseCount,
11375 createInfo.blockSize != 0,
11381 VmaPool_T::~VmaPool_T()
11385 #if VMA_STATS_STRING_ENABLED 11387 #endif // #if VMA_STATS_STRING_ENABLED 11389 VmaBlockVector::VmaBlockVector(
11392 uint32_t memoryTypeIndex,
11393 VkDeviceSize preferredBlockSize,
11394 size_t minBlockCount,
11395 size_t maxBlockCount,
11396 VkDeviceSize bufferImageGranularity,
11397 uint32_t frameInUseCount,
11399 bool explicitBlockSize,
11400 uint32_t algorithm) :
11401 m_hAllocator(hAllocator),
11402 m_hParentPool(hParentPool),
11403 m_MemoryTypeIndex(memoryTypeIndex),
11404 m_PreferredBlockSize(preferredBlockSize),
11405 m_MinBlockCount(minBlockCount),
11406 m_MaxBlockCount(maxBlockCount),
11407 m_BufferImageGranularity(bufferImageGranularity),
11408 m_FrameInUseCount(frameInUseCount),
11409 m_IsCustomPool(isCustomPool),
11410 m_ExplicitBlockSize(explicitBlockSize),
11411 m_Algorithm(algorithm),
11412 m_HasEmptyBlock(false),
11413 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11418 VmaBlockVector::~VmaBlockVector()
11420 for(
size_t i = m_Blocks.size(); i--; )
11422 m_Blocks[i]->Destroy(m_hAllocator);
11423 vma_delete(m_hAllocator, m_Blocks[i]);
11427 VkResult VmaBlockVector::CreateMinBlocks()
11429 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11431 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11432 if(res != VK_SUCCESS)
11440 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11442 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11444 const size_t blockCount = m_Blocks.size();
11453 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11455 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11456 VMA_ASSERT(pBlock);
11457 VMA_HEAVY_ASSERT(pBlock->Validate());
11458 pBlock->m_pMetadata->AddPoolStats(*pStats);
11462 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 11464 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11465 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11466 (VMA_DEBUG_MARGIN > 0) &&
11468 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11471 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11473 VkResult VmaBlockVector::Allocate(
11474 uint32_t currentFrameIndex,
11476 VkDeviceSize alignment,
11478 VmaSuballocationType suballocType,
11479 size_t allocationCount,
11483 VkResult res = VK_SUCCESS;
11485 if(IsCorruptionDetectionEnabled())
11487 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11488 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11492 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11493 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11495 res = AllocatePage(
11501 pAllocations + allocIndex);
11502 if(res != VK_SUCCESS)
11509 if(res != VK_SUCCESS)
11512 while(allocIndex--)
11514 Free(pAllocations[allocIndex]);
11516 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11522 VkResult VmaBlockVector::AllocatePage(
11523 uint32_t currentFrameIndex,
11525 VkDeviceSize alignment,
11527 VmaSuballocationType suballocType,
11534 const bool canCreateNewBlock =
11536 (m_Blocks.size() < m_MaxBlockCount);
11543 canMakeOtherLost =
false;
11547 if(isUpperAddress &&
11550 return VK_ERROR_FEATURE_NOT_PRESENT;
11564 return VK_ERROR_FEATURE_NOT_PRESENT;
11568 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11570 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11578 if(!canMakeOtherLost || canCreateNewBlock)
11587 if(!m_Blocks.empty())
11589 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11590 VMA_ASSERT(pCurrBlock);
11591 VkResult res = AllocateFromBlock(
11601 if(res == VK_SUCCESS)
11603 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
11613 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11615 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11616 VMA_ASSERT(pCurrBlock);
11617 VkResult res = AllocateFromBlock(
11627 if(res == VK_SUCCESS)
11629 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11637 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11639 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11640 VMA_ASSERT(pCurrBlock);
11641 VkResult res = AllocateFromBlock(
11651 if(res == VK_SUCCESS)
11653 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11661 if(canCreateNewBlock)
11664 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11665 uint32_t newBlockSizeShift = 0;
11666 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11668 if(!m_ExplicitBlockSize)
11671 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11672 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
11674 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11675 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11677 newBlockSize = smallerNewBlockSize;
11678 ++newBlockSizeShift;
11687 size_t newBlockIndex = 0;
11688 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
11690 if(!m_ExplicitBlockSize)
11692 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
11694 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11695 if(smallerNewBlockSize >= size)
11697 newBlockSize = smallerNewBlockSize;
11698 ++newBlockSizeShift;
11699 res = CreateBlock(newBlockSize, &newBlockIndex);
11708 if(res == VK_SUCCESS)
11710 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
11711 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
11713 res = AllocateFromBlock(
11723 if(res == VK_SUCCESS)
11725 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
11731 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11738 if(canMakeOtherLost)
11740 uint32_t tryIndex = 0;
11741 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
11743 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
11744 VmaAllocationRequest bestRequest = {};
11745 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
11751 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11753 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11754 VMA_ASSERT(pCurrBlock);
11755 VmaAllocationRequest currRequest = {};
11756 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11759 m_BufferImageGranularity,
11768 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11769 if(pBestRequestBlock == VMA_NULL ||
11770 currRequestCost < bestRequestCost)
11772 pBestRequestBlock = pCurrBlock;
11773 bestRequest = currRequest;
11774 bestRequestCost = currRequestCost;
11776 if(bestRequestCost == 0)
11787 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11789 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11790 VMA_ASSERT(pCurrBlock);
11791 VmaAllocationRequest currRequest = {};
11792 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11795 m_BufferImageGranularity,
11804 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11805 if(pBestRequestBlock == VMA_NULL ||
11806 currRequestCost < bestRequestCost ||
11809 pBestRequestBlock = pCurrBlock;
11810 bestRequest = currRequest;
11811 bestRequestCost = currRequestCost;
11813 if(bestRequestCost == 0 ||
11823 if(pBestRequestBlock != VMA_NULL)
11827 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
11828 if(res != VK_SUCCESS)
11834 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
11840 if(pBestRequestBlock->m_pMetadata->IsEmpty())
11842 m_HasEmptyBlock =
false;
11845 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
11846 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
11847 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
11848 (*pAllocation)->InitBlockAllocation(
11850 bestRequest.offset,
11856 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
11857 VMA_DEBUG_LOG(
" Returned from existing block");
11858 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
11859 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11861 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11863 if(IsCorruptionDetectionEnabled())
11865 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
11866 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11881 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
11883 return VK_ERROR_TOO_MANY_OBJECTS;
11887 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11890 void VmaBlockVector::Free(
11893 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
11897 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11899 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
11901 if(IsCorruptionDetectionEnabled())
11903 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
11904 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
11907 if(hAllocation->IsPersistentMap())
11909 pBlock->Unmap(m_hAllocator, 1);
11912 pBlock->m_pMetadata->Free(hAllocation);
11913 VMA_HEAVY_ASSERT(pBlock->Validate());
11915 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
11918 if(pBlock->m_pMetadata->IsEmpty())
11921 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
11923 pBlockToDelete = pBlock;
11929 m_HasEmptyBlock =
true;
11934 else if(m_HasEmptyBlock)
11936 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
11937 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
11939 pBlockToDelete = pLastBlock;
11940 m_Blocks.pop_back();
11941 m_HasEmptyBlock =
false;
11945 IncrementallySortBlocks();
11950 if(pBlockToDelete != VMA_NULL)
11952 VMA_DEBUG_LOG(
" Deleted empty allocation");
11953 pBlockToDelete->Destroy(m_hAllocator);
11954 vma_delete(m_hAllocator, pBlockToDelete);
11958 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 11960 VkDeviceSize result = 0;
11961 for(
size_t i = m_Blocks.size(); i--; )
11963 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
11964 if(result >= m_PreferredBlockSize)
11972 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
11974 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11976 if(m_Blocks[blockIndex] == pBlock)
11978 VmaVectorRemove(m_Blocks, blockIndex);
11985 void VmaBlockVector::IncrementallySortBlocks()
11990 for(
size_t i = 1; i < m_Blocks.size(); ++i)
11992 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
11994 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12001 VkResult VmaBlockVector::AllocateFromBlock(
12002 VmaDeviceMemoryBlock* pBlock,
12003 uint32_t currentFrameIndex,
12005 VkDeviceSize alignment,
12008 VmaSuballocationType suballocType,
12017 VmaAllocationRequest currRequest = {};
12018 if(pBlock->m_pMetadata->CreateAllocationRequest(
12021 m_BufferImageGranularity,
12031 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12035 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12036 if(res != VK_SUCCESS)
12043 if(pBlock->m_pMetadata->IsEmpty())
12045 m_HasEmptyBlock =
false;
12048 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12049 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12050 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12051 (*pAllocation)->InitBlockAllocation(
12053 currRequest.offset,
12059 VMA_HEAVY_ASSERT(pBlock->Validate());
12060 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12061 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12063 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12065 if(IsCorruptionDetectionEnabled())
12067 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12068 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12072 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12075 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12077 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12078 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12079 allocInfo.allocationSize = blockSize;
12080 VkDeviceMemory mem = VK_NULL_HANDLE;
12081 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12090 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12096 allocInfo.allocationSize,
12100 m_Blocks.push_back(pBlock);
12101 if(pNewBlockIndex != VMA_NULL)
12103 *pNewBlockIndex = m_Blocks.size() - 1;
12109 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12110 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12111 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12113 const size_t blockCount = m_Blocks.size();
12114 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12118 BLOCK_FLAG_USED = 0x00000001,
12119 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12127 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12128 blockInfo(blockCount, VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12129 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12132 const size_t moveCount = moves.size();
12133 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12135 const VmaDefragmentationMove& move = moves[moveIndex];
12136 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12137 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12140 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12143 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12145 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12146 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12147 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12149 currBlockInfo.pMappedData = pBlock->GetMappedData();
12151 if(currBlockInfo.pMappedData == VMA_NULL)
12153 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12154 if(pDefragCtx->res == VK_SUCCESS)
12156 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12163 if(pDefragCtx->res == VK_SUCCESS)
12165 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12166 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12168 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12170 const VmaDefragmentationMove& move = moves[moveIndex];
12172 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12173 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12175 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12180 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12181 memRange.memory = pSrcBlock->GetDeviceMemory();
12182 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12183 memRange.size = VMA_MIN(
12184 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12185 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12186 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12191 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12192 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12193 static_cast<size_t>(move.size));
12195 if(IsCorruptionDetectionEnabled())
12197 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12198 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12204 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12205 memRange.memory = pDstBlock->GetDeviceMemory();
12206 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12207 memRange.size = VMA_MIN(
12208 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12209 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12210 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12217 for(
size_t blockIndex = blockCount; blockIndex--; )
12219 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12220 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12222 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12223 pBlock->Unmap(m_hAllocator, 1);
12228 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12229 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12230 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12231 VkCommandBuffer commandBuffer)
12233 const size_t blockCount = m_Blocks.size();
12235 pDefragCtx->blockContexts.resize(blockCount);
12236 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12239 const size_t moveCount = moves.size();
12240 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12242 const VmaDefragmentationMove& move = moves[moveIndex];
12243 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12244 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12247 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12251 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
12252 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
12253 VK_BUFFER_USAGE_TRANSFER_DST_BIT;
12255 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12257 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12258 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12259 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12261 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12262 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12263 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12264 if(pDefragCtx->res == VK_SUCCESS)
12266 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12267 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12274 if(pDefragCtx->res == VK_SUCCESS)
12276 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12277 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12279 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12281 const VmaDefragmentationMove& move = moves[moveIndex];
12283 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12284 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12286 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12288 VkBufferCopy region = {
12292 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12293 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12298 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12300 pDefragCtx->res = VK_NOT_READY;
12306 m_HasEmptyBlock =
false;
12307 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12309 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12310 if(pBlock->m_pMetadata->IsEmpty())
12312 if(m_Blocks.size() > m_MinBlockCount)
12314 if(pDefragmentationStats != VMA_NULL)
12317 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12320 VmaVectorRemove(m_Blocks, blockIndex);
12321 pBlock->Destroy(m_hAllocator);
12322 vma_delete(m_hAllocator, pBlock);
12326 m_HasEmptyBlock =
true;
12332 #if VMA_STATS_STRING_ENABLED 12334 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12336 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12338 json.BeginObject();
12342 json.WriteString(
"MemoryTypeIndex");
12343 json.WriteNumber(m_MemoryTypeIndex);
12345 json.WriteString(
"BlockSize");
12346 json.WriteNumber(m_PreferredBlockSize);
12348 json.WriteString(
"BlockCount");
12349 json.BeginObject(
true);
12350 if(m_MinBlockCount > 0)
12352 json.WriteString(
"Min");
12353 json.WriteNumber((uint64_t)m_MinBlockCount);
12355 if(m_MaxBlockCount < SIZE_MAX)
12357 json.WriteString(
"Max");
12358 json.WriteNumber((uint64_t)m_MaxBlockCount);
12360 json.WriteString(
"Cur");
12361 json.WriteNumber((uint64_t)m_Blocks.size());
12364 if(m_FrameInUseCount > 0)
12366 json.WriteString(
"FrameInUseCount");
12367 json.WriteNumber(m_FrameInUseCount);
12370 if(m_Algorithm != 0)
12372 json.WriteString(
"Algorithm");
12373 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12378 json.WriteString(
"PreferredBlockSize");
12379 json.WriteNumber(m_PreferredBlockSize);
12382 json.WriteString(
"Blocks");
12383 json.BeginObject();
12384 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12386 json.BeginString();
12387 json.ContinueString(m_Blocks[i]->GetId());
12390 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12397 #endif // #if VMA_STATS_STRING_ENABLED 12399 void VmaBlockVector::Defragment(
12400 class VmaBlockVectorDefragmentationContext* pCtx,
12402 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12403 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12404 VkCommandBuffer commandBuffer)
12406 pCtx->res = VK_SUCCESS;
12408 const VkMemoryPropertyFlags memPropFlags =
12409 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12410 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12411 const bool isHostCoherent = (memPropFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0;
12413 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12415 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12416 !IsCorruptionDetectionEnabled();
12419 if(canDefragmentOnCpu || canDefragmentOnGpu)
12421 bool defragmentOnGpu;
12423 if(canDefragmentOnGpu != canDefragmentOnCpu)
12425 defragmentOnGpu = canDefragmentOnGpu;
12430 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12431 m_hAllocator->IsIntegratedGpu();
12434 bool overlappingMoveSupported = !defragmentOnGpu;
12436 if(m_hAllocator->m_UseMutex)
12438 m_Mutex.LockWrite();
12439 pCtx->mutexLocked =
true;
12442 pCtx->Begin(overlappingMoveSupported);
12446 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12447 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12448 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12449 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12450 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12453 if(pStats != VMA_NULL)
12455 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12456 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12459 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12460 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12461 if(defragmentOnGpu)
12463 maxGpuBytesToMove -= bytesMoved;
12464 maxGpuAllocationsToMove -= allocationsMoved;
12468 maxCpuBytesToMove -= bytesMoved;
12469 maxCpuAllocationsToMove -= allocationsMoved;
12473 if(pCtx->res >= VK_SUCCESS)
12475 if(defragmentOnGpu)
12477 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12481 ApplyDefragmentationMovesCpu(pCtx, moves);
12487 void VmaBlockVector::DefragmentationEnd(
12488 class VmaBlockVectorDefragmentationContext* pCtx,
12492 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12494 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12495 if(blockCtx.hBuffer)
12497 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12498 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12502 if(pCtx->res >= VK_SUCCESS)
12504 FreeEmptyBlocks(pStats);
12507 if(pCtx->mutexLocked)
12509 VMA_ASSERT(m_hAllocator->m_UseMutex);
12510 m_Mutex.UnlockWrite();
12514 size_t VmaBlockVector::CalcAllocationCount()
const 12517 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12519 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12524 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const 12526 if(m_BufferImageGranularity == 1)
12530 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12531 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12533 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12534 VMA_ASSERT(m_Algorithm == 0);
12535 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12536 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12544 void VmaBlockVector::MakePoolAllocationsLost(
12545 uint32_t currentFrameIndex,
12546 size_t* pLostAllocationCount)
12548 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12549 size_t lostAllocationCount = 0;
12550 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12552 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12553 VMA_ASSERT(pBlock);
12554 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12556 if(pLostAllocationCount != VMA_NULL)
12558 *pLostAllocationCount = lostAllocationCount;
12562 VkResult VmaBlockVector::CheckCorruption()
12564 if(!IsCorruptionDetectionEnabled())
12566 return VK_ERROR_FEATURE_NOT_PRESENT;
12569 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12570 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12572 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12573 VMA_ASSERT(pBlock);
12574 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12575 if(res != VK_SUCCESS)
12583 void VmaBlockVector::AddStats(
VmaStats* pStats)
12585 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12586 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12588 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12590 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12592 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12593 VMA_ASSERT(pBlock);
12594 VMA_HEAVY_ASSERT(pBlock->Validate());
12596 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12597 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12598 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12599 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12606 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12608 VmaBlockVector* pBlockVector,
12609 uint32_t currentFrameIndex,
12610 bool overlappingMoveSupported) :
12611 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12612 m_AllAllocations(false),
12613 m_AllocationCount(0),
12615 m_AllocationsMoved(0),
12616 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12619 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12620 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12622 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12623 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12624 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12625 m_Blocks.push_back(pBlockInfo);
12629 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12632 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12634 for(
size_t i = m_Blocks.size(); i--; )
12636 vma_delete(m_hAllocator, m_Blocks[i]);
12640 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12643 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12645 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12646 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12647 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12649 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12650 (*it)->m_Allocations.push_back(allocInfo);
12657 ++m_AllocationCount;
12661 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
12662 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12663 VkDeviceSize maxBytesToMove,
12664 uint32_t maxAllocationsToMove)
12666 if(m_Blocks.empty())
12679 size_t srcBlockMinIndex = 0;
12692 size_t srcBlockIndex = m_Blocks.size() - 1;
12693 size_t srcAllocIndex = SIZE_MAX;
12699 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
12701 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
12704 if(srcBlockIndex == srcBlockMinIndex)
12711 srcAllocIndex = SIZE_MAX;
12716 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
12720 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
12721 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
12723 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
12724 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
12725 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
12726 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
12729 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
12731 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
12732 VmaAllocationRequest dstAllocRequest;
12733 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
12734 m_CurrentFrameIndex,
12735 m_pBlockVector->GetFrameInUseCount(),
12736 m_pBlockVector->GetBufferImageGranularity(),
12743 &dstAllocRequest) &&
12745 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
12747 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
12750 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
12751 (m_BytesMoved + size > maxBytesToMove))
12756 VmaDefragmentationMove move;
12757 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
12758 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
12759 move.srcOffset = srcOffset;
12760 move.dstOffset = dstAllocRequest.offset;
12762 moves.push_back(move);
12764 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
12768 allocInfo.m_hAllocation);
12769 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
12771 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
12773 if(allocInfo.m_pChanged != VMA_NULL)
12775 *allocInfo.m_pChanged = VK_TRUE;
12778 ++m_AllocationsMoved;
12779 m_BytesMoved += size;
12781 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
12789 if(srcAllocIndex > 0)
12795 if(srcBlockIndex > 0)
12798 srcAllocIndex = SIZE_MAX;
12808 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const 12811 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12813 if(m_Blocks[i]->m_HasNonMovableAllocations)
12821 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
12822 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12823 VkDeviceSize maxBytesToMove,
12824 uint32_t maxAllocationsToMove)
12826 if(!m_AllAllocations && m_AllocationCount == 0)
12831 const size_t blockCount = m_Blocks.size();
12832 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12834 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
12836 if(m_AllAllocations)
12838 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
12839 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
12840 it != pMetadata->m_Suballocations.end();
12843 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
12845 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
12846 pBlockInfo->m_Allocations.push_back(allocInfo);
12851 pBlockInfo->CalcHasNonMovableAllocations();
12855 pBlockInfo->SortAllocationsByOffsetDescending();
12861 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
12864 const uint32_t roundCount = 2;
12867 VkResult result = VK_SUCCESS;
12868 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
12870 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
12876 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
12877 size_t dstBlockIndex, VkDeviceSize dstOffset,
12878 size_t srcBlockIndex, VkDeviceSize srcOffset)
12880 if(dstBlockIndex < srcBlockIndex)
12884 if(dstBlockIndex > srcBlockIndex)
12888 if(dstOffset < srcOffset)
12898 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
12900 VmaBlockVector* pBlockVector,
12901 uint32_t currentFrameIndex,
12902 bool overlappingMoveSupported) :
12903 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12904 m_OverlappingMoveSupported(overlappingMoveSupported),
12905 m_AllocationCount(0),
12906 m_AllAllocations(false),
12908 m_AllocationsMoved(0),
12909 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
12911 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
12915 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
12919 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
12920 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12921 VkDeviceSize maxBytesToMove,
12922 uint32_t maxAllocationsToMove)
12924 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
12926 const size_t blockCount = m_pBlockVector->GetBlockCount();
12927 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
12932 PreprocessMetadata();
12936 m_BlockInfos.resize(blockCount);
12937 for(
size_t i = 0; i < blockCount; ++i)
12939 m_BlockInfos[i].origBlockIndex = i;
12942 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
12943 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
12944 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
12949 FreeSpaceDatabase freeSpaceDb;
12951 size_t dstBlockInfoIndex = 0;
12952 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12953 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12954 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12955 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
12956 VkDeviceSize dstOffset = 0;
12959 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
12961 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
12962 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
12963 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
12964 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
12965 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
12967 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
12968 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
12969 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
12970 if(m_AllocationsMoved == maxAllocationsToMove ||
12971 m_BytesMoved + srcAllocSize > maxBytesToMove)
12976 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
12979 size_t freeSpaceInfoIndex;
12980 VkDeviceSize dstAllocOffset;
12981 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
12982 freeSpaceInfoIndex, dstAllocOffset))
12984 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
12985 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
12986 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
12989 if(freeSpaceInfoIndex == srcBlockInfoIndex)
12991 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
12995 VmaSuballocation suballoc = *srcSuballocIt;
12996 suballoc.offset = dstAllocOffset;
12997 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
12998 m_BytesMoved += srcAllocSize;
12999 ++m_AllocationsMoved;
13001 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13003 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13004 srcSuballocIt = nextSuballocIt;
13006 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13008 VmaDefragmentationMove move = {
13009 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13010 srcAllocOffset, dstAllocOffset,
13012 moves.push_back(move);
13019 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13021 VmaSuballocation suballoc = *srcSuballocIt;
13022 suballoc.offset = dstAllocOffset;
13023 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13024 m_BytesMoved += srcAllocSize;
13025 ++m_AllocationsMoved;
13027 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13029 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13030 srcSuballocIt = nextSuballocIt;
13032 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13034 VmaDefragmentationMove move = {
13035 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13036 srcAllocOffset, dstAllocOffset,
13038 moves.push_back(move);
13043 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13046 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13047 dstAllocOffset + srcAllocSize > dstBlockSize)
13050 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13052 ++dstBlockInfoIndex;
13053 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13054 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13055 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13056 dstBlockSize = pDstMetadata->GetSize();
13058 dstAllocOffset = 0;
13062 if(dstBlockInfoIndex == srcBlockInfoIndex)
13064 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13066 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13068 bool skipOver = overlap;
13069 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13073 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13078 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13080 dstOffset = srcAllocOffset + srcAllocSize;
13086 srcSuballocIt->offset = dstAllocOffset;
13087 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13088 dstOffset = dstAllocOffset + srcAllocSize;
13089 m_BytesMoved += srcAllocSize;
13090 ++m_AllocationsMoved;
13092 VmaDefragmentationMove move = {
13093 srcOrigBlockIndex, dstOrigBlockIndex,
13094 srcAllocOffset, dstAllocOffset,
13096 moves.push_back(move);
13104 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13105 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13107 VmaSuballocation suballoc = *srcSuballocIt;
13108 suballoc.offset = dstAllocOffset;
13109 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13110 dstOffset = dstAllocOffset + srcAllocSize;
13111 m_BytesMoved += srcAllocSize;
13112 ++m_AllocationsMoved;
13114 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13116 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13117 srcSuballocIt = nextSuballocIt;
13119 pDstMetadata->m_Suballocations.push_back(suballoc);
13121 VmaDefragmentationMove move = {
13122 srcOrigBlockIndex, dstOrigBlockIndex,
13123 srcAllocOffset, dstAllocOffset,
13125 moves.push_back(move);
13131 m_BlockInfos.clear();
13133 PostprocessMetadata();
13138 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13140 const size_t blockCount = m_pBlockVector->GetBlockCount();
13141 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13143 VmaBlockMetadata_Generic*
const pMetadata =
13144 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13145 pMetadata->m_FreeCount = 0;
13146 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13147 pMetadata->m_FreeSuballocationsBySize.clear();
13148 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13149 it != pMetadata->m_Suballocations.end(); )
13151 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13153 VmaSuballocationList::iterator nextIt = it;
13155 pMetadata->m_Suballocations.erase(it);
13166 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13168 const size_t blockCount = m_pBlockVector->GetBlockCount();
13169 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13171 VmaBlockMetadata_Generic*
const pMetadata =
13172 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13173 const VkDeviceSize blockSize = pMetadata->GetSize();
13176 if(pMetadata->m_Suballocations.empty())
13178 pMetadata->m_FreeCount = 1;
13180 VmaSuballocation suballoc = {
13184 VMA_SUBALLOCATION_TYPE_FREE };
13185 pMetadata->m_Suballocations.push_back(suballoc);
13186 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13191 VkDeviceSize offset = 0;
13192 VmaSuballocationList::iterator it;
13193 for(it = pMetadata->m_Suballocations.begin();
13194 it != pMetadata->m_Suballocations.end();
13197 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13198 VMA_ASSERT(it->offset >= offset);
13201 if(it->offset > offset)
13203 ++pMetadata->m_FreeCount;
13204 const VkDeviceSize freeSize = it->offset - offset;
13205 VmaSuballocation suballoc = {
13209 VMA_SUBALLOCATION_TYPE_FREE };
13210 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13211 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13213 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13217 pMetadata->m_SumFreeSize -= it->size;
13218 offset = it->offset + it->size;
13222 if(offset < blockSize)
13224 ++pMetadata->m_FreeCount;
13225 const VkDeviceSize freeSize = blockSize - offset;
13226 VmaSuballocation suballoc = {
13230 VMA_SUBALLOCATION_TYPE_FREE };
13231 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13232 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13233 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13235 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13240 pMetadata->m_FreeSuballocationsBySize.begin(),
13241 pMetadata->m_FreeSuballocationsBySize.end(),
13242 VmaSuballocationItemSizeLess());
13245 VMA_HEAVY_ASSERT(pMetadata->Validate());
13249 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13252 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13253 while(it != pMetadata->m_Suballocations.end())
13255 if(it->offset < suballoc.offset)
13260 pMetadata->m_Suballocations.insert(it, suballoc);
13266 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13269 VmaBlockVector* pBlockVector,
13270 uint32_t currFrameIndex,
13271 uint32_t algorithmFlags) :
13273 mutexLocked(false),
13274 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13275 m_hAllocator(hAllocator),
13276 m_hCustomPool(hCustomPool),
13277 m_pBlockVector(pBlockVector),
13278 m_CurrFrameIndex(currFrameIndex),
13279 m_AlgorithmFlags(algorithmFlags),
13280 m_pAlgorithm(VMA_NULL),
13281 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13282 m_AllAllocations(false)
13286 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13288 vma_delete(m_hAllocator, m_pAlgorithm);
13291 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13293 AllocInfo info = { hAlloc, pChanged };
13294 m_Allocations.push_back(info);
13297 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13299 const bool allAllocations = m_AllAllocations ||
13300 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13312 if(VMA_DEBUG_MARGIN == 0 &&
13314 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13316 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13317 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13321 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13322 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13327 m_pAlgorithm->AddAll();
13331 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13333 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13341 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13343 uint32_t currFrameIndex,
13346 m_hAllocator(hAllocator),
13347 m_CurrFrameIndex(currFrameIndex),
13350 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13352 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13355 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13357 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13359 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13360 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13361 vma_delete(m_hAllocator, pBlockVectorCtx);
13363 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13365 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13366 if(pBlockVectorCtx)
13368 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13369 vma_delete(m_hAllocator, pBlockVectorCtx);
13374 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13376 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13378 VmaPool pool = pPools[poolIndex];
13381 if(pool->m_BlockVector.GetAlgorithm() == 0)
13383 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13385 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13387 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13389 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13394 if(!pBlockVectorDefragCtx)
13396 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13399 &pool->m_BlockVector,
13402 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13405 pBlockVectorDefragCtx->AddAll();
13410 void VmaDefragmentationContext_T::AddAllocations(
13411 uint32_t allocationCount,
13413 VkBool32* pAllocationsChanged)
13416 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13419 VMA_ASSERT(hAlloc);
13421 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13423 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13425 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13427 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
13429 if(hAllocPool != VK_NULL_HANDLE)
13432 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13434 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13436 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13438 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13442 if(!pBlockVectorDefragCtx)
13444 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13447 &hAllocPool->m_BlockVector,
13450 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13457 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13458 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13459 if(!pBlockVectorDefragCtx)
13461 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13464 m_hAllocator->m_pBlockVectors[memTypeIndex],
13467 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13471 if(pBlockVectorDefragCtx)
13473 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13474 &pAllocationsChanged[allocIndex] : VMA_NULL;
13475 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13481 VkResult VmaDefragmentationContext_T::Defragment(
13482 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13483 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13491 if(commandBuffer == VK_NULL_HANDLE)
13493 maxGpuBytesToMove = 0;
13494 maxGpuAllocationsToMove = 0;
13497 VkResult res = VK_SUCCESS;
13500 for(uint32_t memTypeIndex = 0;
13501 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13504 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13505 if(pBlockVectorCtx)
13507 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13508 pBlockVectorCtx->GetBlockVector()->Defragment(
13511 maxCpuBytesToMove, maxCpuAllocationsToMove,
13512 maxGpuBytesToMove, maxGpuAllocationsToMove,
13514 if(pBlockVectorCtx->res != VK_SUCCESS)
13516 res = pBlockVectorCtx->res;
13522 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13523 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13526 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13527 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13528 pBlockVectorCtx->GetBlockVector()->Defragment(
13531 maxCpuBytesToMove, maxCpuAllocationsToMove,
13532 maxGpuBytesToMove, maxGpuAllocationsToMove,
13534 if(pBlockVectorCtx->res != VK_SUCCESS)
13536 res = pBlockVectorCtx->res;
13546 #if VMA_RECORDING_ENABLED 13548 VmaRecorder::VmaRecorder() :
13553 m_StartCounter(INT64_MAX)
13559 m_UseMutex = useMutex;
13560 m_Flags = settings.
flags;
13562 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13563 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13566 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13569 return VK_ERROR_INITIALIZATION_FAILED;
13573 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13574 fprintf(m_File,
"%s\n",
"1,5");
13579 VmaRecorder::~VmaRecorder()
13581 if(m_File != VMA_NULL)
13587 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13589 CallParams callParams;
13590 GetBasicParams(callParams);
13592 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13593 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13597 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13599 CallParams callParams;
13600 GetBasicParams(callParams);
13602 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13603 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13609 CallParams callParams;
13610 GetBasicParams(callParams);
13612 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13613 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13624 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13626 CallParams callParams;
13627 GetBasicParams(callParams);
13629 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13630 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13635 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13636 const VkMemoryRequirements& vkMemReq,
13640 CallParams callParams;
13641 GetBasicParams(callParams);
13643 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13644 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13645 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13647 vkMemReq.alignment,
13648 vkMemReq.memoryTypeBits,
13656 userDataStr.GetString());
13660 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
13661 const VkMemoryRequirements& vkMemReq,
13663 uint64_t allocationCount,
13666 CallParams callParams;
13667 GetBasicParams(callParams);
13669 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13670 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13671 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
13673 vkMemReq.alignment,
13674 vkMemReq.memoryTypeBits,
13681 PrintPointerList(allocationCount, pAllocations);
13682 fprintf(m_File,
",%s\n", userDataStr.GetString());
13686 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
13687 const VkMemoryRequirements& vkMemReq,
13688 bool requiresDedicatedAllocation,
13689 bool prefersDedicatedAllocation,
13693 CallParams callParams;
13694 GetBasicParams(callParams);
13696 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13697 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13698 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13700 vkMemReq.alignment,
13701 vkMemReq.memoryTypeBits,
13702 requiresDedicatedAllocation ? 1 : 0,
13703 prefersDedicatedAllocation ? 1 : 0,
13711 userDataStr.GetString());
13715 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
13716 const VkMemoryRequirements& vkMemReq,
13717 bool requiresDedicatedAllocation,
13718 bool prefersDedicatedAllocation,
13722 CallParams callParams;
13723 GetBasicParams(callParams);
13725 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13726 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13727 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13729 vkMemReq.alignment,
13730 vkMemReq.memoryTypeBits,
13731 requiresDedicatedAllocation ? 1 : 0,
13732 prefersDedicatedAllocation ? 1 : 0,
13740 userDataStr.GetString());
13744 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
13747 CallParams callParams;
13748 GetBasicParams(callParams);
13750 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13751 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13756 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
13757 uint64_t allocationCount,
13760 CallParams callParams;
13761 GetBasicParams(callParams);
13763 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13764 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
13765 PrintPointerList(allocationCount, pAllocations);
13766 fprintf(m_File,
"\n");
13770 void VmaRecorder::RecordResizeAllocation(
13771 uint32_t frameIndex,
13773 VkDeviceSize newSize)
13775 CallParams callParams;
13776 GetBasicParams(callParams);
13778 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13779 fprintf(m_File,
"%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
13780 allocation, newSize);
13784 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
13786 const void* pUserData)
13788 CallParams callParams;
13789 GetBasicParams(callParams);
13791 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13792 UserDataString userDataStr(
13795 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13797 userDataStr.GetString());
13801 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
13804 CallParams callParams;
13805 GetBasicParams(callParams);
13807 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13808 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13813 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
13816 CallParams callParams;
13817 GetBasicParams(callParams);
13819 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13820 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13825 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
13828 CallParams callParams;
13829 GetBasicParams(callParams);
13831 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13832 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13837 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
13838 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13840 CallParams callParams;
13841 GetBasicParams(callParams);
13843 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13844 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13851 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
13852 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13854 CallParams callParams;
13855 GetBasicParams(callParams);
13857 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13858 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13865 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
13866 const VkBufferCreateInfo& bufCreateInfo,
13870 CallParams callParams;
13871 GetBasicParams(callParams);
13873 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13874 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13875 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13876 bufCreateInfo.flags,
13877 bufCreateInfo.size,
13878 bufCreateInfo.usage,
13879 bufCreateInfo.sharingMode,
13880 allocCreateInfo.
flags,
13881 allocCreateInfo.
usage,
13885 allocCreateInfo.
pool,
13887 userDataStr.GetString());
13891 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
13892 const VkImageCreateInfo& imageCreateInfo,
13896 CallParams callParams;
13897 GetBasicParams(callParams);
13899 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13900 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13901 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13902 imageCreateInfo.flags,
13903 imageCreateInfo.imageType,
13904 imageCreateInfo.format,
13905 imageCreateInfo.extent.width,
13906 imageCreateInfo.extent.height,
13907 imageCreateInfo.extent.depth,
13908 imageCreateInfo.mipLevels,
13909 imageCreateInfo.arrayLayers,
13910 imageCreateInfo.samples,
13911 imageCreateInfo.tiling,
13912 imageCreateInfo.usage,
13913 imageCreateInfo.sharingMode,
13914 imageCreateInfo.initialLayout,
13915 allocCreateInfo.
flags,
13916 allocCreateInfo.
usage,
13920 allocCreateInfo.
pool,
13922 userDataStr.GetString());
13926 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
13929 CallParams callParams;
13930 GetBasicParams(callParams);
13932 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13933 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
13938 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
13941 CallParams callParams;
13942 GetBasicParams(callParams);
13944 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13945 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
13950 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
13953 CallParams callParams;
13954 GetBasicParams(callParams);
13956 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13957 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13962 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
13965 CallParams callParams;
13966 GetBasicParams(callParams);
13968 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13969 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
13974 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
13977 CallParams callParams;
13978 GetBasicParams(callParams);
13980 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13981 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
13986 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
13990 CallParams callParams;
13991 GetBasicParams(callParams);
13993 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13994 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
13997 fprintf(m_File,
",");
13999 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
14009 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
14012 CallParams callParams;
14013 GetBasicParams(callParams);
14015 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14016 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14023 if(pUserData != VMA_NULL)
14027 m_Str = (
const char*)pUserData;
14031 sprintf_s(m_PtrStr,
"%p", pUserData);
14041 void VmaRecorder::WriteConfiguration(
14042 const VkPhysicalDeviceProperties& devProps,
14043 const VkPhysicalDeviceMemoryProperties& memProps,
14044 bool dedicatedAllocationExtensionEnabled)
14046 fprintf(m_File,
"Config,Begin\n");
14048 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14049 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14050 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14051 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14052 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14053 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14055 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14056 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14057 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14059 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14060 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14062 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14063 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14065 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14066 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14068 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14069 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
14072 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14074 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14075 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14076 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14077 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14078 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14079 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14080 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14081 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14082 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14084 fprintf(m_File,
"Config,End\n");
14087 void VmaRecorder::GetBasicParams(CallParams& outParams)
14089 outParams.threadId = GetCurrentThreadId();
14091 LARGE_INTEGER counter;
14092 QueryPerformanceCounter(&counter);
14093 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
14096 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
14100 fprintf(m_File,
"%p", pItems[0]);
14101 for(uint64_t i = 1; i < count; ++i)
14103 fprintf(m_File,
" %p", pItems[i]);
14108 void VmaRecorder::Flush()
14116 #endif // #if VMA_RECORDING_ENABLED 14121 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14122 m_Allocator(pAllocationCallbacks, 1024)
14128 VmaMutexLock mutexLock(m_Mutex);
14129 return m_Allocator.Alloc();
14132 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14134 VmaMutexLock mutexLock(m_Mutex);
14135 m_Allocator.Free(hAlloc);
14144 m_hDevice(pCreateInfo->device),
14145 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14146 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14147 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14148 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14149 m_PreferredLargeHeapBlockSize(0),
14150 m_PhysicalDevice(pCreateInfo->physicalDevice),
14151 m_CurrentFrameIndex(0),
14152 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14155 ,m_pRecorder(VMA_NULL)
14158 if(VMA_DEBUG_DETECT_CORRUPTION)
14161 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14166 #if !(VMA_DEDICATED_ALLOCATION) 14169 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14173 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14174 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14175 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14177 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14178 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14180 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14182 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
14193 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14194 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14196 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14197 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14198 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14199 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14206 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14208 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14209 if(limit != VK_WHOLE_SIZE)
14211 m_HeapSizeLimit[heapIndex] = limit;
14212 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14214 m_MemProps.memoryHeaps[heapIndex].size = limit;
14220 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14222 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14224 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14228 preferredBlockSize,
14231 GetBufferImageGranularity(),
14238 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14245 VkResult res = VK_SUCCESS;
14250 #if VMA_RECORDING_ENABLED 14251 m_pRecorder = vma_new(
this, VmaRecorder)();
14253 if(res != VK_SUCCESS)
14257 m_pRecorder->WriteConfiguration(
14258 m_PhysicalDeviceProperties,
14260 m_UseKhrDedicatedAllocation);
14261 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14263 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14264 return VK_ERROR_FEATURE_NOT_PRESENT;
14271 VmaAllocator_T::~VmaAllocator_T()
14273 #if VMA_RECORDING_ENABLED 14274 if(m_pRecorder != VMA_NULL)
14276 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14277 vma_delete(
this, m_pRecorder);
14281 VMA_ASSERT(m_Pools.empty());
14283 for(
size_t i = GetMemoryTypeCount(); i--; )
14285 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
14287 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
14290 vma_delete(
this, m_pDedicatedAllocations[i]);
14291 vma_delete(
this, m_pBlockVectors[i]);
14295 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14297 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14298 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
14299 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
14300 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
14301 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
14302 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
14303 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
14304 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
14305 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
14306 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
14307 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
14308 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
14309 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
14310 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
14311 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
14312 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
14313 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
14314 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
14315 #if VMA_DEDICATED_ALLOCATION 14316 if(m_UseKhrDedicatedAllocation)
14318 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14319 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14320 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14321 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14323 #endif // #if VMA_DEDICATED_ALLOCATION 14324 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14326 #define VMA_COPY_IF_NOT_NULL(funcName) \ 14327 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 14329 if(pVulkanFunctions != VMA_NULL)
14331 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14332 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14333 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14334 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14335 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14336 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14337 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14338 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14339 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14340 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14341 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14342 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14343 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14344 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14345 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14346 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14347 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14348 #if VMA_DEDICATED_ALLOCATION 14349 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14350 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14354 #undef VMA_COPY_IF_NOT_NULL 14358 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14359 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14360 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14361 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14362 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14363 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14364 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14365 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14366 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14367 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14368 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14369 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14370 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14371 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14372 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14373 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14374 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14375 #if VMA_DEDICATED_ALLOCATION 14376 if(m_UseKhrDedicatedAllocation)
14378 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14379 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14384 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14386 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14387 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14388 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14389 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
14392 VkResult VmaAllocator_T::AllocateMemoryOfType(
14394 VkDeviceSize alignment,
14395 bool dedicatedAllocation,
14396 VkBuffer dedicatedBuffer,
14397 VkImage dedicatedImage,
14399 uint32_t memTypeIndex,
14400 VmaSuballocationType suballocType,
14401 size_t allocationCount,
14404 VMA_ASSERT(pAllocations != VMA_NULL);
14405 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14411 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14416 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14417 VMA_ASSERT(blockVector);
14419 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14420 bool preferDedicatedMemory =
14421 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14422 dedicatedAllocation ||
14424 size > preferredBlockSize / 2;
14426 if(preferDedicatedMemory &&
14428 finalCreateInfo.
pool == VK_NULL_HANDLE)
14437 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14441 return AllocateDedicatedMemory(
14456 VkResult res = blockVector->Allocate(
14457 m_CurrentFrameIndex.load(),
14464 if(res == VK_SUCCESS)
14472 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14476 res = AllocateDedicatedMemory(
14482 finalCreateInfo.pUserData,
14487 if(res == VK_SUCCESS)
14490 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14496 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14503 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14505 VmaSuballocationType suballocType,
14506 uint32_t memTypeIndex,
14508 bool isUserDataString,
14510 VkBuffer dedicatedBuffer,
14511 VkImage dedicatedImage,
14512 size_t allocationCount,
14515 VMA_ASSERT(allocationCount > 0 && pAllocations);
14517 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14518 allocInfo.memoryTypeIndex = memTypeIndex;
14519 allocInfo.allocationSize = size;
14521 #if VMA_DEDICATED_ALLOCATION 14522 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14523 if(m_UseKhrDedicatedAllocation)
14525 if(dedicatedBuffer != VK_NULL_HANDLE)
14527 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14528 dedicatedAllocInfo.buffer = dedicatedBuffer;
14529 allocInfo.pNext = &dedicatedAllocInfo;
14531 else if(dedicatedImage != VK_NULL_HANDLE)
14533 dedicatedAllocInfo.image = dedicatedImage;
14534 allocInfo.pNext = &dedicatedAllocInfo;
14537 #endif // #if VMA_DEDICATED_ALLOCATION 14540 VkResult res = VK_SUCCESS;
14541 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14543 res = AllocateDedicatedMemoryPage(
14551 pAllocations + allocIndex);
14552 if(res != VK_SUCCESS)
14558 if(res == VK_SUCCESS)
14562 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14563 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14564 VMA_ASSERT(pDedicatedAllocations);
14565 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14567 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
14571 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
14576 while(allocIndex--)
14579 VkDeviceMemory hMemory = currAlloc->GetMemory();
14591 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
14593 currAlloc->SetUserData(
this, VMA_NULL);
14595 m_AllocationObjectAllocator.Free(currAlloc);
14598 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14604 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
14606 VmaSuballocationType suballocType,
14607 uint32_t memTypeIndex,
14608 const VkMemoryAllocateInfo& allocInfo,
14610 bool isUserDataString,
14614 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14615 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14618 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14622 void* pMappedData = VMA_NULL;
14625 res = (*m_VulkanFunctions.vkMapMemory)(
14634 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
14635 FreeVulkanMemory(memTypeIndex, size, hMemory);
14640 *pAllocation = m_AllocationObjectAllocator.Allocate();
14641 (*pAllocation)->Ctor(m_CurrentFrameIndex.load(), isUserDataString);
14642 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
14643 (*pAllocation)->SetUserData(
this, pUserData);
14644 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14646 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14652 void VmaAllocator_T::GetBufferMemoryRequirements(
14654 VkMemoryRequirements& memReq,
14655 bool& requiresDedicatedAllocation,
14656 bool& prefersDedicatedAllocation)
const 14658 #if VMA_DEDICATED_ALLOCATION 14659 if(m_UseKhrDedicatedAllocation)
14661 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14662 memReqInfo.buffer = hBuffer;
14664 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14666 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14667 memReq2.pNext = &memDedicatedReq;
14669 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14671 memReq = memReq2.memoryRequirements;
14672 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14673 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14676 #endif // #if VMA_DEDICATED_ALLOCATION 14678 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14679 requiresDedicatedAllocation =
false;
14680 prefersDedicatedAllocation =
false;
14684 void VmaAllocator_T::GetImageMemoryRequirements(
14686 VkMemoryRequirements& memReq,
14687 bool& requiresDedicatedAllocation,
14688 bool& prefersDedicatedAllocation)
const 14690 #if VMA_DEDICATED_ALLOCATION 14691 if(m_UseKhrDedicatedAllocation)
14693 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
14694 memReqInfo.image = hImage;
14696 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14698 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14699 memReq2.pNext = &memDedicatedReq;
14701 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14703 memReq = memReq2.memoryRequirements;
14704 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14705 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14708 #endif // #if VMA_DEDICATED_ALLOCATION 14710 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14711 requiresDedicatedAllocation =
false;
14712 prefersDedicatedAllocation =
false;
14716 VkResult VmaAllocator_T::AllocateMemory(
14717 const VkMemoryRequirements& vkMemReq,
14718 bool requiresDedicatedAllocation,
14719 bool prefersDedicatedAllocation,
14720 VkBuffer dedicatedBuffer,
14721 VkImage dedicatedImage,
14723 VmaSuballocationType suballocType,
14724 size_t allocationCount,
14727 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14729 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
14731 if(vkMemReq.size == 0)
14733 return VK_ERROR_VALIDATION_FAILED_EXT;
14738 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14739 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14744 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
14745 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14747 if(requiresDedicatedAllocation)
14751 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
14752 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14754 if(createInfo.
pool != VK_NULL_HANDLE)
14756 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
14757 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14760 if((createInfo.
pool != VK_NULL_HANDLE) &&
14763 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
14764 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14767 if(createInfo.
pool != VK_NULL_HANDLE)
14769 const VkDeviceSize alignmentForPool = VMA_MAX(
14770 vkMemReq.alignment,
14771 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
14772 return createInfo.
pool->m_BlockVector.Allocate(
14773 m_CurrentFrameIndex.load(),
14784 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
14785 uint32_t memTypeIndex = UINT32_MAX;
14787 if(res == VK_SUCCESS)
14789 VkDeviceSize alignmentForMemType = VMA_MAX(
14790 vkMemReq.alignment,
14791 GetMemoryTypeMinAlignment(memTypeIndex));
14793 res = AllocateMemoryOfType(
14795 alignmentForMemType,
14796 requiresDedicatedAllocation || prefersDedicatedAllocation,
14805 if(res == VK_SUCCESS)
14815 memoryTypeBits &= ~(1u << memTypeIndex);
14818 if(res == VK_SUCCESS)
14820 alignmentForMemType = VMA_MAX(
14821 vkMemReq.alignment,
14822 GetMemoryTypeMinAlignment(memTypeIndex));
14824 res = AllocateMemoryOfType(
14826 alignmentForMemType,
14827 requiresDedicatedAllocation || prefersDedicatedAllocation,
14836 if(res == VK_SUCCESS)
14846 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14857 void VmaAllocator_T::FreeMemory(
14858 size_t allocationCount,
14861 VMA_ASSERT(pAllocations);
14863 for(
size_t allocIndex = allocationCount; allocIndex--; )
14867 if(allocation != VK_NULL_HANDLE)
14869 if(TouchAllocation(allocation))
14871 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14873 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
14876 switch(allocation->GetType())
14878 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14880 VmaBlockVector* pBlockVector = VMA_NULL;
14881 VmaPool hPool = allocation->GetBlock()->GetParentPool();
14882 if(hPool != VK_NULL_HANDLE)
14884 pBlockVector = &hPool->m_BlockVector;
14888 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14889 pBlockVector = m_pBlockVectors[memTypeIndex];
14891 pBlockVector->Free(allocation);
14894 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14895 FreeDedicatedMemory(allocation);
14902 allocation->SetUserData(
this, VMA_NULL);
14903 allocation->Dtor();
14904 m_AllocationObjectAllocator.Free(allocation);
14909 VkResult VmaAllocator_T::ResizeAllocation(
14911 VkDeviceSize newSize)
14913 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
14915 return VK_ERROR_VALIDATION_FAILED_EXT;
14917 if(newSize == alloc->GetSize())
14922 switch(alloc->GetType())
14924 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14925 return VK_ERROR_FEATURE_NOT_PRESENT;
14926 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14927 if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
14929 alloc->ChangeSize(newSize);
14930 VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
14935 return VK_ERROR_OUT_OF_POOL_MEMORY;
14939 return VK_ERROR_VALIDATION_FAILED_EXT;
14943 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
14946 InitStatInfo(pStats->
total);
14947 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
14949 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14953 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14955 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
14956 VMA_ASSERT(pBlockVector);
14957 pBlockVector->AddStats(pStats);
14962 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
14963 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
14965 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
14970 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14972 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14973 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14974 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
14975 VMA_ASSERT(pDedicatedAllocVector);
14976 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
14979 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
14980 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14981 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14982 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
14987 VmaPostprocessCalcStatInfo(pStats->
total);
14988 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
14989 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
14990 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
14991 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
14994 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
14996 VkResult VmaAllocator_T::DefragmentationBegin(
15006 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
15007 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
15010 (*pContext)->AddAllocations(
15013 VkResult res = (*pContext)->Defragment(
15018 if(res != VK_NOT_READY)
15020 vma_delete(
this, *pContext);
15021 *pContext = VMA_NULL;
15027 VkResult VmaAllocator_T::DefragmentationEnd(
15030 vma_delete(
this, context);
15036 if(hAllocation->CanBecomeLost())
15042 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15043 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15046 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15050 pAllocationInfo->
offset = 0;
15051 pAllocationInfo->
size = hAllocation->GetSize();
15053 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15056 else if(localLastUseFrameIndex == localCurrFrameIndex)
15058 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15059 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15060 pAllocationInfo->
offset = hAllocation->GetOffset();
15061 pAllocationInfo->
size = hAllocation->GetSize();
15063 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15068 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15070 localLastUseFrameIndex = localCurrFrameIndex;
15077 #if VMA_STATS_STRING_ENABLED 15078 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15079 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15082 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15083 if(localLastUseFrameIndex == localCurrFrameIndex)
15089 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15091 localLastUseFrameIndex = localCurrFrameIndex;
15097 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15098 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15099 pAllocationInfo->
offset = hAllocation->GetOffset();
15100 pAllocationInfo->
size = hAllocation->GetSize();
15101 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
15102 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15106 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
15109 if(hAllocation->CanBecomeLost())
15111 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15112 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15115 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15119 else if(localLastUseFrameIndex == localCurrFrameIndex)
15125 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15127 localLastUseFrameIndex = localCurrFrameIndex;
15134 #if VMA_STATS_STRING_ENABLED 15135 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15136 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15139 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15140 if(localLastUseFrameIndex == localCurrFrameIndex)
15146 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15148 localLastUseFrameIndex = localCurrFrameIndex;
15160 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
15170 return VK_ERROR_INITIALIZATION_FAILED;
15173 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15175 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15177 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15178 if(res != VK_SUCCESS)
15180 vma_delete(
this, *pPool);
15187 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15188 (*pPool)->SetId(m_NextPoolId++);
15189 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15195 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15199 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15200 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15201 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15204 vma_delete(
this, pool);
15209 pool->m_BlockVector.GetPoolStats(pPoolStats);
15212 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15214 m_CurrentFrameIndex.store(frameIndex);
15217 void VmaAllocator_T::MakePoolAllocationsLost(
15219 size_t* pLostAllocationCount)
15221 hPool->m_BlockVector.MakePoolAllocationsLost(
15222 m_CurrentFrameIndex.load(),
15223 pLostAllocationCount);
15226 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15228 return hPool->m_BlockVector.CheckCorruption();
15231 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15233 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15236 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15238 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15240 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15241 VMA_ASSERT(pBlockVector);
15242 VkResult localRes = pBlockVector->CheckCorruption();
15245 case VK_ERROR_FEATURE_NOT_PRESENT:
15248 finalRes = VK_SUCCESS;
15258 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15259 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15261 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15263 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15266 case VK_ERROR_FEATURE_NOT_PRESENT:
15269 finalRes = VK_SUCCESS;
15281 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15283 *pAllocation = m_AllocationObjectAllocator.Allocate();
15284 (*pAllocation)->Ctor(VMA_FRAME_INDEX_LOST,
false);
15285 (*pAllocation)->InitLost();
15288 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15290 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15293 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15295 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15296 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
15298 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15299 if(res == VK_SUCCESS)
15301 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
15306 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
15311 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15314 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15316 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15322 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15324 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15326 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15329 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15331 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
15332 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15334 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15335 m_HeapSizeLimit[heapIndex] += size;
15339 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15341 if(hAllocation->CanBecomeLost())
15343 return VK_ERROR_MEMORY_MAP_FAILED;
15346 switch(hAllocation->GetType())
15348 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15350 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15351 char *pBytes = VMA_NULL;
15352 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15353 if(res == VK_SUCCESS)
15355 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15356 hAllocation->BlockAllocMap();
15360 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15361 return hAllocation->DedicatedAllocMap(
this, ppData);
15364 return VK_ERROR_MEMORY_MAP_FAILED;
15370 switch(hAllocation->GetType())
15372 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15374 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15375 hAllocation->BlockAllocUnmap();
15376 pBlock->Unmap(
this, 1);
15379 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15380 hAllocation->DedicatedAllocUnmap(
this);
15387 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
15389 VkResult res = VK_SUCCESS;
15390 switch(hAllocation->GetType())
15392 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15393 res = GetVulkanFunctions().vkBindBufferMemory(
15396 hAllocation->GetMemory(),
15399 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15401 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15402 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
15403 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
15412 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
15414 VkResult res = VK_SUCCESS;
15415 switch(hAllocation->GetType())
15417 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15418 res = GetVulkanFunctions().vkBindImageMemory(
15421 hAllocation->GetMemory(),
15424 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15426 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15427 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
15428 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
15437 void VmaAllocator_T::FlushOrInvalidateAllocation(
15439 VkDeviceSize offset, VkDeviceSize size,
15440 VMA_CACHE_OPERATION op)
15442 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
15443 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
15445 const VkDeviceSize allocationSize = hAllocation->GetSize();
15446 VMA_ASSERT(offset <= allocationSize);
15448 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
15450 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
15451 memRange.memory = hAllocation->GetMemory();
15453 switch(hAllocation->GetType())
15455 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15456 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15457 if(size == VK_WHOLE_SIZE)
15459 memRange.size = allocationSize - memRange.offset;
15463 VMA_ASSERT(offset + size <= allocationSize);
15464 memRange.size = VMA_MIN(
15465 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
15466 allocationSize - memRange.offset);
15470 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15473 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15474 if(size == VK_WHOLE_SIZE)
15476 size = allocationSize - offset;
15480 VMA_ASSERT(offset + size <= allocationSize);
15482 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
15485 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
15486 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
15487 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
15488 memRange.offset += allocationOffset;
15489 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
15500 case VMA_CACHE_FLUSH:
15501 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
15503 case VMA_CACHE_INVALIDATE:
15504 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
15513 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
15515 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
15517 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15519 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15520 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15521 VMA_ASSERT(pDedicatedAllocations);
15522 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
15523 VMA_ASSERT(success);
15526 VkDeviceMemory hMemory = allocation->GetMemory();
15538 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15540 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15543 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
15545 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
15546 !hAllocation->CanBecomeLost() &&
15547 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15549 void* pData = VMA_NULL;
15550 VkResult res = Map(hAllocation, &pData);
15551 if(res == VK_SUCCESS)
15553 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
15554 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
15555 Unmap(hAllocation);
15559 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15564 #if VMA_STATS_STRING_ENABLED 15566 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15568 bool dedicatedAllocationsStarted =
false;
15569 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15571 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15572 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15573 VMA_ASSERT(pDedicatedAllocVector);
15574 if(pDedicatedAllocVector->empty() ==
false)
15576 if(dedicatedAllocationsStarted ==
false)
15578 dedicatedAllocationsStarted =
true;
15579 json.WriteString(
"DedicatedAllocations");
15580 json.BeginObject();
15583 json.BeginString(
"Type ");
15584 json.ContinueString(memTypeIndex);
15589 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
15591 json.BeginObject(
true);
15593 hAlloc->PrintParameters(json);
15600 if(dedicatedAllocationsStarted)
15606 bool allocationsStarted =
false;
15607 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15609 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
15611 if(allocationsStarted ==
false)
15613 allocationsStarted =
true;
15614 json.WriteString(
"DefaultPools");
15615 json.BeginObject();
15618 json.BeginString(
"Type ");
15619 json.ContinueString(memTypeIndex);
15622 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
15625 if(allocationsStarted)
15633 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15634 const size_t poolCount = m_Pools.size();
15637 json.WriteString(
"Pools");
15638 json.BeginObject();
15639 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15641 json.BeginString();
15642 json.ContinueString(m_Pools[poolIndex]->GetId());
15645 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
15652 #endif // #if VMA_STATS_STRING_ENABLED 15661 VMA_ASSERT(pCreateInfo && pAllocator);
15662 VMA_DEBUG_LOG(
"vmaCreateAllocator");
15664 return (*pAllocator)->Init(pCreateInfo);
15670 if(allocator != VK_NULL_HANDLE)
15672 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
15673 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
15674 vma_delete(&allocationCallbacks, allocator);
15680 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
15682 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
15683 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
15688 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
15690 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
15691 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
15696 uint32_t memoryTypeIndex,
15697 VkMemoryPropertyFlags* pFlags)
15699 VMA_ASSERT(allocator && pFlags);
15700 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
15701 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
15706 uint32_t frameIndex)
15708 VMA_ASSERT(allocator);
15709 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
15711 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15713 allocator->SetCurrentFrameIndex(frameIndex);
15720 VMA_ASSERT(allocator && pStats);
15721 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15722 allocator->CalculateStats(pStats);
15725 #if VMA_STATS_STRING_ENABLED 15729 char** ppStatsString,
15730 VkBool32 detailedMap)
15732 VMA_ASSERT(allocator && ppStatsString);
15733 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15735 VmaStringBuilder sb(allocator);
15737 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
15738 json.BeginObject();
15741 allocator->CalculateStats(&stats);
15743 json.WriteString(
"Total");
15744 VmaPrintStatInfo(json, stats.
total);
15746 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
15748 json.BeginString(
"Heap ");
15749 json.ContinueString(heapIndex);
15751 json.BeginObject();
15753 json.WriteString(
"Size");
15754 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
15756 json.WriteString(
"Flags");
15757 json.BeginArray(
true);
15758 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
15760 json.WriteString(
"DEVICE_LOCAL");
15766 json.WriteString(
"Stats");
15767 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
15770 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
15772 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
15774 json.BeginString(
"Type ");
15775 json.ContinueString(typeIndex);
15778 json.BeginObject();
15780 json.WriteString(
"Flags");
15781 json.BeginArray(
true);
15782 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
15783 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
15785 json.WriteString(
"DEVICE_LOCAL");
15787 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15789 json.WriteString(
"HOST_VISIBLE");
15791 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
15793 json.WriteString(
"HOST_COHERENT");
15795 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
15797 json.WriteString(
"HOST_CACHED");
15799 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
15801 json.WriteString(
"LAZILY_ALLOCATED");
15807 json.WriteString(
"Stats");
15808 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
15817 if(detailedMap == VK_TRUE)
15819 allocator->PrintDetailedMap(json);
15825 const size_t len = sb.GetLength();
15826 char*
const pChars = vma_new_array(allocator,
char, len + 1);
15829 memcpy(pChars, sb.GetData(), len);
15831 pChars[len] =
'\0';
15832 *ppStatsString = pChars;
15837 char* pStatsString)
15839 if(pStatsString != VMA_NULL)
15841 VMA_ASSERT(allocator);
15842 size_t len = strlen(pStatsString);
15843 vma_delete_array(allocator, pStatsString, len + 1);
15847 #endif // #if VMA_STATS_STRING_ENABLED 15854 uint32_t memoryTypeBits,
15856 uint32_t* pMemoryTypeIndex)
15858 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15859 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15860 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15867 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
15868 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
15873 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15877 switch(pAllocationCreateInfo->
usage)
15882 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15884 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15888 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
15891 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15892 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15894 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15898 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15899 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
15905 *pMemoryTypeIndex = UINT32_MAX;
15906 uint32_t minCost = UINT32_MAX;
15907 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
15908 memTypeIndex < allocator->GetMemoryTypeCount();
15909 ++memTypeIndex, memTypeBit <<= 1)
15912 if((memTypeBit & memoryTypeBits) != 0)
15914 const VkMemoryPropertyFlags currFlags =
15915 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
15917 if((requiredFlags & ~currFlags) == 0)
15920 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
15922 if(currCost < minCost)
15924 *pMemoryTypeIndex = memTypeIndex;
15929 minCost = currCost;
15934 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
15939 const VkBufferCreateInfo* pBufferCreateInfo,
15941 uint32_t* pMemoryTypeIndex)
15943 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15944 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
15945 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15946 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15948 const VkDevice hDev = allocator->m_hDevice;
15949 VkBuffer hBuffer = VK_NULL_HANDLE;
15950 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
15951 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
15952 if(res == VK_SUCCESS)
15954 VkMemoryRequirements memReq = {};
15955 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
15956 hDev, hBuffer, &memReq);
15960 memReq.memoryTypeBits,
15961 pAllocationCreateInfo,
15964 allocator->GetVulkanFunctions().vkDestroyBuffer(
15965 hDev, hBuffer, allocator->GetAllocationCallbacks());
15972 const VkImageCreateInfo* pImageCreateInfo,
15974 uint32_t* pMemoryTypeIndex)
15976 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15977 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
15978 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15979 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15981 const VkDevice hDev = allocator->m_hDevice;
15982 VkImage hImage = VK_NULL_HANDLE;
15983 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
15984 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
15985 if(res == VK_SUCCESS)
15987 VkMemoryRequirements memReq = {};
15988 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
15989 hDev, hImage, &memReq);
15993 memReq.memoryTypeBits,
15994 pAllocationCreateInfo,
15997 allocator->GetVulkanFunctions().vkDestroyImage(
15998 hDev, hImage, allocator->GetAllocationCallbacks());
16008 VMA_ASSERT(allocator && pCreateInfo && pPool);
16010 VMA_DEBUG_LOG(
"vmaCreatePool");
16012 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16014 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
16016 #if VMA_RECORDING_ENABLED 16017 if(allocator->GetRecorder() != VMA_NULL)
16019 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
16030 VMA_ASSERT(allocator);
16032 if(pool == VK_NULL_HANDLE)
16037 VMA_DEBUG_LOG(
"vmaDestroyPool");
16039 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16041 #if VMA_RECORDING_ENABLED 16042 if(allocator->GetRecorder() != VMA_NULL)
16044 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
16048 allocator->DestroyPool(pool);
16056 VMA_ASSERT(allocator && pool && pPoolStats);
16058 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16060 allocator->GetPoolStats(pool, pPoolStats);
16066 size_t* pLostAllocationCount)
16068 VMA_ASSERT(allocator && pool);
16070 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16072 #if VMA_RECORDING_ENABLED 16073 if(allocator->GetRecorder() != VMA_NULL)
16075 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
16079 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
16084 VMA_ASSERT(allocator && pool);
16086 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16088 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
16090 return allocator->CheckPoolCorruption(pool);
16095 const VkMemoryRequirements* pVkMemoryRequirements,
16100 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16102 VMA_DEBUG_LOG(
"vmaAllocateMemory");
16104 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16106 VkResult result = allocator->AllocateMemory(
16107 *pVkMemoryRequirements,
16113 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16117 #if VMA_RECORDING_ENABLED 16118 if(allocator->GetRecorder() != VMA_NULL)
16120 allocator->GetRecorder()->RecordAllocateMemory(
16121 allocator->GetCurrentFrameIndex(),
16122 *pVkMemoryRequirements,
16128 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16130 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16138 const VkMemoryRequirements* pVkMemoryRequirements,
16140 size_t allocationCount,
16144 if(allocationCount == 0)
16149 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16151 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
16153 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16155 VkResult result = allocator->AllocateMemory(
16156 *pVkMemoryRequirements,
16162 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16166 #if VMA_RECORDING_ENABLED 16167 if(allocator->GetRecorder() != VMA_NULL)
16169 allocator->GetRecorder()->RecordAllocateMemoryPages(
16170 allocator->GetCurrentFrameIndex(),
16171 *pVkMemoryRequirements,
16173 (uint64_t)allocationCount,
16178 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16180 for(
size_t i = 0; i < allocationCount; ++i)
16182 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16196 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16198 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16200 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16202 VkMemoryRequirements vkMemReq = {};
16203 bool requiresDedicatedAllocation =
false;
16204 bool prefersDedicatedAllocation =
false;
16205 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16206 requiresDedicatedAllocation,
16207 prefersDedicatedAllocation);
16209 VkResult result = allocator->AllocateMemory(
16211 requiresDedicatedAllocation,
16212 prefersDedicatedAllocation,
16216 VMA_SUBALLOCATION_TYPE_BUFFER,
16220 #if VMA_RECORDING_ENABLED 16221 if(allocator->GetRecorder() != VMA_NULL)
16223 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16224 allocator->GetCurrentFrameIndex(),
16226 requiresDedicatedAllocation,
16227 prefersDedicatedAllocation,
16233 if(pAllocationInfo && result == VK_SUCCESS)
16235 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16248 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16250 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
16252 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16254 VkMemoryRequirements vkMemReq = {};
16255 bool requiresDedicatedAllocation =
false;
16256 bool prefersDedicatedAllocation =
false;
16257 allocator->GetImageMemoryRequirements(image, vkMemReq,
16258 requiresDedicatedAllocation, prefersDedicatedAllocation);
16260 VkResult result = allocator->AllocateMemory(
16262 requiresDedicatedAllocation,
16263 prefersDedicatedAllocation,
16267 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16271 #if VMA_RECORDING_ENABLED 16272 if(allocator->GetRecorder() != VMA_NULL)
16274 allocator->GetRecorder()->RecordAllocateMemoryForImage(
16275 allocator->GetCurrentFrameIndex(),
16277 requiresDedicatedAllocation,
16278 prefersDedicatedAllocation,
16284 if(pAllocationInfo && result == VK_SUCCESS)
16286 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16296 VMA_ASSERT(allocator);
16298 if(allocation == VK_NULL_HANDLE)
16303 VMA_DEBUG_LOG(
"vmaFreeMemory");
16305 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16307 #if VMA_RECORDING_ENABLED 16308 if(allocator->GetRecorder() != VMA_NULL)
16310 allocator->GetRecorder()->RecordFreeMemory(
16311 allocator->GetCurrentFrameIndex(),
16316 allocator->FreeMemory(
16323 size_t allocationCount,
16326 if(allocationCount == 0)
16331 VMA_ASSERT(allocator);
16333 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
16335 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16337 #if VMA_RECORDING_ENABLED 16338 if(allocator->GetRecorder() != VMA_NULL)
16340 allocator->GetRecorder()->RecordFreeMemoryPages(
16341 allocator->GetCurrentFrameIndex(),
16342 (uint64_t)allocationCount,
16347 allocator->FreeMemory(allocationCount, pAllocations);
16353 VkDeviceSize newSize)
16355 VMA_ASSERT(allocator && allocation);
16357 VMA_DEBUG_LOG(
"vmaResizeAllocation");
16359 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16361 #if VMA_RECORDING_ENABLED 16362 if(allocator->GetRecorder() != VMA_NULL)
16364 allocator->GetRecorder()->RecordResizeAllocation(
16365 allocator->GetCurrentFrameIndex(),
16371 return allocator->ResizeAllocation(allocation, newSize);
16379 VMA_ASSERT(allocator && allocation && pAllocationInfo);
16381 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16383 #if VMA_RECORDING_ENABLED 16384 if(allocator->GetRecorder() != VMA_NULL)
16386 allocator->GetRecorder()->RecordGetAllocationInfo(
16387 allocator->GetCurrentFrameIndex(),
16392 allocator->GetAllocationInfo(allocation, pAllocationInfo);
16399 VMA_ASSERT(allocator && allocation);
16401 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16403 #if VMA_RECORDING_ENABLED 16404 if(allocator->GetRecorder() != VMA_NULL)
16406 allocator->GetRecorder()->RecordTouchAllocation(
16407 allocator->GetCurrentFrameIndex(),
16412 return allocator->TouchAllocation(allocation);
16420 VMA_ASSERT(allocator && allocation);
16422 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16424 allocation->SetUserData(allocator, pUserData);
16426 #if VMA_RECORDING_ENABLED 16427 if(allocator->GetRecorder() != VMA_NULL)
16429 allocator->GetRecorder()->RecordSetAllocationUserData(
16430 allocator->GetCurrentFrameIndex(),
16441 VMA_ASSERT(allocator && pAllocation);
16443 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
16445 allocator->CreateLostAllocation(pAllocation);
16447 #if VMA_RECORDING_ENABLED 16448 if(allocator->GetRecorder() != VMA_NULL)
16450 allocator->GetRecorder()->RecordCreateLostAllocation(
16451 allocator->GetCurrentFrameIndex(),
16462 VMA_ASSERT(allocator && allocation && ppData);
16464 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16466 VkResult res = allocator->Map(allocation, ppData);
16468 #if VMA_RECORDING_ENABLED 16469 if(allocator->GetRecorder() != VMA_NULL)
16471 allocator->GetRecorder()->RecordMapMemory(
16472 allocator->GetCurrentFrameIndex(),
16484 VMA_ASSERT(allocator && allocation);
16486 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16488 #if VMA_RECORDING_ENABLED 16489 if(allocator->GetRecorder() != VMA_NULL)
16491 allocator->GetRecorder()->RecordUnmapMemory(
16492 allocator->GetCurrentFrameIndex(),
16497 allocator->Unmap(allocation);
16502 VMA_ASSERT(allocator && allocation);
16504 VMA_DEBUG_LOG(
"vmaFlushAllocation");
16506 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16508 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
16510 #if VMA_RECORDING_ENABLED 16511 if(allocator->GetRecorder() != VMA_NULL)
16513 allocator->GetRecorder()->RecordFlushAllocation(
16514 allocator->GetCurrentFrameIndex(),
16515 allocation, offset, size);
16522 VMA_ASSERT(allocator && allocation);
16524 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
16526 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16528 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
16530 #if VMA_RECORDING_ENABLED 16531 if(allocator->GetRecorder() != VMA_NULL)
16533 allocator->GetRecorder()->RecordInvalidateAllocation(
16534 allocator->GetCurrentFrameIndex(),
16535 allocation, offset, size);
16542 VMA_ASSERT(allocator);
16544 VMA_DEBUG_LOG(
"vmaCheckCorruption");
16546 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16548 return allocator->CheckCorruption(memoryTypeBits);
16554 size_t allocationCount,
16555 VkBool32* pAllocationsChanged,
16565 if(pDefragmentationInfo != VMA_NULL)
16579 if(res == VK_NOT_READY)
16592 VMA_ASSERT(allocator && pInfo && pContext);
16603 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
16605 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
16607 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16609 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
16611 #if VMA_RECORDING_ENABLED 16612 if(allocator->GetRecorder() != VMA_NULL)
16614 allocator->GetRecorder()->RecordDefragmentationBegin(
16615 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
16626 VMA_ASSERT(allocator);
16628 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
16630 if(context != VK_NULL_HANDLE)
16632 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16634 #if VMA_RECORDING_ENABLED 16635 if(allocator->GetRecorder() != VMA_NULL)
16637 allocator->GetRecorder()->RecordDefragmentationEnd(
16638 allocator->GetCurrentFrameIndex(), context);
16642 return allocator->DefragmentationEnd(context);
16655 VMA_ASSERT(allocator && allocation && buffer);
16657 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
16659 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16661 return allocator->BindBufferMemory(allocation, buffer);
16669 VMA_ASSERT(allocator && allocation && image);
16671 VMA_DEBUG_LOG(
"vmaBindImageMemory");
16673 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16675 return allocator->BindImageMemory(allocation, image);
16680 const VkBufferCreateInfo* pBufferCreateInfo,
16686 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16688 if(pBufferCreateInfo->size == 0)
16690 return VK_ERROR_VALIDATION_FAILED_EXT;
16693 VMA_DEBUG_LOG(
"vmaCreateBuffer");
16695 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16697 *pBuffer = VK_NULL_HANDLE;
16698 *pAllocation = VK_NULL_HANDLE;
16701 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
16702 allocator->m_hDevice,
16704 allocator->GetAllocationCallbacks(),
16709 VkMemoryRequirements vkMemReq = {};
16710 bool requiresDedicatedAllocation =
false;
16711 bool prefersDedicatedAllocation =
false;
16712 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
16713 requiresDedicatedAllocation, prefersDedicatedAllocation);
16717 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
16719 VMA_ASSERT(vkMemReq.alignment %
16720 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
16722 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
16724 VMA_ASSERT(vkMemReq.alignment %
16725 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
16727 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
16729 VMA_ASSERT(vkMemReq.alignment %
16730 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
16734 res = allocator->AllocateMemory(
16736 requiresDedicatedAllocation,
16737 prefersDedicatedAllocation,
16740 *pAllocationCreateInfo,
16741 VMA_SUBALLOCATION_TYPE_BUFFER,
16745 #if VMA_RECORDING_ENABLED 16746 if(allocator->GetRecorder() != VMA_NULL)
16748 allocator->GetRecorder()->RecordCreateBuffer(
16749 allocator->GetCurrentFrameIndex(),
16750 *pBufferCreateInfo,
16751 *pAllocationCreateInfo,
16761 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
16766 #if VMA_STATS_STRING_ENABLED 16767 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
16769 if(pAllocationInfo != VMA_NULL)
16771 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16776 allocator->FreeMemory(
16779 *pAllocation = VK_NULL_HANDLE;
16780 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16781 *pBuffer = VK_NULL_HANDLE;
16784 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16785 *pBuffer = VK_NULL_HANDLE;
16796 VMA_ASSERT(allocator);
16798 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16803 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
16805 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16807 #if VMA_RECORDING_ENABLED 16808 if(allocator->GetRecorder() != VMA_NULL)
16810 allocator->GetRecorder()->RecordDestroyBuffer(
16811 allocator->GetCurrentFrameIndex(),
16816 if(buffer != VK_NULL_HANDLE)
16818 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
16821 if(allocation != VK_NULL_HANDLE)
16823 allocator->FreeMemory(
16831 const VkImageCreateInfo* pImageCreateInfo,
16837 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
16839 if(pImageCreateInfo->extent.width == 0 ||
16840 pImageCreateInfo->extent.height == 0 ||
16841 pImageCreateInfo->extent.depth == 0 ||
16842 pImageCreateInfo->mipLevels == 0 ||
16843 pImageCreateInfo->arrayLayers == 0)
16845 return VK_ERROR_VALIDATION_FAILED_EXT;
16848 VMA_DEBUG_LOG(
"vmaCreateImage");
16850 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16852 *pImage = VK_NULL_HANDLE;
16853 *pAllocation = VK_NULL_HANDLE;
16856 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
16857 allocator->m_hDevice,
16859 allocator->GetAllocationCallbacks(),
16863 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
16864 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
16865 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
16868 VkMemoryRequirements vkMemReq = {};
16869 bool requiresDedicatedAllocation =
false;
16870 bool prefersDedicatedAllocation =
false;
16871 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
16872 requiresDedicatedAllocation, prefersDedicatedAllocation);
16874 res = allocator->AllocateMemory(
16876 requiresDedicatedAllocation,
16877 prefersDedicatedAllocation,
16880 *pAllocationCreateInfo,
16885 #if VMA_RECORDING_ENABLED 16886 if(allocator->GetRecorder() != VMA_NULL)
16888 allocator->GetRecorder()->RecordCreateImage(
16889 allocator->GetCurrentFrameIndex(),
16891 *pAllocationCreateInfo,
16901 res = allocator->BindImageMemory(*pAllocation, *pImage);
16906 #if VMA_STATS_STRING_ENABLED 16907 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
16909 if(pAllocationInfo != VMA_NULL)
16911 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16916 allocator->FreeMemory(
16919 *pAllocation = VK_NULL_HANDLE;
16920 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16921 *pImage = VK_NULL_HANDLE;
16924 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16925 *pImage = VK_NULL_HANDLE;
16936 VMA_ASSERT(allocator);
16938 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16943 VMA_DEBUG_LOG(
"vmaDestroyImage");
16945 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16947 #if VMA_RECORDING_ENABLED 16948 if(allocator->GetRecorder() != VMA_NULL)
16950 allocator->GetRecorder()->RecordDestroyImage(
16951 allocator->GetCurrentFrameIndex(),
16956 if(image != VK_NULL_HANDLE)
16958 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
16960 if(allocation != VK_NULL_HANDLE)
16962 allocator->FreeMemory(
16968 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1753
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2053
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1811
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:2856
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
Definition: vk_mem_alloc.h:1785
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2384
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1765
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:2015
Definition: vk_mem_alloc.h:2119
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2809
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1757
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2484
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1808
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2892
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2273
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1652
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2365
Definition: vk_mem_alloc.h:2090
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2812
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1746
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2172
Definition: vk_mem_alloc.h:2042
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1820
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2301
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1.
Definition: vk_mem_alloc.h:1874
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1805
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2046
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1946
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1762
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2846
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1945
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2896
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(),...
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1837
VmaStatInfo total
Definition: vk_mem_alloc.h:1955
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2904
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2156
Definition: vk_mem_alloc.h:2114
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2887
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1763
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1688
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1814
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2315
Definition: vk_mem_alloc.h:2309
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1769
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1881
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2494
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1758
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1783
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2193
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2335
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:2371
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1744
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2318
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2861
VmaMemoryUsage
Definition: vk_mem_alloc.h:1993
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2821
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2882
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:2900
Definition: vk_mem_alloc.h:2032
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2180
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1761
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1951
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1694
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2800
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
Definition: vk_mem_alloc.h:2798
Definition: vk_mem_alloc.h:2140
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2827
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1715
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1787
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1720
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2902
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2167
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2381
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1754
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1934
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:2330
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1707
Definition: vk_mem_alloc.h:2305
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:2097
Represents Opaque object that represents started defragmentation process.
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1947
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1711
Definition: vk_mem_alloc.h:2130
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2321
Definition: vk_mem_alloc.h:2041
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1760
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2162
Definition: vk_mem_alloc.h:2153
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1937
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1756
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2343
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1823
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2374
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2151
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2851
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2186
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1862
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1953
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2077
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1946
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1767
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1793
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:2797
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2875
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1709
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1766
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2357
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1759
Definition: vk_mem_alloc.h:2108
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1801
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2508
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:1817
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1946
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1943
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2362
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2806
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions.
Definition: vk_mem_alloc.h:2123
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:2489
Definition: vk_mem_alloc.h:2137
Definition: vk_mem_alloc.h:2149
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2898
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1752
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1941
Definition: vk_mem_alloc.h:1998
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2311
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1790
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1939
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1764
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1768
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2064
Definition: vk_mem_alloc.h:2144
Definition: vk_mem_alloc.h:2025
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2503
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1742
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1755
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2290
VkResult vmaResizeAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize newSize)
Tries to resize an allocation in place, if there is enough free memory after it.
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2470
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:2134
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2255
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1947
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame.
Definition: vk_mem_alloc.h:2103
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1777
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1954
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2368
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1947
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:2866
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2475
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2830