23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1648 #ifndef VMA_RECORDING_ENABLED 1650 #define VMA_RECORDING_ENABLED 1 1652 #define VMA_RECORDING_ENABLED 0 1657 #define NOMINMAX // For windows.h 1661 #include <vulkan/vulkan.h> 1664 #if VMA_RECORDING_ENABLED 1665 #include <windows.h> 1668 #if !defined(VMA_DEDICATED_ALLOCATION) 1669 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1670 #define VMA_DEDICATED_ALLOCATION 1 1672 #define VMA_DEDICATED_ALLOCATION 0 1690 uint32_t memoryType,
1691 VkDeviceMemory memory,
1696 uint32_t memoryType,
1697 VkDeviceMemory memory,
1770 #if VMA_DEDICATED_ALLOCATION 1771 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1772 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1899 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1907 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1917 uint32_t memoryTypeIndex,
1918 VkMemoryPropertyFlags* pFlags);
1930 uint32_t frameIndex);
1963 #ifndef VMA_STATS_STRING_ENABLED 1964 #define VMA_STATS_STRING_ENABLED 1 1967 #if VMA_STATS_STRING_ENABLED 1974 char** ppStatsString,
1975 VkBool32 detailedMap);
1979 char* pStatsString);
1981 #endif // #if VMA_STATS_STRING_ENABLED 2214 uint32_t memoryTypeBits,
2216 uint32_t* pMemoryTypeIndex);
2232 const VkBufferCreateInfo* pBufferCreateInfo,
2234 uint32_t* pMemoryTypeIndex);
2250 const VkImageCreateInfo* pImageCreateInfo,
2252 uint32_t* pMemoryTypeIndex);
2424 size_t* pLostAllocationCount);
2523 const VkMemoryRequirements* pVkMemoryRequirements,
2549 const VkMemoryRequirements* pVkMemoryRequirements,
2551 size_t allocationCount,
2596 size_t allocationCount,
2622 VkDeviceSize newSize);
2999 size_t allocationCount,
3000 VkBool32* pAllocationsChanged,
3066 const VkBufferCreateInfo* pBufferCreateInfo,
3091 const VkImageCreateInfo* pImageCreateInfo,
3117 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 3120 #if defined(__cplusplus) && defined(__INTELLISENSE__) 3121 #define VMA_IMPLEMENTATION 3124 #ifdef VMA_IMPLEMENTATION 3125 #undef VMA_IMPLEMENTATION 3147 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 3148 #define VMA_STATIC_VULKAN_FUNCTIONS 1 3160 #if VMA_USE_STL_CONTAINERS 3161 #define VMA_USE_STL_VECTOR 1 3162 #define VMA_USE_STL_UNORDERED_MAP 1 3163 #define VMA_USE_STL_LIST 1 3166 #ifndef VMA_USE_STL_SHARED_MUTEX 3168 #if __cplusplus >= 201703L 3169 #define VMA_USE_STL_SHARED_MUTEX 1 3173 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L 3174 #define VMA_USE_STL_SHARED_MUTEX 1 3176 #define VMA_USE_STL_SHARED_MUTEX 0 3180 #if VMA_USE_STL_VECTOR 3184 #if VMA_USE_STL_UNORDERED_MAP 3185 #include <unordered_map> 3188 #if VMA_USE_STL_LIST 3197 #include <algorithm> 3203 #define VMA_NULL nullptr 3206 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 3208 void *aligned_alloc(
size_t alignment,
size_t size)
3211 if(alignment <
sizeof(
void*))
3213 alignment =
sizeof(
void*);
3216 return memalign(alignment, size);
3218 #elif defined(__APPLE__) || defined(__ANDROID__) 3220 void *aligned_alloc(
size_t alignment,
size_t size)
3223 if(alignment <
sizeof(
void*))
3225 alignment =
sizeof(
void*);
3229 if(posix_memalign(&pointer, alignment, size) == 0)
3243 #define VMA_ASSERT(expr) assert(expr) 3245 #define VMA_ASSERT(expr) 3251 #ifndef VMA_HEAVY_ASSERT 3253 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 3255 #define VMA_HEAVY_ASSERT(expr) 3259 #ifndef VMA_ALIGN_OF 3260 #define VMA_ALIGN_OF(type) (__alignof(type)) 3263 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 3265 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 3267 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 3271 #ifndef VMA_SYSTEM_FREE 3273 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 3275 #define VMA_SYSTEM_FREE(ptr) free(ptr) 3280 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 3284 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 3288 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 3292 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 3295 #ifndef VMA_DEBUG_LOG 3296 #define VMA_DEBUG_LOG(format, ...) 3306 #if VMA_STATS_STRING_ENABLED 3307 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3309 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3311 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3313 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3315 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3317 snprintf(outStr, strLen,
"%p", ptr);
3325 void Lock() { m_Mutex.lock(); }
3326 void Unlock() { m_Mutex.unlock(); }
3330 #define VMA_MUTEX VmaMutex 3334 #ifndef VMA_RW_MUTEX 3335 #if VMA_USE_STL_SHARED_MUTEX 3337 #include <shared_mutex> 3341 void LockRead() { m_Mutex.lock_shared(); }
3342 void UnlockRead() { m_Mutex.unlock_shared(); }
3343 void LockWrite() { m_Mutex.lock(); }
3344 void UnlockWrite() { m_Mutex.unlock(); }
3346 std::shared_mutex m_Mutex;
3348 #define VMA_RW_MUTEX VmaRWMutex 3349 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600 3355 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3356 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3357 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3358 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3359 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3363 #define VMA_RW_MUTEX VmaRWMutex 3369 void LockRead() { m_Mutex.Lock(); }
3370 void UnlockRead() { m_Mutex.Unlock(); }
3371 void LockWrite() { m_Mutex.Lock(); }
3372 void UnlockWrite() { m_Mutex.Unlock(); }
3376 #define VMA_RW_MUTEX VmaRWMutex 3377 #endif // #if VMA_USE_STL_SHARED_MUTEX 3378 #endif // #ifndef VMA_RW_MUTEX 3388 #ifndef VMA_ATOMIC_UINT32 3389 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 3392 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 3397 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 3400 #ifndef VMA_DEBUG_ALIGNMENT 3405 #define VMA_DEBUG_ALIGNMENT (1) 3408 #ifndef VMA_DEBUG_MARGIN 3413 #define VMA_DEBUG_MARGIN (0) 3416 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3421 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3424 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3430 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3433 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3438 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3441 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3446 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3449 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3450 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3454 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3455 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3459 #ifndef VMA_CLASS_NO_COPY 3460 #define VMA_CLASS_NO_COPY(className) \ 3462 className(const className&) = delete; \ 3463 className& operator=(const className&) = delete; 3466 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3469 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3471 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3472 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3478 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3480 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3481 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3484 static inline uint32_t VmaCountBitsSet(uint32_t v)
3486 uint32_t c = v - ((v >> 1) & 0x55555555);
3487 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3488 c = ((c >> 4) + c) & 0x0F0F0F0F;
3489 c = ((c >> 8) + c) & 0x00FF00FF;
3490 c = ((c >> 16) + c) & 0x0000FFFF;
3496 template <
typename T>
3497 static inline T VmaAlignUp(T val, T align)
3499 return (val + align - 1) / align * align;
3503 template <
typename T>
3504 static inline T VmaAlignDown(T val, T align)
3506 return val / align * align;
3510 template <
typename T>
3511 static inline T VmaRoundDiv(T x, T y)
3513 return (x + (y / (T)2)) / y;
3521 template <
typename T>
3522 inline bool VmaIsPow2(T x)
3524 return (x & (x-1)) == 0;
3528 static inline uint32_t VmaNextPow2(uint32_t v)
3539 static inline uint64_t VmaNextPow2(uint64_t v)
3553 static inline uint32_t VmaPrevPow2(uint32_t v)
3563 static inline uint64_t VmaPrevPow2(uint64_t v)
3575 static inline bool VmaStrIsEmpty(
const char* pStr)
3577 return pStr == VMA_NULL || *pStr ==
'\0';
3580 #if VMA_STATS_STRING_ENABLED 3582 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3598 #endif // #if VMA_STATS_STRING_ENABLED 3602 template<
typename Iterator,
typename Compare>
3603 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3605 Iterator centerValue = end; --centerValue;
3606 Iterator insertIndex = beg;
3607 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3609 if(cmp(*memTypeIndex, *centerValue))
3611 if(insertIndex != memTypeIndex)
3613 VMA_SWAP(*memTypeIndex, *insertIndex);
3618 if(insertIndex != centerValue)
3620 VMA_SWAP(*insertIndex, *centerValue);
3625 template<
typename Iterator,
typename Compare>
3626 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3630 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3631 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3632 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3636 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3638 #endif // #ifndef VMA_SORT 3647 static inline bool VmaBlocksOnSamePage(
3648 VkDeviceSize resourceAOffset,
3649 VkDeviceSize resourceASize,
3650 VkDeviceSize resourceBOffset,
3651 VkDeviceSize pageSize)
3653 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3654 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3655 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3656 VkDeviceSize resourceBStart = resourceBOffset;
3657 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3658 return resourceAEndPage == resourceBStartPage;
3661 enum VmaSuballocationType
3663 VMA_SUBALLOCATION_TYPE_FREE = 0,
3664 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3665 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3666 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3667 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3668 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3669 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3678 static inline bool VmaIsBufferImageGranularityConflict(
3679 VmaSuballocationType suballocType1,
3680 VmaSuballocationType suballocType2)
3682 if(suballocType1 > suballocType2)
3684 VMA_SWAP(suballocType1, suballocType2);
3687 switch(suballocType1)
3689 case VMA_SUBALLOCATION_TYPE_FREE:
3691 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3693 case VMA_SUBALLOCATION_TYPE_BUFFER:
3695 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3696 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3697 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3699 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3700 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3701 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3702 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3704 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3705 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3713 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3715 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3716 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3717 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3719 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3723 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3725 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3726 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3727 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3729 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3740 VMA_CLASS_NO_COPY(VmaMutexLock)
3742 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
3743 m_pMutex(useMutex ? &mutex : VMA_NULL)
3744 {
if(m_pMutex) { m_pMutex->Lock(); } }
3746 {
if(m_pMutex) { m_pMutex->Unlock(); } }
3748 VMA_MUTEX* m_pMutex;
3752 struct VmaMutexLockRead
3754 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3756 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3757 m_pMutex(useMutex ? &mutex : VMA_NULL)
3758 {
if(m_pMutex) { m_pMutex->LockRead(); } }
3759 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
3761 VMA_RW_MUTEX* m_pMutex;
3765 struct VmaMutexLockWrite
3767 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3769 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
3770 m_pMutex(useMutex ? &mutex : VMA_NULL)
3771 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
3772 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
3774 VMA_RW_MUTEX* m_pMutex;
3777 #if VMA_DEBUG_GLOBAL_MUTEX 3778 static VMA_MUTEX gDebugGlobalMutex;
3779 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3781 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3785 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3796 template <
typename CmpLess,
typename IterT,
typename KeyT>
3797 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3799 size_t down = 0, up = (end - beg);
3802 const size_t mid = (down + up) / 2;
3803 if(cmp(*(beg+mid), key))
3820 template<
typename T>
3821 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
3823 for(uint32_t i = 0; i < count; ++i)
3825 const T iPtr = arr[i];
3826 if(iPtr == VMA_NULL)
3830 for(uint32_t j = i + 1; j < count; ++j)
3844 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3846 if((pAllocationCallbacks != VMA_NULL) &&
3847 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3849 return (*pAllocationCallbacks->pfnAllocation)(
3850 pAllocationCallbacks->pUserData,
3853 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3857 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3861 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3863 if((pAllocationCallbacks != VMA_NULL) &&
3864 (pAllocationCallbacks->pfnFree != VMA_NULL))
3866 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3870 VMA_SYSTEM_FREE(ptr);
3874 template<
typename T>
3875 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3877 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3880 template<
typename T>
3881 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3883 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3886 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3888 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3890 template<
typename T>
3891 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3894 VmaFree(pAllocationCallbacks, ptr);
3897 template<
typename T>
3898 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3902 for(
size_t i = count; i--; )
3906 VmaFree(pAllocationCallbacks, ptr);
3911 template<
typename T>
3912 class VmaStlAllocator
3915 const VkAllocationCallbacks*
const m_pCallbacks;
3916 typedef T value_type;
3918 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3919 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3921 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3922 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3924 template<
typename U>
3925 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3927 return m_pCallbacks == rhs.m_pCallbacks;
3929 template<
typename U>
3930 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3932 return m_pCallbacks != rhs.m_pCallbacks;
3935 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3938 #if VMA_USE_STL_VECTOR 3940 #define VmaVector std::vector 3942 template<
typename T,
typename allocatorT>
3943 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3945 vec.insert(vec.begin() + index, item);
3948 template<
typename T,
typename allocatorT>
3949 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3951 vec.erase(vec.begin() + index);
3954 #else // #if VMA_USE_STL_VECTOR 3959 template<
typename T,
typename AllocatorT>
3963 typedef T value_type;
3965 VmaVector(
const AllocatorT& allocator) :
3966 m_Allocator(allocator),
3973 VmaVector(
size_t count,
const AllocatorT& allocator) :
3974 m_Allocator(allocator),
3975 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3981 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3982 m_Allocator(src.m_Allocator),
3983 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3984 m_Count(src.m_Count),
3985 m_Capacity(src.m_Count)
3989 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3995 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3998 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4002 resize(rhs.m_Count);
4005 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4011 bool empty()
const {
return m_Count == 0; }
4012 size_t size()
const {
return m_Count; }
4013 T* data() {
return m_pArray; }
4014 const T* data()
const {
return m_pArray; }
4016 T& operator[](
size_t index)
4018 VMA_HEAVY_ASSERT(index < m_Count);
4019 return m_pArray[index];
4021 const T& operator[](
size_t index)
const 4023 VMA_HEAVY_ASSERT(index < m_Count);
4024 return m_pArray[index];
4029 VMA_HEAVY_ASSERT(m_Count > 0);
4032 const T& front()
const 4034 VMA_HEAVY_ASSERT(m_Count > 0);
4039 VMA_HEAVY_ASSERT(m_Count > 0);
4040 return m_pArray[m_Count - 1];
4042 const T& back()
const 4044 VMA_HEAVY_ASSERT(m_Count > 0);
4045 return m_pArray[m_Count - 1];
4048 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4050 newCapacity = VMA_MAX(newCapacity, m_Count);
4052 if((newCapacity < m_Capacity) && !freeMemory)
4054 newCapacity = m_Capacity;
4057 if(newCapacity != m_Capacity)
4059 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4062 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4064 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4065 m_Capacity = newCapacity;
4066 m_pArray = newArray;
4070 void resize(
size_t newCount,
bool freeMemory =
false)
4072 size_t newCapacity = m_Capacity;
4073 if(newCount > m_Capacity)
4075 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4079 newCapacity = newCount;
4082 if(newCapacity != m_Capacity)
4084 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4085 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4086 if(elementsToCopy != 0)
4088 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4090 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4091 m_Capacity = newCapacity;
4092 m_pArray = newArray;
4098 void clear(
bool freeMemory =
false)
4100 resize(0, freeMemory);
4103 void insert(
size_t index,
const T& src)
4105 VMA_HEAVY_ASSERT(index <= m_Count);
4106 const size_t oldCount = size();
4107 resize(oldCount + 1);
4108 if(index < oldCount)
4110 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4112 m_pArray[index] = src;
4115 void remove(
size_t index)
4117 VMA_HEAVY_ASSERT(index < m_Count);
4118 const size_t oldCount = size();
4119 if(index < oldCount - 1)
4121 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4123 resize(oldCount - 1);
4126 void push_back(
const T& src)
4128 const size_t newIndex = size();
4129 resize(newIndex + 1);
4130 m_pArray[newIndex] = src;
4135 VMA_HEAVY_ASSERT(m_Count > 0);
4139 void push_front(
const T& src)
4146 VMA_HEAVY_ASSERT(m_Count > 0);
4150 typedef T* iterator;
4152 iterator begin() {
return m_pArray; }
4153 iterator end() {
return m_pArray + m_Count; }
4156 AllocatorT m_Allocator;
4162 template<
typename T,
typename allocatorT>
4163 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4165 vec.insert(index, item);
4168 template<
typename T,
typename allocatorT>
4169 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4174 #endif // #if VMA_USE_STL_VECTOR 4176 template<
typename CmpLess,
typename VectorT>
4177 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4179 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4181 vector.data() + vector.size(),
4183 CmpLess()) - vector.data();
4184 VmaVectorInsert(vector, indexToInsert, value);
4185 return indexToInsert;
4188 template<
typename CmpLess,
typename VectorT>
4189 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4192 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4197 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4199 size_t indexToRemove = it - vector.begin();
4200 VmaVectorRemove(vector, indexToRemove);
4206 template<
typename CmpLess,
typename IterT,
typename KeyT>
4207 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
4210 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4211 beg, end, value, comparator);
4213 (!comparator(*it, value) && !comparator(value, *it)))
4228 template<
typename T>
4229 class VmaPoolAllocator
4231 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4233 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4234 ~VmaPoolAllocator();
4242 uint32_t NextFreeIndex;
4250 uint32_t FirstFreeIndex;
4253 const VkAllocationCallbacks* m_pAllocationCallbacks;
4254 const uint32_t m_FirstBlockCapacity;
4255 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4257 ItemBlock& CreateNewBlock();
4260 template<
typename T>
4261 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4262 m_pAllocationCallbacks(pAllocationCallbacks),
4263 m_FirstBlockCapacity(firstBlockCapacity),
4264 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4266 VMA_ASSERT(m_FirstBlockCapacity > 1);
4269 template<
typename T>
4270 VmaPoolAllocator<T>::~VmaPoolAllocator()
4275 template<
typename T>
4276 void VmaPoolAllocator<T>::Clear()
4278 for(
size_t i = m_ItemBlocks.size(); i--; )
4279 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4280 m_ItemBlocks.clear();
4283 template<
typename T>
4284 T* VmaPoolAllocator<T>::Alloc()
4286 for(
size_t i = m_ItemBlocks.size(); i--; )
4288 ItemBlock& block = m_ItemBlocks[i];
4290 if(block.FirstFreeIndex != UINT32_MAX)
4292 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4293 block.FirstFreeIndex = pItem->NextFreeIndex;
4294 return &pItem->Value;
4299 ItemBlock& newBlock = CreateNewBlock();
4300 Item*
const pItem = &newBlock.pItems[0];
4301 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4302 return &pItem->Value;
4305 template<
typename T>
4306 void VmaPoolAllocator<T>::Free(T* ptr)
4309 for(
size_t i = m_ItemBlocks.size(); i--; )
4311 ItemBlock& block = m_ItemBlocks[i];
4315 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4318 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4320 const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
4321 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4322 block.FirstFreeIndex = index;
4326 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4329 template<
typename T>
4330 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4332 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4333 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4335 const ItemBlock newBlock = {
4336 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4340 m_ItemBlocks.push_back(newBlock);
4343 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4344 newBlock.pItems[i].NextFreeIndex = i + 1;
4345 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4346 return m_ItemBlocks.back();
4352 #if VMA_USE_STL_LIST 4354 #define VmaList std::list 4356 #else // #if VMA_USE_STL_LIST 4358 template<
typename T>
4367 template<
typename T>
4370 VMA_CLASS_NO_COPY(VmaRawList)
4372 typedef VmaListItem<T> ItemType;
4374 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4378 size_t GetCount()
const {
return m_Count; }
4379 bool IsEmpty()
const {
return m_Count == 0; }
4381 ItemType* Front() {
return m_pFront; }
4382 const ItemType* Front()
const {
return m_pFront; }
4383 ItemType* Back() {
return m_pBack; }
4384 const ItemType* Back()
const {
return m_pBack; }
4386 ItemType* PushBack();
4387 ItemType* PushFront();
4388 ItemType* PushBack(
const T& value);
4389 ItemType* PushFront(
const T& value);
4394 ItemType* InsertBefore(ItemType* pItem);
4396 ItemType* InsertAfter(ItemType* pItem);
4398 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4399 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4401 void Remove(ItemType* pItem);
4404 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4405 VmaPoolAllocator<ItemType> m_ItemAllocator;
4411 template<
typename T>
4412 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4413 m_pAllocationCallbacks(pAllocationCallbacks),
4414 m_ItemAllocator(pAllocationCallbacks, 128),
4421 template<
typename T>
4422 VmaRawList<T>::~VmaRawList()
4428 template<
typename T>
4429 void VmaRawList<T>::Clear()
4431 if(IsEmpty() ==
false)
4433 ItemType* pItem = m_pBack;
4434 while(pItem != VMA_NULL)
4436 ItemType*
const pPrevItem = pItem->pPrev;
4437 m_ItemAllocator.Free(pItem);
4440 m_pFront = VMA_NULL;
4446 template<
typename T>
4447 VmaListItem<T>* VmaRawList<T>::PushBack()
4449 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4450 pNewItem->pNext = VMA_NULL;
4453 pNewItem->pPrev = VMA_NULL;
4454 m_pFront = pNewItem;
4460 pNewItem->pPrev = m_pBack;
4461 m_pBack->pNext = pNewItem;
4468 template<
typename T>
4469 VmaListItem<T>* VmaRawList<T>::PushFront()
4471 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4472 pNewItem->pPrev = VMA_NULL;
4475 pNewItem->pNext = VMA_NULL;
4476 m_pFront = pNewItem;
4482 pNewItem->pNext = m_pFront;
4483 m_pFront->pPrev = pNewItem;
4484 m_pFront = pNewItem;
4490 template<
typename T>
4491 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4493 ItemType*
const pNewItem = PushBack();
4494 pNewItem->Value = value;
4498 template<
typename T>
4499 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4501 ItemType*
const pNewItem = PushFront();
4502 pNewItem->Value = value;
4506 template<
typename T>
4507 void VmaRawList<T>::PopBack()
4509 VMA_HEAVY_ASSERT(m_Count > 0);
4510 ItemType*
const pBackItem = m_pBack;
4511 ItemType*
const pPrevItem = pBackItem->pPrev;
4512 if(pPrevItem != VMA_NULL)
4514 pPrevItem->pNext = VMA_NULL;
4516 m_pBack = pPrevItem;
4517 m_ItemAllocator.Free(pBackItem);
4521 template<
typename T>
4522 void VmaRawList<T>::PopFront()
4524 VMA_HEAVY_ASSERT(m_Count > 0);
4525 ItemType*
const pFrontItem = m_pFront;
4526 ItemType*
const pNextItem = pFrontItem->pNext;
4527 if(pNextItem != VMA_NULL)
4529 pNextItem->pPrev = VMA_NULL;
4531 m_pFront = pNextItem;
4532 m_ItemAllocator.Free(pFrontItem);
4536 template<
typename T>
4537 void VmaRawList<T>::Remove(ItemType* pItem)
4539 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4540 VMA_HEAVY_ASSERT(m_Count > 0);
4542 if(pItem->pPrev != VMA_NULL)
4544 pItem->pPrev->pNext = pItem->pNext;
4548 VMA_HEAVY_ASSERT(m_pFront == pItem);
4549 m_pFront = pItem->pNext;
4552 if(pItem->pNext != VMA_NULL)
4554 pItem->pNext->pPrev = pItem->pPrev;
4558 VMA_HEAVY_ASSERT(m_pBack == pItem);
4559 m_pBack = pItem->pPrev;
4562 m_ItemAllocator.Free(pItem);
4566 template<
typename T>
4567 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4569 if(pItem != VMA_NULL)
4571 ItemType*
const prevItem = pItem->pPrev;
4572 ItemType*
const newItem = m_ItemAllocator.Alloc();
4573 newItem->pPrev = prevItem;
4574 newItem->pNext = pItem;
4575 pItem->pPrev = newItem;
4576 if(prevItem != VMA_NULL)
4578 prevItem->pNext = newItem;
4582 VMA_HEAVY_ASSERT(m_pFront == pItem);
4592 template<
typename T>
4593 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4595 if(pItem != VMA_NULL)
4597 ItemType*
const nextItem = pItem->pNext;
4598 ItemType*
const newItem = m_ItemAllocator.Alloc();
4599 newItem->pNext = nextItem;
4600 newItem->pPrev = pItem;
4601 pItem->pNext = newItem;
4602 if(nextItem != VMA_NULL)
4604 nextItem->pPrev = newItem;
4608 VMA_HEAVY_ASSERT(m_pBack == pItem);
4618 template<
typename T>
4619 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4621 ItemType*
const newItem = InsertBefore(pItem);
4622 newItem->Value = value;
4626 template<
typename T>
4627 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4629 ItemType*
const newItem = InsertAfter(pItem);
4630 newItem->Value = value;
4634 template<
typename T,
typename AllocatorT>
4637 VMA_CLASS_NO_COPY(VmaList)
4648 T& operator*()
const 4650 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4651 return m_pItem->Value;
4653 T* operator->()
const 4655 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4656 return &m_pItem->Value;
4659 iterator& operator++()
4661 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4662 m_pItem = m_pItem->pNext;
4665 iterator& operator--()
4667 if(m_pItem != VMA_NULL)
4669 m_pItem = m_pItem->pPrev;
4673 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4674 m_pItem = m_pList->Back();
4679 iterator operator++(
int)
4681 iterator result = *
this;
4685 iterator operator--(
int)
4687 iterator result = *
this;
4692 bool operator==(
const iterator& rhs)
const 4694 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4695 return m_pItem == rhs.m_pItem;
4697 bool operator!=(
const iterator& rhs)
const 4699 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4700 return m_pItem != rhs.m_pItem;
4704 VmaRawList<T>* m_pList;
4705 VmaListItem<T>* m_pItem;
4707 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4713 friend class VmaList<T, AllocatorT>;
4716 class const_iterator
4725 const_iterator(
const iterator& src) :
4726 m_pList(src.m_pList),
4727 m_pItem(src.m_pItem)
4731 const T& operator*()
const 4733 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4734 return m_pItem->Value;
4736 const T* operator->()
const 4738 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4739 return &m_pItem->Value;
4742 const_iterator& operator++()
4744 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4745 m_pItem = m_pItem->pNext;
4748 const_iterator& operator--()
4750 if(m_pItem != VMA_NULL)
4752 m_pItem = m_pItem->pPrev;
4756 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4757 m_pItem = m_pList->Back();
4762 const_iterator operator++(
int)
4764 const_iterator result = *
this;
4768 const_iterator operator--(
int)
4770 const_iterator result = *
this;
4775 bool operator==(
const const_iterator& rhs)
const 4777 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4778 return m_pItem == rhs.m_pItem;
4780 bool operator!=(
const const_iterator& rhs)
const 4782 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4783 return m_pItem != rhs.m_pItem;
4787 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4793 const VmaRawList<T>* m_pList;
4794 const VmaListItem<T>* m_pItem;
4796 friend class VmaList<T, AllocatorT>;
4799 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4801 bool empty()
const {
return m_RawList.IsEmpty(); }
4802 size_t size()
const {
return m_RawList.GetCount(); }
4804 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4805 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4807 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4808 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4810 void clear() { m_RawList.Clear(); }
4811 void push_back(
const T& value) { m_RawList.PushBack(value); }
4812 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4813 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4816 VmaRawList<T> m_RawList;
4819 #endif // #if VMA_USE_STL_LIST 4827 #if VMA_USE_STL_UNORDERED_MAP 4829 #define VmaPair std::pair 4831 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4832 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4834 #else // #if VMA_USE_STL_UNORDERED_MAP 4836 template<
typename T1,
typename T2>
4842 VmaPair() : first(), second() { }
4843 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4849 template<
typename KeyT,
typename ValueT>
4853 typedef VmaPair<KeyT, ValueT> PairType;
4854 typedef PairType* iterator;
4856 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4858 iterator begin() {
return m_Vector.begin(); }
4859 iterator end() {
return m_Vector.end(); }
4861 void insert(
const PairType& pair);
4862 iterator find(
const KeyT& key);
4863 void erase(iterator it);
4866 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4869 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4871 template<
typename FirstT,
typename SecondT>
4872 struct VmaPairFirstLess
4874 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4876 return lhs.first < rhs.first;
4878 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4880 return lhs.first < rhsFirst;
4884 template<
typename KeyT,
typename ValueT>
4885 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4887 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4889 m_Vector.data() + m_Vector.size(),
4891 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4892 VmaVectorInsert(m_Vector, indexToInsert, pair);
4895 template<
typename KeyT,
typename ValueT>
4896 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4898 PairType* it = VmaBinaryFindFirstNotLess(
4900 m_Vector.data() + m_Vector.size(),
4902 VmaPairFirstLess<KeyT, ValueT>());
4903 if((it != m_Vector.end()) && (it->first == key))
4909 return m_Vector.end();
4913 template<
typename KeyT,
typename ValueT>
4914 void VmaMap<KeyT, ValueT>::erase(iterator it)
4916 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4919 #endif // #if VMA_USE_STL_UNORDERED_MAP 4925 class VmaDeviceMemoryBlock;
4927 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4929 struct VmaAllocation_T
4932 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4936 FLAG_USER_DATA_STRING = 0x01,
4940 enum ALLOCATION_TYPE
4942 ALLOCATION_TYPE_NONE,
4943 ALLOCATION_TYPE_BLOCK,
4944 ALLOCATION_TYPE_DEDICATED,
4952 void Ctor(uint32_t currentFrameIndex,
bool userDataString)
4956 m_pUserData = VMA_NULL;
4957 m_LastUseFrameIndex = currentFrameIndex;
4958 m_Type = (uint8_t)ALLOCATION_TYPE_NONE;
4959 m_SuballocationType = (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN;
4961 m_Flags = userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0;
4963 #if VMA_STATS_STRING_ENABLED 4964 m_CreationFrameIndex = currentFrameIndex;
4965 m_BufferImageUsage = 0;
4971 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4974 VMA_ASSERT(m_pUserData == VMA_NULL);
4977 void InitBlockAllocation(
4978 VmaDeviceMemoryBlock* block,
4979 VkDeviceSize offset,
4980 VkDeviceSize alignment,
4982 VmaSuballocationType suballocationType,
4986 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4987 VMA_ASSERT(block != VMA_NULL);
4988 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4989 m_Alignment = alignment;
4991 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4992 m_SuballocationType = (uint8_t)suballocationType;
4993 m_BlockAllocation.m_Block = block;
4994 m_BlockAllocation.m_Offset = offset;
4995 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5000 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5001 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5002 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5003 m_BlockAllocation.m_Block = VMA_NULL;
5004 m_BlockAllocation.m_Offset = 0;
5005 m_BlockAllocation.m_CanBecomeLost =
true;
5008 void ChangeBlockAllocation(
5010 VmaDeviceMemoryBlock* block,
5011 VkDeviceSize offset);
5013 void ChangeSize(VkDeviceSize newSize);
5014 void ChangeOffset(VkDeviceSize newOffset);
5017 void InitDedicatedAllocation(
5018 uint32_t memoryTypeIndex,
5019 VkDeviceMemory hMemory,
5020 VmaSuballocationType suballocationType,
5024 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5025 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5026 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5029 m_SuballocationType = (uint8_t)suballocationType;
5030 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5031 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
5032 m_DedicatedAllocation.m_hMemory = hMemory;
5033 m_DedicatedAllocation.m_pMappedData = pMappedData;
5036 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5037 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5038 VkDeviceSize GetSize()
const {
return m_Size; }
5039 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5040 void* GetUserData()
const {
return m_pUserData; }
5041 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5042 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5044 VmaDeviceMemoryBlock* GetBlock()
const 5046 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5047 return m_BlockAllocation.m_Block;
5049 VkDeviceSize GetOffset()
const;
5050 VkDeviceMemory GetMemory()
const;
5051 uint32_t GetMemoryTypeIndex()
const;
5052 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5053 void* GetMappedData()
const;
5054 bool CanBecomeLost()
const;
5056 uint32_t GetLastUseFrameIndex()
const 5058 return m_LastUseFrameIndex.load();
5060 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5062 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5072 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5074 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5076 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5087 void BlockAllocMap();
5088 void BlockAllocUnmap();
5089 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5092 #if VMA_STATS_STRING_ENABLED 5093 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5094 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5096 void InitBufferImageUsage(uint32_t bufferImageUsage)
5098 VMA_ASSERT(m_BufferImageUsage == 0);
5099 m_BufferImageUsage = bufferImageUsage;
5102 void PrintParameters(
class VmaJsonWriter& json)
const;
5106 VkDeviceSize m_Alignment;
5107 VkDeviceSize m_Size;
5109 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5111 uint8_t m_SuballocationType;
5118 struct BlockAllocation
5120 VmaDeviceMemoryBlock* m_Block;
5121 VkDeviceSize m_Offset;
5122 bool m_CanBecomeLost;
5126 struct DedicatedAllocation
5128 uint32_t m_MemoryTypeIndex;
5129 VkDeviceMemory m_hMemory;
5130 void* m_pMappedData;
5136 BlockAllocation m_BlockAllocation;
5138 DedicatedAllocation m_DedicatedAllocation;
5141 #if VMA_STATS_STRING_ENABLED 5142 uint32_t m_CreationFrameIndex;
5143 uint32_t m_BufferImageUsage;
5153 struct VmaSuballocation
5155 VkDeviceSize offset;
5158 VmaSuballocationType type;
5162 struct VmaSuballocationOffsetLess
5164 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5166 return lhs.offset < rhs.offset;
5169 struct VmaSuballocationOffsetGreater
5171 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5173 return lhs.offset > rhs.offset;
5177 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5180 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5182 enum class VmaAllocationRequestType
5204 struct VmaAllocationRequest
5206 VkDeviceSize offset;
5207 VkDeviceSize sumFreeSize;
5208 VkDeviceSize sumItemSize;
5209 VmaSuballocationList::iterator item;
5210 size_t itemsToMakeLostCount;
5212 VmaAllocationRequestType type;
5214 VkDeviceSize CalcCost()
const 5216 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5224 class VmaBlockMetadata
5228 virtual ~VmaBlockMetadata() { }
5229 virtual void Init(VkDeviceSize size) { m_Size = size; }
5232 virtual bool Validate()
const = 0;
5233 VkDeviceSize GetSize()
const {
return m_Size; }
5234 virtual size_t GetAllocationCount()
const = 0;
5235 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5236 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5238 virtual bool IsEmpty()
const = 0;
5240 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5242 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5244 #if VMA_STATS_STRING_ENABLED 5245 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5251 virtual bool CreateAllocationRequest(
5252 uint32_t currentFrameIndex,
5253 uint32_t frameInUseCount,
5254 VkDeviceSize bufferImageGranularity,
5255 VkDeviceSize allocSize,
5256 VkDeviceSize allocAlignment,
5258 VmaSuballocationType allocType,
5259 bool canMakeOtherLost,
5262 VmaAllocationRequest* pAllocationRequest) = 0;
5264 virtual bool MakeRequestedAllocationsLost(
5265 uint32_t currentFrameIndex,
5266 uint32_t frameInUseCount,
5267 VmaAllocationRequest* pAllocationRequest) = 0;
5269 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5271 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5275 const VmaAllocationRequest& request,
5276 VmaSuballocationType type,
5277 VkDeviceSize allocSize,
5282 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5285 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize) {
return false; }
5288 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5290 #if VMA_STATS_STRING_ENABLED 5291 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5292 VkDeviceSize unusedBytes,
5293 size_t allocationCount,
5294 size_t unusedRangeCount)
const;
5295 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5296 VkDeviceSize offset,
5298 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5299 VkDeviceSize offset,
5300 VkDeviceSize size)
const;
5301 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5305 VkDeviceSize m_Size;
5306 const VkAllocationCallbacks* m_pAllocationCallbacks;
5309 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 5310 VMA_ASSERT(0 && "Validation failed: " #cond); \ 5314 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5316 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5319 virtual ~VmaBlockMetadata_Generic();
5320 virtual void Init(VkDeviceSize size);
5322 virtual bool Validate()
const;
5323 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5324 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5325 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5326 virtual bool IsEmpty()
const;
5328 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5329 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5331 #if VMA_STATS_STRING_ENABLED 5332 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5335 virtual bool CreateAllocationRequest(
5336 uint32_t currentFrameIndex,
5337 uint32_t frameInUseCount,
5338 VkDeviceSize bufferImageGranularity,
5339 VkDeviceSize allocSize,
5340 VkDeviceSize allocAlignment,
5342 VmaSuballocationType allocType,
5343 bool canMakeOtherLost,
5345 VmaAllocationRequest* pAllocationRequest);
5347 virtual bool MakeRequestedAllocationsLost(
5348 uint32_t currentFrameIndex,
5349 uint32_t frameInUseCount,
5350 VmaAllocationRequest* pAllocationRequest);
5352 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5354 virtual VkResult CheckCorruption(
const void* pBlockData);
5357 const VmaAllocationRequest& request,
5358 VmaSuballocationType type,
5359 VkDeviceSize allocSize,
5363 virtual void FreeAtOffset(VkDeviceSize offset);
5365 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize);
5370 bool IsBufferImageGranularityConflictPossible(
5371 VkDeviceSize bufferImageGranularity,
5372 VmaSuballocationType& inOutPrevSuballocType)
const;
5375 friend class VmaDefragmentationAlgorithm_Generic;
5376 friend class VmaDefragmentationAlgorithm_Fast;
5378 uint32_t m_FreeCount;
5379 VkDeviceSize m_SumFreeSize;
5380 VmaSuballocationList m_Suballocations;
5383 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5385 bool ValidateFreeSuballocationList()
const;
5389 bool CheckAllocation(
5390 uint32_t currentFrameIndex,
5391 uint32_t frameInUseCount,
5392 VkDeviceSize bufferImageGranularity,
5393 VkDeviceSize allocSize,
5394 VkDeviceSize allocAlignment,
5395 VmaSuballocationType allocType,
5396 VmaSuballocationList::const_iterator suballocItem,
5397 bool canMakeOtherLost,
5398 VkDeviceSize* pOffset,
5399 size_t* itemsToMakeLostCount,
5400 VkDeviceSize* pSumFreeSize,
5401 VkDeviceSize* pSumItemSize)
const;
5403 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5407 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5410 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5413 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5494 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5496 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5499 virtual ~VmaBlockMetadata_Linear();
5500 virtual void Init(VkDeviceSize size);
5502 virtual bool Validate()
const;
5503 virtual size_t GetAllocationCount()
const;
5504 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5505 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5506 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5508 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5509 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5511 #if VMA_STATS_STRING_ENABLED 5512 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5515 virtual bool CreateAllocationRequest(
5516 uint32_t currentFrameIndex,
5517 uint32_t frameInUseCount,
5518 VkDeviceSize bufferImageGranularity,
5519 VkDeviceSize allocSize,
5520 VkDeviceSize allocAlignment,
5522 VmaSuballocationType allocType,
5523 bool canMakeOtherLost,
5525 VmaAllocationRequest* pAllocationRequest);
5527 virtual bool MakeRequestedAllocationsLost(
5528 uint32_t currentFrameIndex,
5529 uint32_t frameInUseCount,
5530 VmaAllocationRequest* pAllocationRequest);
5532 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5534 virtual VkResult CheckCorruption(
const void* pBlockData);
5537 const VmaAllocationRequest& request,
5538 VmaSuballocationType type,
5539 VkDeviceSize allocSize,
5543 virtual void FreeAtOffset(VkDeviceSize offset);
5553 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5555 enum SECOND_VECTOR_MODE
5557 SECOND_VECTOR_EMPTY,
5562 SECOND_VECTOR_RING_BUFFER,
5568 SECOND_VECTOR_DOUBLE_STACK,
5571 VkDeviceSize m_SumFreeSize;
5572 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5573 uint32_t m_1stVectorIndex;
5574 SECOND_VECTOR_MODE m_2ndVectorMode;
5576 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5577 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5578 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5579 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5582 size_t m_1stNullItemsBeginCount;
5584 size_t m_1stNullItemsMiddleCount;
5586 size_t m_2ndNullItemsCount;
5588 bool ShouldCompact1st()
const;
5589 void CleanupAfterFree();
5591 bool CreateAllocationRequest_LowerAddress(
5592 uint32_t currentFrameIndex,
5593 uint32_t frameInUseCount,
5594 VkDeviceSize bufferImageGranularity,
5595 VkDeviceSize allocSize,
5596 VkDeviceSize allocAlignment,
5597 VmaSuballocationType allocType,
5598 bool canMakeOtherLost,
5600 VmaAllocationRequest* pAllocationRequest);
5601 bool CreateAllocationRequest_UpperAddress(
5602 uint32_t currentFrameIndex,
5603 uint32_t frameInUseCount,
5604 VkDeviceSize bufferImageGranularity,
5605 VkDeviceSize allocSize,
5606 VkDeviceSize allocAlignment,
5607 VmaSuballocationType allocType,
5608 bool canMakeOtherLost,
5610 VmaAllocationRequest* pAllocationRequest);
5624 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5626 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5629 virtual ~VmaBlockMetadata_Buddy();
5630 virtual void Init(VkDeviceSize size);
5632 virtual bool Validate()
const;
5633 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5634 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5635 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5636 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5638 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5639 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5641 #if VMA_STATS_STRING_ENABLED 5642 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5645 virtual bool CreateAllocationRequest(
5646 uint32_t currentFrameIndex,
5647 uint32_t frameInUseCount,
5648 VkDeviceSize bufferImageGranularity,
5649 VkDeviceSize allocSize,
5650 VkDeviceSize allocAlignment,
5652 VmaSuballocationType allocType,
5653 bool canMakeOtherLost,
5655 VmaAllocationRequest* pAllocationRequest);
5657 virtual bool MakeRequestedAllocationsLost(
5658 uint32_t currentFrameIndex,
5659 uint32_t frameInUseCount,
5660 VmaAllocationRequest* pAllocationRequest);
5662 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5664 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5667 const VmaAllocationRequest& request,
5668 VmaSuballocationType type,
5669 VkDeviceSize allocSize,
5672 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5673 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5676 static const VkDeviceSize MIN_NODE_SIZE = 32;
5677 static const size_t MAX_LEVELS = 30;
5679 struct ValidationContext
5681 size_t calculatedAllocationCount;
5682 size_t calculatedFreeCount;
5683 VkDeviceSize calculatedSumFreeSize;
5685 ValidationContext() :
5686 calculatedAllocationCount(0),
5687 calculatedFreeCount(0),
5688 calculatedSumFreeSize(0) { }
5693 VkDeviceSize offset;
5723 VkDeviceSize m_UsableSize;
5724 uint32_t m_LevelCount;
5730 } m_FreeList[MAX_LEVELS];
5732 size_t m_AllocationCount;
5736 VkDeviceSize m_SumFreeSize;
5738 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5739 void DeleteNode(Node* node);
5740 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5741 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5742 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5744 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5745 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5749 void AddToFreeListFront(uint32_t level, Node* node);
5753 void RemoveFromFreeList(uint32_t level, Node* node);
5755 #if VMA_STATS_STRING_ENABLED 5756 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5766 class VmaDeviceMemoryBlock
5768 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5770 VmaBlockMetadata* m_pMetadata;
5774 ~VmaDeviceMemoryBlock()
5776 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5777 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5784 uint32_t newMemoryTypeIndex,
5785 VkDeviceMemory newMemory,
5786 VkDeviceSize newSize,
5788 uint32_t algorithm);
5792 VmaPool GetParentPool()
const {
return m_hParentPool; }
5793 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5794 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5795 uint32_t GetId()
const {
return m_Id; }
5796 void* GetMappedData()
const {
return m_pMappedData; }
5799 bool Validate()
const;
5804 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5807 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5808 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5810 VkResult BindBufferMemory(
5814 VkResult BindImageMemory(
5821 uint32_t m_MemoryTypeIndex;
5823 VkDeviceMemory m_hMemory;
5831 uint32_t m_MapCount;
5832 void* m_pMappedData;
5835 struct VmaPointerLess
5837 bool operator()(
const void* lhs,
const void* rhs)
const 5843 struct VmaDefragmentationMove
5845 size_t srcBlockIndex;
5846 size_t dstBlockIndex;
5847 VkDeviceSize srcOffset;
5848 VkDeviceSize dstOffset;
5852 class VmaDefragmentationAlgorithm;
5860 struct VmaBlockVector
5862 VMA_CLASS_NO_COPY(VmaBlockVector)
5867 uint32_t memoryTypeIndex,
5868 VkDeviceSize preferredBlockSize,
5869 size_t minBlockCount,
5870 size_t maxBlockCount,
5871 VkDeviceSize bufferImageGranularity,
5872 uint32_t frameInUseCount,
5874 bool explicitBlockSize,
5875 uint32_t algorithm);
5878 VkResult CreateMinBlocks();
5880 VmaPool GetParentPool()
const {
return m_hParentPool; }
5881 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5882 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5883 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5884 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5885 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5889 bool IsEmpty()
const {
return m_Blocks.empty(); }
5890 bool IsCorruptionDetectionEnabled()
const;
5893 uint32_t currentFrameIndex,
5895 VkDeviceSize alignment,
5897 VmaSuballocationType suballocType,
5898 size_t allocationCount,
5907 #if VMA_STATS_STRING_ENABLED 5908 void PrintDetailedMap(
class VmaJsonWriter& json);
5911 void MakePoolAllocationsLost(
5912 uint32_t currentFrameIndex,
5913 size_t* pLostAllocationCount);
5914 VkResult CheckCorruption();
5918 class VmaBlockVectorDefragmentationContext* pCtx,
5920 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
5921 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
5922 VkCommandBuffer commandBuffer);
5923 void DefragmentationEnd(
5924 class VmaBlockVectorDefragmentationContext* pCtx,
5930 size_t GetBlockCount()
const {
return m_Blocks.size(); }
5931 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
5932 size_t CalcAllocationCount()
const;
5933 bool IsBufferImageGranularityConflictPossible()
const;
5936 friend class VmaDefragmentationAlgorithm_Generic;
5940 const uint32_t m_MemoryTypeIndex;
5941 const VkDeviceSize m_PreferredBlockSize;
5942 const size_t m_MinBlockCount;
5943 const size_t m_MaxBlockCount;
5944 const VkDeviceSize m_BufferImageGranularity;
5945 const uint32_t m_FrameInUseCount;
5946 const bool m_IsCustomPool;
5947 const bool m_ExplicitBlockSize;
5948 const uint32_t m_Algorithm;
5952 bool m_HasEmptyBlock;
5953 VMA_RW_MUTEX m_Mutex;
5955 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5956 uint32_t m_NextBlockId;
5958 VkDeviceSize CalcMaxBlockSize()
const;
5961 void Remove(VmaDeviceMemoryBlock* pBlock);
5965 void IncrementallySortBlocks();
5967 VkResult AllocatePage(
5968 uint32_t currentFrameIndex,
5970 VkDeviceSize alignment,
5972 VmaSuballocationType suballocType,
5976 VkResult AllocateFromBlock(
5977 VmaDeviceMemoryBlock* pBlock,
5978 uint32_t currentFrameIndex,
5980 VkDeviceSize alignment,
5983 VmaSuballocationType suballocType,
5987 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5990 void ApplyDefragmentationMovesCpu(
5991 class VmaBlockVectorDefragmentationContext* pDefragCtx,
5992 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
5994 void ApplyDefragmentationMovesGpu(
5995 class VmaBlockVectorDefragmentationContext* pDefragCtx,
5996 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
5997 VkCommandBuffer commandBuffer);
6008 VMA_CLASS_NO_COPY(VmaPool_T)
6010 VmaBlockVector m_BlockVector;
6015 VkDeviceSize preferredBlockSize);
6018 uint32_t GetId()
const {
return m_Id; }
6019 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6021 #if VMA_STATS_STRING_ENABLED 6036 class VmaDefragmentationAlgorithm
6038 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6040 VmaDefragmentationAlgorithm(
6042 VmaBlockVector* pBlockVector,
6043 uint32_t currentFrameIndex) :
6044 m_hAllocator(hAllocator),
6045 m_pBlockVector(pBlockVector),
6046 m_CurrentFrameIndex(currentFrameIndex)
6049 virtual ~VmaDefragmentationAlgorithm()
6053 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6054 virtual void AddAll() = 0;
6056 virtual VkResult Defragment(
6057 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6058 VkDeviceSize maxBytesToMove,
6059 uint32_t maxAllocationsToMove) = 0;
6061 virtual VkDeviceSize GetBytesMoved()
const = 0;
6062 virtual uint32_t GetAllocationsMoved()
const = 0;
6066 VmaBlockVector*
const m_pBlockVector;
6067 const uint32_t m_CurrentFrameIndex;
6069 struct AllocationInfo
6072 VkBool32* m_pChanged;
6075 m_hAllocation(VK_NULL_HANDLE),
6076 m_pChanged(VMA_NULL)
6080 m_hAllocation(hAlloc),
6081 m_pChanged(pChanged)
6087 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6089 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6091 VmaDefragmentationAlgorithm_Generic(
6093 VmaBlockVector* pBlockVector,
6094 uint32_t currentFrameIndex,
6095 bool overlappingMoveSupported);
6096 virtual ~VmaDefragmentationAlgorithm_Generic();
6098 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6099 virtual void AddAll() { m_AllAllocations =
true; }
6101 virtual VkResult Defragment(
6102 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6103 VkDeviceSize maxBytesToMove,
6104 uint32_t maxAllocationsToMove);
6106 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6107 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6110 uint32_t m_AllocationCount;
6111 bool m_AllAllocations;
6113 VkDeviceSize m_BytesMoved;
6114 uint32_t m_AllocationsMoved;
6116 struct AllocationInfoSizeGreater
6118 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6120 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6124 struct AllocationInfoOffsetGreater
6126 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6128 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6134 size_t m_OriginalBlockIndex;
6135 VmaDeviceMemoryBlock* m_pBlock;
6136 bool m_HasNonMovableAllocations;
6137 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6139 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6140 m_OriginalBlockIndex(SIZE_MAX),
6142 m_HasNonMovableAllocations(true),
6143 m_Allocations(pAllocationCallbacks)
6147 void CalcHasNonMovableAllocations()
6149 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6150 const size_t defragmentAllocCount = m_Allocations.size();
6151 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6154 void SortAllocationsBySizeDescending()
6156 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6159 void SortAllocationsByOffsetDescending()
6161 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6165 struct BlockPointerLess
6167 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 6169 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6171 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6173 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6179 struct BlockInfoCompareMoveDestination
6181 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6183 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6187 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6191 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6199 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6200 BlockInfoVector m_Blocks;
6202 VkResult DefragmentRound(
6203 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6204 VkDeviceSize maxBytesToMove,
6205 uint32_t maxAllocationsToMove);
6207 size_t CalcBlocksWithNonMovableCount()
const;
6209 static bool MoveMakesSense(
6210 size_t dstBlockIndex, VkDeviceSize dstOffset,
6211 size_t srcBlockIndex, VkDeviceSize srcOffset);
6214 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6216 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6218 VmaDefragmentationAlgorithm_Fast(
6220 VmaBlockVector* pBlockVector,
6221 uint32_t currentFrameIndex,
6222 bool overlappingMoveSupported);
6223 virtual ~VmaDefragmentationAlgorithm_Fast();
6225 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6226 virtual void AddAll() { m_AllAllocations =
true; }
6228 virtual VkResult Defragment(
6229 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6230 VkDeviceSize maxBytesToMove,
6231 uint32_t maxAllocationsToMove);
6233 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6234 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6239 size_t origBlockIndex;
6242 class FreeSpaceDatabase
6248 s.blockInfoIndex = SIZE_MAX;
6249 for(
size_t i = 0; i < MAX_COUNT; ++i)
6251 m_FreeSpaces[i] = s;
6255 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6257 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6263 size_t bestIndex = SIZE_MAX;
6264 for(
size_t i = 0; i < MAX_COUNT; ++i)
6267 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6272 if(m_FreeSpaces[i].size < size &&
6273 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6279 if(bestIndex != SIZE_MAX)
6281 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6282 m_FreeSpaces[bestIndex].offset = offset;
6283 m_FreeSpaces[bestIndex].size = size;
6287 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6288 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6290 size_t bestIndex = SIZE_MAX;
6291 VkDeviceSize bestFreeSpaceAfter = 0;
6292 for(
size_t i = 0; i < MAX_COUNT; ++i)
6295 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6297 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6299 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6301 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6303 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6306 bestFreeSpaceAfter = freeSpaceAfter;
6312 if(bestIndex != SIZE_MAX)
6314 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6315 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6317 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6320 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6321 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6322 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6327 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6337 static const size_t MAX_COUNT = 4;
6341 size_t blockInfoIndex;
6342 VkDeviceSize offset;
6344 } m_FreeSpaces[MAX_COUNT];
6347 const bool m_OverlappingMoveSupported;
6349 uint32_t m_AllocationCount;
6350 bool m_AllAllocations;
6352 VkDeviceSize m_BytesMoved;
6353 uint32_t m_AllocationsMoved;
6355 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6357 void PreprocessMetadata();
6358 void PostprocessMetadata();
6359 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6362 struct VmaBlockDefragmentationContext
6366 BLOCK_FLAG_USED = 0x00000001,
6371 VmaBlockDefragmentationContext() :
6373 hBuffer(VK_NULL_HANDLE)
6378 class VmaBlockVectorDefragmentationContext
6380 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6384 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6386 VmaBlockVectorDefragmentationContext(
6389 VmaBlockVector* pBlockVector,
6390 uint32_t currFrameIndex,
6392 ~VmaBlockVectorDefragmentationContext();
6394 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6395 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6396 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6398 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6399 void AddAll() { m_AllAllocations =
true; }
6401 void Begin(
bool overlappingMoveSupported);
6408 VmaBlockVector*
const m_pBlockVector;
6409 const uint32_t m_CurrFrameIndex;
6410 const uint32_t m_AlgorithmFlags;
6412 VmaDefragmentationAlgorithm* m_pAlgorithm;
6420 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6421 bool m_AllAllocations;
6424 struct VmaDefragmentationContext_T
6427 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6429 VmaDefragmentationContext_T(
6431 uint32_t currFrameIndex,
6434 ~VmaDefragmentationContext_T();
6436 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6437 void AddAllocations(
6438 uint32_t allocationCount,
6440 VkBool32* pAllocationsChanged);
6448 VkResult Defragment(
6449 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6450 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6455 const uint32_t m_CurrFrameIndex;
6456 const uint32_t m_Flags;
6459 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6461 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6464 #if VMA_RECORDING_ENABLED 6471 void WriteConfiguration(
6472 const VkPhysicalDeviceProperties& devProps,
6473 const VkPhysicalDeviceMemoryProperties& memProps,
6474 bool dedicatedAllocationExtensionEnabled);
6477 void RecordCreateAllocator(uint32_t frameIndex);
6478 void RecordDestroyAllocator(uint32_t frameIndex);
6479 void RecordCreatePool(uint32_t frameIndex,
6482 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6483 void RecordAllocateMemory(uint32_t frameIndex,
6484 const VkMemoryRequirements& vkMemReq,
6487 void RecordAllocateMemoryPages(uint32_t frameIndex,
6488 const VkMemoryRequirements& vkMemReq,
6490 uint64_t allocationCount,
6492 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6493 const VkMemoryRequirements& vkMemReq,
6494 bool requiresDedicatedAllocation,
6495 bool prefersDedicatedAllocation,
6498 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6499 const VkMemoryRequirements& vkMemReq,
6500 bool requiresDedicatedAllocation,
6501 bool prefersDedicatedAllocation,
6504 void RecordFreeMemory(uint32_t frameIndex,
6506 void RecordFreeMemoryPages(uint32_t frameIndex,
6507 uint64_t allocationCount,
6509 void RecordResizeAllocation(
6510 uint32_t frameIndex,
6512 VkDeviceSize newSize);
6513 void RecordSetAllocationUserData(uint32_t frameIndex,
6515 const void* pUserData);
6516 void RecordCreateLostAllocation(uint32_t frameIndex,
6518 void RecordMapMemory(uint32_t frameIndex,
6520 void RecordUnmapMemory(uint32_t frameIndex,
6522 void RecordFlushAllocation(uint32_t frameIndex,
6523 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6524 void RecordInvalidateAllocation(uint32_t frameIndex,
6525 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6526 void RecordCreateBuffer(uint32_t frameIndex,
6527 const VkBufferCreateInfo& bufCreateInfo,
6530 void RecordCreateImage(uint32_t frameIndex,
6531 const VkImageCreateInfo& imageCreateInfo,
6534 void RecordDestroyBuffer(uint32_t frameIndex,
6536 void RecordDestroyImage(uint32_t frameIndex,
6538 void RecordTouchAllocation(uint32_t frameIndex,
6540 void RecordGetAllocationInfo(uint32_t frameIndex,
6542 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6544 void RecordDefragmentationBegin(uint32_t frameIndex,
6547 void RecordDefragmentationEnd(uint32_t frameIndex,
6557 class UserDataString
6561 const char* GetString()
const {
return m_Str; }
6571 VMA_MUTEX m_FileMutex;
6573 int64_t m_StartCounter;
6575 void GetBasicParams(CallParams& outParams);
6578 template<
typename T>
6579 void PrintPointerList(uint64_t count,
const T* pItems)
6583 fprintf(m_File,
"%p", pItems[0]);
6584 for(uint64_t i = 1; i < count; ++i)
6586 fprintf(m_File,
" %p", pItems[i]);
6591 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6595 #endif // #if VMA_RECORDING_ENABLED 6600 class VmaAllocationObjectAllocator
6602 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
6604 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
6611 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
6615 struct VmaAllocator_T
6617 VMA_CLASS_NO_COPY(VmaAllocator_T)
6620 bool m_UseKhrDedicatedAllocation;
6622 bool m_AllocationCallbacksSpecified;
6623 VkAllocationCallbacks m_AllocationCallbacks;
6625 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
6628 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
6629 VMA_MUTEX m_HeapSizeLimitMutex;
6631 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
6632 VkPhysicalDeviceMemoryProperties m_MemProps;
6635 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
6638 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
6639 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
6640 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
6646 const VkAllocationCallbacks* GetAllocationCallbacks()
const 6648 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
6652 return m_VulkanFunctions;
6655 VkDeviceSize GetBufferImageGranularity()
const 6658 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
6659 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
6662 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
6663 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
6665 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 6667 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
6668 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6671 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 6673 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
6674 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6677 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 6679 return IsMemoryTypeNonCoherent(memTypeIndex) ?
6680 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
6681 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
6684 bool IsIntegratedGpu()
const 6686 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
6689 #if VMA_RECORDING_ENABLED 6690 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
6693 void GetBufferMemoryRequirements(
6695 VkMemoryRequirements& memReq,
6696 bool& requiresDedicatedAllocation,
6697 bool& prefersDedicatedAllocation)
const;
6698 void GetImageMemoryRequirements(
6700 VkMemoryRequirements& memReq,
6701 bool& requiresDedicatedAllocation,
6702 bool& prefersDedicatedAllocation)
const;
6705 VkResult AllocateMemory(
6706 const VkMemoryRequirements& vkMemReq,
6707 bool requiresDedicatedAllocation,
6708 bool prefersDedicatedAllocation,
6709 VkBuffer dedicatedBuffer,
6710 VkImage dedicatedImage,
6712 VmaSuballocationType suballocType,
6713 size_t allocationCount,
6718 size_t allocationCount,
6721 VkResult ResizeAllocation(
6723 VkDeviceSize newSize);
6725 void CalculateStats(
VmaStats* pStats);
6727 #if VMA_STATS_STRING_ENABLED 6728 void PrintDetailedMap(
class VmaJsonWriter& json);
6731 VkResult DefragmentationBegin(
6735 VkResult DefragmentationEnd(
6742 void DestroyPool(
VmaPool pool);
6745 void SetCurrentFrameIndex(uint32_t frameIndex);
6746 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
6748 void MakePoolAllocationsLost(
6750 size_t* pLostAllocationCount);
6751 VkResult CheckPoolCorruption(
VmaPool hPool);
6752 VkResult CheckCorruption(uint32_t memoryTypeBits);
6756 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
6757 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
6762 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
6763 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
6765 void FlushOrInvalidateAllocation(
6767 VkDeviceSize offset, VkDeviceSize size,
6768 VMA_CACHE_OPERATION op);
6770 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
6773 VkDeviceSize m_PreferredLargeHeapBlockSize;
6775 VkPhysicalDevice m_PhysicalDevice;
6776 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
6778 VMA_RW_MUTEX m_PoolsMutex;
6780 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
6781 uint32_t m_NextPoolId;
6785 #if VMA_RECORDING_ENABLED 6786 VmaRecorder* m_pRecorder;
6791 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
6793 VkResult AllocateMemoryOfType(
6795 VkDeviceSize alignment,
6796 bool dedicatedAllocation,
6797 VkBuffer dedicatedBuffer,
6798 VkImage dedicatedImage,
6800 uint32_t memTypeIndex,
6801 VmaSuballocationType suballocType,
6802 size_t allocationCount,
6806 VkResult AllocateDedicatedMemoryPage(
6808 VmaSuballocationType suballocType,
6809 uint32_t memTypeIndex,
6810 const VkMemoryAllocateInfo& allocInfo,
6812 bool isUserDataString,
6817 VkResult AllocateDedicatedMemory(
6819 VmaSuballocationType suballocType,
6820 uint32_t memTypeIndex,
6822 bool isUserDataString,
6824 VkBuffer dedicatedBuffer,
6825 VkImage dedicatedImage,
6826 size_t allocationCount,
6836 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
6838 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
6841 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
6843 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
6846 template<
typename T>
6849 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
6852 template<
typename T>
6853 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
6855 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
6858 template<
typename T>
6859 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
6864 VmaFree(hAllocator, ptr);
6868 template<
typename T>
6869 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
6873 for(
size_t i = count; i--; )
6875 VmaFree(hAllocator, ptr);
6882 #if VMA_STATS_STRING_ENABLED 6884 class VmaStringBuilder
6887 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
6888 size_t GetLength()
const {
return m_Data.size(); }
6889 const char* GetData()
const {
return m_Data.data(); }
6891 void Add(
char ch) { m_Data.push_back(ch); }
6892 void Add(
const char* pStr);
6893 void AddNewLine() { Add(
'\n'); }
6894 void AddNumber(uint32_t num);
6895 void AddNumber(uint64_t num);
6896 void AddPointer(
const void* ptr);
6899 VmaVector< char, VmaStlAllocator<char> > m_Data;
6902 void VmaStringBuilder::Add(
const char* pStr)
6904 const size_t strLen = strlen(pStr);
6907 const size_t oldCount = m_Data.size();
6908 m_Data.resize(oldCount + strLen);
6909 memcpy(m_Data.data() + oldCount, pStr, strLen);
6913 void VmaStringBuilder::AddNumber(uint32_t num)
6916 VmaUint32ToStr(buf,
sizeof(buf), num);
6920 void VmaStringBuilder::AddNumber(uint64_t num)
6923 VmaUint64ToStr(buf,
sizeof(buf), num);
6927 void VmaStringBuilder::AddPointer(
const void* ptr)
6930 VmaPtrToStr(buf,
sizeof(buf), ptr);
6934 #endif // #if VMA_STATS_STRING_ENABLED 6939 #if VMA_STATS_STRING_ENABLED 6943 VMA_CLASS_NO_COPY(VmaJsonWriter)
6945 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6948 void BeginObject(
bool singleLine =
false);
6951 void BeginArray(
bool singleLine =
false);
6954 void WriteString(
const char* pStr);
6955 void BeginString(
const char* pStr = VMA_NULL);
6956 void ContinueString(
const char* pStr);
6957 void ContinueString(uint32_t n);
6958 void ContinueString(uint64_t n);
6959 void ContinueString_Pointer(
const void* ptr);
6960 void EndString(
const char* pStr = VMA_NULL);
6962 void WriteNumber(uint32_t n);
6963 void WriteNumber(uint64_t n);
6964 void WriteBool(
bool b);
6968 static const char*
const INDENT;
6970 enum COLLECTION_TYPE
6972 COLLECTION_TYPE_OBJECT,
6973 COLLECTION_TYPE_ARRAY,
6977 COLLECTION_TYPE type;
6978 uint32_t valueCount;
6979 bool singleLineMode;
6982 VmaStringBuilder& m_SB;
6983 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
6984 bool m_InsideString;
6986 void BeginValue(
bool isString);
6987 void WriteIndent(
bool oneLess =
false);
6990 const char*
const VmaJsonWriter::INDENT =
" ";
6992 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
6994 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
6995 m_InsideString(false)
6999 VmaJsonWriter::~VmaJsonWriter()
7001 VMA_ASSERT(!m_InsideString);
7002 VMA_ASSERT(m_Stack.empty());
7005 void VmaJsonWriter::BeginObject(
bool singleLine)
7007 VMA_ASSERT(!m_InsideString);
7013 item.type = COLLECTION_TYPE_OBJECT;
7014 item.valueCount = 0;
7015 item.singleLineMode = singleLine;
7016 m_Stack.push_back(item);
7019 void VmaJsonWriter::EndObject()
7021 VMA_ASSERT(!m_InsideString);
7026 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7030 void VmaJsonWriter::BeginArray(
bool singleLine)
7032 VMA_ASSERT(!m_InsideString);
7038 item.type = COLLECTION_TYPE_ARRAY;
7039 item.valueCount = 0;
7040 item.singleLineMode = singleLine;
7041 m_Stack.push_back(item);
7044 void VmaJsonWriter::EndArray()
7046 VMA_ASSERT(!m_InsideString);
7051 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7055 void VmaJsonWriter::WriteString(
const char* pStr)
7061 void VmaJsonWriter::BeginString(
const char* pStr)
7063 VMA_ASSERT(!m_InsideString);
7067 m_InsideString =
true;
7068 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7070 ContinueString(pStr);
7074 void VmaJsonWriter::ContinueString(
const char* pStr)
7076 VMA_ASSERT(m_InsideString);
7078 const size_t strLen = strlen(pStr);
7079 for(
size_t i = 0; i < strLen; ++i)
7112 VMA_ASSERT(0 &&
"Character not currently supported.");
7118 void VmaJsonWriter::ContinueString(uint32_t n)
7120 VMA_ASSERT(m_InsideString);
7124 void VmaJsonWriter::ContinueString(uint64_t n)
7126 VMA_ASSERT(m_InsideString);
7130 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7132 VMA_ASSERT(m_InsideString);
7133 m_SB.AddPointer(ptr);
7136 void VmaJsonWriter::EndString(
const char* pStr)
7138 VMA_ASSERT(m_InsideString);
7139 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7141 ContinueString(pStr);
7144 m_InsideString =
false;
7147 void VmaJsonWriter::WriteNumber(uint32_t n)
7149 VMA_ASSERT(!m_InsideString);
7154 void VmaJsonWriter::WriteNumber(uint64_t n)
7156 VMA_ASSERT(!m_InsideString);
7161 void VmaJsonWriter::WriteBool(
bool b)
7163 VMA_ASSERT(!m_InsideString);
7165 m_SB.Add(b ?
"true" :
"false");
7168 void VmaJsonWriter::WriteNull()
7170 VMA_ASSERT(!m_InsideString);
7175 void VmaJsonWriter::BeginValue(
bool isString)
7177 if(!m_Stack.empty())
7179 StackItem& currItem = m_Stack.back();
7180 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7181 currItem.valueCount % 2 == 0)
7183 VMA_ASSERT(isString);
7186 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7187 currItem.valueCount % 2 != 0)
7191 else if(currItem.valueCount > 0)
7200 ++currItem.valueCount;
7204 void VmaJsonWriter::WriteIndent(
bool oneLess)
7206 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7210 size_t count = m_Stack.size();
7211 if(count > 0 && oneLess)
7215 for(
size_t i = 0; i < count; ++i)
7222 #endif // #if VMA_STATS_STRING_ENABLED 7226 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7228 if(IsUserDataString())
7230 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7232 FreeUserDataString(hAllocator);
7234 if(pUserData != VMA_NULL)
7236 const char*
const newStrSrc = (
char*)pUserData;
7237 const size_t newStrLen = strlen(newStrSrc);
7238 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
7239 memcpy(newStrDst, newStrSrc, newStrLen + 1);
7240 m_pUserData = newStrDst;
7245 m_pUserData = pUserData;
7249 void VmaAllocation_T::ChangeBlockAllocation(
7251 VmaDeviceMemoryBlock* block,
7252 VkDeviceSize offset)
7254 VMA_ASSERT(block != VMA_NULL);
7255 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7258 if(block != m_BlockAllocation.m_Block)
7260 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7261 if(IsPersistentMap())
7263 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7264 block->Map(hAllocator, mapRefCount, VMA_NULL);
7267 m_BlockAllocation.m_Block = block;
7268 m_BlockAllocation.m_Offset = offset;
7271 void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
7273 VMA_ASSERT(newSize > 0);
7277 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7279 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7280 m_BlockAllocation.m_Offset = newOffset;
7283 VkDeviceSize VmaAllocation_T::GetOffset()
const 7287 case ALLOCATION_TYPE_BLOCK:
7288 return m_BlockAllocation.m_Offset;
7289 case ALLOCATION_TYPE_DEDICATED:
7297 VkDeviceMemory VmaAllocation_T::GetMemory()
const 7301 case ALLOCATION_TYPE_BLOCK:
7302 return m_BlockAllocation.m_Block->GetDeviceMemory();
7303 case ALLOCATION_TYPE_DEDICATED:
7304 return m_DedicatedAllocation.m_hMemory;
7307 return VK_NULL_HANDLE;
7311 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 7315 case ALLOCATION_TYPE_BLOCK:
7316 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
7317 case ALLOCATION_TYPE_DEDICATED:
7318 return m_DedicatedAllocation.m_MemoryTypeIndex;
7325 void* VmaAllocation_T::GetMappedData()
const 7329 case ALLOCATION_TYPE_BLOCK:
7332 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7333 VMA_ASSERT(pBlockData != VMA_NULL);
7334 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7341 case ALLOCATION_TYPE_DEDICATED:
7342 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7343 return m_DedicatedAllocation.m_pMappedData;
7350 bool VmaAllocation_T::CanBecomeLost()
const 7354 case ALLOCATION_TYPE_BLOCK:
7355 return m_BlockAllocation.m_CanBecomeLost;
7356 case ALLOCATION_TYPE_DEDICATED:
7364 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7366 VMA_ASSERT(CanBecomeLost());
7372 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7375 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7380 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7386 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7396 #if VMA_STATS_STRING_ENABLED 7399 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7408 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 7410 json.WriteString(
"Type");
7411 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7413 json.WriteString(
"Size");
7414 json.WriteNumber(m_Size);
7416 if(m_pUserData != VMA_NULL)
7418 json.WriteString(
"UserData");
7419 if(IsUserDataString())
7421 json.WriteString((
const char*)m_pUserData);
7426 json.ContinueString_Pointer(m_pUserData);
7431 json.WriteString(
"CreationFrameIndex");
7432 json.WriteNumber(m_CreationFrameIndex);
7434 json.WriteString(
"LastUseFrameIndex");
7435 json.WriteNumber(GetLastUseFrameIndex());
7437 if(m_BufferImageUsage != 0)
7439 json.WriteString(
"Usage");
7440 json.WriteNumber(m_BufferImageUsage);
7446 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7448 VMA_ASSERT(IsUserDataString());
7449 if(m_pUserData != VMA_NULL)
7451 char*
const oldStr = (
char*)m_pUserData;
7452 const size_t oldStrLen = strlen(oldStr);
7453 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
7454 m_pUserData = VMA_NULL;
7458 void VmaAllocation_T::BlockAllocMap()
7460 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7462 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7468 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7472 void VmaAllocation_T::BlockAllocUnmap()
7474 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7476 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7482 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7486 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7488 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7492 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7494 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7495 *ppData = m_DedicatedAllocation.m_pMappedData;
7501 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7502 return VK_ERROR_MEMORY_MAP_FAILED;
7507 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7508 hAllocator->m_hDevice,
7509 m_DedicatedAllocation.m_hMemory,
7514 if(result == VK_SUCCESS)
7516 m_DedicatedAllocation.m_pMappedData = *ppData;
7523 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7525 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7527 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7532 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7533 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7534 hAllocator->m_hDevice,
7535 m_DedicatedAllocation.m_hMemory);
7540 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7544 #if VMA_STATS_STRING_ENABLED 7546 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7550 json.WriteString(
"Blocks");
7553 json.WriteString(
"Allocations");
7556 json.WriteString(
"UnusedRanges");
7559 json.WriteString(
"UsedBytes");
7562 json.WriteString(
"UnusedBytes");
7567 json.WriteString(
"AllocationSize");
7568 json.BeginObject(
true);
7569 json.WriteString(
"Min");
7571 json.WriteString(
"Avg");
7573 json.WriteString(
"Max");
7580 json.WriteString(
"UnusedRangeSize");
7581 json.BeginObject(
true);
7582 json.WriteString(
"Min");
7584 json.WriteString(
"Avg");
7586 json.WriteString(
"Max");
7594 #endif // #if VMA_STATS_STRING_ENABLED 7596 struct VmaSuballocationItemSizeLess
7599 const VmaSuballocationList::iterator lhs,
7600 const VmaSuballocationList::iterator rhs)
const 7602 return lhs->size < rhs->size;
7605 const VmaSuballocationList::iterator lhs,
7606 VkDeviceSize rhsSize)
const 7608 return lhs->size < rhsSize;
7616 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
7618 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
7622 #if VMA_STATS_STRING_ENABLED 7624 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
7625 VkDeviceSize unusedBytes,
7626 size_t allocationCount,
7627 size_t unusedRangeCount)
const 7631 json.WriteString(
"TotalBytes");
7632 json.WriteNumber(GetSize());
7634 json.WriteString(
"UnusedBytes");
7635 json.WriteNumber(unusedBytes);
7637 json.WriteString(
"Allocations");
7638 json.WriteNumber((uint64_t)allocationCount);
7640 json.WriteString(
"UnusedRanges");
7641 json.WriteNumber((uint64_t)unusedRangeCount);
7643 json.WriteString(
"Suballocations");
7647 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
7648 VkDeviceSize offset,
7651 json.BeginObject(
true);
7653 json.WriteString(
"Offset");
7654 json.WriteNumber(offset);
7656 hAllocation->PrintParameters(json);
7661 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
7662 VkDeviceSize offset,
7663 VkDeviceSize size)
const 7665 json.BeginObject(
true);
7667 json.WriteString(
"Offset");
7668 json.WriteNumber(offset);
7670 json.WriteString(
"Type");
7671 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
7673 json.WriteString(
"Size");
7674 json.WriteNumber(size);
7679 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 7685 #endif // #if VMA_STATS_STRING_ENABLED 7690 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
7691 VmaBlockMetadata(hAllocator),
7694 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7695 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
7699 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
7703 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
7705 VmaBlockMetadata::Init(size);
7708 m_SumFreeSize = size;
7710 VmaSuballocation suballoc = {};
7711 suballoc.offset = 0;
7712 suballoc.size = size;
7713 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7714 suballoc.hAllocation = VK_NULL_HANDLE;
7716 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7717 m_Suballocations.push_back(suballoc);
7718 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
7720 m_FreeSuballocationsBySize.push_back(suballocItem);
7723 bool VmaBlockMetadata_Generic::Validate()
const 7725 VMA_VALIDATE(!m_Suballocations.empty());
7728 VkDeviceSize calculatedOffset = 0;
7730 uint32_t calculatedFreeCount = 0;
7732 VkDeviceSize calculatedSumFreeSize = 0;
7735 size_t freeSuballocationsToRegister = 0;
7737 bool prevFree =
false;
7739 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7740 suballocItem != m_Suballocations.cend();
7743 const VmaSuballocation& subAlloc = *suballocItem;
7746 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
7748 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
7750 VMA_VALIDATE(!prevFree || !currFree);
7752 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
7756 calculatedSumFreeSize += subAlloc.size;
7757 ++calculatedFreeCount;
7758 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7760 ++freeSuballocationsToRegister;
7764 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
7768 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
7769 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
7772 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
7775 calculatedOffset += subAlloc.size;
7776 prevFree = currFree;
7781 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
7783 VkDeviceSize lastSize = 0;
7784 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
7786 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
7789 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7791 VMA_VALIDATE(suballocItem->size >= lastSize);
7793 lastSize = suballocItem->size;
7797 VMA_VALIDATE(ValidateFreeSuballocationList());
7798 VMA_VALIDATE(calculatedOffset == GetSize());
7799 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
7800 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
7805 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 7807 if(!m_FreeSuballocationsBySize.empty())
7809 return m_FreeSuballocationsBySize.back()->size;
7817 bool VmaBlockMetadata_Generic::IsEmpty()
const 7819 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
7822 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7826 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7838 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7839 suballocItem != m_Suballocations.cend();
7842 const VmaSuballocation& suballoc = *suballocItem;
7843 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7856 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 7858 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7860 inoutStats.
size += GetSize();
7867 #if VMA_STATS_STRING_ENABLED 7869 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 7871 PrintDetailedMap_Begin(json,
7873 m_Suballocations.size() - (size_t)m_FreeCount,
7877 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7878 suballocItem != m_Suballocations.cend();
7879 ++suballocItem, ++i)
7881 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7883 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
7887 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
7891 PrintDetailedMap_End(json);
7894 #endif // #if VMA_STATS_STRING_ENABLED 7896 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
7897 uint32_t currentFrameIndex,
7898 uint32_t frameInUseCount,
7899 VkDeviceSize bufferImageGranularity,
7900 VkDeviceSize allocSize,
7901 VkDeviceSize allocAlignment,
7903 VmaSuballocationType allocType,
7904 bool canMakeOtherLost,
7906 VmaAllocationRequest* pAllocationRequest)
7908 VMA_ASSERT(allocSize > 0);
7909 VMA_ASSERT(!upperAddress);
7910 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7911 VMA_ASSERT(pAllocationRequest != VMA_NULL);
7912 VMA_HEAVY_ASSERT(Validate());
7914 pAllocationRequest->type = VmaAllocationRequestType::Normal;
7917 if(canMakeOtherLost ==
false &&
7918 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
7924 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
7925 if(freeSuballocCount > 0)
7930 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7931 m_FreeSuballocationsBySize.data(),
7932 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7933 allocSize + 2 * VMA_DEBUG_MARGIN,
7934 VmaSuballocationItemSizeLess());
7935 size_t index = it - m_FreeSuballocationsBySize.data();
7936 for(; index < freeSuballocCount; ++index)
7941 bufferImageGranularity,
7945 m_FreeSuballocationsBySize[index],
7947 &pAllocationRequest->offset,
7948 &pAllocationRequest->itemsToMakeLostCount,
7949 &pAllocationRequest->sumFreeSize,
7950 &pAllocationRequest->sumItemSize))
7952 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7957 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
7959 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7960 it != m_Suballocations.end();
7963 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
7966 bufferImageGranularity,
7972 &pAllocationRequest->offset,
7973 &pAllocationRequest->itemsToMakeLostCount,
7974 &pAllocationRequest->sumFreeSize,
7975 &pAllocationRequest->sumItemSize))
7977 pAllocationRequest->item = it;
7985 for(
size_t index = freeSuballocCount; index--; )
7990 bufferImageGranularity,
7994 m_FreeSuballocationsBySize[index],
7996 &pAllocationRequest->offset,
7997 &pAllocationRequest->itemsToMakeLostCount,
7998 &pAllocationRequest->sumFreeSize,
7999 &pAllocationRequest->sumItemSize))
8001 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8008 if(canMakeOtherLost)
8013 VmaAllocationRequest tmpAllocRequest = {};
8014 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8015 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8016 suballocIt != m_Suballocations.end();
8019 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8020 suballocIt->hAllocation->CanBecomeLost())
8025 bufferImageGranularity,
8031 &tmpAllocRequest.offset,
8032 &tmpAllocRequest.itemsToMakeLostCount,
8033 &tmpAllocRequest.sumFreeSize,
8034 &tmpAllocRequest.sumItemSize))
8038 *pAllocationRequest = tmpAllocRequest;
8039 pAllocationRequest->item = suballocIt;
8042 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8044 *pAllocationRequest = tmpAllocRequest;
8045 pAllocationRequest->item = suballocIt;
8058 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8059 uint32_t currentFrameIndex,
8060 uint32_t frameInUseCount,
8061 VmaAllocationRequest* pAllocationRequest)
8063 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8065 while(pAllocationRequest->itemsToMakeLostCount > 0)
8067 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8069 ++pAllocationRequest->item;
8071 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8072 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8073 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8074 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8076 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8077 --pAllocationRequest->itemsToMakeLostCount;
8085 VMA_HEAVY_ASSERT(Validate());
8086 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8087 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8092 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8094 uint32_t lostAllocationCount = 0;
8095 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8096 it != m_Suballocations.end();
8099 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8100 it->hAllocation->CanBecomeLost() &&
8101 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8103 it = FreeSuballocation(it);
8104 ++lostAllocationCount;
8107 return lostAllocationCount;
8110 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8112 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8113 it != m_Suballocations.end();
8116 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8118 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8120 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8121 return VK_ERROR_VALIDATION_FAILED_EXT;
8123 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8125 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8126 return VK_ERROR_VALIDATION_FAILED_EXT;
8134 void VmaBlockMetadata_Generic::Alloc(
8135 const VmaAllocationRequest& request,
8136 VmaSuballocationType type,
8137 VkDeviceSize allocSize,
8140 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8141 VMA_ASSERT(request.item != m_Suballocations.end());
8142 VmaSuballocation& suballoc = *request.item;
8144 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8146 VMA_ASSERT(request.offset >= suballoc.offset);
8147 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8148 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8149 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8153 UnregisterFreeSuballocation(request.item);
8155 suballoc.offset = request.offset;
8156 suballoc.size = allocSize;
8157 suballoc.type = type;
8158 suballoc.hAllocation = hAllocation;
8163 VmaSuballocation paddingSuballoc = {};
8164 paddingSuballoc.offset = request.offset + allocSize;
8165 paddingSuballoc.size = paddingEnd;
8166 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8167 VmaSuballocationList::iterator next = request.item;
8169 const VmaSuballocationList::iterator paddingEndItem =
8170 m_Suballocations.insert(next, paddingSuballoc);
8171 RegisterFreeSuballocation(paddingEndItem);
8177 VmaSuballocation paddingSuballoc = {};
8178 paddingSuballoc.offset = request.offset - paddingBegin;
8179 paddingSuballoc.size = paddingBegin;
8180 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8181 const VmaSuballocationList::iterator paddingBeginItem =
8182 m_Suballocations.insert(request.item, paddingSuballoc);
8183 RegisterFreeSuballocation(paddingBeginItem);
8187 m_FreeCount = m_FreeCount - 1;
8188 if(paddingBegin > 0)
8196 m_SumFreeSize -= allocSize;
8199 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8201 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8202 suballocItem != m_Suballocations.end();
8205 VmaSuballocation& suballoc = *suballocItem;
8206 if(suballoc.hAllocation == allocation)
8208 FreeSuballocation(suballocItem);
8209 VMA_HEAVY_ASSERT(Validate());
8213 VMA_ASSERT(0 &&
"Not found!");
8216 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8218 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8219 suballocItem != m_Suballocations.end();
8222 VmaSuballocation& suballoc = *suballocItem;
8223 if(suballoc.offset == offset)
8225 FreeSuballocation(suballocItem);
8229 VMA_ASSERT(0 &&
"Not found!");
8232 bool VmaBlockMetadata_Generic::ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize)
8234 typedef VmaSuballocationList::iterator iter_type;
8235 for(iter_type suballocItem = m_Suballocations.begin();
8236 suballocItem != m_Suballocations.end();
8239 VmaSuballocation& suballoc = *suballocItem;
8240 if(suballoc.hAllocation == alloc)
8242 iter_type nextItem = suballocItem;
8246 VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
8249 if(newSize < alloc->GetSize())
8251 const VkDeviceSize sizeDiff = suballoc.size - newSize;
8254 if(nextItem != m_Suballocations.end())
8257 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8260 UnregisterFreeSuballocation(nextItem);
8261 nextItem->offset -= sizeDiff;
8262 nextItem->size += sizeDiff;
8263 RegisterFreeSuballocation(nextItem);
8269 VmaSuballocation newFreeSuballoc;
8270 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8271 newFreeSuballoc.offset = suballoc.offset + newSize;
8272 newFreeSuballoc.size = sizeDiff;
8273 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8274 iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
8275 RegisterFreeSuballocation(newFreeSuballocIt);
8284 VmaSuballocation newFreeSuballoc;
8285 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8286 newFreeSuballoc.offset = suballoc.offset + newSize;
8287 newFreeSuballoc.size = sizeDiff;
8288 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8289 m_Suballocations.push_back(newFreeSuballoc);
8291 iter_type newFreeSuballocIt = m_Suballocations.end();
8292 RegisterFreeSuballocation(--newFreeSuballocIt);
8297 suballoc.size = newSize;
8298 m_SumFreeSize += sizeDiff;
8303 const VkDeviceSize sizeDiff = newSize - suballoc.size;
8306 if(nextItem != m_Suballocations.end())
8309 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8312 if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
8318 if(nextItem->size > sizeDiff)
8321 UnregisterFreeSuballocation(nextItem);
8322 nextItem->offset += sizeDiff;
8323 nextItem->size -= sizeDiff;
8324 RegisterFreeSuballocation(nextItem);
8330 UnregisterFreeSuballocation(nextItem);
8331 m_Suballocations.erase(nextItem);
8347 suballoc.size = newSize;
8348 m_SumFreeSize -= sizeDiff;
8355 VMA_ASSERT(0 &&
"Not found!");
8359 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 8361 VkDeviceSize lastSize = 0;
8362 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8364 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8366 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8367 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8368 VMA_VALIDATE(it->size >= lastSize);
8369 lastSize = it->size;
8374 bool VmaBlockMetadata_Generic::CheckAllocation(
8375 uint32_t currentFrameIndex,
8376 uint32_t frameInUseCount,
8377 VkDeviceSize bufferImageGranularity,
8378 VkDeviceSize allocSize,
8379 VkDeviceSize allocAlignment,
8380 VmaSuballocationType allocType,
8381 VmaSuballocationList::const_iterator suballocItem,
8382 bool canMakeOtherLost,
8383 VkDeviceSize* pOffset,
8384 size_t* itemsToMakeLostCount,
8385 VkDeviceSize* pSumFreeSize,
8386 VkDeviceSize* pSumItemSize)
const 8388 VMA_ASSERT(allocSize > 0);
8389 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8390 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8391 VMA_ASSERT(pOffset != VMA_NULL);
8393 *itemsToMakeLostCount = 0;
8397 if(canMakeOtherLost)
8399 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8401 *pSumFreeSize = suballocItem->size;
8405 if(suballocItem->hAllocation->CanBecomeLost() &&
8406 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8408 ++*itemsToMakeLostCount;
8409 *pSumItemSize = suballocItem->size;
8418 if(GetSize() - suballocItem->offset < allocSize)
8424 *pOffset = suballocItem->offset;
8427 if(VMA_DEBUG_MARGIN > 0)
8429 *pOffset += VMA_DEBUG_MARGIN;
8433 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8437 if(bufferImageGranularity > 1)
8439 bool bufferImageGranularityConflict =
false;
8440 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8441 while(prevSuballocItem != m_Suballocations.cbegin())
8444 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8445 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8447 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8449 bufferImageGranularityConflict =
true;
8457 if(bufferImageGranularityConflict)
8459 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8465 if(*pOffset >= suballocItem->offset + suballocItem->size)
8471 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8474 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8476 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8478 if(suballocItem->offset + totalSize > GetSize())
8485 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8486 if(totalSize > suballocItem->size)
8488 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8489 while(remainingSize > 0)
8492 if(lastSuballocItem == m_Suballocations.cend())
8496 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8498 *pSumFreeSize += lastSuballocItem->size;
8502 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8503 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8504 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8506 ++*itemsToMakeLostCount;
8507 *pSumItemSize += lastSuballocItem->size;
8514 remainingSize = (lastSuballocItem->size < remainingSize) ?
8515 remainingSize - lastSuballocItem->size : 0;
8521 if(bufferImageGranularity > 1)
8523 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8525 while(nextSuballocItem != m_Suballocations.cend())
8527 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8528 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8530 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8532 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8533 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8534 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8536 ++*itemsToMakeLostCount;
8555 const VmaSuballocation& suballoc = *suballocItem;
8556 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8558 *pSumFreeSize = suballoc.size;
8561 if(suballoc.size < allocSize)
8567 *pOffset = suballoc.offset;
8570 if(VMA_DEBUG_MARGIN > 0)
8572 *pOffset += VMA_DEBUG_MARGIN;
8576 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8580 if(bufferImageGranularity > 1)
8582 bool bufferImageGranularityConflict =
false;
8583 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8584 while(prevSuballocItem != m_Suballocations.cbegin())
8587 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8588 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8590 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8592 bufferImageGranularityConflict =
true;
8600 if(bufferImageGranularityConflict)
8602 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8607 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8610 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8613 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8620 if(bufferImageGranularity > 1)
8622 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8624 while(nextSuballocItem != m_Suballocations.cend())
8626 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8627 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8629 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8648 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8650 VMA_ASSERT(item != m_Suballocations.end());
8651 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8653 VmaSuballocationList::iterator nextItem = item;
8655 VMA_ASSERT(nextItem != m_Suballocations.end());
8656 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8658 item->size += nextItem->size;
8660 m_Suballocations.erase(nextItem);
8663 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8666 VmaSuballocation& suballoc = *suballocItem;
8667 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8668 suballoc.hAllocation = VK_NULL_HANDLE;
8672 m_SumFreeSize += suballoc.size;
8675 bool mergeWithNext =
false;
8676 bool mergeWithPrev =
false;
8678 VmaSuballocationList::iterator nextItem = suballocItem;
8680 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8682 mergeWithNext =
true;
8685 VmaSuballocationList::iterator prevItem = suballocItem;
8686 if(suballocItem != m_Suballocations.begin())
8689 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8691 mergeWithPrev =
true;
8697 UnregisterFreeSuballocation(nextItem);
8698 MergeFreeWithNext(suballocItem);
8703 UnregisterFreeSuballocation(prevItem);
8704 MergeFreeWithNext(prevItem);
8705 RegisterFreeSuballocation(prevItem);
8710 RegisterFreeSuballocation(suballocItem);
8711 return suballocItem;
8715 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
8717 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8718 VMA_ASSERT(item->size > 0);
8722 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8724 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8726 if(m_FreeSuballocationsBySize.empty())
8728 m_FreeSuballocationsBySize.push_back(item);
8732 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
8740 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
8742 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8743 VMA_ASSERT(item->size > 0);
8747 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8749 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8751 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8752 m_FreeSuballocationsBySize.data(),
8753 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
8755 VmaSuballocationItemSizeLess());
8756 for(
size_t index = it - m_FreeSuballocationsBySize.data();
8757 index < m_FreeSuballocationsBySize.size();
8760 if(m_FreeSuballocationsBySize[index] == item)
8762 VmaVectorRemove(m_FreeSuballocationsBySize, index);
8765 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
8767 VMA_ASSERT(0 &&
"Not found.");
8773 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
8774 VkDeviceSize bufferImageGranularity,
8775 VmaSuballocationType& inOutPrevSuballocType)
const 8777 if(bufferImageGranularity == 1 || IsEmpty())
8782 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
8783 bool typeConflictFound =
false;
8784 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
8785 it != m_Suballocations.cend();
8788 const VmaSuballocationType suballocType = it->type;
8789 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
8791 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
8792 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
8794 typeConflictFound =
true;
8796 inOutPrevSuballocType = suballocType;
8800 return typeConflictFound || minAlignment >= bufferImageGranularity;
8806 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
8807 VmaBlockMetadata(hAllocator),
8809 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8810 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8811 m_1stVectorIndex(0),
8812 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
8813 m_1stNullItemsBeginCount(0),
8814 m_1stNullItemsMiddleCount(0),
8815 m_2ndNullItemsCount(0)
8819 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
8823 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
8825 VmaBlockMetadata::Init(size);
8826 m_SumFreeSize = size;
8829 bool VmaBlockMetadata_Linear::Validate()
const 8831 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8832 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8834 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
8835 VMA_VALIDATE(!suballocations1st.empty() ||
8836 suballocations2nd.empty() ||
8837 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
8839 if(!suballocations1st.empty())
8842 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
8844 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
8846 if(!suballocations2nd.empty())
8849 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
8852 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
8853 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
8855 VkDeviceSize sumUsedSize = 0;
8856 const size_t suballoc1stCount = suballocations1st.size();
8857 VkDeviceSize offset = VMA_DEBUG_MARGIN;
8859 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8861 const size_t suballoc2ndCount = suballocations2nd.size();
8862 size_t nullItem2ndCount = 0;
8863 for(
size_t i = 0; i < suballoc2ndCount; ++i)
8865 const VmaSuballocation& suballoc = suballocations2nd[i];
8866 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8868 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8869 VMA_VALIDATE(suballoc.offset >= offset);
8873 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8874 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8875 sumUsedSize += suballoc.size;
8882 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8885 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8888 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
8890 const VmaSuballocation& suballoc = suballocations1st[i];
8891 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
8892 suballoc.hAllocation == VK_NULL_HANDLE);
8895 size_t nullItem1stCount = m_1stNullItemsBeginCount;
8897 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
8899 const VmaSuballocation& suballoc = suballocations1st[i];
8900 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8902 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8903 VMA_VALIDATE(suballoc.offset >= offset);
8904 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
8908 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8909 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8910 sumUsedSize += suballoc.size;
8917 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8919 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
8921 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8923 const size_t suballoc2ndCount = suballocations2nd.size();
8924 size_t nullItem2ndCount = 0;
8925 for(
size_t i = suballoc2ndCount; i--; )
8927 const VmaSuballocation& suballoc = suballocations2nd[i];
8928 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8930 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8931 VMA_VALIDATE(suballoc.offset >= offset);
8935 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8936 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8937 sumUsedSize += suballoc.size;
8944 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8947 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8950 VMA_VALIDATE(offset <= GetSize());
8951 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
8956 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 8958 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
8959 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
8962 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 8964 const VkDeviceSize size = GetSize();
8976 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8978 switch(m_2ndVectorMode)
8980 case SECOND_VECTOR_EMPTY:
8986 const size_t suballocations1stCount = suballocations1st.size();
8987 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
8988 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8989 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
8991 firstSuballoc.offset,
8992 size - (lastSuballoc.offset + lastSuballoc.size));
8996 case SECOND_VECTOR_RING_BUFFER:
9001 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9002 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
9003 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
9004 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
9008 case SECOND_VECTOR_DOUBLE_STACK:
9013 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9014 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
9015 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
9016 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
9026 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9028 const VkDeviceSize size = GetSize();
9029 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9030 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9031 const size_t suballoc1stCount = suballocations1st.size();
9032 const size_t suballoc2ndCount = suballocations2nd.size();
9043 VkDeviceSize lastOffset = 0;
9045 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9047 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9048 size_t nextAlloc2ndIndex = 0;
9049 while(lastOffset < freeSpace2ndTo1stEnd)
9052 while(nextAlloc2ndIndex < suballoc2ndCount &&
9053 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9055 ++nextAlloc2ndIndex;
9059 if(nextAlloc2ndIndex < suballoc2ndCount)
9061 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9064 if(lastOffset < suballoc.offset)
9067 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9081 lastOffset = suballoc.offset + suballoc.size;
9082 ++nextAlloc2ndIndex;
9088 if(lastOffset < freeSpace2ndTo1stEnd)
9090 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9098 lastOffset = freeSpace2ndTo1stEnd;
9103 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9104 const VkDeviceSize freeSpace1stTo2ndEnd =
9105 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9106 while(lastOffset < freeSpace1stTo2ndEnd)
9109 while(nextAlloc1stIndex < suballoc1stCount &&
9110 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9112 ++nextAlloc1stIndex;
9116 if(nextAlloc1stIndex < suballoc1stCount)
9118 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9121 if(lastOffset < suballoc.offset)
9124 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9138 lastOffset = suballoc.offset + suballoc.size;
9139 ++nextAlloc1stIndex;
9145 if(lastOffset < freeSpace1stTo2ndEnd)
9147 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9155 lastOffset = freeSpace1stTo2ndEnd;
9159 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9161 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9162 while(lastOffset < size)
9165 while(nextAlloc2ndIndex != SIZE_MAX &&
9166 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9168 --nextAlloc2ndIndex;
9172 if(nextAlloc2ndIndex != SIZE_MAX)
9174 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9177 if(lastOffset < suballoc.offset)
9180 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9194 lastOffset = suballoc.offset + suballoc.size;
9195 --nextAlloc2ndIndex;
9201 if(lastOffset < size)
9203 const VkDeviceSize unusedRangeSize = size - lastOffset;
9219 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 9221 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9222 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9223 const VkDeviceSize size = GetSize();
9224 const size_t suballoc1stCount = suballocations1st.size();
9225 const size_t suballoc2ndCount = suballocations2nd.size();
9227 inoutStats.
size += size;
9229 VkDeviceSize lastOffset = 0;
9231 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9233 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9234 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9235 while(lastOffset < freeSpace2ndTo1stEnd)
9238 while(nextAlloc2ndIndex < suballoc2ndCount &&
9239 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9241 ++nextAlloc2ndIndex;
9245 if(nextAlloc2ndIndex < suballoc2ndCount)
9247 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9250 if(lastOffset < suballoc.offset)
9253 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9264 lastOffset = suballoc.offset + suballoc.size;
9265 ++nextAlloc2ndIndex;
9270 if(lastOffset < freeSpace2ndTo1stEnd)
9273 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9280 lastOffset = freeSpace2ndTo1stEnd;
9285 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9286 const VkDeviceSize freeSpace1stTo2ndEnd =
9287 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9288 while(lastOffset < freeSpace1stTo2ndEnd)
9291 while(nextAlloc1stIndex < suballoc1stCount &&
9292 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9294 ++nextAlloc1stIndex;
9298 if(nextAlloc1stIndex < suballoc1stCount)
9300 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9303 if(lastOffset < suballoc.offset)
9306 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9317 lastOffset = suballoc.offset + suballoc.size;
9318 ++nextAlloc1stIndex;
9323 if(lastOffset < freeSpace1stTo2ndEnd)
9326 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9333 lastOffset = freeSpace1stTo2ndEnd;
9337 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9339 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9340 while(lastOffset < size)
9343 while(nextAlloc2ndIndex != SIZE_MAX &&
9344 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9346 --nextAlloc2ndIndex;
9350 if(nextAlloc2ndIndex != SIZE_MAX)
9352 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9355 if(lastOffset < suballoc.offset)
9358 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9369 lastOffset = suballoc.offset + suballoc.size;
9370 --nextAlloc2ndIndex;
9375 if(lastOffset < size)
9378 const VkDeviceSize unusedRangeSize = size - lastOffset;
9391 #if VMA_STATS_STRING_ENABLED 9392 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 9394 const VkDeviceSize size = GetSize();
9395 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9396 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9397 const size_t suballoc1stCount = suballocations1st.size();
9398 const size_t suballoc2ndCount = suballocations2nd.size();
9402 size_t unusedRangeCount = 0;
9403 VkDeviceSize usedBytes = 0;
9405 VkDeviceSize lastOffset = 0;
9407 size_t alloc2ndCount = 0;
9408 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9410 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9411 size_t nextAlloc2ndIndex = 0;
9412 while(lastOffset < freeSpace2ndTo1stEnd)
9415 while(nextAlloc2ndIndex < suballoc2ndCount &&
9416 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9418 ++nextAlloc2ndIndex;
9422 if(nextAlloc2ndIndex < suballoc2ndCount)
9424 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9427 if(lastOffset < suballoc.offset)
9436 usedBytes += suballoc.size;
9439 lastOffset = suballoc.offset + suballoc.size;
9440 ++nextAlloc2ndIndex;
9445 if(lastOffset < freeSpace2ndTo1stEnd)
9452 lastOffset = freeSpace2ndTo1stEnd;
9457 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9458 size_t alloc1stCount = 0;
9459 const VkDeviceSize freeSpace1stTo2ndEnd =
9460 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9461 while(lastOffset < freeSpace1stTo2ndEnd)
9464 while(nextAlloc1stIndex < suballoc1stCount &&
9465 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9467 ++nextAlloc1stIndex;
9471 if(nextAlloc1stIndex < suballoc1stCount)
9473 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9476 if(lastOffset < suballoc.offset)
9485 usedBytes += suballoc.size;
9488 lastOffset = suballoc.offset + suballoc.size;
9489 ++nextAlloc1stIndex;
9494 if(lastOffset < size)
9501 lastOffset = freeSpace1stTo2ndEnd;
9505 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9507 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9508 while(lastOffset < size)
9511 while(nextAlloc2ndIndex != SIZE_MAX &&
9512 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9514 --nextAlloc2ndIndex;
9518 if(nextAlloc2ndIndex != SIZE_MAX)
9520 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9523 if(lastOffset < suballoc.offset)
9532 usedBytes += suballoc.size;
9535 lastOffset = suballoc.offset + suballoc.size;
9536 --nextAlloc2ndIndex;
9541 if(lastOffset < size)
9553 const VkDeviceSize unusedBytes = size - usedBytes;
9554 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9559 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9561 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9562 size_t nextAlloc2ndIndex = 0;
9563 while(lastOffset < freeSpace2ndTo1stEnd)
9566 while(nextAlloc2ndIndex < suballoc2ndCount &&
9567 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9569 ++nextAlloc2ndIndex;
9573 if(nextAlloc2ndIndex < suballoc2ndCount)
9575 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9578 if(lastOffset < suballoc.offset)
9581 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9582 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9587 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9590 lastOffset = suballoc.offset + suballoc.size;
9591 ++nextAlloc2ndIndex;
9596 if(lastOffset < freeSpace2ndTo1stEnd)
9599 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9600 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9604 lastOffset = freeSpace2ndTo1stEnd;
9609 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9610 while(lastOffset < freeSpace1stTo2ndEnd)
9613 while(nextAlloc1stIndex < suballoc1stCount &&
9614 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9616 ++nextAlloc1stIndex;
9620 if(nextAlloc1stIndex < suballoc1stCount)
9622 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9625 if(lastOffset < suballoc.offset)
9628 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9629 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9634 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9637 lastOffset = suballoc.offset + suballoc.size;
9638 ++nextAlloc1stIndex;
9643 if(lastOffset < freeSpace1stTo2ndEnd)
9646 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9647 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9651 lastOffset = freeSpace1stTo2ndEnd;
9655 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9657 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9658 while(lastOffset < size)
9661 while(nextAlloc2ndIndex != SIZE_MAX &&
9662 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9664 --nextAlloc2ndIndex;
9668 if(nextAlloc2ndIndex != SIZE_MAX)
9670 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9673 if(lastOffset < suballoc.offset)
9676 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9677 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9682 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9685 lastOffset = suballoc.offset + suballoc.size;
9686 --nextAlloc2ndIndex;
9691 if(lastOffset < size)
9694 const VkDeviceSize unusedRangeSize = size - lastOffset;
9695 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9704 PrintDetailedMap_End(json);
9706 #endif // #if VMA_STATS_STRING_ENABLED 9708 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9709 uint32_t currentFrameIndex,
9710 uint32_t frameInUseCount,
9711 VkDeviceSize bufferImageGranularity,
9712 VkDeviceSize allocSize,
9713 VkDeviceSize allocAlignment,
9715 VmaSuballocationType allocType,
9716 bool canMakeOtherLost,
9718 VmaAllocationRequest* pAllocationRequest)
9720 VMA_ASSERT(allocSize > 0);
9721 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9722 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9723 VMA_HEAVY_ASSERT(Validate());
9724 return upperAddress ?
9725 CreateAllocationRequest_UpperAddress(
9726 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9727 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
9728 CreateAllocationRequest_LowerAddress(
9729 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9730 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
9733 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
9734 uint32_t currentFrameIndex,
9735 uint32_t frameInUseCount,
9736 VkDeviceSize bufferImageGranularity,
9737 VkDeviceSize allocSize,
9738 VkDeviceSize allocAlignment,
9739 VmaSuballocationType allocType,
9740 bool canMakeOtherLost,
9742 VmaAllocationRequest* pAllocationRequest)
9744 const VkDeviceSize size = GetSize();
9745 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9746 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9748 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9750 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9755 if(allocSize > size)
9759 VkDeviceSize resultBaseOffset = size - allocSize;
9760 if(!suballocations2nd.empty())
9762 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9763 resultBaseOffset = lastSuballoc.offset - allocSize;
9764 if(allocSize > lastSuballoc.offset)
9771 VkDeviceSize resultOffset = resultBaseOffset;
9774 if(VMA_DEBUG_MARGIN > 0)
9776 if(resultOffset < VMA_DEBUG_MARGIN)
9780 resultOffset -= VMA_DEBUG_MARGIN;
9784 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9788 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9790 bool bufferImageGranularityConflict =
false;
9791 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9793 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9794 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9796 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9798 bufferImageGranularityConflict =
true;
9806 if(bufferImageGranularityConflict)
9808 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9813 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9814 suballocations1st.back().offset + suballocations1st.back().size :
9816 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
9820 if(bufferImageGranularity > 1)
9822 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9824 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9825 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9827 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9841 pAllocationRequest->offset = resultOffset;
9842 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
9843 pAllocationRequest->sumItemSize = 0;
9845 pAllocationRequest->itemsToMakeLostCount = 0;
9846 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
9853 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
9854 uint32_t currentFrameIndex,
9855 uint32_t frameInUseCount,
9856 VkDeviceSize bufferImageGranularity,
9857 VkDeviceSize allocSize,
9858 VkDeviceSize allocAlignment,
9859 VmaSuballocationType allocType,
9860 bool canMakeOtherLost,
9862 VmaAllocationRequest* pAllocationRequest)
9864 const VkDeviceSize size = GetSize();
9865 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9866 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9868 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9872 VkDeviceSize resultBaseOffset = 0;
9873 if(!suballocations1st.empty())
9875 const VmaSuballocation& lastSuballoc = suballocations1st.back();
9876 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9880 VkDeviceSize resultOffset = resultBaseOffset;
9883 if(VMA_DEBUG_MARGIN > 0)
9885 resultOffset += VMA_DEBUG_MARGIN;
9889 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9893 if(bufferImageGranularity > 1 && !suballocations1st.empty())
9895 bool bufferImageGranularityConflict =
false;
9896 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9898 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9899 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9901 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9903 bufferImageGranularityConflict =
true;
9911 if(bufferImageGranularityConflict)
9913 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9917 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
9918 suballocations2nd.back().offset : size;
9921 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
9925 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9927 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9929 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9930 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9932 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9946 pAllocationRequest->offset = resultOffset;
9947 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
9948 pAllocationRequest->sumItemSize = 0;
9950 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
9951 pAllocationRequest->itemsToMakeLostCount = 0;
9958 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9960 VMA_ASSERT(!suballocations1st.empty());
9962 VkDeviceSize resultBaseOffset = 0;
9963 if(!suballocations2nd.empty())
9965 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9966 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9970 VkDeviceSize resultOffset = resultBaseOffset;
9973 if(VMA_DEBUG_MARGIN > 0)
9975 resultOffset += VMA_DEBUG_MARGIN;
9979 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9983 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9985 bool bufferImageGranularityConflict =
false;
9986 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
9988 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
9989 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9991 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9993 bufferImageGranularityConflict =
true;
10001 if(bufferImageGranularityConflict)
10003 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
10007 pAllocationRequest->itemsToMakeLostCount = 0;
10008 pAllocationRequest->sumItemSize = 0;
10009 size_t index1st = m_1stNullItemsBeginCount;
10011 if(canMakeOtherLost)
10013 while(index1st < suballocations1st.size() &&
10014 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
10017 const VmaSuballocation& suballoc = suballocations1st[index1st];
10018 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
10024 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10025 if(suballoc.hAllocation->CanBecomeLost() &&
10026 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10028 ++pAllocationRequest->itemsToMakeLostCount;
10029 pAllocationRequest->sumItemSize += suballoc.size;
10041 if(bufferImageGranularity > 1)
10043 while(index1st < suballocations1st.size())
10045 const VmaSuballocation& suballoc = suballocations1st[index1st];
10046 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
10048 if(suballoc.hAllocation != VK_NULL_HANDLE)
10051 if(suballoc.hAllocation->CanBecomeLost() &&
10052 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
10054 ++pAllocationRequest->itemsToMakeLostCount;
10055 pAllocationRequest->sumItemSize += suballoc.size;
10073 if(index1st == suballocations1st.size() &&
10074 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
10077 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
10082 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
10083 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
10087 if(bufferImageGranularity > 1)
10089 for(
size_t nextSuballocIndex = index1st;
10090 nextSuballocIndex < suballocations1st.size();
10091 nextSuballocIndex++)
10093 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
10094 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10096 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10110 pAllocationRequest->offset = resultOffset;
10111 pAllocationRequest->sumFreeSize =
10112 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10114 - pAllocationRequest->sumItemSize;
10115 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10124 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10125 uint32_t currentFrameIndex,
10126 uint32_t frameInUseCount,
10127 VmaAllocationRequest* pAllocationRequest)
10129 if(pAllocationRequest->itemsToMakeLostCount == 0)
10134 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10137 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10138 size_t index = m_1stNullItemsBeginCount;
10139 size_t madeLostCount = 0;
10140 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10142 if(index == suballocations->size())
10146 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10148 suballocations = &AccessSuballocations2nd();
10152 VMA_ASSERT(!suballocations->empty());
10154 VmaSuballocation& suballoc = (*suballocations)[index];
10155 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10157 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10158 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10159 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10161 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10162 suballoc.hAllocation = VK_NULL_HANDLE;
10163 m_SumFreeSize += suballoc.size;
10164 if(suballocations == &AccessSuballocations1st())
10166 ++m_1stNullItemsMiddleCount;
10170 ++m_2ndNullItemsCount;
10182 CleanupAfterFree();
10188 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10190 uint32_t lostAllocationCount = 0;
10192 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10193 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10195 VmaSuballocation& suballoc = suballocations1st[i];
10196 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10197 suballoc.hAllocation->CanBecomeLost() &&
10198 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10200 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10201 suballoc.hAllocation = VK_NULL_HANDLE;
10202 ++m_1stNullItemsMiddleCount;
10203 m_SumFreeSize += suballoc.size;
10204 ++lostAllocationCount;
10208 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10209 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10211 VmaSuballocation& suballoc = suballocations2nd[i];
10212 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10213 suballoc.hAllocation->CanBecomeLost() &&
10214 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10216 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10217 suballoc.hAllocation = VK_NULL_HANDLE;
10218 ++m_2ndNullItemsCount;
10219 m_SumFreeSize += suballoc.size;
10220 ++lostAllocationCount;
10224 if(lostAllocationCount)
10226 CleanupAfterFree();
10229 return lostAllocationCount;
10232 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10234 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10235 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10237 const VmaSuballocation& suballoc = suballocations1st[i];
10238 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10240 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10242 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10243 return VK_ERROR_VALIDATION_FAILED_EXT;
10245 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10247 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10248 return VK_ERROR_VALIDATION_FAILED_EXT;
10253 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10254 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10256 const VmaSuballocation& suballoc = suballocations2nd[i];
10257 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10259 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10261 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10262 return VK_ERROR_VALIDATION_FAILED_EXT;
10264 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10266 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10267 return VK_ERROR_VALIDATION_FAILED_EXT;
10275 void VmaBlockMetadata_Linear::Alloc(
10276 const VmaAllocationRequest& request,
10277 VmaSuballocationType type,
10278 VkDeviceSize allocSize,
10281 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10283 switch(request.type)
10285 case VmaAllocationRequestType::UpperAddress:
10287 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10288 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10289 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10290 suballocations2nd.push_back(newSuballoc);
10291 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10294 case VmaAllocationRequestType::EndOf1st:
10296 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10298 VMA_ASSERT(suballocations1st.empty() ||
10299 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10301 VMA_ASSERT(request.offset + allocSize <= GetSize());
10303 suballocations1st.push_back(newSuballoc);
10306 case VmaAllocationRequestType::EndOf2nd:
10308 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10310 VMA_ASSERT(!suballocations1st.empty() &&
10311 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10312 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10314 switch(m_2ndVectorMode)
10316 case SECOND_VECTOR_EMPTY:
10318 VMA_ASSERT(suballocations2nd.empty());
10319 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10321 case SECOND_VECTOR_RING_BUFFER:
10323 VMA_ASSERT(!suballocations2nd.empty());
10325 case SECOND_VECTOR_DOUBLE_STACK:
10326 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10332 suballocations2nd.push_back(newSuballoc);
10336 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10339 m_SumFreeSize -= newSuballoc.size;
10342 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10344 FreeAtOffset(allocation->GetOffset());
10347 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10349 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10350 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10352 if(!suballocations1st.empty())
10355 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10356 if(firstSuballoc.offset == offset)
10358 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10359 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10360 m_SumFreeSize += firstSuballoc.size;
10361 ++m_1stNullItemsBeginCount;
10362 CleanupAfterFree();
10368 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10369 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10371 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10372 if(lastSuballoc.offset == offset)
10374 m_SumFreeSize += lastSuballoc.size;
10375 suballocations2nd.pop_back();
10376 CleanupAfterFree();
10381 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10383 VmaSuballocation& lastSuballoc = suballocations1st.back();
10384 if(lastSuballoc.offset == offset)
10386 m_SumFreeSize += lastSuballoc.size;
10387 suballocations1st.pop_back();
10388 CleanupAfterFree();
10395 VmaSuballocation refSuballoc;
10396 refSuballoc.offset = offset;
10398 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
10399 suballocations1st.begin() + m_1stNullItemsBeginCount,
10400 suballocations1st.end(),
10402 if(it != suballocations1st.end())
10404 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10405 it->hAllocation = VK_NULL_HANDLE;
10406 ++m_1stNullItemsMiddleCount;
10407 m_SumFreeSize += it->size;
10408 CleanupAfterFree();
10413 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10416 VmaSuballocation refSuballoc;
10417 refSuballoc.offset = offset;
10419 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10420 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
10421 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
10422 if(it != suballocations2nd.end())
10424 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10425 it->hAllocation = VK_NULL_HANDLE;
10426 ++m_2ndNullItemsCount;
10427 m_SumFreeSize += it->size;
10428 CleanupAfterFree();
10433 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10436 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 10438 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10439 const size_t suballocCount = AccessSuballocations1st().size();
10440 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10443 void VmaBlockMetadata_Linear::CleanupAfterFree()
10445 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10446 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10450 suballocations1st.clear();
10451 suballocations2nd.clear();
10452 m_1stNullItemsBeginCount = 0;
10453 m_1stNullItemsMiddleCount = 0;
10454 m_2ndNullItemsCount = 0;
10455 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10459 const size_t suballoc1stCount = suballocations1st.size();
10460 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10461 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10464 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10465 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10467 ++m_1stNullItemsBeginCount;
10468 --m_1stNullItemsMiddleCount;
10472 while(m_1stNullItemsMiddleCount > 0 &&
10473 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10475 --m_1stNullItemsMiddleCount;
10476 suballocations1st.pop_back();
10480 while(m_2ndNullItemsCount > 0 &&
10481 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10483 --m_2ndNullItemsCount;
10484 suballocations2nd.pop_back();
10488 while(m_2ndNullItemsCount > 0 &&
10489 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
10491 --m_2ndNullItemsCount;
10492 suballocations2nd.remove(0);
10495 if(ShouldCompact1st())
10497 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10498 size_t srcIndex = m_1stNullItemsBeginCount;
10499 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10501 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10505 if(dstIndex != srcIndex)
10507 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10511 suballocations1st.resize(nonNullItemCount);
10512 m_1stNullItemsBeginCount = 0;
10513 m_1stNullItemsMiddleCount = 0;
10517 if(suballocations2nd.empty())
10519 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10523 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10525 suballocations1st.clear();
10526 m_1stNullItemsBeginCount = 0;
10528 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10531 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10532 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10533 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10534 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10536 ++m_1stNullItemsBeginCount;
10537 --m_1stNullItemsMiddleCount;
10539 m_2ndNullItemsCount = 0;
10540 m_1stVectorIndex ^= 1;
10545 VMA_HEAVY_ASSERT(Validate());
10552 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10553 VmaBlockMetadata(hAllocator),
10555 m_AllocationCount(0),
10559 memset(m_FreeList, 0,
sizeof(m_FreeList));
10562 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10564 DeleteNode(m_Root);
10567 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10569 VmaBlockMetadata::Init(size);
10571 m_UsableSize = VmaPrevPow2(size);
10572 m_SumFreeSize = m_UsableSize;
10576 while(m_LevelCount < MAX_LEVELS &&
10577 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10582 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10583 rootNode->offset = 0;
10584 rootNode->type = Node::TYPE_FREE;
10585 rootNode->parent = VMA_NULL;
10586 rootNode->buddy = VMA_NULL;
10589 AddToFreeListFront(0, rootNode);
10592 bool VmaBlockMetadata_Buddy::Validate()
const 10595 ValidationContext ctx;
10596 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10598 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10600 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10601 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10604 for(uint32_t level = 0; level < m_LevelCount; ++level)
10606 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10607 m_FreeList[level].front->free.prev == VMA_NULL);
10609 for(Node* node = m_FreeList[level].front;
10611 node = node->free.next)
10613 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10615 if(node->free.next == VMA_NULL)
10617 VMA_VALIDATE(m_FreeList[level].back == node);
10621 VMA_VALIDATE(node->free.next->free.prev == node);
10627 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10629 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10635 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 10637 for(uint32_t level = 0; level < m_LevelCount; ++level)
10639 if(m_FreeList[level].front != VMA_NULL)
10641 return LevelToNodeSize(level);
10647 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 10649 const VkDeviceSize unusableSize = GetUnusableSize();
10660 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10662 if(unusableSize > 0)
10671 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 10673 const VkDeviceSize unusableSize = GetUnusableSize();
10675 inoutStats.
size += GetSize();
10676 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10681 if(unusableSize > 0)
10688 #if VMA_STATS_STRING_ENABLED 10690 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 10694 CalcAllocationStatInfo(stat);
10696 PrintDetailedMap_Begin(
10702 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10704 const VkDeviceSize unusableSize = GetUnusableSize();
10705 if(unusableSize > 0)
10707 PrintDetailedMap_UnusedRange(json,
10712 PrintDetailedMap_End(json);
10715 #endif // #if VMA_STATS_STRING_ENABLED 10717 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
10718 uint32_t currentFrameIndex,
10719 uint32_t frameInUseCount,
10720 VkDeviceSize bufferImageGranularity,
10721 VkDeviceSize allocSize,
10722 VkDeviceSize allocAlignment,
10724 VmaSuballocationType allocType,
10725 bool canMakeOtherLost,
10727 VmaAllocationRequest* pAllocationRequest)
10729 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10733 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
10734 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
10735 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
10737 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
10738 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
10741 if(allocSize > m_UsableSize)
10746 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10747 for(uint32_t level = targetLevel + 1; level--; )
10749 for(Node* freeNode = m_FreeList[level].front;
10750 freeNode != VMA_NULL;
10751 freeNode = freeNode->free.next)
10753 if(freeNode->offset % allocAlignment == 0)
10755 pAllocationRequest->type = VmaAllocationRequestType::Normal;
10756 pAllocationRequest->offset = freeNode->offset;
10757 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
10758 pAllocationRequest->sumItemSize = 0;
10759 pAllocationRequest->itemsToMakeLostCount = 0;
10760 pAllocationRequest->customData = (
void*)(uintptr_t)level;
10769 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
10770 uint32_t currentFrameIndex,
10771 uint32_t frameInUseCount,
10772 VmaAllocationRequest* pAllocationRequest)
10778 return pAllocationRequest->itemsToMakeLostCount == 0;
10781 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10790 void VmaBlockMetadata_Buddy::Alloc(
10791 const VmaAllocationRequest& request,
10792 VmaSuballocationType type,
10793 VkDeviceSize allocSize,
10796 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
10798 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10799 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
10801 Node* currNode = m_FreeList[currLevel].front;
10802 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10803 while(currNode->offset != request.offset)
10805 currNode = currNode->free.next;
10806 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10810 while(currLevel < targetLevel)
10814 RemoveFromFreeList(currLevel, currNode);
10816 const uint32_t childrenLevel = currLevel + 1;
10819 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
10820 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
10822 leftChild->offset = currNode->offset;
10823 leftChild->type = Node::TYPE_FREE;
10824 leftChild->parent = currNode;
10825 leftChild->buddy = rightChild;
10827 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
10828 rightChild->type = Node::TYPE_FREE;
10829 rightChild->parent = currNode;
10830 rightChild->buddy = leftChild;
10833 currNode->type = Node::TYPE_SPLIT;
10834 currNode->split.leftChild = leftChild;
10837 AddToFreeListFront(childrenLevel, rightChild);
10838 AddToFreeListFront(childrenLevel, leftChild);
10843 currNode = m_FreeList[currLevel].front;
10852 VMA_ASSERT(currLevel == targetLevel &&
10853 currNode != VMA_NULL &&
10854 currNode->type == Node::TYPE_FREE);
10855 RemoveFromFreeList(currLevel, currNode);
10858 currNode->type = Node::TYPE_ALLOCATION;
10859 currNode->allocation.alloc = hAllocation;
10861 ++m_AllocationCount;
10863 m_SumFreeSize -= allocSize;
10866 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
10868 if(node->type == Node::TYPE_SPLIT)
10870 DeleteNode(node->split.leftChild->buddy);
10871 DeleteNode(node->split.leftChild);
10874 vma_delete(GetAllocationCallbacks(), node);
10877 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 10879 VMA_VALIDATE(level < m_LevelCount);
10880 VMA_VALIDATE(curr->parent == parent);
10881 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
10882 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
10885 case Node::TYPE_FREE:
10887 ctx.calculatedSumFreeSize += levelNodeSize;
10888 ++ctx.calculatedFreeCount;
10890 case Node::TYPE_ALLOCATION:
10891 ++ctx.calculatedAllocationCount;
10892 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
10893 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
10895 case Node::TYPE_SPLIT:
10897 const uint32_t childrenLevel = level + 1;
10898 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
10899 const Node*
const leftChild = curr->split.leftChild;
10900 VMA_VALIDATE(leftChild != VMA_NULL);
10901 VMA_VALIDATE(leftChild->offset == curr->offset);
10902 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
10904 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
10906 const Node*
const rightChild = leftChild->buddy;
10907 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
10908 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
10910 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
10921 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 10924 uint32_t level = 0;
10925 VkDeviceSize currLevelNodeSize = m_UsableSize;
10926 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
10927 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
10930 currLevelNodeSize = nextLevelNodeSize;
10931 nextLevelNodeSize = currLevelNodeSize >> 1;
10936 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
10939 Node* node = m_Root;
10940 VkDeviceSize nodeOffset = 0;
10941 uint32_t level = 0;
10942 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
10943 while(node->type == Node::TYPE_SPLIT)
10945 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
10946 if(offset < nodeOffset + nextLevelSize)
10948 node = node->split.leftChild;
10952 node = node->split.leftChild->buddy;
10953 nodeOffset += nextLevelSize;
10956 levelNodeSize = nextLevelSize;
10959 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
10960 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
10963 --m_AllocationCount;
10964 m_SumFreeSize += alloc->GetSize();
10966 node->type = Node::TYPE_FREE;
10969 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
10971 RemoveFromFreeList(level, node->buddy);
10972 Node*
const parent = node->parent;
10974 vma_delete(GetAllocationCallbacks(), node->buddy);
10975 vma_delete(GetAllocationCallbacks(), node);
10976 parent->type = Node::TYPE_FREE;
10984 AddToFreeListFront(level, node);
10987 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 10991 case Node::TYPE_FREE:
10997 case Node::TYPE_ALLOCATION:
10999 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11005 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
11006 if(unusedRangeSize > 0)
11015 case Node::TYPE_SPLIT:
11017 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11018 const Node*
const leftChild = node->split.leftChild;
11019 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
11020 const Node*
const rightChild = leftChild->buddy;
11021 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
11029 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
11031 VMA_ASSERT(node->type == Node::TYPE_FREE);
11034 Node*
const frontNode = m_FreeList[level].front;
11035 if(frontNode == VMA_NULL)
11037 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
11038 node->free.prev = node->free.next = VMA_NULL;
11039 m_FreeList[level].front = m_FreeList[level].back = node;
11043 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
11044 node->free.prev = VMA_NULL;
11045 node->free.next = frontNode;
11046 frontNode->free.prev = node;
11047 m_FreeList[level].front = node;
11051 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
11053 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
11056 if(node->free.prev == VMA_NULL)
11058 VMA_ASSERT(m_FreeList[level].front == node);
11059 m_FreeList[level].front = node->free.next;
11063 Node*
const prevFreeNode = node->free.prev;
11064 VMA_ASSERT(prevFreeNode->free.next == node);
11065 prevFreeNode->free.next = node->free.next;
11069 if(node->free.next == VMA_NULL)
11071 VMA_ASSERT(m_FreeList[level].back == node);
11072 m_FreeList[level].back = node->free.prev;
11076 Node*
const nextFreeNode = node->free.next;
11077 VMA_ASSERT(nextFreeNode->free.prev == node);
11078 nextFreeNode->free.prev = node->free.prev;
11082 #if VMA_STATS_STRING_ENABLED 11083 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 11087 case Node::TYPE_FREE:
11088 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
11090 case Node::TYPE_ALLOCATION:
11092 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
11093 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
11094 if(allocSize < levelNodeSize)
11096 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11100 case Node::TYPE_SPLIT:
11102 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11103 const Node*
const leftChild = node->split.leftChild;
11104 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11105 const Node*
const rightChild = leftChild->buddy;
11106 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11113 #endif // #if VMA_STATS_STRING_ENABLED 11119 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11120 m_pMetadata(VMA_NULL),
11121 m_MemoryTypeIndex(UINT32_MAX),
11123 m_hMemory(VK_NULL_HANDLE),
11125 m_pMappedData(VMA_NULL)
11129 void VmaDeviceMemoryBlock::Init(
11132 uint32_t newMemoryTypeIndex,
11133 VkDeviceMemory newMemory,
11134 VkDeviceSize newSize,
11136 uint32_t algorithm)
11138 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11140 m_hParentPool = hParentPool;
11141 m_MemoryTypeIndex = newMemoryTypeIndex;
11143 m_hMemory = newMemory;
11148 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11151 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11157 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11159 m_pMetadata->Init(newSize);
11162 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11166 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11168 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11169 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11170 m_hMemory = VK_NULL_HANDLE;
11172 vma_delete(allocator, m_pMetadata);
11173 m_pMetadata = VMA_NULL;
11176 bool VmaDeviceMemoryBlock::Validate()
const 11178 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11179 (m_pMetadata->GetSize() != 0));
11181 return m_pMetadata->Validate();
11184 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11186 void* pData =
nullptr;
11187 VkResult res = Map(hAllocator, 1, &pData);
11188 if(res != VK_SUCCESS)
11193 res = m_pMetadata->CheckCorruption(pData);
11195 Unmap(hAllocator, 1);
11200 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11207 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11208 if(m_MapCount != 0)
11210 m_MapCount += count;
11211 VMA_ASSERT(m_pMappedData != VMA_NULL);
11212 if(ppData != VMA_NULL)
11214 *ppData = m_pMappedData;
11220 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11221 hAllocator->m_hDevice,
11227 if(result == VK_SUCCESS)
11229 if(ppData != VMA_NULL)
11231 *ppData = m_pMappedData;
11233 m_MapCount = count;
11239 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11246 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11247 if(m_MapCount >= count)
11249 m_MapCount -= count;
11250 if(m_MapCount == 0)
11252 m_pMappedData = VMA_NULL;
11253 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11258 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11262 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11264 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11265 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11268 VkResult res = Map(hAllocator, 1, &pData);
11269 if(res != VK_SUCCESS)
11274 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11275 VmaWriteMagicValue(pData, allocOffset + allocSize);
11277 Unmap(hAllocator, 1);
11282 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11284 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11285 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11288 VkResult res = Map(hAllocator, 1, &pData);
11289 if(res != VK_SUCCESS)
11294 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11296 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11298 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11300 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11303 Unmap(hAllocator, 1);
11308 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11313 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11314 hAllocation->GetBlock() ==
this);
11316 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11317 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
11318 hAllocator->m_hDevice,
11321 hAllocation->GetOffset());
11324 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11329 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11330 hAllocation->GetBlock() ==
this);
11332 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11333 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
11334 hAllocator->m_hDevice,
11337 hAllocation->GetOffset());
11342 memset(&outInfo, 0,
sizeof(outInfo));
11361 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11369 VmaPool_T::VmaPool_T(
11372 VkDeviceSize preferredBlockSize) :
11376 createInfo.memoryTypeIndex,
11377 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11378 createInfo.minBlockCount,
11379 createInfo.maxBlockCount,
11381 createInfo.frameInUseCount,
11383 createInfo.blockSize != 0,
11389 VmaPool_T::~VmaPool_T()
11393 #if VMA_STATS_STRING_ENABLED 11395 #endif // #if VMA_STATS_STRING_ENABLED 11397 VmaBlockVector::VmaBlockVector(
11400 uint32_t memoryTypeIndex,
11401 VkDeviceSize preferredBlockSize,
11402 size_t minBlockCount,
11403 size_t maxBlockCount,
11404 VkDeviceSize bufferImageGranularity,
11405 uint32_t frameInUseCount,
11407 bool explicitBlockSize,
11408 uint32_t algorithm) :
11409 m_hAllocator(hAllocator),
11410 m_hParentPool(hParentPool),
11411 m_MemoryTypeIndex(memoryTypeIndex),
11412 m_PreferredBlockSize(preferredBlockSize),
11413 m_MinBlockCount(minBlockCount),
11414 m_MaxBlockCount(maxBlockCount),
11415 m_BufferImageGranularity(bufferImageGranularity),
11416 m_FrameInUseCount(frameInUseCount),
11417 m_IsCustomPool(isCustomPool),
11418 m_ExplicitBlockSize(explicitBlockSize),
11419 m_Algorithm(algorithm),
11420 m_HasEmptyBlock(false),
11421 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11426 VmaBlockVector::~VmaBlockVector()
11428 for(
size_t i = m_Blocks.size(); i--; )
11430 m_Blocks[i]->Destroy(m_hAllocator);
11431 vma_delete(m_hAllocator, m_Blocks[i]);
11435 VkResult VmaBlockVector::CreateMinBlocks()
11437 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11439 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11440 if(res != VK_SUCCESS)
11448 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11450 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11452 const size_t blockCount = m_Blocks.size();
11461 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11463 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11464 VMA_ASSERT(pBlock);
11465 VMA_HEAVY_ASSERT(pBlock->Validate());
11466 pBlock->m_pMetadata->AddPoolStats(*pStats);
11470 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 11472 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11473 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11474 (VMA_DEBUG_MARGIN > 0) &&
11476 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11479 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11481 VkResult VmaBlockVector::Allocate(
11482 uint32_t currentFrameIndex,
11484 VkDeviceSize alignment,
11486 VmaSuballocationType suballocType,
11487 size_t allocationCount,
11491 VkResult res = VK_SUCCESS;
11493 if(IsCorruptionDetectionEnabled())
11495 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11496 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11500 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11501 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11503 res = AllocatePage(
11509 pAllocations + allocIndex);
11510 if(res != VK_SUCCESS)
11517 if(res != VK_SUCCESS)
11520 while(allocIndex--)
11522 Free(pAllocations[allocIndex]);
11524 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11530 VkResult VmaBlockVector::AllocatePage(
11531 uint32_t currentFrameIndex,
11533 VkDeviceSize alignment,
11535 VmaSuballocationType suballocType,
11542 const bool canCreateNewBlock =
11544 (m_Blocks.size() < m_MaxBlockCount);
11551 canMakeOtherLost =
false;
11555 if(isUpperAddress &&
11558 return VK_ERROR_FEATURE_NOT_PRESENT;
11572 return VK_ERROR_FEATURE_NOT_PRESENT;
11576 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11578 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11586 if(!canMakeOtherLost || canCreateNewBlock)
11595 if(!m_Blocks.empty())
11597 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11598 VMA_ASSERT(pCurrBlock);
11599 VkResult res = AllocateFromBlock(
11609 if(res == VK_SUCCESS)
11611 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
11621 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11623 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11624 VMA_ASSERT(pCurrBlock);
11625 VkResult res = AllocateFromBlock(
11635 if(res == VK_SUCCESS)
11637 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11645 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11647 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11648 VMA_ASSERT(pCurrBlock);
11649 VkResult res = AllocateFromBlock(
11659 if(res == VK_SUCCESS)
11661 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11669 if(canCreateNewBlock)
11672 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11673 uint32_t newBlockSizeShift = 0;
11674 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11676 if(!m_ExplicitBlockSize)
11679 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11680 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
11682 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11683 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11685 newBlockSize = smallerNewBlockSize;
11686 ++newBlockSizeShift;
11695 size_t newBlockIndex = 0;
11696 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
11698 if(!m_ExplicitBlockSize)
11700 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
11702 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11703 if(smallerNewBlockSize >= size)
11705 newBlockSize = smallerNewBlockSize;
11706 ++newBlockSizeShift;
11707 res = CreateBlock(newBlockSize, &newBlockIndex);
11716 if(res == VK_SUCCESS)
11718 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
11719 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
11721 res = AllocateFromBlock(
11731 if(res == VK_SUCCESS)
11733 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
11739 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11746 if(canMakeOtherLost)
11748 uint32_t tryIndex = 0;
11749 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
11751 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
11752 VmaAllocationRequest bestRequest = {};
11753 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
11759 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11761 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11762 VMA_ASSERT(pCurrBlock);
11763 VmaAllocationRequest currRequest = {};
11764 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11767 m_BufferImageGranularity,
11776 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11777 if(pBestRequestBlock == VMA_NULL ||
11778 currRequestCost < bestRequestCost)
11780 pBestRequestBlock = pCurrBlock;
11781 bestRequest = currRequest;
11782 bestRequestCost = currRequestCost;
11784 if(bestRequestCost == 0)
11795 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11797 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11798 VMA_ASSERT(pCurrBlock);
11799 VmaAllocationRequest currRequest = {};
11800 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11803 m_BufferImageGranularity,
11812 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11813 if(pBestRequestBlock == VMA_NULL ||
11814 currRequestCost < bestRequestCost ||
11817 pBestRequestBlock = pCurrBlock;
11818 bestRequest = currRequest;
11819 bestRequestCost = currRequestCost;
11821 if(bestRequestCost == 0 ||
11831 if(pBestRequestBlock != VMA_NULL)
11835 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
11836 if(res != VK_SUCCESS)
11842 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
11848 if(pBestRequestBlock->m_pMetadata->IsEmpty())
11850 m_HasEmptyBlock =
false;
11853 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
11854 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
11855 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
11856 (*pAllocation)->InitBlockAllocation(
11858 bestRequest.offset,
11864 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
11865 VMA_DEBUG_LOG(
" Returned from existing block");
11866 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
11867 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11869 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11871 if(IsCorruptionDetectionEnabled())
11873 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
11874 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11889 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
11891 return VK_ERROR_TOO_MANY_OBJECTS;
11895 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11898 void VmaBlockVector::Free(
11901 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
11905 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11907 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
11909 if(IsCorruptionDetectionEnabled())
11911 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
11912 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
11915 if(hAllocation->IsPersistentMap())
11917 pBlock->Unmap(m_hAllocator, 1);
11920 pBlock->m_pMetadata->Free(hAllocation);
11921 VMA_HEAVY_ASSERT(pBlock->Validate());
11923 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
11926 if(pBlock->m_pMetadata->IsEmpty())
11929 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
11931 pBlockToDelete = pBlock;
11937 m_HasEmptyBlock =
true;
11942 else if(m_HasEmptyBlock)
11944 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
11945 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
11947 pBlockToDelete = pLastBlock;
11948 m_Blocks.pop_back();
11949 m_HasEmptyBlock =
false;
11953 IncrementallySortBlocks();
11958 if(pBlockToDelete != VMA_NULL)
11960 VMA_DEBUG_LOG(
" Deleted empty allocation");
11961 pBlockToDelete->Destroy(m_hAllocator);
11962 vma_delete(m_hAllocator, pBlockToDelete);
11966 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 11968 VkDeviceSize result = 0;
11969 for(
size_t i = m_Blocks.size(); i--; )
11971 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
11972 if(result >= m_PreferredBlockSize)
11980 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
11982 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11984 if(m_Blocks[blockIndex] == pBlock)
11986 VmaVectorRemove(m_Blocks, blockIndex);
11993 void VmaBlockVector::IncrementallySortBlocks()
11998 for(
size_t i = 1; i < m_Blocks.size(); ++i)
12000 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
12002 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
12009 VkResult VmaBlockVector::AllocateFromBlock(
12010 VmaDeviceMemoryBlock* pBlock,
12011 uint32_t currentFrameIndex,
12013 VkDeviceSize alignment,
12016 VmaSuballocationType suballocType,
12025 VmaAllocationRequest currRequest = {};
12026 if(pBlock->m_pMetadata->CreateAllocationRequest(
12029 m_BufferImageGranularity,
12039 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
12043 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
12044 if(res != VK_SUCCESS)
12051 if(pBlock->m_pMetadata->IsEmpty())
12053 m_HasEmptyBlock =
false;
12056 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
12057 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
12058 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
12059 (*pAllocation)->InitBlockAllocation(
12061 currRequest.offset,
12067 VMA_HEAVY_ASSERT(pBlock->Validate());
12068 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
12069 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12071 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12073 if(IsCorruptionDetectionEnabled())
12075 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
12076 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
12080 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12083 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
12085 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12086 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
12087 allocInfo.allocationSize = blockSize;
12088 VkDeviceMemory mem = VK_NULL_HANDLE;
12089 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12098 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12104 allocInfo.allocationSize,
12108 m_Blocks.push_back(pBlock);
12109 if(pNewBlockIndex != VMA_NULL)
12111 *pNewBlockIndex = m_Blocks.size() - 1;
12117 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12118 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12119 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12121 const size_t blockCount = m_Blocks.size();
12122 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12126 BLOCK_FLAG_USED = 0x00000001,
12127 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12135 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12136 blockInfo(blockCount, VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12137 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12140 const size_t moveCount = moves.size();
12141 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12143 const VmaDefragmentationMove& move = moves[moveIndex];
12144 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12145 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12148 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12151 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12153 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12154 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12155 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12157 currBlockInfo.pMappedData = pBlock->GetMappedData();
12159 if(currBlockInfo.pMappedData == VMA_NULL)
12161 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12162 if(pDefragCtx->res == VK_SUCCESS)
12164 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12171 if(pDefragCtx->res == VK_SUCCESS)
12173 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12174 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12176 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12178 const VmaDefragmentationMove& move = moves[moveIndex];
12180 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12181 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12183 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12188 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12189 memRange.memory = pSrcBlock->GetDeviceMemory();
12190 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12191 memRange.size = VMA_MIN(
12192 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12193 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12194 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12199 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12200 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12201 static_cast<size_t>(move.size));
12203 if(IsCorruptionDetectionEnabled())
12205 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12206 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12212 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12213 memRange.memory = pDstBlock->GetDeviceMemory();
12214 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12215 memRange.size = VMA_MIN(
12216 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12217 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12218 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12225 for(
size_t blockIndex = blockCount; blockIndex--; )
12227 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12228 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12230 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12231 pBlock->Unmap(m_hAllocator, 1);
12236 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12237 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12238 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12239 VkCommandBuffer commandBuffer)
12241 const size_t blockCount = m_Blocks.size();
12243 pDefragCtx->blockContexts.resize(blockCount);
12244 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12247 const size_t moveCount = moves.size();
12248 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12250 const VmaDefragmentationMove& move = moves[moveIndex];
12251 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12252 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12255 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12259 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
12260 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
12261 VK_BUFFER_USAGE_TRANSFER_DST_BIT;
12263 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12265 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12266 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12267 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12269 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12270 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12271 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12272 if(pDefragCtx->res == VK_SUCCESS)
12274 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12275 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12282 if(pDefragCtx->res == VK_SUCCESS)
12284 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12285 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12287 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12289 const VmaDefragmentationMove& move = moves[moveIndex];
12291 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12292 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12294 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12296 VkBufferCopy region = {
12300 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12301 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12306 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12308 pDefragCtx->res = VK_NOT_READY;
12314 m_HasEmptyBlock =
false;
12315 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12317 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12318 if(pBlock->m_pMetadata->IsEmpty())
12320 if(m_Blocks.size() > m_MinBlockCount)
12322 if(pDefragmentationStats != VMA_NULL)
12325 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12328 VmaVectorRemove(m_Blocks, blockIndex);
12329 pBlock->Destroy(m_hAllocator);
12330 vma_delete(m_hAllocator, pBlock);
12334 m_HasEmptyBlock =
true;
12340 #if VMA_STATS_STRING_ENABLED 12342 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12344 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12346 json.BeginObject();
12350 json.WriteString(
"MemoryTypeIndex");
12351 json.WriteNumber(m_MemoryTypeIndex);
12353 json.WriteString(
"BlockSize");
12354 json.WriteNumber(m_PreferredBlockSize);
12356 json.WriteString(
"BlockCount");
12357 json.BeginObject(
true);
12358 if(m_MinBlockCount > 0)
12360 json.WriteString(
"Min");
12361 json.WriteNumber((uint64_t)m_MinBlockCount);
12363 if(m_MaxBlockCount < SIZE_MAX)
12365 json.WriteString(
"Max");
12366 json.WriteNumber((uint64_t)m_MaxBlockCount);
12368 json.WriteString(
"Cur");
12369 json.WriteNumber((uint64_t)m_Blocks.size());
12372 if(m_FrameInUseCount > 0)
12374 json.WriteString(
"FrameInUseCount");
12375 json.WriteNumber(m_FrameInUseCount);
12378 if(m_Algorithm != 0)
12380 json.WriteString(
"Algorithm");
12381 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12386 json.WriteString(
"PreferredBlockSize");
12387 json.WriteNumber(m_PreferredBlockSize);
12390 json.WriteString(
"Blocks");
12391 json.BeginObject();
12392 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12394 json.BeginString();
12395 json.ContinueString(m_Blocks[i]->GetId());
12398 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12405 #endif // #if VMA_STATS_STRING_ENABLED 12407 void VmaBlockVector::Defragment(
12408 class VmaBlockVectorDefragmentationContext* pCtx,
12410 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12411 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12412 VkCommandBuffer commandBuffer)
12414 pCtx->res = VK_SUCCESS;
12416 const VkMemoryPropertyFlags memPropFlags =
12417 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12418 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12419 const bool isHostCoherent = (memPropFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0;
12421 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12423 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12424 !IsCorruptionDetectionEnabled();
12427 if(canDefragmentOnCpu || canDefragmentOnGpu)
12429 bool defragmentOnGpu;
12431 if(canDefragmentOnGpu != canDefragmentOnCpu)
12433 defragmentOnGpu = canDefragmentOnGpu;
12438 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12439 m_hAllocator->IsIntegratedGpu();
12442 bool overlappingMoveSupported = !defragmentOnGpu;
12444 if(m_hAllocator->m_UseMutex)
12446 m_Mutex.LockWrite();
12447 pCtx->mutexLocked =
true;
12450 pCtx->Begin(overlappingMoveSupported);
12454 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12455 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12456 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12457 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12458 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12461 if(pStats != VMA_NULL)
12463 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12464 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12467 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12468 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12469 if(defragmentOnGpu)
12471 maxGpuBytesToMove -= bytesMoved;
12472 maxGpuAllocationsToMove -= allocationsMoved;
12476 maxCpuBytesToMove -= bytesMoved;
12477 maxCpuAllocationsToMove -= allocationsMoved;
12481 if(pCtx->res >= VK_SUCCESS)
12483 if(defragmentOnGpu)
12485 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12489 ApplyDefragmentationMovesCpu(pCtx, moves);
12495 void VmaBlockVector::DefragmentationEnd(
12496 class VmaBlockVectorDefragmentationContext* pCtx,
12500 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12502 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12503 if(blockCtx.hBuffer)
12505 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12506 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12510 if(pCtx->res >= VK_SUCCESS)
12512 FreeEmptyBlocks(pStats);
12515 if(pCtx->mutexLocked)
12517 VMA_ASSERT(m_hAllocator->m_UseMutex);
12518 m_Mutex.UnlockWrite();
12522 size_t VmaBlockVector::CalcAllocationCount()
const 12525 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12527 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12532 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const 12534 if(m_BufferImageGranularity == 1)
12538 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12539 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12541 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12542 VMA_ASSERT(m_Algorithm == 0);
12543 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12544 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12552 void VmaBlockVector::MakePoolAllocationsLost(
12553 uint32_t currentFrameIndex,
12554 size_t* pLostAllocationCount)
12556 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12557 size_t lostAllocationCount = 0;
12558 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12560 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12561 VMA_ASSERT(pBlock);
12562 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12564 if(pLostAllocationCount != VMA_NULL)
12566 *pLostAllocationCount = lostAllocationCount;
12570 VkResult VmaBlockVector::CheckCorruption()
12572 if(!IsCorruptionDetectionEnabled())
12574 return VK_ERROR_FEATURE_NOT_PRESENT;
12577 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12578 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12580 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12581 VMA_ASSERT(pBlock);
12582 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12583 if(res != VK_SUCCESS)
12591 void VmaBlockVector::AddStats(
VmaStats* pStats)
12593 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12594 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12596 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12598 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12600 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12601 VMA_ASSERT(pBlock);
12602 VMA_HEAVY_ASSERT(pBlock->Validate());
12604 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12605 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12606 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12607 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12614 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12616 VmaBlockVector* pBlockVector,
12617 uint32_t currentFrameIndex,
12618 bool overlappingMoveSupported) :
12619 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12620 m_AllocationCount(0),
12621 m_AllAllocations(false),
12623 m_AllocationsMoved(0),
12624 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12627 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12628 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12630 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12631 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12632 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12633 m_Blocks.push_back(pBlockInfo);
12637 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12640 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12642 for(
size_t i = m_Blocks.size(); i--; )
12644 vma_delete(m_hAllocator, m_Blocks[i]);
12648 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12651 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12653 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12654 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12655 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12657 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12658 (*it)->m_Allocations.push_back(allocInfo);
12665 ++m_AllocationCount;
12669 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
12670 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12671 VkDeviceSize maxBytesToMove,
12672 uint32_t maxAllocationsToMove)
12674 if(m_Blocks.empty())
12687 size_t srcBlockMinIndex = 0;
12700 size_t srcBlockIndex = m_Blocks.size() - 1;
12701 size_t srcAllocIndex = SIZE_MAX;
12707 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
12709 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
12712 if(srcBlockIndex == srcBlockMinIndex)
12719 srcAllocIndex = SIZE_MAX;
12724 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
12728 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
12729 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
12731 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
12732 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
12733 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
12734 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
12737 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
12739 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
12740 VmaAllocationRequest dstAllocRequest;
12741 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
12742 m_CurrentFrameIndex,
12743 m_pBlockVector->GetFrameInUseCount(),
12744 m_pBlockVector->GetBufferImageGranularity(),
12751 &dstAllocRequest) &&
12753 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
12755 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
12758 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
12759 (m_BytesMoved + size > maxBytesToMove))
12764 VmaDefragmentationMove move;
12765 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
12766 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
12767 move.srcOffset = srcOffset;
12768 move.dstOffset = dstAllocRequest.offset;
12770 moves.push_back(move);
12772 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
12776 allocInfo.m_hAllocation);
12777 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
12779 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
12781 if(allocInfo.m_pChanged != VMA_NULL)
12783 *allocInfo.m_pChanged = VK_TRUE;
12786 ++m_AllocationsMoved;
12787 m_BytesMoved += size;
12789 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
12797 if(srcAllocIndex > 0)
12803 if(srcBlockIndex > 0)
12806 srcAllocIndex = SIZE_MAX;
12816 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const 12819 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12821 if(m_Blocks[i]->m_HasNonMovableAllocations)
12829 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
12830 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12831 VkDeviceSize maxBytesToMove,
12832 uint32_t maxAllocationsToMove)
12834 if(!m_AllAllocations && m_AllocationCount == 0)
12839 const size_t blockCount = m_Blocks.size();
12840 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12842 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
12844 if(m_AllAllocations)
12846 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
12847 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
12848 it != pMetadata->m_Suballocations.end();
12851 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
12853 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
12854 pBlockInfo->m_Allocations.push_back(allocInfo);
12859 pBlockInfo->CalcHasNonMovableAllocations();
12863 pBlockInfo->SortAllocationsByOffsetDescending();
12869 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
12872 const uint32_t roundCount = 2;
12875 VkResult result = VK_SUCCESS;
12876 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
12878 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
12884 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
12885 size_t dstBlockIndex, VkDeviceSize dstOffset,
12886 size_t srcBlockIndex, VkDeviceSize srcOffset)
12888 if(dstBlockIndex < srcBlockIndex)
12892 if(dstBlockIndex > srcBlockIndex)
12896 if(dstOffset < srcOffset)
12906 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
12908 VmaBlockVector* pBlockVector,
12909 uint32_t currentFrameIndex,
12910 bool overlappingMoveSupported) :
12911 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12912 m_OverlappingMoveSupported(overlappingMoveSupported),
12913 m_AllocationCount(0),
12914 m_AllAllocations(false),
12916 m_AllocationsMoved(0),
12917 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
12919 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
12923 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
12927 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
12928 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12929 VkDeviceSize maxBytesToMove,
12930 uint32_t maxAllocationsToMove)
12932 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
12934 const size_t blockCount = m_pBlockVector->GetBlockCount();
12935 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
12940 PreprocessMetadata();
12944 m_BlockInfos.resize(blockCount);
12945 for(
size_t i = 0; i < blockCount; ++i)
12947 m_BlockInfos[i].origBlockIndex = i;
12950 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
12951 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
12952 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
12957 FreeSpaceDatabase freeSpaceDb;
12959 size_t dstBlockInfoIndex = 0;
12960 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12961 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12962 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12963 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
12964 VkDeviceSize dstOffset = 0;
12967 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
12969 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
12970 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
12971 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
12972 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
12973 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
12975 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
12976 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
12977 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
12978 if(m_AllocationsMoved == maxAllocationsToMove ||
12979 m_BytesMoved + srcAllocSize > maxBytesToMove)
12984 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
12987 size_t freeSpaceInfoIndex;
12988 VkDeviceSize dstAllocOffset;
12989 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
12990 freeSpaceInfoIndex, dstAllocOffset))
12992 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
12993 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
12994 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
12997 if(freeSpaceInfoIndex == srcBlockInfoIndex)
12999 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13003 VmaSuballocation suballoc = *srcSuballocIt;
13004 suballoc.offset = dstAllocOffset;
13005 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
13006 m_BytesMoved += srcAllocSize;
13007 ++m_AllocationsMoved;
13009 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13011 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13012 srcSuballocIt = nextSuballocIt;
13014 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13016 VmaDefragmentationMove move = {
13017 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13018 srcAllocOffset, dstAllocOffset,
13020 moves.push_back(move);
13027 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
13029 VmaSuballocation suballoc = *srcSuballocIt;
13030 suballoc.offset = dstAllocOffset;
13031 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
13032 m_BytesMoved += srcAllocSize;
13033 ++m_AllocationsMoved;
13035 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13037 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13038 srcSuballocIt = nextSuballocIt;
13040 InsertSuballoc(pFreeSpaceMetadata, suballoc);
13042 VmaDefragmentationMove move = {
13043 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
13044 srcAllocOffset, dstAllocOffset,
13046 moves.push_back(move);
13051 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
13054 while(dstBlockInfoIndex < srcBlockInfoIndex &&
13055 dstAllocOffset + srcAllocSize > dstBlockSize)
13058 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
13060 ++dstBlockInfoIndex;
13061 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
13062 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
13063 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
13064 dstBlockSize = pDstMetadata->GetSize();
13066 dstAllocOffset = 0;
13070 if(dstBlockInfoIndex == srcBlockInfoIndex)
13072 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
13074 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
13076 bool skipOver = overlap;
13077 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
13081 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
13086 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
13088 dstOffset = srcAllocOffset + srcAllocSize;
13094 srcSuballocIt->offset = dstAllocOffset;
13095 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
13096 dstOffset = dstAllocOffset + srcAllocSize;
13097 m_BytesMoved += srcAllocSize;
13098 ++m_AllocationsMoved;
13100 VmaDefragmentationMove move = {
13101 srcOrigBlockIndex, dstOrigBlockIndex,
13102 srcAllocOffset, dstAllocOffset,
13104 moves.push_back(move);
13112 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13113 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13115 VmaSuballocation suballoc = *srcSuballocIt;
13116 suballoc.offset = dstAllocOffset;
13117 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13118 dstOffset = dstAllocOffset + srcAllocSize;
13119 m_BytesMoved += srcAllocSize;
13120 ++m_AllocationsMoved;
13122 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13124 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13125 srcSuballocIt = nextSuballocIt;
13127 pDstMetadata->m_Suballocations.push_back(suballoc);
13129 VmaDefragmentationMove move = {
13130 srcOrigBlockIndex, dstOrigBlockIndex,
13131 srcAllocOffset, dstAllocOffset,
13133 moves.push_back(move);
13139 m_BlockInfos.clear();
13141 PostprocessMetadata();
13146 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13148 const size_t blockCount = m_pBlockVector->GetBlockCount();
13149 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13151 VmaBlockMetadata_Generic*
const pMetadata =
13152 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13153 pMetadata->m_FreeCount = 0;
13154 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13155 pMetadata->m_FreeSuballocationsBySize.clear();
13156 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13157 it != pMetadata->m_Suballocations.end(); )
13159 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13161 VmaSuballocationList::iterator nextIt = it;
13163 pMetadata->m_Suballocations.erase(it);
13174 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13176 const size_t blockCount = m_pBlockVector->GetBlockCount();
13177 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13179 VmaBlockMetadata_Generic*
const pMetadata =
13180 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13181 const VkDeviceSize blockSize = pMetadata->GetSize();
13184 if(pMetadata->m_Suballocations.empty())
13186 pMetadata->m_FreeCount = 1;
13188 VmaSuballocation suballoc = {
13192 VMA_SUBALLOCATION_TYPE_FREE };
13193 pMetadata->m_Suballocations.push_back(suballoc);
13194 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13199 VkDeviceSize offset = 0;
13200 VmaSuballocationList::iterator it;
13201 for(it = pMetadata->m_Suballocations.begin();
13202 it != pMetadata->m_Suballocations.end();
13205 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13206 VMA_ASSERT(it->offset >= offset);
13209 if(it->offset > offset)
13211 ++pMetadata->m_FreeCount;
13212 const VkDeviceSize freeSize = it->offset - offset;
13213 VmaSuballocation suballoc = {
13217 VMA_SUBALLOCATION_TYPE_FREE };
13218 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13219 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13221 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13225 pMetadata->m_SumFreeSize -= it->size;
13226 offset = it->offset + it->size;
13230 if(offset < blockSize)
13232 ++pMetadata->m_FreeCount;
13233 const VkDeviceSize freeSize = blockSize - offset;
13234 VmaSuballocation suballoc = {
13238 VMA_SUBALLOCATION_TYPE_FREE };
13239 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13240 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13241 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13243 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13248 pMetadata->m_FreeSuballocationsBySize.begin(),
13249 pMetadata->m_FreeSuballocationsBySize.end(),
13250 VmaSuballocationItemSizeLess());
13253 VMA_HEAVY_ASSERT(pMetadata->Validate());
13257 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13260 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13261 while(it != pMetadata->m_Suballocations.end())
13263 if(it->offset < suballoc.offset)
13268 pMetadata->m_Suballocations.insert(it, suballoc);
13274 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13277 VmaBlockVector* pBlockVector,
13278 uint32_t currFrameIndex,
13279 uint32_t algorithmFlags) :
13281 mutexLocked(false),
13282 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13283 m_hAllocator(hAllocator),
13284 m_hCustomPool(hCustomPool),
13285 m_pBlockVector(pBlockVector),
13286 m_CurrFrameIndex(currFrameIndex),
13287 m_AlgorithmFlags(algorithmFlags),
13288 m_pAlgorithm(VMA_NULL),
13289 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13290 m_AllAllocations(false)
13294 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13296 vma_delete(m_hAllocator, m_pAlgorithm);
13299 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13301 AllocInfo info = { hAlloc, pChanged };
13302 m_Allocations.push_back(info);
13305 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13307 const bool allAllocations = m_AllAllocations ||
13308 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13320 if(VMA_DEBUG_MARGIN == 0 &&
13322 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13324 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13325 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13329 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13330 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13335 m_pAlgorithm->AddAll();
13339 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13341 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13349 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13351 uint32_t currFrameIndex,
13354 m_hAllocator(hAllocator),
13355 m_CurrFrameIndex(currFrameIndex),
13358 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13360 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13363 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13365 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13367 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13368 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13369 vma_delete(m_hAllocator, pBlockVectorCtx);
13371 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13373 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13374 if(pBlockVectorCtx)
13376 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13377 vma_delete(m_hAllocator, pBlockVectorCtx);
13382 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13384 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13386 VmaPool pool = pPools[poolIndex];
13389 if(pool->m_BlockVector.GetAlgorithm() == 0)
13391 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13393 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13395 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13397 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13402 if(!pBlockVectorDefragCtx)
13404 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13407 &pool->m_BlockVector,
13410 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13413 pBlockVectorDefragCtx->AddAll();
13418 void VmaDefragmentationContext_T::AddAllocations(
13419 uint32_t allocationCount,
13421 VkBool32* pAllocationsChanged)
13424 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13427 VMA_ASSERT(hAlloc);
13429 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13431 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13433 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13435 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
13437 if(hAllocPool != VK_NULL_HANDLE)
13440 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13442 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13444 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13446 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13450 if(!pBlockVectorDefragCtx)
13452 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13455 &hAllocPool->m_BlockVector,
13458 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13465 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13466 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13467 if(!pBlockVectorDefragCtx)
13469 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13472 m_hAllocator->m_pBlockVectors[memTypeIndex],
13475 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13479 if(pBlockVectorDefragCtx)
13481 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13482 &pAllocationsChanged[allocIndex] : VMA_NULL;
13483 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13489 VkResult VmaDefragmentationContext_T::Defragment(
13490 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13491 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13499 if(commandBuffer == VK_NULL_HANDLE)
13501 maxGpuBytesToMove = 0;
13502 maxGpuAllocationsToMove = 0;
13505 VkResult res = VK_SUCCESS;
13508 for(uint32_t memTypeIndex = 0;
13509 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13512 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13513 if(pBlockVectorCtx)
13515 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13516 pBlockVectorCtx->GetBlockVector()->Defragment(
13519 maxCpuBytesToMove, maxCpuAllocationsToMove,
13520 maxGpuBytesToMove, maxGpuAllocationsToMove,
13522 if(pBlockVectorCtx->res != VK_SUCCESS)
13524 res = pBlockVectorCtx->res;
13530 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13531 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13534 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13535 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13536 pBlockVectorCtx->GetBlockVector()->Defragment(
13539 maxCpuBytesToMove, maxCpuAllocationsToMove,
13540 maxGpuBytesToMove, maxGpuAllocationsToMove,
13542 if(pBlockVectorCtx->res != VK_SUCCESS)
13544 res = pBlockVectorCtx->res;
13554 #if VMA_RECORDING_ENABLED 13556 VmaRecorder::VmaRecorder() :
13561 m_StartCounter(INT64_MAX)
13567 m_UseMutex = useMutex;
13568 m_Flags = settings.
flags;
13570 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13571 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13574 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13577 return VK_ERROR_INITIALIZATION_FAILED;
13581 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13582 fprintf(m_File,
"%s\n",
"1,5");
13587 VmaRecorder::~VmaRecorder()
13589 if(m_File != VMA_NULL)
13595 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13597 CallParams callParams;
13598 GetBasicParams(callParams);
13600 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13601 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13605 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13607 CallParams callParams;
13608 GetBasicParams(callParams);
13610 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13611 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13617 CallParams callParams;
13618 GetBasicParams(callParams);
13620 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13621 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13632 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13634 CallParams callParams;
13635 GetBasicParams(callParams);
13637 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13638 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13643 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13644 const VkMemoryRequirements& vkMemReq,
13648 CallParams callParams;
13649 GetBasicParams(callParams);
13651 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13652 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13653 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13655 vkMemReq.alignment,
13656 vkMemReq.memoryTypeBits,
13664 userDataStr.GetString());
13668 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
13669 const VkMemoryRequirements& vkMemReq,
13671 uint64_t allocationCount,
13674 CallParams callParams;
13675 GetBasicParams(callParams);
13677 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13678 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13679 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
13681 vkMemReq.alignment,
13682 vkMemReq.memoryTypeBits,
13689 PrintPointerList(allocationCount, pAllocations);
13690 fprintf(m_File,
",%s\n", userDataStr.GetString());
13694 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
13695 const VkMemoryRequirements& vkMemReq,
13696 bool requiresDedicatedAllocation,
13697 bool prefersDedicatedAllocation,
13701 CallParams callParams;
13702 GetBasicParams(callParams);
13704 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13705 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13706 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13708 vkMemReq.alignment,
13709 vkMemReq.memoryTypeBits,
13710 requiresDedicatedAllocation ? 1 : 0,
13711 prefersDedicatedAllocation ? 1 : 0,
13719 userDataStr.GetString());
13723 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
13724 const VkMemoryRequirements& vkMemReq,
13725 bool requiresDedicatedAllocation,
13726 bool prefersDedicatedAllocation,
13730 CallParams callParams;
13731 GetBasicParams(callParams);
13733 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13734 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13735 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13737 vkMemReq.alignment,
13738 vkMemReq.memoryTypeBits,
13739 requiresDedicatedAllocation ? 1 : 0,
13740 prefersDedicatedAllocation ? 1 : 0,
13748 userDataStr.GetString());
13752 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
13755 CallParams callParams;
13756 GetBasicParams(callParams);
13758 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13759 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13764 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
13765 uint64_t allocationCount,
13768 CallParams callParams;
13769 GetBasicParams(callParams);
13771 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13772 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
13773 PrintPointerList(allocationCount, pAllocations);
13774 fprintf(m_File,
"\n");
13778 void VmaRecorder::RecordResizeAllocation(
13779 uint32_t frameIndex,
13781 VkDeviceSize newSize)
13783 CallParams callParams;
13784 GetBasicParams(callParams);
13786 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13787 fprintf(m_File,
"%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
13788 allocation, newSize);
13792 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
13794 const void* pUserData)
13796 CallParams callParams;
13797 GetBasicParams(callParams);
13799 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13800 UserDataString userDataStr(
13803 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13805 userDataStr.GetString());
13809 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
13812 CallParams callParams;
13813 GetBasicParams(callParams);
13815 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13816 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13821 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
13824 CallParams callParams;
13825 GetBasicParams(callParams);
13827 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13828 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13833 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
13836 CallParams callParams;
13837 GetBasicParams(callParams);
13839 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13840 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13845 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
13846 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13848 CallParams callParams;
13849 GetBasicParams(callParams);
13851 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13852 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13859 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
13860 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13862 CallParams callParams;
13863 GetBasicParams(callParams);
13865 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13866 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13873 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
13874 const VkBufferCreateInfo& bufCreateInfo,
13878 CallParams callParams;
13879 GetBasicParams(callParams);
13881 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13882 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13883 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13884 bufCreateInfo.flags,
13885 bufCreateInfo.size,
13886 bufCreateInfo.usage,
13887 bufCreateInfo.sharingMode,
13888 allocCreateInfo.
flags,
13889 allocCreateInfo.
usage,
13893 allocCreateInfo.
pool,
13895 userDataStr.GetString());
13899 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
13900 const VkImageCreateInfo& imageCreateInfo,
13904 CallParams callParams;
13905 GetBasicParams(callParams);
13907 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13908 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13909 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13910 imageCreateInfo.flags,
13911 imageCreateInfo.imageType,
13912 imageCreateInfo.format,
13913 imageCreateInfo.extent.width,
13914 imageCreateInfo.extent.height,
13915 imageCreateInfo.extent.depth,
13916 imageCreateInfo.mipLevels,
13917 imageCreateInfo.arrayLayers,
13918 imageCreateInfo.samples,
13919 imageCreateInfo.tiling,
13920 imageCreateInfo.usage,
13921 imageCreateInfo.sharingMode,
13922 imageCreateInfo.initialLayout,
13923 allocCreateInfo.
flags,
13924 allocCreateInfo.
usage,
13928 allocCreateInfo.
pool,
13930 userDataStr.GetString());
13934 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
13937 CallParams callParams;
13938 GetBasicParams(callParams);
13940 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13941 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
13946 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
13949 CallParams callParams;
13950 GetBasicParams(callParams);
13952 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13953 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
13958 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
13961 CallParams callParams;
13962 GetBasicParams(callParams);
13964 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13965 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13970 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
13973 CallParams callParams;
13974 GetBasicParams(callParams);
13976 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13977 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
13982 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
13985 CallParams callParams;
13986 GetBasicParams(callParams);
13988 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13989 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
13994 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
13998 CallParams callParams;
13999 GetBasicParams(callParams);
14001 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14002 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
14005 fprintf(m_File,
",");
14007 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
14017 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
14020 CallParams callParams;
14021 GetBasicParams(callParams);
14023 VmaMutexLock lock(m_FileMutex, m_UseMutex);
14024 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
14031 if(pUserData != VMA_NULL)
14035 m_Str = (
const char*)pUserData;
14039 sprintf_s(m_PtrStr,
"%p", pUserData);
14049 void VmaRecorder::WriteConfiguration(
14050 const VkPhysicalDeviceProperties& devProps,
14051 const VkPhysicalDeviceMemoryProperties& memProps,
14052 bool dedicatedAllocationExtensionEnabled)
14054 fprintf(m_File,
"Config,Begin\n");
14056 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
14057 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
14058 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
14059 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
14060 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
14061 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
14063 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
14064 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
14065 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
14067 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
14068 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
14070 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
14071 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
14073 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
14074 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
14076 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
14077 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
14080 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
14082 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
14083 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
14084 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
14085 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
14086 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
14087 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
14088 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
14089 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
14090 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
14092 fprintf(m_File,
"Config,End\n");
14095 void VmaRecorder::GetBasicParams(CallParams& outParams)
14097 outParams.threadId = GetCurrentThreadId();
14099 LARGE_INTEGER counter;
14100 QueryPerformanceCounter(&counter);
14101 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
14104 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
14108 fprintf(m_File,
"%p", pItems[0]);
14109 for(uint64_t i = 1; i < count; ++i)
14111 fprintf(m_File,
" %p", pItems[i]);
14116 void VmaRecorder::Flush()
14124 #endif // #if VMA_RECORDING_ENABLED 14129 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14130 m_Allocator(pAllocationCallbacks, 1024)
14136 VmaMutexLock mutexLock(m_Mutex);
14137 return m_Allocator.Alloc();
14140 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14142 VmaMutexLock mutexLock(m_Mutex);
14143 m_Allocator.Free(hAlloc);
14152 m_hDevice(pCreateInfo->device),
14153 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14154 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14155 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14156 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14157 m_PreferredLargeHeapBlockSize(0),
14158 m_PhysicalDevice(pCreateInfo->physicalDevice),
14159 m_CurrentFrameIndex(0),
14160 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14163 ,m_pRecorder(VMA_NULL)
14166 if(VMA_DEBUG_DETECT_CORRUPTION)
14169 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14174 #if !(VMA_DEDICATED_ALLOCATION) 14177 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14181 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14182 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14183 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14185 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14186 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14188 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14190 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
14201 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14202 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14204 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14205 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14206 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14207 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14214 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14216 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14217 if(limit != VK_WHOLE_SIZE)
14219 m_HeapSizeLimit[heapIndex] = limit;
14220 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14222 m_MemProps.memoryHeaps[heapIndex].size = limit;
14228 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14230 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14232 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14236 preferredBlockSize,
14239 GetBufferImageGranularity(),
14246 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14253 VkResult res = VK_SUCCESS;
14258 #if VMA_RECORDING_ENABLED 14259 m_pRecorder = vma_new(
this, VmaRecorder)();
14261 if(res != VK_SUCCESS)
14265 m_pRecorder->WriteConfiguration(
14266 m_PhysicalDeviceProperties,
14268 m_UseKhrDedicatedAllocation);
14269 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14271 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14272 return VK_ERROR_FEATURE_NOT_PRESENT;
14279 VmaAllocator_T::~VmaAllocator_T()
14281 #if VMA_RECORDING_ENABLED 14282 if(m_pRecorder != VMA_NULL)
14284 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14285 vma_delete(
this, m_pRecorder);
14289 VMA_ASSERT(m_Pools.empty());
14291 for(
size_t i = GetMemoryTypeCount(); i--; )
14293 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
14295 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
14298 vma_delete(
this, m_pDedicatedAllocations[i]);
14299 vma_delete(
this, m_pBlockVectors[i]);
14303 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14305 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14306 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
14307 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
14308 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
14309 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
14310 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
14311 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
14312 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
14313 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
14314 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
14315 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
14316 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
14317 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
14318 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
14319 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
14320 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
14321 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
14322 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
14323 #if VMA_DEDICATED_ALLOCATION 14324 if(m_UseKhrDedicatedAllocation)
14326 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14327 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14328 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14329 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14331 #endif // #if VMA_DEDICATED_ALLOCATION 14332 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14334 #define VMA_COPY_IF_NOT_NULL(funcName) \ 14335 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 14337 if(pVulkanFunctions != VMA_NULL)
14339 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14340 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14341 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14342 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14343 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14344 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14345 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14346 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14347 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14348 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14349 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14350 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14351 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14352 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14353 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14354 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14355 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14356 #if VMA_DEDICATED_ALLOCATION 14357 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14358 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14362 #undef VMA_COPY_IF_NOT_NULL 14366 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14367 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14368 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14369 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14370 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14371 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14372 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14373 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14374 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14375 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14376 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14377 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14378 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14379 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14380 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14381 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14382 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14383 #if VMA_DEDICATED_ALLOCATION 14384 if(m_UseKhrDedicatedAllocation)
14386 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14387 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14392 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14394 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14395 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14396 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14397 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
14400 VkResult VmaAllocator_T::AllocateMemoryOfType(
14402 VkDeviceSize alignment,
14403 bool dedicatedAllocation,
14404 VkBuffer dedicatedBuffer,
14405 VkImage dedicatedImage,
14407 uint32_t memTypeIndex,
14408 VmaSuballocationType suballocType,
14409 size_t allocationCount,
14412 VMA_ASSERT(pAllocations != VMA_NULL);
14413 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14419 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14424 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14425 VMA_ASSERT(blockVector);
14427 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14428 bool preferDedicatedMemory =
14429 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14430 dedicatedAllocation ||
14432 size > preferredBlockSize / 2;
14434 if(preferDedicatedMemory &&
14436 finalCreateInfo.
pool == VK_NULL_HANDLE)
14445 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14449 return AllocateDedicatedMemory(
14464 VkResult res = blockVector->Allocate(
14465 m_CurrentFrameIndex.load(),
14472 if(res == VK_SUCCESS)
14480 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14484 res = AllocateDedicatedMemory(
14490 finalCreateInfo.pUserData,
14495 if(res == VK_SUCCESS)
14498 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14504 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14511 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14513 VmaSuballocationType suballocType,
14514 uint32_t memTypeIndex,
14516 bool isUserDataString,
14518 VkBuffer dedicatedBuffer,
14519 VkImage dedicatedImage,
14520 size_t allocationCount,
14523 VMA_ASSERT(allocationCount > 0 && pAllocations);
14525 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14526 allocInfo.memoryTypeIndex = memTypeIndex;
14527 allocInfo.allocationSize = size;
14529 #if VMA_DEDICATED_ALLOCATION 14530 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14531 if(m_UseKhrDedicatedAllocation)
14533 if(dedicatedBuffer != VK_NULL_HANDLE)
14535 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14536 dedicatedAllocInfo.buffer = dedicatedBuffer;
14537 allocInfo.pNext = &dedicatedAllocInfo;
14539 else if(dedicatedImage != VK_NULL_HANDLE)
14541 dedicatedAllocInfo.image = dedicatedImage;
14542 allocInfo.pNext = &dedicatedAllocInfo;
14545 #endif // #if VMA_DEDICATED_ALLOCATION 14548 VkResult res = VK_SUCCESS;
14549 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14551 res = AllocateDedicatedMemoryPage(
14559 pAllocations + allocIndex);
14560 if(res != VK_SUCCESS)
14566 if(res == VK_SUCCESS)
14570 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14571 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14572 VMA_ASSERT(pDedicatedAllocations);
14573 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14575 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
14579 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
14584 while(allocIndex--)
14587 VkDeviceMemory hMemory = currAlloc->GetMemory();
14599 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
14601 currAlloc->SetUserData(
this, VMA_NULL);
14603 m_AllocationObjectAllocator.Free(currAlloc);
14606 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14612 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
14614 VmaSuballocationType suballocType,
14615 uint32_t memTypeIndex,
14616 const VkMemoryAllocateInfo& allocInfo,
14618 bool isUserDataString,
14622 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14623 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14626 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14630 void* pMappedData = VMA_NULL;
14633 res = (*m_VulkanFunctions.vkMapMemory)(
14642 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
14643 FreeVulkanMemory(memTypeIndex, size, hMemory);
14648 *pAllocation = m_AllocationObjectAllocator.Allocate();
14649 (*pAllocation)->Ctor(m_CurrentFrameIndex.load(), isUserDataString);
14650 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
14651 (*pAllocation)->SetUserData(
this, pUserData);
14652 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14654 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14660 void VmaAllocator_T::GetBufferMemoryRequirements(
14662 VkMemoryRequirements& memReq,
14663 bool& requiresDedicatedAllocation,
14664 bool& prefersDedicatedAllocation)
const 14666 #if VMA_DEDICATED_ALLOCATION 14667 if(m_UseKhrDedicatedAllocation)
14669 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14670 memReqInfo.buffer = hBuffer;
14672 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14674 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14675 memReq2.pNext = &memDedicatedReq;
14677 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14679 memReq = memReq2.memoryRequirements;
14680 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14681 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14684 #endif // #if VMA_DEDICATED_ALLOCATION 14686 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14687 requiresDedicatedAllocation =
false;
14688 prefersDedicatedAllocation =
false;
14692 void VmaAllocator_T::GetImageMemoryRequirements(
14694 VkMemoryRequirements& memReq,
14695 bool& requiresDedicatedAllocation,
14696 bool& prefersDedicatedAllocation)
const 14698 #if VMA_DEDICATED_ALLOCATION 14699 if(m_UseKhrDedicatedAllocation)
14701 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
14702 memReqInfo.image = hImage;
14704 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14706 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14707 memReq2.pNext = &memDedicatedReq;
14709 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14711 memReq = memReq2.memoryRequirements;
14712 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14713 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14716 #endif // #if VMA_DEDICATED_ALLOCATION 14718 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14719 requiresDedicatedAllocation =
false;
14720 prefersDedicatedAllocation =
false;
14724 VkResult VmaAllocator_T::AllocateMemory(
14725 const VkMemoryRequirements& vkMemReq,
14726 bool requiresDedicatedAllocation,
14727 bool prefersDedicatedAllocation,
14728 VkBuffer dedicatedBuffer,
14729 VkImage dedicatedImage,
14731 VmaSuballocationType suballocType,
14732 size_t allocationCount,
14735 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14737 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
14739 if(vkMemReq.size == 0)
14741 return VK_ERROR_VALIDATION_FAILED_EXT;
14746 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14747 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14752 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
14753 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14755 if(requiresDedicatedAllocation)
14759 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
14760 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14762 if(createInfo.
pool != VK_NULL_HANDLE)
14764 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
14765 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14768 if((createInfo.
pool != VK_NULL_HANDLE) &&
14771 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
14772 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14775 if(createInfo.
pool != VK_NULL_HANDLE)
14777 const VkDeviceSize alignmentForPool = VMA_MAX(
14778 vkMemReq.alignment,
14779 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
14780 return createInfo.
pool->m_BlockVector.Allocate(
14781 m_CurrentFrameIndex.load(),
14792 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
14793 uint32_t memTypeIndex = UINT32_MAX;
14795 if(res == VK_SUCCESS)
14797 VkDeviceSize alignmentForMemType = VMA_MAX(
14798 vkMemReq.alignment,
14799 GetMemoryTypeMinAlignment(memTypeIndex));
14801 res = AllocateMemoryOfType(
14803 alignmentForMemType,
14804 requiresDedicatedAllocation || prefersDedicatedAllocation,
14813 if(res == VK_SUCCESS)
14823 memoryTypeBits &= ~(1u << memTypeIndex);
14826 if(res == VK_SUCCESS)
14828 alignmentForMemType = VMA_MAX(
14829 vkMemReq.alignment,
14830 GetMemoryTypeMinAlignment(memTypeIndex));
14832 res = AllocateMemoryOfType(
14834 alignmentForMemType,
14835 requiresDedicatedAllocation || prefersDedicatedAllocation,
14844 if(res == VK_SUCCESS)
14854 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14865 void VmaAllocator_T::FreeMemory(
14866 size_t allocationCount,
14869 VMA_ASSERT(pAllocations);
14871 for(
size_t allocIndex = allocationCount; allocIndex--; )
14875 if(allocation != VK_NULL_HANDLE)
14877 if(TouchAllocation(allocation))
14879 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14881 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
14884 switch(allocation->GetType())
14886 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14888 VmaBlockVector* pBlockVector = VMA_NULL;
14889 VmaPool hPool = allocation->GetBlock()->GetParentPool();
14890 if(hPool != VK_NULL_HANDLE)
14892 pBlockVector = &hPool->m_BlockVector;
14896 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14897 pBlockVector = m_pBlockVectors[memTypeIndex];
14899 pBlockVector->Free(allocation);
14902 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14903 FreeDedicatedMemory(allocation);
14910 allocation->SetUserData(
this, VMA_NULL);
14911 allocation->Dtor();
14912 m_AllocationObjectAllocator.Free(allocation);
14917 VkResult VmaAllocator_T::ResizeAllocation(
14919 VkDeviceSize newSize)
14921 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
14923 return VK_ERROR_VALIDATION_FAILED_EXT;
14925 if(newSize == alloc->GetSize())
14930 switch(alloc->GetType())
14932 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14933 return VK_ERROR_FEATURE_NOT_PRESENT;
14934 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14935 if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
14937 alloc->ChangeSize(newSize);
14938 VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
14943 return VK_ERROR_OUT_OF_POOL_MEMORY;
14947 return VK_ERROR_VALIDATION_FAILED_EXT;
14951 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
14954 InitStatInfo(pStats->
total);
14955 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
14957 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14961 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14963 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
14964 VMA_ASSERT(pBlockVector);
14965 pBlockVector->AddStats(pStats);
14970 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
14971 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
14973 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
14978 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14980 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14981 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14982 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
14983 VMA_ASSERT(pDedicatedAllocVector);
14984 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
14987 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
14988 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14989 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14990 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
14995 VmaPostprocessCalcStatInfo(pStats->
total);
14996 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
14997 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
14998 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
14999 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
15002 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
15004 VkResult VmaAllocator_T::DefragmentationBegin(
15014 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
15015 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
15018 (*pContext)->AddAllocations(
15021 VkResult res = (*pContext)->Defragment(
15026 if(res != VK_NOT_READY)
15028 vma_delete(
this, *pContext);
15029 *pContext = VMA_NULL;
15035 VkResult VmaAllocator_T::DefragmentationEnd(
15038 vma_delete(
this, context);
15044 if(hAllocation->CanBecomeLost())
15050 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15051 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15054 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15058 pAllocationInfo->
offset = 0;
15059 pAllocationInfo->
size = hAllocation->GetSize();
15061 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15064 else if(localLastUseFrameIndex == localCurrFrameIndex)
15066 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15067 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15068 pAllocationInfo->
offset = hAllocation->GetOffset();
15069 pAllocationInfo->
size = hAllocation->GetSize();
15071 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15076 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15078 localLastUseFrameIndex = localCurrFrameIndex;
15085 #if VMA_STATS_STRING_ENABLED 15086 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15087 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15090 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15091 if(localLastUseFrameIndex == localCurrFrameIndex)
15097 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15099 localLastUseFrameIndex = localCurrFrameIndex;
15105 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
15106 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
15107 pAllocationInfo->
offset = hAllocation->GetOffset();
15108 pAllocationInfo->
size = hAllocation->GetSize();
15109 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
15110 pAllocationInfo->
pUserData = hAllocation->GetUserData();
15114 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
15117 if(hAllocation->CanBecomeLost())
15119 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15120 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15123 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15127 else if(localLastUseFrameIndex == localCurrFrameIndex)
15133 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15135 localLastUseFrameIndex = localCurrFrameIndex;
15142 #if VMA_STATS_STRING_ENABLED 15143 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15144 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15147 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15148 if(localLastUseFrameIndex == localCurrFrameIndex)
15154 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15156 localLastUseFrameIndex = localCurrFrameIndex;
15168 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
15178 return VK_ERROR_INITIALIZATION_FAILED;
15181 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15183 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15185 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15186 if(res != VK_SUCCESS)
15188 vma_delete(
this, *pPool);
15195 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15196 (*pPool)->SetId(m_NextPoolId++);
15197 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15203 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15207 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15208 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15209 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15212 vma_delete(
this, pool);
15217 pool->m_BlockVector.GetPoolStats(pPoolStats);
15220 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15222 m_CurrentFrameIndex.store(frameIndex);
15225 void VmaAllocator_T::MakePoolAllocationsLost(
15227 size_t* pLostAllocationCount)
15229 hPool->m_BlockVector.MakePoolAllocationsLost(
15230 m_CurrentFrameIndex.load(),
15231 pLostAllocationCount);
15234 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15236 return hPool->m_BlockVector.CheckCorruption();
15239 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15241 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15244 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15246 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15248 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15249 VMA_ASSERT(pBlockVector);
15250 VkResult localRes = pBlockVector->CheckCorruption();
15253 case VK_ERROR_FEATURE_NOT_PRESENT:
15256 finalRes = VK_SUCCESS;
15266 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15267 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15269 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15271 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15274 case VK_ERROR_FEATURE_NOT_PRESENT:
15277 finalRes = VK_SUCCESS;
15289 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15291 *pAllocation = m_AllocationObjectAllocator.Allocate();
15292 (*pAllocation)->Ctor(VMA_FRAME_INDEX_LOST,
false);
15293 (*pAllocation)->InitLost();
15296 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15298 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15301 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15303 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15304 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
15306 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15307 if(res == VK_SUCCESS)
15309 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
15314 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
15319 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15322 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15324 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15330 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15332 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15334 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15337 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15339 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
15340 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15342 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15343 m_HeapSizeLimit[heapIndex] += size;
15347 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15349 if(hAllocation->CanBecomeLost())
15351 return VK_ERROR_MEMORY_MAP_FAILED;
15354 switch(hAllocation->GetType())
15356 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15358 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15359 char *pBytes = VMA_NULL;
15360 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15361 if(res == VK_SUCCESS)
15363 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15364 hAllocation->BlockAllocMap();
15368 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15369 return hAllocation->DedicatedAllocMap(
this, ppData);
15372 return VK_ERROR_MEMORY_MAP_FAILED;
15378 switch(hAllocation->GetType())
15380 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15382 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15383 hAllocation->BlockAllocUnmap();
15384 pBlock->Unmap(
this, 1);
15387 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15388 hAllocation->DedicatedAllocUnmap(
this);
15395 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
15397 VkResult res = VK_SUCCESS;
15398 switch(hAllocation->GetType())
15400 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15401 res = GetVulkanFunctions().vkBindBufferMemory(
15404 hAllocation->GetMemory(),
15407 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15409 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15410 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
15411 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
15420 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
15422 VkResult res = VK_SUCCESS;
15423 switch(hAllocation->GetType())
15425 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15426 res = GetVulkanFunctions().vkBindImageMemory(
15429 hAllocation->GetMemory(),
15432 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15434 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15435 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
15436 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
15445 void VmaAllocator_T::FlushOrInvalidateAllocation(
15447 VkDeviceSize offset, VkDeviceSize size,
15448 VMA_CACHE_OPERATION op)
15450 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
15451 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
15453 const VkDeviceSize allocationSize = hAllocation->GetSize();
15454 VMA_ASSERT(offset <= allocationSize);
15456 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
15458 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
15459 memRange.memory = hAllocation->GetMemory();
15461 switch(hAllocation->GetType())
15463 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15464 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15465 if(size == VK_WHOLE_SIZE)
15467 memRange.size = allocationSize - memRange.offset;
15471 VMA_ASSERT(offset + size <= allocationSize);
15472 memRange.size = VMA_MIN(
15473 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
15474 allocationSize - memRange.offset);
15478 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15481 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15482 if(size == VK_WHOLE_SIZE)
15484 size = allocationSize - offset;
15488 VMA_ASSERT(offset + size <= allocationSize);
15490 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
15493 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
15494 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
15495 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
15496 memRange.offset += allocationOffset;
15497 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
15508 case VMA_CACHE_FLUSH:
15509 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
15511 case VMA_CACHE_INVALIDATE:
15512 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
15521 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
15523 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
15525 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15527 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15528 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15529 VMA_ASSERT(pDedicatedAllocations);
15530 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
15531 VMA_ASSERT(success);
15534 VkDeviceMemory hMemory = allocation->GetMemory();
15546 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15548 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15551 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
15553 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
15554 !hAllocation->CanBecomeLost() &&
15555 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15557 void* pData = VMA_NULL;
15558 VkResult res = Map(hAllocation, &pData);
15559 if(res == VK_SUCCESS)
15561 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
15562 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
15563 Unmap(hAllocation);
15567 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15572 #if VMA_STATS_STRING_ENABLED 15574 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15576 bool dedicatedAllocationsStarted =
false;
15577 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15579 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15580 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15581 VMA_ASSERT(pDedicatedAllocVector);
15582 if(pDedicatedAllocVector->empty() ==
false)
15584 if(dedicatedAllocationsStarted ==
false)
15586 dedicatedAllocationsStarted =
true;
15587 json.WriteString(
"DedicatedAllocations");
15588 json.BeginObject();
15591 json.BeginString(
"Type ");
15592 json.ContinueString(memTypeIndex);
15597 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
15599 json.BeginObject(
true);
15601 hAlloc->PrintParameters(json);
15608 if(dedicatedAllocationsStarted)
15614 bool allocationsStarted =
false;
15615 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15617 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
15619 if(allocationsStarted ==
false)
15621 allocationsStarted =
true;
15622 json.WriteString(
"DefaultPools");
15623 json.BeginObject();
15626 json.BeginString(
"Type ");
15627 json.ContinueString(memTypeIndex);
15630 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
15633 if(allocationsStarted)
15641 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15642 const size_t poolCount = m_Pools.size();
15645 json.WriteString(
"Pools");
15646 json.BeginObject();
15647 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15649 json.BeginString();
15650 json.ContinueString(m_Pools[poolIndex]->GetId());
15653 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
15660 #endif // #if VMA_STATS_STRING_ENABLED 15669 VMA_ASSERT(pCreateInfo && pAllocator);
15670 VMA_DEBUG_LOG(
"vmaCreateAllocator");
15672 return (*pAllocator)->Init(pCreateInfo);
15678 if(allocator != VK_NULL_HANDLE)
15680 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
15681 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
15682 vma_delete(&allocationCallbacks, allocator);
15688 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
15690 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
15691 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
15696 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
15698 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
15699 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
15704 uint32_t memoryTypeIndex,
15705 VkMemoryPropertyFlags* pFlags)
15707 VMA_ASSERT(allocator && pFlags);
15708 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
15709 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
15714 uint32_t frameIndex)
15716 VMA_ASSERT(allocator);
15717 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
15719 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15721 allocator->SetCurrentFrameIndex(frameIndex);
15728 VMA_ASSERT(allocator && pStats);
15729 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15730 allocator->CalculateStats(pStats);
15733 #if VMA_STATS_STRING_ENABLED 15737 char** ppStatsString,
15738 VkBool32 detailedMap)
15740 VMA_ASSERT(allocator && ppStatsString);
15741 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15743 VmaStringBuilder sb(allocator);
15745 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
15746 json.BeginObject();
15749 allocator->CalculateStats(&stats);
15751 json.WriteString(
"Total");
15752 VmaPrintStatInfo(json, stats.
total);
15754 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
15756 json.BeginString(
"Heap ");
15757 json.ContinueString(heapIndex);
15759 json.BeginObject();
15761 json.WriteString(
"Size");
15762 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
15764 json.WriteString(
"Flags");
15765 json.BeginArray(
true);
15766 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
15768 json.WriteString(
"DEVICE_LOCAL");
15774 json.WriteString(
"Stats");
15775 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
15778 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
15780 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
15782 json.BeginString(
"Type ");
15783 json.ContinueString(typeIndex);
15786 json.BeginObject();
15788 json.WriteString(
"Flags");
15789 json.BeginArray(
true);
15790 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
15791 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
15793 json.WriteString(
"DEVICE_LOCAL");
15795 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15797 json.WriteString(
"HOST_VISIBLE");
15799 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
15801 json.WriteString(
"HOST_COHERENT");
15803 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
15805 json.WriteString(
"HOST_CACHED");
15807 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
15809 json.WriteString(
"LAZILY_ALLOCATED");
15815 json.WriteString(
"Stats");
15816 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
15825 if(detailedMap == VK_TRUE)
15827 allocator->PrintDetailedMap(json);
15833 const size_t len = sb.GetLength();
15834 char*
const pChars = vma_new_array(allocator,
char, len + 1);
15837 memcpy(pChars, sb.GetData(), len);
15839 pChars[len] =
'\0';
15840 *ppStatsString = pChars;
15845 char* pStatsString)
15847 if(pStatsString != VMA_NULL)
15849 VMA_ASSERT(allocator);
15850 size_t len = strlen(pStatsString);
15851 vma_delete_array(allocator, pStatsString, len + 1);
15855 #endif // #if VMA_STATS_STRING_ENABLED 15862 uint32_t memoryTypeBits,
15864 uint32_t* pMemoryTypeIndex)
15866 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15867 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15868 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15875 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
15876 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
15881 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15885 switch(pAllocationCreateInfo->
usage)
15890 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15892 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15896 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
15899 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15900 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15902 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15906 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15907 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
15913 *pMemoryTypeIndex = UINT32_MAX;
15914 uint32_t minCost = UINT32_MAX;
15915 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
15916 memTypeIndex < allocator->GetMemoryTypeCount();
15917 ++memTypeIndex, memTypeBit <<= 1)
15920 if((memTypeBit & memoryTypeBits) != 0)
15922 const VkMemoryPropertyFlags currFlags =
15923 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
15925 if((requiredFlags & ~currFlags) == 0)
15928 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
15930 if(currCost < minCost)
15932 *pMemoryTypeIndex = memTypeIndex;
15937 minCost = currCost;
15942 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
15947 const VkBufferCreateInfo* pBufferCreateInfo,
15949 uint32_t* pMemoryTypeIndex)
15951 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15952 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
15953 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15954 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15956 const VkDevice hDev = allocator->m_hDevice;
15957 VkBuffer hBuffer = VK_NULL_HANDLE;
15958 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
15959 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
15960 if(res == VK_SUCCESS)
15962 VkMemoryRequirements memReq = {};
15963 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
15964 hDev, hBuffer, &memReq);
15968 memReq.memoryTypeBits,
15969 pAllocationCreateInfo,
15972 allocator->GetVulkanFunctions().vkDestroyBuffer(
15973 hDev, hBuffer, allocator->GetAllocationCallbacks());
15980 const VkImageCreateInfo* pImageCreateInfo,
15982 uint32_t* pMemoryTypeIndex)
15984 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15985 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
15986 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15987 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15989 const VkDevice hDev = allocator->m_hDevice;
15990 VkImage hImage = VK_NULL_HANDLE;
15991 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
15992 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
15993 if(res == VK_SUCCESS)
15995 VkMemoryRequirements memReq = {};
15996 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
15997 hDev, hImage, &memReq);
16001 memReq.memoryTypeBits,
16002 pAllocationCreateInfo,
16005 allocator->GetVulkanFunctions().vkDestroyImage(
16006 hDev, hImage, allocator->GetAllocationCallbacks());
16016 VMA_ASSERT(allocator && pCreateInfo && pPool);
16018 VMA_DEBUG_LOG(
"vmaCreatePool");
16020 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16022 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
16024 #if VMA_RECORDING_ENABLED 16025 if(allocator->GetRecorder() != VMA_NULL)
16027 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
16038 VMA_ASSERT(allocator);
16040 if(pool == VK_NULL_HANDLE)
16045 VMA_DEBUG_LOG(
"vmaDestroyPool");
16047 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16049 #if VMA_RECORDING_ENABLED 16050 if(allocator->GetRecorder() != VMA_NULL)
16052 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
16056 allocator->DestroyPool(pool);
16064 VMA_ASSERT(allocator && pool && pPoolStats);
16066 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16068 allocator->GetPoolStats(pool, pPoolStats);
16074 size_t* pLostAllocationCount)
16076 VMA_ASSERT(allocator && pool);
16078 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16080 #if VMA_RECORDING_ENABLED 16081 if(allocator->GetRecorder() != VMA_NULL)
16083 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
16087 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
16092 VMA_ASSERT(allocator && pool);
16094 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16096 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
16098 return allocator->CheckPoolCorruption(pool);
16103 const VkMemoryRequirements* pVkMemoryRequirements,
16108 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16110 VMA_DEBUG_LOG(
"vmaAllocateMemory");
16112 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16114 VkResult result = allocator->AllocateMemory(
16115 *pVkMemoryRequirements,
16121 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16125 #if VMA_RECORDING_ENABLED 16126 if(allocator->GetRecorder() != VMA_NULL)
16128 allocator->GetRecorder()->RecordAllocateMemory(
16129 allocator->GetCurrentFrameIndex(),
16130 *pVkMemoryRequirements,
16136 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16138 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16146 const VkMemoryRequirements* pVkMemoryRequirements,
16148 size_t allocationCount,
16152 if(allocationCount == 0)
16157 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16159 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
16161 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16163 VkResult result = allocator->AllocateMemory(
16164 *pVkMemoryRequirements,
16170 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16174 #if VMA_RECORDING_ENABLED 16175 if(allocator->GetRecorder() != VMA_NULL)
16177 allocator->GetRecorder()->RecordAllocateMemoryPages(
16178 allocator->GetCurrentFrameIndex(),
16179 *pVkMemoryRequirements,
16181 (uint64_t)allocationCount,
16186 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16188 for(
size_t i = 0; i < allocationCount; ++i)
16190 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16204 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16206 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16208 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16210 VkMemoryRequirements vkMemReq = {};
16211 bool requiresDedicatedAllocation =
false;
16212 bool prefersDedicatedAllocation =
false;
16213 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16214 requiresDedicatedAllocation,
16215 prefersDedicatedAllocation);
16217 VkResult result = allocator->AllocateMemory(
16219 requiresDedicatedAllocation,
16220 prefersDedicatedAllocation,
16224 VMA_SUBALLOCATION_TYPE_BUFFER,
16228 #if VMA_RECORDING_ENABLED 16229 if(allocator->GetRecorder() != VMA_NULL)
16231 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16232 allocator->GetCurrentFrameIndex(),
16234 requiresDedicatedAllocation,
16235 prefersDedicatedAllocation,
16241 if(pAllocationInfo && result == VK_SUCCESS)
16243 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16256 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16258 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
16260 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16262 VkMemoryRequirements vkMemReq = {};
16263 bool requiresDedicatedAllocation =
false;
16264 bool prefersDedicatedAllocation =
false;
16265 allocator->GetImageMemoryRequirements(image, vkMemReq,
16266 requiresDedicatedAllocation, prefersDedicatedAllocation);
16268 VkResult result = allocator->AllocateMemory(
16270 requiresDedicatedAllocation,
16271 prefersDedicatedAllocation,
16275 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16279 #if VMA_RECORDING_ENABLED 16280 if(allocator->GetRecorder() != VMA_NULL)
16282 allocator->GetRecorder()->RecordAllocateMemoryForImage(
16283 allocator->GetCurrentFrameIndex(),
16285 requiresDedicatedAllocation,
16286 prefersDedicatedAllocation,
16292 if(pAllocationInfo && result == VK_SUCCESS)
16294 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16304 VMA_ASSERT(allocator);
16306 if(allocation == VK_NULL_HANDLE)
16311 VMA_DEBUG_LOG(
"vmaFreeMemory");
16313 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16315 #if VMA_RECORDING_ENABLED 16316 if(allocator->GetRecorder() != VMA_NULL)
16318 allocator->GetRecorder()->RecordFreeMemory(
16319 allocator->GetCurrentFrameIndex(),
16324 allocator->FreeMemory(
16331 size_t allocationCount,
16334 if(allocationCount == 0)
16339 VMA_ASSERT(allocator);
16341 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
16343 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16345 #if VMA_RECORDING_ENABLED 16346 if(allocator->GetRecorder() != VMA_NULL)
16348 allocator->GetRecorder()->RecordFreeMemoryPages(
16349 allocator->GetCurrentFrameIndex(),
16350 (uint64_t)allocationCount,
16355 allocator->FreeMemory(allocationCount, pAllocations);
16361 VkDeviceSize newSize)
16363 VMA_ASSERT(allocator && allocation);
16365 VMA_DEBUG_LOG(
"vmaResizeAllocation");
16367 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16369 #if VMA_RECORDING_ENABLED 16370 if(allocator->GetRecorder() != VMA_NULL)
16372 allocator->GetRecorder()->RecordResizeAllocation(
16373 allocator->GetCurrentFrameIndex(),
16379 return allocator->ResizeAllocation(allocation, newSize);
16387 VMA_ASSERT(allocator && allocation && pAllocationInfo);
16389 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16391 #if VMA_RECORDING_ENABLED 16392 if(allocator->GetRecorder() != VMA_NULL)
16394 allocator->GetRecorder()->RecordGetAllocationInfo(
16395 allocator->GetCurrentFrameIndex(),
16400 allocator->GetAllocationInfo(allocation, pAllocationInfo);
16407 VMA_ASSERT(allocator && allocation);
16409 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16411 #if VMA_RECORDING_ENABLED 16412 if(allocator->GetRecorder() != VMA_NULL)
16414 allocator->GetRecorder()->RecordTouchAllocation(
16415 allocator->GetCurrentFrameIndex(),
16420 return allocator->TouchAllocation(allocation);
16428 VMA_ASSERT(allocator && allocation);
16430 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16432 allocation->SetUserData(allocator, pUserData);
16434 #if VMA_RECORDING_ENABLED 16435 if(allocator->GetRecorder() != VMA_NULL)
16437 allocator->GetRecorder()->RecordSetAllocationUserData(
16438 allocator->GetCurrentFrameIndex(),
16449 VMA_ASSERT(allocator && pAllocation);
16451 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
16453 allocator->CreateLostAllocation(pAllocation);
16455 #if VMA_RECORDING_ENABLED 16456 if(allocator->GetRecorder() != VMA_NULL)
16458 allocator->GetRecorder()->RecordCreateLostAllocation(
16459 allocator->GetCurrentFrameIndex(),
16470 VMA_ASSERT(allocator && allocation && ppData);
16472 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16474 VkResult res = allocator->Map(allocation, ppData);
16476 #if VMA_RECORDING_ENABLED 16477 if(allocator->GetRecorder() != VMA_NULL)
16479 allocator->GetRecorder()->RecordMapMemory(
16480 allocator->GetCurrentFrameIndex(),
16492 VMA_ASSERT(allocator && allocation);
16494 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16496 #if VMA_RECORDING_ENABLED 16497 if(allocator->GetRecorder() != VMA_NULL)
16499 allocator->GetRecorder()->RecordUnmapMemory(
16500 allocator->GetCurrentFrameIndex(),
16505 allocator->Unmap(allocation);
16510 VMA_ASSERT(allocator && allocation);
16512 VMA_DEBUG_LOG(
"vmaFlushAllocation");
16514 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16516 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
16518 #if VMA_RECORDING_ENABLED 16519 if(allocator->GetRecorder() != VMA_NULL)
16521 allocator->GetRecorder()->RecordFlushAllocation(
16522 allocator->GetCurrentFrameIndex(),
16523 allocation, offset, size);
16530 VMA_ASSERT(allocator && allocation);
16532 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
16534 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16536 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
16538 #if VMA_RECORDING_ENABLED 16539 if(allocator->GetRecorder() != VMA_NULL)
16541 allocator->GetRecorder()->RecordInvalidateAllocation(
16542 allocator->GetCurrentFrameIndex(),
16543 allocation, offset, size);
16550 VMA_ASSERT(allocator);
16552 VMA_DEBUG_LOG(
"vmaCheckCorruption");
16554 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16556 return allocator->CheckCorruption(memoryTypeBits);
16562 size_t allocationCount,
16563 VkBool32* pAllocationsChanged,
16573 if(pDefragmentationInfo != VMA_NULL)
16587 if(res == VK_NOT_READY)
16600 VMA_ASSERT(allocator && pInfo && pContext);
16611 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
16613 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
16615 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16617 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
16619 #if VMA_RECORDING_ENABLED 16620 if(allocator->GetRecorder() != VMA_NULL)
16622 allocator->GetRecorder()->RecordDefragmentationBegin(
16623 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
16634 VMA_ASSERT(allocator);
16636 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
16638 if(context != VK_NULL_HANDLE)
16640 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16642 #if VMA_RECORDING_ENABLED 16643 if(allocator->GetRecorder() != VMA_NULL)
16645 allocator->GetRecorder()->RecordDefragmentationEnd(
16646 allocator->GetCurrentFrameIndex(), context);
16650 return allocator->DefragmentationEnd(context);
16663 VMA_ASSERT(allocator && allocation && buffer);
16665 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
16667 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16669 return allocator->BindBufferMemory(allocation, buffer);
16677 VMA_ASSERT(allocator && allocation && image);
16679 VMA_DEBUG_LOG(
"vmaBindImageMemory");
16681 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16683 return allocator->BindImageMemory(allocation, image);
16688 const VkBufferCreateInfo* pBufferCreateInfo,
16694 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16696 if(pBufferCreateInfo->size == 0)
16698 return VK_ERROR_VALIDATION_FAILED_EXT;
16701 VMA_DEBUG_LOG(
"vmaCreateBuffer");
16703 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16705 *pBuffer = VK_NULL_HANDLE;
16706 *pAllocation = VK_NULL_HANDLE;
16709 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
16710 allocator->m_hDevice,
16712 allocator->GetAllocationCallbacks(),
16717 VkMemoryRequirements vkMemReq = {};
16718 bool requiresDedicatedAllocation =
false;
16719 bool prefersDedicatedAllocation =
false;
16720 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
16721 requiresDedicatedAllocation, prefersDedicatedAllocation);
16725 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
16727 VMA_ASSERT(vkMemReq.alignment %
16728 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
16730 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
16732 VMA_ASSERT(vkMemReq.alignment %
16733 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
16735 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
16737 VMA_ASSERT(vkMemReq.alignment %
16738 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
16742 res = allocator->AllocateMemory(
16744 requiresDedicatedAllocation,
16745 prefersDedicatedAllocation,
16748 *pAllocationCreateInfo,
16749 VMA_SUBALLOCATION_TYPE_BUFFER,
16753 #if VMA_RECORDING_ENABLED 16754 if(allocator->GetRecorder() != VMA_NULL)
16756 allocator->GetRecorder()->RecordCreateBuffer(
16757 allocator->GetCurrentFrameIndex(),
16758 *pBufferCreateInfo,
16759 *pAllocationCreateInfo,
16769 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
16774 #if VMA_STATS_STRING_ENABLED 16775 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
16777 if(pAllocationInfo != VMA_NULL)
16779 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16784 allocator->FreeMemory(
16787 *pAllocation = VK_NULL_HANDLE;
16788 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16789 *pBuffer = VK_NULL_HANDLE;
16792 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16793 *pBuffer = VK_NULL_HANDLE;
16804 VMA_ASSERT(allocator);
16806 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16811 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
16813 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16815 #if VMA_RECORDING_ENABLED 16816 if(allocator->GetRecorder() != VMA_NULL)
16818 allocator->GetRecorder()->RecordDestroyBuffer(
16819 allocator->GetCurrentFrameIndex(),
16824 if(buffer != VK_NULL_HANDLE)
16826 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
16829 if(allocation != VK_NULL_HANDLE)
16831 allocator->FreeMemory(
16839 const VkImageCreateInfo* pImageCreateInfo,
16845 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
16847 if(pImageCreateInfo->extent.width == 0 ||
16848 pImageCreateInfo->extent.height == 0 ||
16849 pImageCreateInfo->extent.depth == 0 ||
16850 pImageCreateInfo->mipLevels == 0 ||
16851 pImageCreateInfo->arrayLayers == 0)
16853 return VK_ERROR_VALIDATION_FAILED_EXT;
16856 VMA_DEBUG_LOG(
"vmaCreateImage");
16858 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16860 *pImage = VK_NULL_HANDLE;
16861 *pAllocation = VK_NULL_HANDLE;
16864 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
16865 allocator->m_hDevice,
16867 allocator->GetAllocationCallbacks(),
16871 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
16872 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
16873 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
16876 VkMemoryRequirements vkMemReq = {};
16877 bool requiresDedicatedAllocation =
false;
16878 bool prefersDedicatedAllocation =
false;
16879 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
16880 requiresDedicatedAllocation, prefersDedicatedAllocation);
16882 res = allocator->AllocateMemory(
16884 requiresDedicatedAllocation,
16885 prefersDedicatedAllocation,
16888 *pAllocationCreateInfo,
16893 #if VMA_RECORDING_ENABLED 16894 if(allocator->GetRecorder() != VMA_NULL)
16896 allocator->GetRecorder()->RecordCreateImage(
16897 allocator->GetCurrentFrameIndex(),
16899 *pAllocationCreateInfo,
16909 res = allocator->BindImageMemory(*pAllocation, *pImage);
16914 #if VMA_STATS_STRING_ENABLED 16915 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
16917 if(pAllocationInfo != VMA_NULL)
16919 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16924 allocator->FreeMemory(
16927 *pAllocation = VK_NULL_HANDLE;
16928 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16929 *pImage = VK_NULL_HANDLE;
16932 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16933 *pImage = VK_NULL_HANDLE;
16944 VMA_ASSERT(allocator);
16946 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16951 VMA_DEBUG_LOG(
"vmaDestroyImage");
16953 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16955 #if VMA_RECORDING_ENABLED 16956 if(allocator->GetRecorder() != VMA_NULL)
16958 allocator->GetRecorder()->RecordDestroyImage(
16959 allocator->GetCurrentFrameIndex(),
16964 if(image != VK_NULL_HANDLE)
16966 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
16968 if(allocation != VK_NULL_HANDLE)
16970 allocator->FreeMemory(
16976 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1753
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2053
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1811
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:2864
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
Definition: vk_mem_alloc.h:1785
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2384
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1765
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:2015
Definition: vk_mem_alloc.h:2119
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2817
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1757
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2484
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1808
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2900
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2273
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1652
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2365
Definition: vk_mem_alloc.h:2090
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2820
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1746
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2172
Definition: vk_mem_alloc.h:2042
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1820
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2301
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1.
Definition: vk_mem_alloc.h:1874
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1805
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2046
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1946
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1762
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2854
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1945
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2904
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(),...
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1837
VmaStatInfo total
Definition: vk_mem_alloc.h:1955
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2912
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2156
Definition: vk_mem_alloc.h:2114
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2895
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1763
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1688
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1814
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2315
Definition: vk_mem_alloc.h:2309
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1769
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1881
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2494
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1758
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1783
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2193
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2335
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:2371
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1744
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2318
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2869
VmaMemoryUsage
Definition: vk_mem_alloc.h:1993
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2829
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2890
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:2908
Definition: vk_mem_alloc.h:2032
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2180
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1761
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1951
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1694
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2808
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
Definition: vk_mem_alloc.h:2806
Definition: vk_mem_alloc.h:2140
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2835
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1715
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1787
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1720
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2910
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2167
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2381
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1754
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1934
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:2330
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1707
Definition: vk_mem_alloc.h:2305
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:2097
Represents Opaque object that represents started defragmentation process.
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1947
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1711
Definition: vk_mem_alloc.h:2130
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2321
Definition: vk_mem_alloc.h:2041
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1760
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2162
Definition: vk_mem_alloc.h:2153
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1937
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1756
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2343
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1823
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2374
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2151
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2859
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2186
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1862
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1953
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2077
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1946
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1767
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1793
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:2805
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2883
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1709
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1766
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2357
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1759
Definition: vk_mem_alloc.h:2108
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1801
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2508
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:1817
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1946
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1943
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2362
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2814
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions.
Definition: vk_mem_alloc.h:2123
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:2489
Definition: vk_mem_alloc.h:2137
Definition: vk_mem_alloc.h:2149
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2906
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1752
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1941
Definition: vk_mem_alloc.h:1998
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2311
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1790
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1939
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1764
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1768
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2064
Definition: vk_mem_alloc.h:2144
Definition: vk_mem_alloc.h:2025
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2503
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1742
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1755
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2290
VkResult vmaResizeAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize newSize)
Tries to resize an allocation in place, if there is enough free memory after it.
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2470
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:2134
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2255
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1947
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame.
Definition: vk_mem_alloc.h:2103
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1777
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1954
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2368
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1947
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:2874
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2475
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2838