23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1648 #ifndef VMA_RECORDING_ENABLED 1650 #define VMA_RECORDING_ENABLED 1 1652 #define VMA_RECORDING_ENABLED 0 1657 #define NOMINMAX // For windows.h 1661 #include <vulkan/vulkan.h> 1664 #if VMA_RECORDING_ENABLED 1665 #include <windows.h> 1668 #if !defined(VMA_DEDICATED_ALLOCATION) 1669 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1670 #define VMA_DEDICATED_ALLOCATION 1 1672 #define VMA_DEDICATED_ALLOCATION 0 1690 uint32_t memoryType,
1691 VkDeviceMemory memory,
1696 uint32_t memoryType,
1697 VkDeviceMemory memory,
1770 #if VMA_DEDICATED_ALLOCATION 1771 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1772 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1899 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1907 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1917 uint32_t memoryTypeIndex,
1918 VkMemoryPropertyFlags* pFlags);
1930 uint32_t frameIndex);
1963 #define VMA_STATS_STRING_ENABLED 1 1965 #if VMA_STATS_STRING_ENABLED 1972 char** ppStatsString,
1973 VkBool32 detailedMap);
1977 char* pStatsString);
1979 #endif // #if VMA_STATS_STRING_ENABLED 2206 uint32_t memoryTypeBits,
2208 uint32_t* pMemoryTypeIndex);
2224 const VkBufferCreateInfo* pBufferCreateInfo,
2226 uint32_t* pMemoryTypeIndex);
2242 const VkImageCreateInfo* pImageCreateInfo,
2244 uint32_t* pMemoryTypeIndex);
2416 size_t* pLostAllocationCount);
2515 const VkMemoryRequirements* pVkMemoryRequirements,
2541 const VkMemoryRequirements* pVkMemoryRequirements,
2543 size_t allocationCount,
2588 size_t allocationCount,
2614 VkDeviceSize newSize);
2983 size_t allocationCount,
2984 VkBool32* pAllocationsChanged,
3050 const VkBufferCreateInfo* pBufferCreateInfo,
3075 const VkImageCreateInfo* pImageCreateInfo,
3101 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 3104 #if defined(__cplusplus) && defined(__INTELLISENSE__) 3105 #define VMA_IMPLEMENTATION 3108 #ifdef VMA_IMPLEMENTATION 3109 #undef VMA_IMPLEMENTATION 3131 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 3132 #define VMA_STATIC_VULKAN_FUNCTIONS 1 3144 #if VMA_USE_STL_CONTAINERS 3145 #define VMA_USE_STL_VECTOR 1 3146 #define VMA_USE_STL_UNORDERED_MAP 1 3147 #define VMA_USE_STL_LIST 1 3150 #ifndef VMA_USE_STL_SHARED_MUTEX 3152 #if __cplusplus >= 201703L 3153 #define VMA_USE_STL_SHARED_MUTEX 1 3157 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L 3158 #define VMA_USE_STL_SHARED_MUTEX 1 3160 #define VMA_USE_STL_SHARED_MUTEX 0 3164 #if VMA_USE_STL_VECTOR 3168 #if VMA_USE_STL_UNORDERED_MAP 3169 #include <unordered_map> 3172 #if VMA_USE_STL_LIST 3181 #include <algorithm> 3187 #define VMA_NULL nullptr 3190 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 3192 void *aligned_alloc(
size_t alignment,
size_t size)
3195 if(alignment <
sizeof(
void*))
3197 alignment =
sizeof(
void*);
3200 return memalign(alignment, size);
3202 #elif defined(__APPLE__) || defined(__ANDROID__) 3204 void *aligned_alloc(
size_t alignment,
size_t size)
3207 if(alignment <
sizeof(
void*))
3209 alignment =
sizeof(
void*);
3213 if(posix_memalign(&pointer, alignment, size) == 0)
3227 #define VMA_ASSERT(expr) assert(expr) 3229 #define VMA_ASSERT(expr) 3235 #ifndef VMA_HEAVY_ASSERT 3237 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 3239 #define VMA_HEAVY_ASSERT(expr) 3243 #ifndef VMA_ALIGN_OF 3244 #define VMA_ALIGN_OF(type) (__alignof(type)) 3247 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 3249 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 3251 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 3255 #ifndef VMA_SYSTEM_FREE 3257 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 3259 #define VMA_SYSTEM_FREE(ptr) free(ptr) 3264 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 3268 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 3272 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 3276 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 3279 #ifndef VMA_DEBUG_LOG 3280 #define VMA_DEBUG_LOG(format, ...) 3290 #if VMA_STATS_STRING_ENABLED 3291 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3293 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3295 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3297 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3299 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3301 snprintf(outStr, strLen,
"%p", ptr);
3309 void Lock() { m_Mutex.lock(); }
3310 void Unlock() { m_Mutex.unlock(); }
3314 #define VMA_MUTEX VmaMutex 3318 #ifndef VMA_RW_MUTEX 3319 #if VMA_USE_STL_SHARED_MUTEX 3321 #include <shared_mutex> 3325 void LockRead() { m_Mutex.lock_shared(); }
3326 void UnlockRead() { m_Mutex.unlock_shared(); }
3327 void LockWrite() { m_Mutex.lock(); }
3328 void UnlockWrite() { m_Mutex.unlock(); }
3330 std::shared_mutex m_Mutex;
3332 #define VMA_RW_MUTEX VmaRWMutex 3333 #elif defined(_WIN32) 3338 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3339 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3340 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3341 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3342 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3346 #define VMA_RW_MUTEX VmaRWMutex 3352 void LockRead() { m_Mutex.Lock(); }
3353 void UnlockRead() { m_Mutex.Unlock(); }
3354 void LockWrite() { m_Mutex.Lock(); }
3355 void UnlockWrite() { m_Mutex.Unlock(); }
3359 #define VMA_RW_MUTEX VmaRWMutex 3360 #endif // #if VMA_USE_STL_SHARED_MUTEX 3361 #endif // #ifndef VMA_RW_MUTEX 3371 #ifndef VMA_ATOMIC_UINT32 3372 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 3375 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 3380 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 3383 #ifndef VMA_DEBUG_ALIGNMENT 3388 #define VMA_DEBUG_ALIGNMENT (1) 3391 #ifndef VMA_DEBUG_MARGIN 3396 #define VMA_DEBUG_MARGIN (0) 3399 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3404 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3407 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3413 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3416 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3421 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3424 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3429 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3432 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3433 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3437 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3438 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3442 #ifndef VMA_CLASS_NO_COPY 3443 #define VMA_CLASS_NO_COPY(className) \ 3445 className(const className&) = delete; \ 3446 className& operator=(const className&) = delete; 3449 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3452 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3454 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3455 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3461 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3463 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3464 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3467 static inline uint32_t VmaCountBitsSet(uint32_t v)
3469 uint32_t c = v - ((v >> 1) & 0x55555555);
3470 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3471 c = ((c >> 4) + c) & 0x0F0F0F0F;
3472 c = ((c >> 8) + c) & 0x00FF00FF;
3473 c = ((c >> 16) + c) & 0x0000FFFF;
3479 template <
typename T>
3480 static inline T VmaAlignUp(T val, T align)
3482 return (val + align - 1) / align * align;
3486 template <
typename T>
3487 static inline T VmaAlignDown(T val, T align)
3489 return val / align * align;
3493 template <
typename T>
3494 static inline T VmaRoundDiv(T x, T y)
3496 return (x + (y / (T)2)) / y;
3504 template <
typename T>
3505 inline bool VmaIsPow2(T x)
3507 return (x & (x-1)) == 0;
3511 static inline uint32_t VmaNextPow2(uint32_t v)
3522 static inline uint64_t VmaNextPow2(uint64_t v)
3536 static inline uint32_t VmaPrevPow2(uint32_t v)
3546 static inline uint64_t VmaPrevPow2(uint64_t v)
3558 static inline bool VmaStrIsEmpty(
const char* pStr)
3560 return pStr == VMA_NULL || *pStr ==
'\0';
3563 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3581 template<
typename Iterator,
typename Compare>
3582 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3584 Iterator centerValue = end; --centerValue;
3585 Iterator insertIndex = beg;
3586 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3588 if(cmp(*memTypeIndex, *centerValue))
3590 if(insertIndex != memTypeIndex)
3592 VMA_SWAP(*memTypeIndex, *insertIndex);
3597 if(insertIndex != centerValue)
3599 VMA_SWAP(*insertIndex, *centerValue);
3604 template<
typename Iterator,
typename Compare>
3605 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3609 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3610 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3611 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3615 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3617 #endif // #ifndef VMA_SORT 3626 static inline bool VmaBlocksOnSamePage(
3627 VkDeviceSize resourceAOffset,
3628 VkDeviceSize resourceASize,
3629 VkDeviceSize resourceBOffset,
3630 VkDeviceSize pageSize)
3632 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3633 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3634 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3635 VkDeviceSize resourceBStart = resourceBOffset;
3636 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3637 return resourceAEndPage == resourceBStartPage;
3640 enum VmaSuballocationType
3642 VMA_SUBALLOCATION_TYPE_FREE = 0,
3643 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3644 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3645 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3646 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3647 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3648 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3657 static inline bool VmaIsBufferImageGranularityConflict(
3658 VmaSuballocationType suballocType1,
3659 VmaSuballocationType suballocType2)
3661 if(suballocType1 > suballocType2)
3663 VMA_SWAP(suballocType1, suballocType2);
3666 switch(suballocType1)
3668 case VMA_SUBALLOCATION_TYPE_FREE:
3670 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3672 case VMA_SUBALLOCATION_TYPE_BUFFER:
3674 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3675 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3676 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3678 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3679 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3680 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3681 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3683 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3684 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3692 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3694 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3695 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3696 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3698 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3702 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3704 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3705 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3706 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3708 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3719 VMA_CLASS_NO_COPY(VmaMutexLock)
3721 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3722 m_pMutex(useMutex ? &mutex : VMA_NULL)
3723 {
if(m_pMutex) { m_pMutex->Lock(); } }
3725 {
if(m_pMutex) { m_pMutex->Unlock(); } }
3727 VMA_MUTEX* m_pMutex;
3731 struct VmaMutexLockRead
3733 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3735 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3736 m_pMutex(useMutex ? &mutex : VMA_NULL)
3737 {
if(m_pMutex) { m_pMutex->LockRead(); } }
3738 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
3740 VMA_RW_MUTEX* m_pMutex;
3744 struct VmaMutexLockWrite
3746 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3748 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
3749 m_pMutex(useMutex ? &mutex : VMA_NULL)
3750 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
3751 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
3753 VMA_RW_MUTEX* m_pMutex;
3756 #if VMA_DEBUG_GLOBAL_MUTEX 3757 static VMA_MUTEX gDebugGlobalMutex;
3758 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3760 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3764 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3775 template <
typename CmpLess,
typename IterT,
typename KeyT>
3776 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3778 size_t down = 0, up = (end - beg);
3781 const size_t mid = (down + up) / 2;
3782 if(cmp(*(beg+mid), key))
3799 template<
typename T>
3800 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
3802 for(uint32_t i = 0; i < count; ++i)
3804 const T iPtr = arr[i];
3805 if(iPtr == VMA_NULL)
3809 for(uint32_t j = i + 1; j < count; ++j)
3823 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3825 if((pAllocationCallbacks != VMA_NULL) &&
3826 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3828 return (*pAllocationCallbacks->pfnAllocation)(
3829 pAllocationCallbacks->pUserData,
3832 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3836 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3840 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3842 if((pAllocationCallbacks != VMA_NULL) &&
3843 (pAllocationCallbacks->pfnFree != VMA_NULL))
3845 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3849 VMA_SYSTEM_FREE(ptr);
3853 template<
typename T>
3854 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3856 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3859 template<
typename T>
3860 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3862 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3865 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3867 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3869 template<
typename T>
3870 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3873 VmaFree(pAllocationCallbacks, ptr);
3876 template<
typename T>
3877 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3881 for(
size_t i = count; i--; )
3885 VmaFree(pAllocationCallbacks, ptr);
3890 template<
typename T>
3891 class VmaStlAllocator
3894 const VkAllocationCallbacks*
const m_pCallbacks;
3895 typedef T value_type;
3897 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3898 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3900 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3901 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3903 template<
typename U>
3904 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3906 return m_pCallbacks == rhs.m_pCallbacks;
3908 template<
typename U>
3909 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3911 return m_pCallbacks != rhs.m_pCallbacks;
3914 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3917 #if VMA_USE_STL_VECTOR 3919 #define VmaVector std::vector 3921 template<
typename T,
typename allocatorT>
3922 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3924 vec.insert(vec.begin() + index, item);
3927 template<
typename T,
typename allocatorT>
3928 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3930 vec.erase(vec.begin() + index);
3933 #else // #if VMA_USE_STL_VECTOR 3938 template<
typename T,
typename AllocatorT>
3942 typedef T value_type;
3944 VmaVector(
const AllocatorT& allocator) :
3945 m_Allocator(allocator),
3952 VmaVector(
size_t count,
const AllocatorT& allocator) :
3953 m_Allocator(allocator),
3954 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3960 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3961 m_Allocator(src.m_Allocator),
3962 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3963 m_Count(src.m_Count),
3964 m_Capacity(src.m_Count)
3968 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3974 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3977 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3981 resize(rhs.m_Count);
3984 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3990 bool empty()
const {
return m_Count == 0; }
3991 size_t size()
const {
return m_Count; }
3992 T* data() {
return m_pArray; }
3993 const T* data()
const {
return m_pArray; }
3995 T& operator[](
size_t index)
3997 VMA_HEAVY_ASSERT(index < m_Count);
3998 return m_pArray[index];
4000 const T& operator[](
size_t index)
const 4002 VMA_HEAVY_ASSERT(index < m_Count);
4003 return m_pArray[index];
4008 VMA_HEAVY_ASSERT(m_Count > 0);
4011 const T& front()
const 4013 VMA_HEAVY_ASSERT(m_Count > 0);
4018 VMA_HEAVY_ASSERT(m_Count > 0);
4019 return m_pArray[m_Count - 1];
4021 const T& back()
const 4023 VMA_HEAVY_ASSERT(m_Count > 0);
4024 return m_pArray[m_Count - 1];
4027 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4029 newCapacity = VMA_MAX(newCapacity, m_Count);
4031 if((newCapacity < m_Capacity) && !freeMemory)
4033 newCapacity = m_Capacity;
4036 if(newCapacity != m_Capacity)
4038 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4041 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4043 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4044 m_Capacity = newCapacity;
4045 m_pArray = newArray;
4049 void resize(
size_t newCount,
bool freeMemory =
false)
4051 size_t newCapacity = m_Capacity;
4052 if(newCount > m_Capacity)
4054 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4058 newCapacity = newCount;
4061 if(newCapacity != m_Capacity)
4063 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4064 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4065 if(elementsToCopy != 0)
4067 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4069 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4070 m_Capacity = newCapacity;
4071 m_pArray = newArray;
4077 void clear(
bool freeMemory =
false)
4079 resize(0, freeMemory);
4082 void insert(
size_t index,
const T& src)
4084 VMA_HEAVY_ASSERT(index <= m_Count);
4085 const size_t oldCount = size();
4086 resize(oldCount + 1);
4087 if(index < oldCount)
4089 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4091 m_pArray[index] = src;
4094 void remove(
size_t index)
4096 VMA_HEAVY_ASSERT(index < m_Count);
4097 const size_t oldCount = size();
4098 if(index < oldCount - 1)
4100 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4102 resize(oldCount - 1);
4105 void push_back(
const T& src)
4107 const size_t newIndex = size();
4108 resize(newIndex + 1);
4109 m_pArray[newIndex] = src;
4114 VMA_HEAVY_ASSERT(m_Count > 0);
4118 void push_front(
const T& src)
4125 VMA_HEAVY_ASSERT(m_Count > 0);
4129 typedef T* iterator;
4131 iterator begin() {
return m_pArray; }
4132 iterator end() {
return m_pArray + m_Count; }
4135 AllocatorT m_Allocator;
4141 template<
typename T,
typename allocatorT>
4142 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4144 vec.insert(index, item);
4147 template<
typename T,
typename allocatorT>
4148 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4153 #endif // #if VMA_USE_STL_VECTOR 4155 template<
typename CmpLess,
typename VectorT>
4156 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4158 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4160 vector.data() + vector.size(),
4162 CmpLess()) - vector.data();
4163 VmaVectorInsert(vector, indexToInsert, value);
4164 return indexToInsert;
4167 template<
typename CmpLess,
typename VectorT>
4168 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4171 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4176 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4178 size_t indexToRemove = it - vector.begin();
4179 VmaVectorRemove(vector, indexToRemove);
4185 template<
typename CmpLess,
typename IterT,
typename KeyT>
4186 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
4189 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4190 beg, end, value, comparator);
4192 (!comparator(*it, value) && !comparator(value, *it)))
4207 template<
typename T>
4208 class VmaPoolAllocator
4210 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4212 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
4213 ~VmaPoolAllocator();
4221 uint32_t NextFreeIndex;
4228 uint32_t FirstFreeIndex;
4231 const VkAllocationCallbacks* m_pAllocationCallbacks;
4232 size_t m_ItemsPerBlock;
4233 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4235 ItemBlock& CreateNewBlock();
4238 template<
typename T>
4239 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
4240 m_pAllocationCallbacks(pAllocationCallbacks),
4241 m_ItemsPerBlock(itemsPerBlock),
4242 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4244 VMA_ASSERT(itemsPerBlock > 0);
4247 template<
typename T>
4248 VmaPoolAllocator<T>::~VmaPoolAllocator()
4253 template<
typename T>
4254 void VmaPoolAllocator<T>::Clear()
4256 for(
size_t i = m_ItemBlocks.size(); i--; )
4257 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
4258 m_ItemBlocks.clear();
4261 template<
typename T>
4262 T* VmaPoolAllocator<T>::Alloc()
4264 for(
size_t i = m_ItemBlocks.size(); i--; )
4266 ItemBlock& block = m_ItemBlocks[i];
4268 if(block.FirstFreeIndex != UINT32_MAX)
4270 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4271 block.FirstFreeIndex = pItem->NextFreeIndex;
4272 return &pItem->Value;
4277 ItemBlock& newBlock = CreateNewBlock();
4278 Item*
const pItem = &newBlock.pItems[0];
4279 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4280 return &pItem->Value;
4283 template<
typename T>
4284 void VmaPoolAllocator<T>::Free(T* ptr)
4287 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
4289 ItemBlock& block = m_ItemBlocks[i];
4293 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4296 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
4298 const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
4299 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4300 block.FirstFreeIndex = index;
4304 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4307 template<
typename T>
4308 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4310 ItemBlock newBlock = {
4311 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
4313 m_ItemBlocks.push_back(newBlock);
4316 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
4317 newBlock.pItems[i].NextFreeIndex = i + 1;
4318 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
4319 return m_ItemBlocks.back();
4325 #if VMA_USE_STL_LIST 4327 #define VmaList std::list 4329 #else // #if VMA_USE_STL_LIST 4331 template<
typename T>
4340 template<
typename T>
4343 VMA_CLASS_NO_COPY(VmaRawList)
4345 typedef VmaListItem<T> ItemType;
4347 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4351 size_t GetCount()
const {
return m_Count; }
4352 bool IsEmpty()
const {
return m_Count == 0; }
4354 ItemType* Front() {
return m_pFront; }
4355 const ItemType* Front()
const {
return m_pFront; }
4356 ItemType* Back() {
return m_pBack; }
4357 const ItemType* Back()
const {
return m_pBack; }
4359 ItemType* PushBack();
4360 ItemType* PushFront();
4361 ItemType* PushBack(
const T& value);
4362 ItemType* PushFront(
const T& value);
4367 ItemType* InsertBefore(ItemType* pItem);
4369 ItemType* InsertAfter(ItemType* pItem);
4371 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4372 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4374 void Remove(ItemType* pItem);
4377 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4378 VmaPoolAllocator<ItemType> m_ItemAllocator;
4384 template<
typename T>
4385 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4386 m_pAllocationCallbacks(pAllocationCallbacks),
4387 m_ItemAllocator(pAllocationCallbacks, 128),
4394 template<
typename T>
4395 VmaRawList<T>::~VmaRawList()
4401 template<
typename T>
4402 void VmaRawList<T>::Clear()
4404 if(IsEmpty() ==
false)
4406 ItemType* pItem = m_pBack;
4407 while(pItem != VMA_NULL)
4409 ItemType*
const pPrevItem = pItem->pPrev;
4410 m_ItemAllocator.Free(pItem);
4413 m_pFront = VMA_NULL;
4419 template<
typename T>
4420 VmaListItem<T>* VmaRawList<T>::PushBack()
4422 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4423 pNewItem->pNext = VMA_NULL;
4426 pNewItem->pPrev = VMA_NULL;
4427 m_pFront = pNewItem;
4433 pNewItem->pPrev = m_pBack;
4434 m_pBack->pNext = pNewItem;
4441 template<
typename T>
4442 VmaListItem<T>* VmaRawList<T>::PushFront()
4444 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4445 pNewItem->pPrev = VMA_NULL;
4448 pNewItem->pNext = VMA_NULL;
4449 m_pFront = pNewItem;
4455 pNewItem->pNext = m_pFront;
4456 m_pFront->pPrev = pNewItem;
4457 m_pFront = pNewItem;
4463 template<
typename T>
4464 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4466 ItemType*
const pNewItem = PushBack();
4467 pNewItem->Value = value;
4471 template<
typename T>
4472 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4474 ItemType*
const pNewItem = PushFront();
4475 pNewItem->Value = value;
4479 template<
typename T>
4480 void VmaRawList<T>::PopBack()
4482 VMA_HEAVY_ASSERT(m_Count > 0);
4483 ItemType*
const pBackItem = m_pBack;
4484 ItemType*
const pPrevItem = pBackItem->pPrev;
4485 if(pPrevItem != VMA_NULL)
4487 pPrevItem->pNext = VMA_NULL;
4489 m_pBack = pPrevItem;
4490 m_ItemAllocator.Free(pBackItem);
4494 template<
typename T>
4495 void VmaRawList<T>::PopFront()
4497 VMA_HEAVY_ASSERT(m_Count > 0);
4498 ItemType*
const pFrontItem = m_pFront;
4499 ItemType*
const pNextItem = pFrontItem->pNext;
4500 if(pNextItem != VMA_NULL)
4502 pNextItem->pPrev = VMA_NULL;
4504 m_pFront = pNextItem;
4505 m_ItemAllocator.Free(pFrontItem);
4509 template<
typename T>
4510 void VmaRawList<T>::Remove(ItemType* pItem)
4512 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4513 VMA_HEAVY_ASSERT(m_Count > 0);
4515 if(pItem->pPrev != VMA_NULL)
4517 pItem->pPrev->pNext = pItem->pNext;
4521 VMA_HEAVY_ASSERT(m_pFront == pItem);
4522 m_pFront = pItem->pNext;
4525 if(pItem->pNext != VMA_NULL)
4527 pItem->pNext->pPrev = pItem->pPrev;
4531 VMA_HEAVY_ASSERT(m_pBack == pItem);
4532 m_pBack = pItem->pPrev;
4535 m_ItemAllocator.Free(pItem);
4539 template<
typename T>
4540 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4542 if(pItem != VMA_NULL)
4544 ItemType*
const prevItem = pItem->pPrev;
4545 ItemType*
const newItem = m_ItemAllocator.Alloc();
4546 newItem->pPrev = prevItem;
4547 newItem->pNext = pItem;
4548 pItem->pPrev = newItem;
4549 if(prevItem != VMA_NULL)
4551 prevItem->pNext = newItem;
4555 VMA_HEAVY_ASSERT(m_pFront == pItem);
4565 template<
typename T>
4566 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4568 if(pItem != VMA_NULL)
4570 ItemType*
const nextItem = pItem->pNext;
4571 ItemType*
const newItem = m_ItemAllocator.Alloc();
4572 newItem->pNext = nextItem;
4573 newItem->pPrev = pItem;
4574 pItem->pNext = newItem;
4575 if(nextItem != VMA_NULL)
4577 nextItem->pPrev = newItem;
4581 VMA_HEAVY_ASSERT(m_pBack == pItem);
4591 template<
typename T>
4592 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4594 ItemType*
const newItem = InsertBefore(pItem);
4595 newItem->Value = value;
4599 template<
typename T>
4600 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4602 ItemType*
const newItem = InsertAfter(pItem);
4603 newItem->Value = value;
4607 template<
typename T,
typename AllocatorT>
4610 VMA_CLASS_NO_COPY(VmaList)
4621 T& operator*()
const 4623 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4624 return m_pItem->Value;
4626 T* operator->()
const 4628 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4629 return &m_pItem->Value;
4632 iterator& operator++()
4634 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4635 m_pItem = m_pItem->pNext;
4638 iterator& operator--()
4640 if(m_pItem != VMA_NULL)
4642 m_pItem = m_pItem->pPrev;
4646 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4647 m_pItem = m_pList->Back();
4652 iterator operator++(
int)
4654 iterator result = *
this;
4658 iterator operator--(
int)
4660 iterator result = *
this;
4665 bool operator==(
const iterator& rhs)
const 4667 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4668 return m_pItem == rhs.m_pItem;
4670 bool operator!=(
const iterator& rhs)
const 4672 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4673 return m_pItem != rhs.m_pItem;
4677 VmaRawList<T>* m_pList;
4678 VmaListItem<T>* m_pItem;
4680 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4686 friend class VmaList<T, AllocatorT>;
4689 class const_iterator
4698 const_iterator(
const iterator& src) :
4699 m_pList(src.m_pList),
4700 m_pItem(src.m_pItem)
4704 const T& operator*()
const 4706 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4707 return m_pItem->Value;
4709 const T* operator->()
const 4711 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4712 return &m_pItem->Value;
4715 const_iterator& operator++()
4717 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4718 m_pItem = m_pItem->pNext;
4721 const_iterator& operator--()
4723 if(m_pItem != VMA_NULL)
4725 m_pItem = m_pItem->pPrev;
4729 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4730 m_pItem = m_pList->Back();
4735 const_iterator operator++(
int)
4737 const_iterator result = *
this;
4741 const_iterator operator--(
int)
4743 const_iterator result = *
this;
4748 bool operator==(
const const_iterator& rhs)
const 4750 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4751 return m_pItem == rhs.m_pItem;
4753 bool operator!=(
const const_iterator& rhs)
const 4755 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4756 return m_pItem != rhs.m_pItem;
4760 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4766 const VmaRawList<T>* m_pList;
4767 const VmaListItem<T>* m_pItem;
4769 friend class VmaList<T, AllocatorT>;
4772 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4774 bool empty()
const {
return m_RawList.IsEmpty(); }
4775 size_t size()
const {
return m_RawList.GetCount(); }
4777 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4778 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4780 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4781 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4783 void clear() { m_RawList.Clear(); }
4784 void push_back(
const T& value) { m_RawList.PushBack(value); }
4785 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4786 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4789 VmaRawList<T> m_RawList;
4792 #endif // #if VMA_USE_STL_LIST 4800 #if VMA_USE_STL_UNORDERED_MAP 4802 #define VmaPair std::pair 4804 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4805 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4807 #else // #if VMA_USE_STL_UNORDERED_MAP 4809 template<
typename T1,
typename T2>
4815 VmaPair() : first(), second() { }
4816 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4822 template<
typename KeyT,
typename ValueT>
4826 typedef VmaPair<KeyT, ValueT> PairType;
4827 typedef PairType* iterator;
4829 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4831 iterator begin() {
return m_Vector.begin(); }
4832 iterator end() {
return m_Vector.end(); }
4834 void insert(
const PairType& pair);
4835 iterator find(
const KeyT& key);
4836 void erase(iterator it);
4839 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4842 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4844 template<
typename FirstT,
typename SecondT>
4845 struct VmaPairFirstLess
4847 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4849 return lhs.first < rhs.first;
4851 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4853 return lhs.first < rhsFirst;
4857 template<
typename KeyT,
typename ValueT>
4858 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4860 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4862 m_Vector.data() + m_Vector.size(),
4864 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4865 VmaVectorInsert(m_Vector, indexToInsert, pair);
4868 template<
typename KeyT,
typename ValueT>
4869 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4871 PairType* it = VmaBinaryFindFirstNotLess(
4873 m_Vector.data() + m_Vector.size(),
4875 VmaPairFirstLess<KeyT, ValueT>());
4876 if((it != m_Vector.end()) && (it->first == key))
4882 return m_Vector.end();
4886 template<
typename KeyT,
typename ValueT>
4887 void VmaMap<KeyT, ValueT>::erase(iterator it)
4889 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4892 #endif // #if VMA_USE_STL_UNORDERED_MAP 4898 class VmaDeviceMemoryBlock;
4900 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4902 struct VmaAllocation_T
4904 VMA_CLASS_NO_COPY(VmaAllocation_T)
4906 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4910 FLAG_USER_DATA_STRING = 0x01,
4914 enum ALLOCATION_TYPE
4916 ALLOCATION_TYPE_NONE,
4917 ALLOCATION_TYPE_BLOCK,
4918 ALLOCATION_TYPE_DEDICATED,
4921 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4924 m_pUserData(VMA_NULL),
4925 m_LastUseFrameIndex(currentFrameIndex),
4926 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4927 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4929 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4931 #if VMA_STATS_STRING_ENABLED 4932 m_CreationFrameIndex = currentFrameIndex;
4933 m_BufferImageUsage = 0;
4939 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4942 VMA_ASSERT(m_pUserData == VMA_NULL);
4945 void InitBlockAllocation(
4947 VmaDeviceMemoryBlock* block,
4948 VkDeviceSize offset,
4949 VkDeviceSize alignment,
4951 VmaSuballocationType suballocationType,
4955 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4956 VMA_ASSERT(block != VMA_NULL);
4957 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4958 m_Alignment = alignment;
4960 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4961 m_SuballocationType = (uint8_t)suballocationType;
4962 m_BlockAllocation.m_hPool = hPool;
4963 m_BlockAllocation.m_Block = block;
4964 m_BlockAllocation.m_Offset = offset;
4965 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4970 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4971 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4972 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4973 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4974 m_BlockAllocation.m_Block = VMA_NULL;
4975 m_BlockAllocation.m_Offset = 0;
4976 m_BlockAllocation.m_CanBecomeLost =
true;
4979 void ChangeBlockAllocation(
4981 VmaDeviceMemoryBlock* block,
4982 VkDeviceSize offset);
4984 void ChangeSize(VkDeviceSize newSize);
4985 void ChangeOffset(VkDeviceSize newOffset);
4988 void InitDedicatedAllocation(
4989 uint32_t memoryTypeIndex,
4990 VkDeviceMemory hMemory,
4991 VmaSuballocationType suballocationType,
4995 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4996 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4997 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5000 m_SuballocationType = (uint8_t)suballocationType;
5001 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5002 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
5003 m_DedicatedAllocation.m_hMemory = hMemory;
5004 m_DedicatedAllocation.m_pMappedData = pMappedData;
5007 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5008 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5009 VkDeviceSize GetSize()
const {
return m_Size; }
5010 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5011 void* GetUserData()
const {
return m_pUserData; }
5012 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5013 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5015 VmaDeviceMemoryBlock* GetBlock()
const 5017 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5018 return m_BlockAllocation.m_Block;
5020 VkDeviceSize GetOffset()
const;
5021 VkDeviceMemory GetMemory()
const;
5022 uint32_t GetMemoryTypeIndex()
const;
5023 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5024 void* GetMappedData()
const;
5025 bool CanBecomeLost()
const;
5028 uint32_t GetLastUseFrameIndex()
const 5030 return m_LastUseFrameIndex.load();
5032 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5034 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5044 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5046 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5048 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5059 void BlockAllocMap();
5060 void BlockAllocUnmap();
5061 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5064 #if VMA_STATS_STRING_ENABLED 5065 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5066 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5068 void InitBufferImageUsage(uint32_t bufferImageUsage)
5070 VMA_ASSERT(m_BufferImageUsage == 0);
5071 m_BufferImageUsage = bufferImageUsage;
5074 void PrintParameters(
class VmaJsonWriter& json)
const;
5078 VkDeviceSize m_Alignment;
5079 VkDeviceSize m_Size;
5081 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5083 uint8_t m_SuballocationType;
5090 struct BlockAllocation
5093 VmaDeviceMemoryBlock* m_Block;
5094 VkDeviceSize m_Offset;
5095 bool m_CanBecomeLost;
5099 struct DedicatedAllocation
5101 uint32_t m_MemoryTypeIndex;
5102 VkDeviceMemory m_hMemory;
5103 void* m_pMappedData;
5109 BlockAllocation m_BlockAllocation;
5111 DedicatedAllocation m_DedicatedAllocation;
5114 #if VMA_STATS_STRING_ENABLED 5115 uint32_t m_CreationFrameIndex;
5116 uint32_t m_BufferImageUsage;
5126 struct VmaSuballocation
5128 VkDeviceSize offset;
5131 VmaSuballocationType type;
5135 struct VmaSuballocationOffsetLess
5137 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5139 return lhs.offset < rhs.offset;
5142 struct VmaSuballocationOffsetGreater
5144 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5146 return lhs.offset > rhs.offset;
5150 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5153 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5168 struct VmaAllocationRequest
5170 VkDeviceSize offset;
5171 VkDeviceSize sumFreeSize;
5172 VkDeviceSize sumItemSize;
5173 VmaSuballocationList::iterator item;
5174 size_t itemsToMakeLostCount;
5177 VkDeviceSize CalcCost()
const 5179 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5187 class VmaBlockMetadata
5191 virtual ~VmaBlockMetadata() { }
5192 virtual void Init(VkDeviceSize size) { m_Size = size; }
5195 virtual bool Validate()
const = 0;
5196 VkDeviceSize GetSize()
const {
return m_Size; }
5197 virtual size_t GetAllocationCount()
const = 0;
5198 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5199 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5201 virtual bool IsEmpty()
const = 0;
5203 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5205 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5207 #if VMA_STATS_STRING_ENABLED 5208 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5214 virtual bool CreateAllocationRequest(
5215 uint32_t currentFrameIndex,
5216 uint32_t frameInUseCount,
5217 VkDeviceSize bufferImageGranularity,
5218 VkDeviceSize allocSize,
5219 VkDeviceSize allocAlignment,
5221 VmaSuballocationType allocType,
5222 bool canMakeOtherLost,
5225 VmaAllocationRequest* pAllocationRequest) = 0;
5227 virtual bool MakeRequestedAllocationsLost(
5228 uint32_t currentFrameIndex,
5229 uint32_t frameInUseCount,
5230 VmaAllocationRequest* pAllocationRequest) = 0;
5232 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5234 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5238 const VmaAllocationRequest& request,
5239 VmaSuballocationType type,
5240 VkDeviceSize allocSize,
5246 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5249 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize) {
return false; }
5252 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5254 #if VMA_STATS_STRING_ENABLED 5255 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5256 VkDeviceSize unusedBytes,
5257 size_t allocationCount,
5258 size_t unusedRangeCount)
const;
5259 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5260 VkDeviceSize offset,
5262 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5263 VkDeviceSize offset,
5264 VkDeviceSize size)
const;
5265 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5269 VkDeviceSize m_Size;
5270 const VkAllocationCallbacks* m_pAllocationCallbacks;
5273 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 5274 VMA_ASSERT(0 && "Validation failed: " #cond); \ 5278 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5280 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5283 virtual ~VmaBlockMetadata_Generic();
5284 virtual void Init(VkDeviceSize size);
5286 virtual bool Validate()
const;
5287 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5288 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5289 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5290 virtual bool IsEmpty()
const;
5292 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5293 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5295 #if VMA_STATS_STRING_ENABLED 5296 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5299 virtual bool CreateAllocationRequest(
5300 uint32_t currentFrameIndex,
5301 uint32_t frameInUseCount,
5302 VkDeviceSize bufferImageGranularity,
5303 VkDeviceSize allocSize,
5304 VkDeviceSize allocAlignment,
5306 VmaSuballocationType allocType,
5307 bool canMakeOtherLost,
5309 VmaAllocationRequest* pAllocationRequest);
5311 virtual bool MakeRequestedAllocationsLost(
5312 uint32_t currentFrameIndex,
5313 uint32_t frameInUseCount,
5314 VmaAllocationRequest* pAllocationRequest);
5316 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5318 virtual VkResult CheckCorruption(
const void* pBlockData);
5321 const VmaAllocationRequest& request,
5322 VmaSuballocationType type,
5323 VkDeviceSize allocSize,
5328 virtual void FreeAtOffset(VkDeviceSize offset);
5330 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize);
5335 bool IsBufferImageGranularityConflictPossible(
5336 VkDeviceSize bufferImageGranularity,
5337 VmaSuballocationType& inOutPrevSuballocType)
const;
5340 friend class VmaDefragmentationAlgorithm_Generic;
5341 friend class VmaDefragmentationAlgorithm_Fast;
5343 uint32_t m_FreeCount;
5344 VkDeviceSize m_SumFreeSize;
5345 VmaSuballocationList m_Suballocations;
5348 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5350 bool ValidateFreeSuballocationList()
const;
5354 bool CheckAllocation(
5355 uint32_t currentFrameIndex,
5356 uint32_t frameInUseCount,
5357 VkDeviceSize bufferImageGranularity,
5358 VkDeviceSize allocSize,
5359 VkDeviceSize allocAlignment,
5360 VmaSuballocationType allocType,
5361 VmaSuballocationList::const_iterator suballocItem,
5362 bool canMakeOtherLost,
5363 VkDeviceSize* pOffset,
5364 size_t* itemsToMakeLostCount,
5365 VkDeviceSize* pSumFreeSize,
5366 VkDeviceSize* pSumItemSize)
const;
5368 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5372 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5375 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5378 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5459 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5461 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5464 virtual ~VmaBlockMetadata_Linear();
5465 virtual void Init(VkDeviceSize size);
5467 virtual bool Validate()
const;
5468 virtual size_t GetAllocationCount()
const;
5469 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5470 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5471 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5473 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5474 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5476 #if VMA_STATS_STRING_ENABLED 5477 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5480 virtual bool CreateAllocationRequest(
5481 uint32_t currentFrameIndex,
5482 uint32_t frameInUseCount,
5483 VkDeviceSize bufferImageGranularity,
5484 VkDeviceSize allocSize,
5485 VkDeviceSize allocAlignment,
5487 VmaSuballocationType allocType,
5488 bool canMakeOtherLost,
5490 VmaAllocationRequest* pAllocationRequest);
5492 virtual bool MakeRequestedAllocationsLost(
5493 uint32_t currentFrameIndex,
5494 uint32_t frameInUseCount,
5495 VmaAllocationRequest* pAllocationRequest);
5497 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5499 virtual VkResult CheckCorruption(
const void* pBlockData);
5502 const VmaAllocationRequest& request,
5503 VmaSuballocationType type,
5504 VkDeviceSize allocSize,
5509 virtual void FreeAtOffset(VkDeviceSize offset);
5519 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5521 enum SECOND_VECTOR_MODE
5523 SECOND_VECTOR_EMPTY,
5528 SECOND_VECTOR_RING_BUFFER,
5534 SECOND_VECTOR_DOUBLE_STACK,
5537 VkDeviceSize m_SumFreeSize;
5538 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5539 uint32_t m_1stVectorIndex;
5540 SECOND_VECTOR_MODE m_2ndVectorMode;
5542 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5543 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5544 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5545 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5548 size_t m_1stNullItemsBeginCount;
5550 size_t m_1stNullItemsMiddleCount;
5552 size_t m_2ndNullItemsCount;
5554 bool ShouldCompact1st()
const;
5555 void CleanupAfterFree();
5569 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5571 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5574 virtual ~VmaBlockMetadata_Buddy();
5575 virtual void Init(VkDeviceSize size);
5577 virtual bool Validate()
const;
5578 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5579 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5580 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5581 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5583 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5584 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5586 #if VMA_STATS_STRING_ENABLED 5587 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5590 virtual bool CreateAllocationRequest(
5591 uint32_t currentFrameIndex,
5592 uint32_t frameInUseCount,
5593 VkDeviceSize bufferImageGranularity,
5594 VkDeviceSize allocSize,
5595 VkDeviceSize allocAlignment,
5597 VmaSuballocationType allocType,
5598 bool canMakeOtherLost,
5600 VmaAllocationRequest* pAllocationRequest);
5602 virtual bool MakeRequestedAllocationsLost(
5603 uint32_t currentFrameIndex,
5604 uint32_t frameInUseCount,
5605 VmaAllocationRequest* pAllocationRequest);
5607 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5609 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5612 const VmaAllocationRequest& request,
5613 VmaSuballocationType type,
5614 VkDeviceSize allocSize,
5618 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5619 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5622 static const VkDeviceSize MIN_NODE_SIZE = 32;
5623 static const size_t MAX_LEVELS = 30;
5625 struct ValidationContext
5627 size_t calculatedAllocationCount;
5628 size_t calculatedFreeCount;
5629 VkDeviceSize calculatedSumFreeSize;
5631 ValidationContext() :
5632 calculatedAllocationCount(0),
5633 calculatedFreeCount(0),
5634 calculatedSumFreeSize(0) { }
5639 VkDeviceSize offset;
5669 VkDeviceSize m_UsableSize;
5670 uint32_t m_LevelCount;
5676 } m_FreeList[MAX_LEVELS];
5678 size_t m_AllocationCount;
5682 VkDeviceSize m_SumFreeSize;
5684 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5685 void DeleteNode(Node* node);
5686 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5687 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5688 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5690 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5691 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5695 void AddToFreeListFront(uint32_t level, Node* node);
5699 void RemoveFromFreeList(uint32_t level, Node* node);
5701 #if VMA_STATS_STRING_ENABLED 5702 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5712 class VmaDeviceMemoryBlock
5714 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5716 VmaBlockMetadata* m_pMetadata;
5720 ~VmaDeviceMemoryBlock()
5722 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5723 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5729 uint32_t newMemoryTypeIndex,
5730 VkDeviceMemory newMemory,
5731 VkDeviceSize newSize,
5733 uint32_t algorithm);
5737 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5738 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5739 uint32_t GetId()
const {
return m_Id; }
5740 void* GetMappedData()
const {
return m_pMappedData; }
5743 bool Validate()
const;
5748 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5751 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5752 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5754 VkResult BindBufferMemory(
5758 VkResult BindImageMemory(
5764 uint32_t m_MemoryTypeIndex;
5766 VkDeviceMemory m_hMemory;
5774 uint32_t m_MapCount;
5775 void* m_pMappedData;
5778 struct VmaPointerLess
5780 bool operator()(
const void* lhs,
const void* rhs)
const 5786 struct VmaDefragmentationMove
5788 size_t srcBlockIndex;
5789 size_t dstBlockIndex;
5790 VkDeviceSize srcOffset;
5791 VkDeviceSize dstOffset;
5795 class VmaDefragmentationAlgorithm;
5803 struct VmaBlockVector
5805 VMA_CLASS_NO_COPY(VmaBlockVector)
5809 uint32_t memoryTypeIndex,
5810 VkDeviceSize preferredBlockSize,
5811 size_t minBlockCount,
5812 size_t maxBlockCount,
5813 VkDeviceSize bufferImageGranularity,
5814 uint32_t frameInUseCount,
5816 bool explicitBlockSize,
5817 uint32_t algorithm);
5820 VkResult CreateMinBlocks();
5822 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5823 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5824 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5825 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5826 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5830 bool IsEmpty()
const {
return m_Blocks.empty(); }
5831 bool IsCorruptionDetectionEnabled()
const;
5835 uint32_t currentFrameIndex,
5837 VkDeviceSize alignment,
5839 VmaSuballocationType suballocType,
5840 size_t allocationCount,
5849 #if VMA_STATS_STRING_ENABLED 5850 void PrintDetailedMap(
class VmaJsonWriter& json);
5853 void MakePoolAllocationsLost(
5854 uint32_t currentFrameIndex,
5855 size_t* pLostAllocationCount);
5856 VkResult CheckCorruption();
5860 class VmaBlockVectorDefragmentationContext* pCtx,
5862 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
5863 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
5864 VkCommandBuffer commandBuffer);
5865 void DefragmentationEnd(
5866 class VmaBlockVectorDefragmentationContext* pCtx,
5872 size_t GetBlockCount()
const {
return m_Blocks.size(); }
5873 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
5874 size_t CalcAllocationCount()
const;
5875 bool IsBufferImageGranularityConflictPossible()
const;
5878 friend class VmaDefragmentationAlgorithm_Generic;
5881 const uint32_t m_MemoryTypeIndex;
5882 const VkDeviceSize m_PreferredBlockSize;
5883 const size_t m_MinBlockCount;
5884 const size_t m_MaxBlockCount;
5885 const VkDeviceSize m_BufferImageGranularity;
5886 const uint32_t m_FrameInUseCount;
5887 const bool m_IsCustomPool;
5888 const bool m_ExplicitBlockSize;
5889 const uint32_t m_Algorithm;
5893 bool m_HasEmptyBlock;
5894 VMA_RW_MUTEX m_Mutex;
5896 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5897 uint32_t m_NextBlockId;
5899 VkDeviceSize CalcMaxBlockSize()
const;
5902 void Remove(VmaDeviceMemoryBlock* pBlock);
5906 void IncrementallySortBlocks();
5908 VkResult AllocatePage(
5910 uint32_t currentFrameIndex,
5912 VkDeviceSize alignment,
5914 VmaSuballocationType suballocType,
5918 VkResult AllocateFromBlock(
5919 VmaDeviceMemoryBlock* pBlock,
5921 uint32_t currentFrameIndex,
5923 VkDeviceSize alignment,
5926 VmaSuballocationType suballocType,
5930 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5933 void ApplyDefragmentationMovesCpu(
5934 class VmaBlockVectorDefragmentationContext* pDefragCtx,
5935 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
5937 void ApplyDefragmentationMovesGpu(
5938 class VmaBlockVectorDefragmentationContext* pDefragCtx,
5939 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
5940 VkCommandBuffer commandBuffer);
5951 VMA_CLASS_NO_COPY(VmaPool_T)
5953 VmaBlockVector m_BlockVector;
5958 VkDeviceSize preferredBlockSize);
5961 uint32_t GetId()
const {
return m_Id; }
5962 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5964 #if VMA_STATS_STRING_ENABLED 5979 class VmaDefragmentationAlgorithm
5981 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
5983 VmaDefragmentationAlgorithm(
5985 VmaBlockVector* pBlockVector,
5986 uint32_t currentFrameIndex) :
5987 m_hAllocator(hAllocator),
5988 m_pBlockVector(pBlockVector),
5989 m_CurrentFrameIndex(currentFrameIndex)
5992 virtual ~VmaDefragmentationAlgorithm()
5996 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
5997 virtual void AddAll() = 0;
5999 virtual VkResult Defragment(
6000 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6001 VkDeviceSize maxBytesToMove,
6002 uint32_t maxAllocationsToMove) = 0;
6004 virtual VkDeviceSize GetBytesMoved()
const = 0;
6005 virtual uint32_t GetAllocationsMoved()
const = 0;
6009 VmaBlockVector*
const m_pBlockVector;
6010 const uint32_t m_CurrentFrameIndex;
6012 struct AllocationInfo
6015 VkBool32* m_pChanged;
6018 m_hAllocation(VK_NULL_HANDLE),
6019 m_pChanged(VMA_NULL)
6023 m_hAllocation(hAlloc),
6024 m_pChanged(pChanged)
6030 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6032 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6034 VmaDefragmentationAlgorithm_Generic(
6036 VmaBlockVector* pBlockVector,
6037 uint32_t currentFrameIndex,
6038 bool overlappingMoveSupported);
6039 virtual ~VmaDefragmentationAlgorithm_Generic();
6041 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6042 virtual void AddAll() { m_AllAllocations =
true; }
6044 virtual VkResult Defragment(
6045 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6046 VkDeviceSize maxBytesToMove,
6047 uint32_t maxAllocationsToMove);
6049 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6050 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6053 uint32_t m_AllocationCount;
6054 bool m_AllAllocations;
6056 VkDeviceSize m_BytesMoved;
6057 uint32_t m_AllocationsMoved;
6059 struct AllocationInfoSizeGreater
6061 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6063 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6067 struct AllocationInfoOffsetGreater
6069 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6071 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6077 size_t m_OriginalBlockIndex;
6078 VmaDeviceMemoryBlock* m_pBlock;
6079 bool m_HasNonMovableAllocations;
6080 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6082 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6083 m_OriginalBlockIndex(SIZE_MAX),
6085 m_HasNonMovableAllocations(true),
6086 m_Allocations(pAllocationCallbacks)
6090 void CalcHasNonMovableAllocations()
6092 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6093 const size_t defragmentAllocCount = m_Allocations.size();
6094 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6097 void SortAllocationsBySizeDescending()
6099 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6102 void SortAllocationsByOffsetDescending()
6104 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6108 struct BlockPointerLess
6110 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 6112 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6114 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6116 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6122 struct BlockInfoCompareMoveDestination
6124 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6126 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6130 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6134 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6142 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6143 BlockInfoVector m_Blocks;
6145 VkResult DefragmentRound(
6146 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6147 VkDeviceSize maxBytesToMove,
6148 uint32_t maxAllocationsToMove);
6150 size_t CalcBlocksWithNonMovableCount()
const;
6152 static bool MoveMakesSense(
6153 size_t dstBlockIndex, VkDeviceSize dstOffset,
6154 size_t srcBlockIndex, VkDeviceSize srcOffset);
6157 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6159 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6161 VmaDefragmentationAlgorithm_Fast(
6163 VmaBlockVector* pBlockVector,
6164 uint32_t currentFrameIndex,
6165 bool overlappingMoveSupported);
6166 virtual ~VmaDefragmentationAlgorithm_Fast();
6168 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6169 virtual void AddAll() { m_AllAllocations =
true; }
6171 virtual VkResult Defragment(
6172 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6173 VkDeviceSize maxBytesToMove,
6174 uint32_t maxAllocationsToMove);
6176 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6177 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6182 size_t origBlockIndex;
6185 class FreeSpaceDatabase
6191 s.blockInfoIndex = SIZE_MAX;
6192 for(
size_t i = 0; i < MAX_COUNT; ++i)
6194 m_FreeSpaces[i] = s;
6198 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6200 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6206 size_t bestIndex = SIZE_MAX;
6207 for(
size_t i = 0; i < MAX_COUNT; ++i)
6210 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6215 if(m_FreeSpaces[i].size < size &&
6216 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6222 if(bestIndex != SIZE_MAX)
6224 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6225 m_FreeSpaces[bestIndex].offset = offset;
6226 m_FreeSpaces[bestIndex].size = size;
6230 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6231 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6233 size_t bestIndex = SIZE_MAX;
6234 VkDeviceSize bestFreeSpaceAfter = 0;
6235 for(
size_t i = 0; i < MAX_COUNT; ++i)
6238 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6240 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6242 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6244 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6246 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6249 bestFreeSpaceAfter = freeSpaceAfter;
6255 if(bestIndex != SIZE_MAX)
6257 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6258 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6260 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6263 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6264 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6265 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6270 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6280 static const size_t MAX_COUNT = 4;
6284 size_t blockInfoIndex;
6285 VkDeviceSize offset;
6287 } m_FreeSpaces[MAX_COUNT];
6290 const bool m_OverlappingMoveSupported;
6292 uint32_t m_AllocationCount;
6293 bool m_AllAllocations;
6295 VkDeviceSize m_BytesMoved;
6296 uint32_t m_AllocationsMoved;
6298 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6300 void PreprocessMetadata();
6301 void PostprocessMetadata();
6302 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6305 struct VmaBlockDefragmentationContext
6309 BLOCK_FLAG_USED = 0x00000001,
6314 VmaBlockDefragmentationContext() :
6316 hBuffer(VK_NULL_HANDLE)
6321 class VmaBlockVectorDefragmentationContext
6323 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6327 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6329 VmaBlockVectorDefragmentationContext(
6332 VmaBlockVector* pBlockVector,
6333 uint32_t currFrameIndex,
6335 ~VmaBlockVectorDefragmentationContext();
6337 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6338 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6339 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6341 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6342 void AddAll() { m_AllAllocations =
true; }
6344 void Begin(
bool overlappingMoveSupported);
6351 VmaBlockVector*
const m_pBlockVector;
6352 const uint32_t m_CurrFrameIndex;
6353 const uint32_t m_AlgorithmFlags;
6355 VmaDefragmentationAlgorithm* m_pAlgorithm;
6363 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6364 bool m_AllAllocations;
6367 struct VmaDefragmentationContext_T
6370 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6372 VmaDefragmentationContext_T(
6374 uint32_t currFrameIndex,
6377 ~VmaDefragmentationContext_T();
6379 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6380 void AddAllocations(
6381 uint32_t allocationCount,
6383 VkBool32* pAllocationsChanged);
6391 VkResult Defragment(
6392 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6393 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6398 const uint32_t m_CurrFrameIndex;
6399 const uint32_t m_Flags;
6402 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6404 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6407 #if VMA_RECORDING_ENABLED 6414 void WriteConfiguration(
6415 const VkPhysicalDeviceProperties& devProps,
6416 const VkPhysicalDeviceMemoryProperties& memProps,
6417 bool dedicatedAllocationExtensionEnabled);
6420 void RecordCreateAllocator(uint32_t frameIndex);
6421 void RecordDestroyAllocator(uint32_t frameIndex);
6422 void RecordCreatePool(uint32_t frameIndex,
6425 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6426 void RecordAllocateMemory(uint32_t frameIndex,
6427 const VkMemoryRequirements& vkMemReq,
6430 void RecordAllocateMemoryPages(uint32_t frameIndex,
6431 const VkMemoryRequirements& vkMemReq,
6433 uint64_t allocationCount,
6435 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6436 const VkMemoryRequirements& vkMemReq,
6437 bool requiresDedicatedAllocation,
6438 bool prefersDedicatedAllocation,
6441 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6442 const VkMemoryRequirements& vkMemReq,
6443 bool requiresDedicatedAllocation,
6444 bool prefersDedicatedAllocation,
6447 void RecordFreeMemory(uint32_t frameIndex,
6449 void RecordFreeMemoryPages(uint32_t frameIndex,
6450 uint64_t allocationCount,
6452 void RecordResizeAllocation(
6453 uint32_t frameIndex,
6455 VkDeviceSize newSize);
6456 void RecordSetAllocationUserData(uint32_t frameIndex,
6458 const void* pUserData);
6459 void RecordCreateLostAllocation(uint32_t frameIndex,
6461 void RecordMapMemory(uint32_t frameIndex,
6463 void RecordUnmapMemory(uint32_t frameIndex,
6465 void RecordFlushAllocation(uint32_t frameIndex,
6466 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6467 void RecordInvalidateAllocation(uint32_t frameIndex,
6468 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6469 void RecordCreateBuffer(uint32_t frameIndex,
6470 const VkBufferCreateInfo& bufCreateInfo,
6473 void RecordCreateImage(uint32_t frameIndex,
6474 const VkImageCreateInfo& imageCreateInfo,
6477 void RecordDestroyBuffer(uint32_t frameIndex,
6479 void RecordDestroyImage(uint32_t frameIndex,
6481 void RecordTouchAllocation(uint32_t frameIndex,
6483 void RecordGetAllocationInfo(uint32_t frameIndex,
6485 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6487 void RecordDefragmentationBegin(uint32_t frameIndex,
6490 void RecordDefragmentationEnd(uint32_t frameIndex,
6500 class UserDataString
6504 const char* GetString()
const {
return m_Str; }
6514 VMA_MUTEX m_FileMutex;
6516 int64_t m_StartCounter;
6518 void GetBasicParams(CallParams& outParams);
6521 template<
typename T>
6522 void PrintPointerList(uint64_t count,
const T* pItems)
6526 fprintf(m_File,
"%p", pItems[0]);
6527 for(uint64_t i = 1; i < count; ++i)
6529 fprintf(m_File,
" %p", pItems[i]);
6534 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6538 #endif // #if VMA_RECORDING_ENABLED 6541 struct VmaAllocator_T
6543 VMA_CLASS_NO_COPY(VmaAllocator_T)
6546 bool m_UseKhrDedicatedAllocation;
6548 bool m_AllocationCallbacksSpecified;
6549 VkAllocationCallbacks m_AllocationCallbacks;
6553 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
6554 VMA_MUTEX m_HeapSizeLimitMutex;
6556 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
6557 VkPhysicalDeviceMemoryProperties m_MemProps;
6560 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
6563 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
6564 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
6565 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
6571 const VkAllocationCallbacks* GetAllocationCallbacks()
const 6573 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
6577 return m_VulkanFunctions;
6580 VkDeviceSize GetBufferImageGranularity()
const 6583 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
6584 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
6587 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
6588 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
6590 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 6592 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
6593 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6596 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 6598 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
6599 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6602 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 6604 return IsMemoryTypeNonCoherent(memTypeIndex) ?
6605 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
6606 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
6609 bool IsIntegratedGpu()
const 6611 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
6614 #if VMA_RECORDING_ENABLED 6615 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
6618 void GetBufferMemoryRequirements(
6620 VkMemoryRequirements& memReq,
6621 bool& requiresDedicatedAllocation,
6622 bool& prefersDedicatedAllocation)
const;
6623 void GetImageMemoryRequirements(
6625 VkMemoryRequirements& memReq,
6626 bool& requiresDedicatedAllocation,
6627 bool& prefersDedicatedAllocation)
const;
6630 VkResult AllocateMemory(
6631 const VkMemoryRequirements& vkMemReq,
6632 bool requiresDedicatedAllocation,
6633 bool prefersDedicatedAllocation,
6634 VkBuffer dedicatedBuffer,
6635 VkImage dedicatedImage,
6637 VmaSuballocationType suballocType,
6638 size_t allocationCount,
6643 size_t allocationCount,
6646 VkResult ResizeAllocation(
6648 VkDeviceSize newSize);
6650 void CalculateStats(
VmaStats* pStats);
6652 #if VMA_STATS_STRING_ENABLED 6653 void PrintDetailedMap(
class VmaJsonWriter& json);
6656 VkResult DefragmentationBegin(
6660 VkResult DefragmentationEnd(
6667 void DestroyPool(
VmaPool pool);
6670 void SetCurrentFrameIndex(uint32_t frameIndex);
6671 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
6673 void MakePoolAllocationsLost(
6675 size_t* pLostAllocationCount);
6676 VkResult CheckPoolCorruption(
VmaPool hPool);
6677 VkResult CheckCorruption(uint32_t memoryTypeBits);
6681 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
6682 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
6687 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
6688 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
6690 void FlushOrInvalidateAllocation(
6692 VkDeviceSize offset, VkDeviceSize size,
6693 VMA_CACHE_OPERATION op);
6695 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
6698 VkDeviceSize m_PreferredLargeHeapBlockSize;
6700 VkPhysicalDevice m_PhysicalDevice;
6701 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
6703 VMA_RW_MUTEX m_PoolsMutex;
6705 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
6706 uint32_t m_NextPoolId;
6710 #if VMA_RECORDING_ENABLED 6711 VmaRecorder* m_pRecorder;
6716 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
6718 VkResult AllocateMemoryOfType(
6720 VkDeviceSize alignment,
6721 bool dedicatedAllocation,
6722 VkBuffer dedicatedBuffer,
6723 VkImage dedicatedImage,
6725 uint32_t memTypeIndex,
6726 VmaSuballocationType suballocType,
6727 size_t allocationCount,
6731 VkResult AllocateDedicatedMemoryPage(
6733 VmaSuballocationType suballocType,
6734 uint32_t memTypeIndex,
6735 const VkMemoryAllocateInfo& allocInfo,
6737 bool isUserDataString,
6742 VkResult AllocateDedicatedMemory(
6744 VmaSuballocationType suballocType,
6745 uint32_t memTypeIndex,
6747 bool isUserDataString,
6749 VkBuffer dedicatedBuffer,
6750 VkImage dedicatedImage,
6751 size_t allocationCount,
6761 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
6763 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
6766 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
6768 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
6771 template<
typename T>
6774 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
6777 template<
typename T>
6778 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
6780 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
6783 template<
typename T>
6784 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
6789 VmaFree(hAllocator, ptr);
6793 template<
typename T>
6794 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
6798 for(
size_t i = count; i--; )
6800 VmaFree(hAllocator, ptr);
6807 #if VMA_STATS_STRING_ENABLED 6809 class VmaStringBuilder
6812 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
6813 size_t GetLength()
const {
return m_Data.size(); }
6814 const char* GetData()
const {
return m_Data.data(); }
6816 void Add(
char ch) { m_Data.push_back(ch); }
6817 void Add(
const char* pStr);
6818 void AddNewLine() { Add(
'\n'); }
6819 void AddNumber(uint32_t num);
6820 void AddNumber(uint64_t num);
6821 void AddPointer(
const void* ptr);
6824 VmaVector< char, VmaStlAllocator<char> > m_Data;
6827 void VmaStringBuilder::Add(
const char* pStr)
6829 const size_t strLen = strlen(pStr);
6832 const size_t oldCount = m_Data.size();
6833 m_Data.resize(oldCount + strLen);
6834 memcpy(m_Data.data() + oldCount, pStr, strLen);
6838 void VmaStringBuilder::AddNumber(uint32_t num)
6841 VmaUint32ToStr(buf,
sizeof(buf), num);
6845 void VmaStringBuilder::AddNumber(uint64_t num)
6848 VmaUint64ToStr(buf,
sizeof(buf), num);
6852 void VmaStringBuilder::AddPointer(
const void* ptr)
6855 VmaPtrToStr(buf,
sizeof(buf), ptr);
6859 #endif // #if VMA_STATS_STRING_ENABLED 6864 #if VMA_STATS_STRING_ENABLED 6868 VMA_CLASS_NO_COPY(VmaJsonWriter)
6870 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6873 void BeginObject(
bool singleLine =
false);
6876 void BeginArray(
bool singleLine =
false);
6879 void WriteString(
const char* pStr);
6880 void BeginString(
const char* pStr = VMA_NULL);
6881 void ContinueString(
const char* pStr);
6882 void ContinueString(uint32_t n);
6883 void ContinueString(uint64_t n);
6884 void ContinueString_Pointer(
const void* ptr);
6885 void EndString(
const char* pStr = VMA_NULL);
6887 void WriteNumber(uint32_t n);
6888 void WriteNumber(uint64_t n);
6889 void WriteBool(
bool b);
6893 static const char*
const INDENT;
6895 enum COLLECTION_TYPE
6897 COLLECTION_TYPE_OBJECT,
6898 COLLECTION_TYPE_ARRAY,
6902 COLLECTION_TYPE type;
6903 uint32_t valueCount;
6904 bool singleLineMode;
6907 VmaStringBuilder& m_SB;
6908 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
6909 bool m_InsideString;
6911 void BeginValue(
bool isString);
6912 void WriteIndent(
bool oneLess =
false);
6915 const char*
const VmaJsonWriter::INDENT =
" ";
6917 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
6919 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
6920 m_InsideString(false)
6924 VmaJsonWriter::~VmaJsonWriter()
6926 VMA_ASSERT(!m_InsideString);
6927 VMA_ASSERT(m_Stack.empty());
6930 void VmaJsonWriter::BeginObject(
bool singleLine)
6932 VMA_ASSERT(!m_InsideString);
6938 item.type = COLLECTION_TYPE_OBJECT;
6939 item.valueCount = 0;
6940 item.singleLineMode = singleLine;
6941 m_Stack.push_back(item);
6944 void VmaJsonWriter::EndObject()
6946 VMA_ASSERT(!m_InsideString);
6951 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
6955 void VmaJsonWriter::BeginArray(
bool singleLine)
6957 VMA_ASSERT(!m_InsideString);
6963 item.type = COLLECTION_TYPE_ARRAY;
6964 item.valueCount = 0;
6965 item.singleLineMode = singleLine;
6966 m_Stack.push_back(item);
6969 void VmaJsonWriter::EndArray()
6971 VMA_ASSERT(!m_InsideString);
6976 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
6980 void VmaJsonWriter::WriteString(
const char* pStr)
6986 void VmaJsonWriter::BeginString(
const char* pStr)
6988 VMA_ASSERT(!m_InsideString);
6992 m_InsideString =
true;
6993 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6995 ContinueString(pStr);
6999 void VmaJsonWriter::ContinueString(
const char* pStr)
7001 VMA_ASSERT(m_InsideString);
7003 const size_t strLen = strlen(pStr);
7004 for(
size_t i = 0; i < strLen; ++i)
7037 VMA_ASSERT(0 &&
"Character not currently supported.");
7043 void VmaJsonWriter::ContinueString(uint32_t n)
7045 VMA_ASSERT(m_InsideString);
7049 void VmaJsonWriter::ContinueString(uint64_t n)
7051 VMA_ASSERT(m_InsideString);
7055 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7057 VMA_ASSERT(m_InsideString);
7058 m_SB.AddPointer(ptr);
7061 void VmaJsonWriter::EndString(
const char* pStr)
7063 VMA_ASSERT(m_InsideString);
7064 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7066 ContinueString(pStr);
7069 m_InsideString =
false;
7072 void VmaJsonWriter::WriteNumber(uint32_t n)
7074 VMA_ASSERT(!m_InsideString);
7079 void VmaJsonWriter::WriteNumber(uint64_t n)
7081 VMA_ASSERT(!m_InsideString);
7086 void VmaJsonWriter::WriteBool(
bool b)
7088 VMA_ASSERT(!m_InsideString);
7090 m_SB.Add(b ?
"true" :
"false");
7093 void VmaJsonWriter::WriteNull()
7095 VMA_ASSERT(!m_InsideString);
7100 void VmaJsonWriter::BeginValue(
bool isString)
7102 if(!m_Stack.empty())
7104 StackItem& currItem = m_Stack.back();
7105 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7106 currItem.valueCount % 2 == 0)
7108 VMA_ASSERT(isString);
7111 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7112 currItem.valueCount % 2 != 0)
7116 else if(currItem.valueCount > 0)
7125 ++currItem.valueCount;
7129 void VmaJsonWriter::WriteIndent(
bool oneLess)
7131 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7135 size_t count = m_Stack.size();
7136 if(count > 0 && oneLess)
7140 for(
size_t i = 0; i < count; ++i)
7147 #endif // #if VMA_STATS_STRING_ENABLED 7151 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7153 if(IsUserDataString())
7155 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7157 FreeUserDataString(hAllocator);
7159 if(pUserData != VMA_NULL)
7161 const char*
const newStrSrc = (
char*)pUserData;
7162 const size_t newStrLen = strlen(newStrSrc);
7163 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
7164 memcpy(newStrDst, newStrSrc, newStrLen + 1);
7165 m_pUserData = newStrDst;
7170 m_pUserData = pUserData;
7174 void VmaAllocation_T::ChangeBlockAllocation(
7176 VmaDeviceMemoryBlock* block,
7177 VkDeviceSize offset)
7179 VMA_ASSERT(block != VMA_NULL);
7180 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7183 if(block != m_BlockAllocation.m_Block)
7185 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7186 if(IsPersistentMap())
7188 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7189 block->Map(hAllocator, mapRefCount, VMA_NULL);
7192 m_BlockAllocation.m_Block = block;
7193 m_BlockAllocation.m_Offset = offset;
7196 void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
7198 VMA_ASSERT(newSize > 0);
7202 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7204 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7205 m_BlockAllocation.m_Offset = newOffset;
7208 VkDeviceSize VmaAllocation_T::GetOffset()
const 7212 case ALLOCATION_TYPE_BLOCK:
7213 return m_BlockAllocation.m_Offset;
7214 case ALLOCATION_TYPE_DEDICATED:
7222 VkDeviceMemory VmaAllocation_T::GetMemory()
const 7226 case ALLOCATION_TYPE_BLOCK:
7227 return m_BlockAllocation.m_Block->GetDeviceMemory();
7228 case ALLOCATION_TYPE_DEDICATED:
7229 return m_DedicatedAllocation.m_hMemory;
7232 return VK_NULL_HANDLE;
7236 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 7240 case ALLOCATION_TYPE_BLOCK:
7241 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
7242 case ALLOCATION_TYPE_DEDICATED:
7243 return m_DedicatedAllocation.m_MemoryTypeIndex;
7250 void* VmaAllocation_T::GetMappedData()
const 7254 case ALLOCATION_TYPE_BLOCK:
7257 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7258 VMA_ASSERT(pBlockData != VMA_NULL);
7259 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7266 case ALLOCATION_TYPE_DEDICATED:
7267 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7268 return m_DedicatedAllocation.m_pMappedData;
7275 bool VmaAllocation_T::CanBecomeLost()
const 7279 case ALLOCATION_TYPE_BLOCK:
7280 return m_BlockAllocation.m_CanBecomeLost;
7281 case ALLOCATION_TYPE_DEDICATED:
7289 VmaPool VmaAllocation_T::GetPool()
const 7291 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7292 return m_BlockAllocation.m_hPool;
7295 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7297 VMA_ASSERT(CanBecomeLost());
7303 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7306 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7311 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7317 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7327 #if VMA_STATS_STRING_ENABLED 7330 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7339 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 7341 json.WriteString(
"Type");
7342 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7344 json.WriteString(
"Size");
7345 json.WriteNumber(m_Size);
7347 if(m_pUserData != VMA_NULL)
7349 json.WriteString(
"UserData");
7350 if(IsUserDataString())
7352 json.WriteString((
const char*)m_pUserData);
7357 json.ContinueString_Pointer(m_pUserData);
7362 json.WriteString(
"CreationFrameIndex");
7363 json.WriteNumber(m_CreationFrameIndex);
7365 json.WriteString(
"LastUseFrameIndex");
7366 json.WriteNumber(GetLastUseFrameIndex());
7368 if(m_BufferImageUsage != 0)
7370 json.WriteString(
"Usage");
7371 json.WriteNumber(m_BufferImageUsage);
7377 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7379 VMA_ASSERT(IsUserDataString());
7380 if(m_pUserData != VMA_NULL)
7382 char*
const oldStr = (
char*)m_pUserData;
7383 const size_t oldStrLen = strlen(oldStr);
7384 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
7385 m_pUserData = VMA_NULL;
7389 void VmaAllocation_T::BlockAllocMap()
7391 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7393 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7399 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7403 void VmaAllocation_T::BlockAllocUnmap()
7405 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7407 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7413 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7417 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7419 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7423 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7425 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7426 *ppData = m_DedicatedAllocation.m_pMappedData;
7432 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7433 return VK_ERROR_MEMORY_MAP_FAILED;
7438 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7439 hAllocator->m_hDevice,
7440 m_DedicatedAllocation.m_hMemory,
7445 if(result == VK_SUCCESS)
7447 m_DedicatedAllocation.m_pMappedData = *ppData;
7454 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7456 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7458 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7463 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7464 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7465 hAllocator->m_hDevice,
7466 m_DedicatedAllocation.m_hMemory);
7471 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7475 #if VMA_STATS_STRING_ENABLED 7477 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7481 json.WriteString(
"Blocks");
7484 json.WriteString(
"Allocations");
7487 json.WriteString(
"UnusedRanges");
7490 json.WriteString(
"UsedBytes");
7493 json.WriteString(
"UnusedBytes");
7498 json.WriteString(
"AllocationSize");
7499 json.BeginObject(
true);
7500 json.WriteString(
"Min");
7502 json.WriteString(
"Avg");
7504 json.WriteString(
"Max");
7511 json.WriteString(
"UnusedRangeSize");
7512 json.BeginObject(
true);
7513 json.WriteString(
"Min");
7515 json.WriteString(
"Avg");
7517 json.WriteString(
"Max");
7525 #endif // #if VMA_STATS_STRING_ENABLED 7527 struct VmaSuballocationItemSizeLess
7530 const VmaSuballocationList::iterator lhs,
7531 const VmaSuballocationList::iterator rhs)
const 7533 return lhs->size < rhs->size;
7536 const VmaSuballocationList::iterator lhs,
7537 VkDeviceSize rhsSize)
const 7539 return lhs->size < rhsSize;
7547 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
7549 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
7553 #if VMA_STATS_STRING_ENABLED 7555 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
7556 VkDeviceSize unusedBytes,
7557 size_t allocationCount,
7558 size_t unusedRangeCount)
const 7562 json.WriteString(
"TotalBytes");
7563 json.WriteNumber(GetSize());
7565 json.WriteString(
"UnusedBytes");
7566 json.WriteNumber(unusedBytes);
7568 json.WriteString(
"Allocations");
7569 json.WriteNumber((uint64_t)allocationCount);
7571 json.WriteString(
"UnusedRanges");
7572 json.WriteNumber((uint64_t)unusedRangeCount);
7574 json.WriteString(
"Suballocations");
7578 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
7579 VkDeviceSize offset,
7582 json.BeginObject(
true);
7584 json.WriteString(
"Offset");
7585 json.WriteNumber(offset);
7587 hAllocation->PrintParameters(json);
7592 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
7593 VkDeviceSize offset,
7594 VkDeviceSize size)
const 7596 json.BeginObject(
true);
7598 json.WriteString(
"Offset");
7599 json.WriteNumber(offset);
7601 json.WriteString(
"Type");
7602 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
7604 json.WriteString(
"Size");
7605 json.WriteNumber(size);
7610 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 7616 #endif // #if VMA_STATS_STRING_ENABLED 7621 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
7622 VmaBlockMetadata(hAllocator),
7625 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7626 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
7630 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
7634 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
7636 VmaBlockMetadata::Init(size);
7639 m_SumFreeSize = size;
7641 VmaSuballocation suballoc = {};
7642 suballoc.offset = 0;
7643 suballoc.size = size;
7644 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7645 suballoc.hAllocation = VK_NULL_HANDLE;
7647 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7648 m_Suballocations.push_back(suballoc);
7649 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
7651 m_FreeSuballocationsBySize.push_back(suballocItem);
7654 bool VmaBlockMetadata_Generic::Validate()
const 7656 VMA_VALIDATE(!m_Suballocations.empty());
7659 VkDeviceSize calculatedOffset = 0;
7661 uint32_t calculatedFreeCount = 0;
7663 VkDeviceSize calculatedSumFreeSize = 0;
7666 size_t freeSuballocationsToRegister = 0;
7668 bool prevFree =
false;
7670 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7671 suballocItem != m_Suballocations.cend();
7674 const VmaSuballocation& subAlloc = *suballocItem;
7677 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
7679 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
7681 VMA_VALIDATE(!prevFree || !currFree);
7683 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
7687 calculatedSumFreeSize += subAlloc.size;
7688 ++calculatedFreeCount;
7689 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7691 ++freeSuballocationsToRegister;
7695 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
7699 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
7700 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
7703 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
7706 calculatedOffset += subAlloc.size;
7707 prevFree = currFree;
7712 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
7714 VkDeviceSize lastSize = 0;
7715 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
7717 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
7720 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7722 VMA_VALIDATE(suballocItem->size >= lastSize);
7724 lastSize = suballocItem->size;
7728 VMA_VALIDATE(ValidateFreeSuballocationList());
7729 VMA_VALIDATE(calculatedOffset == GetSize());
7730 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
7731 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
7736 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 7738 if(!m_FreeSuballocationsBySize.empty())
7740 return m_FreeSuballocationsBySize.back()->size;
7748 bool VmaBlockMetadata_Generic::IsEmpty()
const 7750 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
7753 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7757 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7769 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7770 suballocItem != m_Suballocations.cend();
7773 const VmaSuballocation& suballoc = *suballocItem;
7774 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7787 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 7789 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7791 inoutStats.
size += GetSize();
7798 #if VMA_STATS_STRING_ENABLED 7800 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 7802 PrintDetailedMap_Begin(json,
7804 m_Suballocations.size() - (size_t)m_FreeCount,
7808 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7809 suballocItem != m_Suballocations.cend();
7810 ++suballocItem, ++i)
7812 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7814 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
7818 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
7822 PrintDetailedMap_End(json);
7825 #endif // #if VMA_STATS_STRING_ENABLED 7827 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
7828 uint32_t currentFrameIndex,
7829 uint32_t frameInUseCount,
7830 VkDeviceSize bufferImageGranularity,
7831 VkDeviceSize allocSize,
7832 VkDeviceSize allocAlignment,
7834 VmaSuballocationType allocType,
7835 bool canMakeOtherLost,
7837 VmaAllocationRequest* pAllocationRequest)
7839 VMA_ASSERT(allocSize > 0);
7840 VMA_ASSERT(!upperAddress);
7841 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7842 VMA_ASSERT(pAllocationRequest != VMA_NULL);
7843 VMA_HEAVY_ASSERT(Validate());
7846 if(canMakeOtherLost ==
false &&
7847 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
7853 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
7854 if(freeSuballocCount > 0)
7859 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7860 m_FreeSuballocationsBySize.data(),
7861 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7862 allocSize + 2 * VMA_DEBUG_MARGIN,
7863 VmaSuballocationItemSizeLess());
7864 size_t index = it - m_FreeSuballocationsBySize.data();
7865 for(; index < freeSuballocCount; ++index)
7870 bufferImageGranularity,
7874 m_FreeSuballocationsBySize[index],
7876 &pAllocationRequest->offset,
7877 &pAllocationRequest->itemsToMakeLostCount,
7878 &pAllocationRequest->sumFreeSize,
7879 &pAllocationRequest->sumItemSize))
7881 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7886 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
7888 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7889 it != m_Suballocations.end();
7892 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
7895 bufferImageGranularity,
7901 &pAllocationRequest->offset,
7902 &pAllocationRequest->itemsToMakeLostCount,
7903 &pAllocationRequest->sumFreeSize,
7904 &pAllocationRequest->sumItemSize))
7906 pAllocationRequest->item = it;
7914 for(
size_t index = freeSuballocCount; index--; )
7919 bufferImageGranularity,
7923 m_FreeSuballocationsBySize[index],
7925 &pAllocationRequest->offset,
7926 &pAllocationRequest->itemsToMakeLostCount,
7927 &pAllocationRequest->sumFreeSize,
7928 &pAllocationRequest->sumItemSize))
7930 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7937 if(canMakeOtherLost)
7941 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
7942 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
7944 VmaAllocationRequest tmpAllocRequest = {};
7945 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
7946 suballocIt != m_Suballocations.end();
7949 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
7950 suballocIt->hAllocation->CanBecomeLost())
7955 bufferImageGranularity,
7961 &tmpAllocRequest.offset,
7962 &tmpAllocRequest.itemsToMakeLostCount,
7963 &tmpAllocRequest.sumFreeSize,
7964 &tmpAllocRequest.sumItemSize))
7966 tmpAllocRequest.item = suballocIt;
7968 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
7971 *pAllocationRequest = tmpAllocRequest;
7977 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
7986 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
7987 uint32_t currentFrameIndex,
7988 uint32_t frameInUseCount,
7989 VmaAllocationRequest* pAllocationRequest)
7991 while(pAllocationRequest->itemsToMakeLostCount > 0)
7993 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
7995 ++pAllocationRequest->item;
7997 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7998 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
7999 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8000 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8002 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8003 --pAllocationRequest->itemsToMakeLostCount;
8011 VMA_HEAVY_ASSERT(Validate());
8012 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8013 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8018 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8020 uint32_t lostAllocationCount = 0;
8021 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8022 it != m_Suballocations.end();
8025 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8026 it->hAllocation->CanBecomeLost() &&
8027 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8029 it = FreeSuballocation(it);
8030 ++lostAllocationCount;
8033 return lostAllocationCount;
8036 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8038 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8039 it != m_Suballocations.end();
8042 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8044 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8046 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8047 return VK_ERROR_VALIDATION_FAILED_EXT;
8049 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8051 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8052 return VK_ERROR_VALIDATION_FAILED_EXT;
8060 void VmaBlockMetadata_Generic::Alloc(
8061 const VmaAllocationRequest& request,
8062 VmaSuballocationType type,
8063 VkDeviceSize allocSize,
8067 VMA_ASSERT(!upperAddress);
8068 VMA_ASSERT(request.item != m_Suballocations.end());
8069 VmaSuballocation& suballoc = *request.item;
8071 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8073 VMA_ASSERT(request.offset >= suballoc.offset);
8074 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8075 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8076 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8080 UnregisterFreeSuballocation(request.item);
8082 suballoc.offset = request.offset;
8083 suballoc.size = allocSize;
8084 suballoc.type = type;
8085 suballoc.hAllocation = hAllocation;
8090 VmaSuballocation paddingSuballoc = {};
8091 paddingSuballoc.offset = request.offset + allocSize;
8092 paddingSuballoc.size = paddingEnd;
8093 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8094 VmaSuballocationList::iterator next = request.item;
8096 const VmaSuballocationList::iterator paddingEndItem =
8097 m_Suballocations.insert(next, paddingSuballoc);
8098 RegisterFreeSuballocation(paddingEndItem);
8104 VmaSuballocation paddingSuballoc = {};
8105 paddingSuballoc.offset = request.offset - paddingBegin;
8106 paddingSuballoc.size = paddingBegin;
8107 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8108 const VmaSuballocationList::iterator paddingBeginItem =
8109 m_Suballocations.insert(request.item, paddingSuballoc);
8110 RegisterFreeSuballocation(paddingBeginItem);
8114 m_FreeCount = m_FreeCount - 1;
8115 if(paddingBegin > 0)
8123 m_SumFreeSize -= allocSize;
8126 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8128 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8129 suballocItem != m_Suballocations.end();
8132 VmaSuballocation& suballoc = *suballocItem;
8133 if(suballoc.hAllocation == allocation)
8135 FreeSuballocation(suballocItem);
8136 VMA_HEAVY_ASSERT(Validate());
8140 VMA_ASSERT(0 &&
"Not found!");
8143 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8145 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8146 suballocItem != m_Suballocations.end();
8149 VmaSuballocation& suballoc = *suballocItem;
8150 if(suballoc.offset == offset)
8152 FreeSuballocation(suballocItem);
8156 VMA_ASSERT(0 &&
"Not found!");
8159 bool VmaBlockMetadata_Generic::ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize)
8161 typedef VmaSuballocationList::iterator iter_type;
8162 for(iter_type suballocItem = m_Suballocations.begin();
8163 suballocItem != m_Suballocations.end();
8166 VmaSuballocation& suballoc = *suballocItem;
8167 if(suballoc.hAllocation == alloc)
8169 iter_type nextItem = suballocItem;
8173 VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
8176 if(newSize < alloc->GetSize())
8178 const VkDeviceSize sizeDiff = suballoc.size - newSize;
8181 if(nextItem != m_Suballocations.end())
8184 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8187 UnregisterFreeSuballocation(nextItem);
8188 nextItem->offset -= sizeDiff;
8189 nextItem->size += sizeDiff;
8190 RegisterFreeSuballocation(nextItem);
8196 VmaSuballocation newFreeSuballoc;
8197 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8198 newFreeSuballoc.offset = suballoc.offset + newSize;
8199 newFreeSuballoc.size = sizeDiff;
8200 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8201 iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
8202 RegisterFreeSuballocation(newFreeSuballocIt);
8211 VmaSuballocation newFreeSuballoc;
8212 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8213 newFreeSuballoc.offset = suballoc.offset + newSize;
8214 newFreeSuballoc.size = sizeDiff;
8215 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8216 m_Suballocations.push_back(newFreeSuballoc);
8218 iter_type newFreeSuballocIt = m_Suballocations.end();
8219 RegisterFreeSuballocation(--newFreeSuballocIt);
8224 suballoc.size = newSize;
8225 m_SumFreeSize += sizeDiff;
8230 const VkDeviceSize sizeDiff = newSize - suballoc.size;
8233 if(nextItem != m_Suballocations.end())
8236 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8239 if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
8245 if(nextItem->size > sizeDiff)
8248 UnregisterFreeSuballocation(nextItem);
8249 nextItem->offset += sizeDiff;
8250 nextItem->size -= sizeDiff;
8251 RegisterFreeSuballocation(nextItem);
8257 UnregisterFreeSuballocation(nextItem);
8258 m_Suballocations.erase(nextItem);
8274 suballoc.size = newSize;
8275 m_SumFreeSize -= sizeDiff;
8282 VMA_ASSERT(0 &&
"Not found!");
8286 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 8288 VkDeviceSize lastSize = 0;
8289 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8291 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8293 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8294 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8295 VMA_VALIDATE(it->size >= lastSize);
8296 lastSize = it->size;
8301 bool VmaBlockMetadata_Generic::CheckAllocation(
8302 uint32_t currentFrameIndex,
8303 uint32_t frameInUseCount,
8304 VkDeviceSize bufferImageGranularity,
8305 VkDeviceSize allocSize,
8306 VkDeviceSize allocAlignment,
8307 VmaSuballocationType allocType,
8308 VmaSuballocationList::const_iterator suballocItem,
8309 bool canMakeOtherLost,
8310 VkDeviceSize* pOffset,
8311 size_t* itemsToMakeLostCount,
8312 VkDeviceSize* pSumFreeSize,
8313 VkDeviceSize* pSumItemSize)
const 8315 VMA_ASSERT(allocSize > 0);
8316 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8317 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8318 VMA_ASSERT(pOffset != VMA_NULL);
8320 *itemsToMakeLostCount = 0;
8324 if(canMakeOtherLost)
8326 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8328 *pSumFreeSize = suballocItem->size;
8332 if(suballocItem->hAllocation->CanBecomeLost() &&
8333 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8335 ++*itemsToMakeLostCount;
8336 *pSumItemSize = suballocItem->size;
8345 if(GetSize() - suballocItem->offset < allocSize)
8351 *pOffset = suballocItem->offset;
8354 if(VMA_DEBUG_MARGIN > 0)
8356 *pOffset += VMA_DEBUG_MARGIN;
8360 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8364 if(bufferImageGranularity > 1)
8366 bool bufferImageGranularityConflict =
false;
8367 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8368 while(prevSuballocItem != m_Suballocations.cbegin())
8371 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8372 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8374 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8376 bufferImageGranularityConflict =
true;
8384 if(bufferImageGranularityConflict)
8386 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8392 if(*pOffset >= suballocItem->offset + suballocItem->size)
8398 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8401 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8403 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8405 if(suballocItem->offset + totalSize > GetSize())
8412 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8413 if(totalSize > suballocItem->size)
8415 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8416 while(remainingSize > 0)
8419 if(lastSuballocItem == m_Suballocations.cend())
8423 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8425 *pSumFreeSize += lastSuballocItem->size;
8429 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8430 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8431 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8433 ++*itemsToMakeLostCount;
8434 *pSumItemSize += lastSuballocItem->size;
8441 remainingSize = (lastSuballocItem->size < remainingSize) ?
8442 remainingSize - lastSuballocItem->size : 0;
8448 if(bufferImageGranularity > 1)
8450 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8452 while(nextSuballocItem != m_Suballocations.cend())
8454 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8455 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8457 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8459 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8460 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8461 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8463 ++*itemsToMakeLostCount;
8482 const VmaSuballocation& suballoc = *suballocItem;
8483 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8485 *pSumFreeSize = suballoc.size;
8488 if(suballoc.size < allocSize)
8494 *pOffset = suballoc.offset;
8497 if(VMA_DEBUG_MARGIN > 0)
8499 *pOffset += VMA_DEBUG_MARGIN;
8503 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8507 if(bufferImageGranularity > 1)
8509 bool bufferImageGranularityConflict =
false;
8510 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8511 while(prevSuballocItem != m_Suballocations.cbegin())
8514 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8515 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8517 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8519 bufferImageGranularityConflict =
true;
8527 if(bufferImageGranularityConflict)
8529 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8534 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8537 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8540 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8547 if(bufferImageGranularity > 1)
8549 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8551 while(nextSuballocItem != m_Suballocations.cend())
8553 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8554 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8556 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8575 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8577 VMA_ASSERT(item != m_Suballocations.end());
8578 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8580 VmaSuballocationList::iterator nextItem = item;
8582 VMA_ASSERT(nextItem != m_Suballocations.end());
8583 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8585 item->size += nextItem->size;
8587 m_Suballocations.erase(nextItem);
8590 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8593 VmaSuballocation& suballoc = *suballocItem;
8594 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8595 suballoc.hAllocation = VK_NULL_HANDLE;
8599 m_SumFreeSize += suballoc.size;
8602 bool mergeWithNext =
false;
8603 bool mergeWithPrev =
false;
8605 VmaSuballocationList::iterator nextItem = suballocItem;
8607 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8609 mergeWithNext =
true;
8612 VmaSuballocationList::iterator prevItem = suballocItem;
8613 if(suballocItem != m_Suballocations.begin())
8616 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8618 mergeWithPrev =
true;
8624 UnregisterFreeSuballocation(nextItem);
8625 MergeFreeWithNext(suballocItem);
8630 UnregisterFreeSuballocation(prevItem);
8631 MergeFreeWithNext(prevItem);
8632 RegisterFreeSuballocation(prevItem);
8637 RegisterFreeSuballocation(suballocItem);
8638 return suballocItem;
8642 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
8644 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8645 VMA_ASSERT(item->size > 0);
8649 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8651 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8653 if(m_FreeSuballocationsBySize.empty())
8655 m_FreeSuballocationsBySize.push_back(item);
8659 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
8667 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
8669 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8670 VMA_ASSERT(item->size > 0);
8674 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8676 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8678 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8679 m_FreeSuballocationsBySize.data(),
8680 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
8682 VmaSuballocationItemSizeLess());
8683 for(
size_t index = it - m_FreeSuballocationsBySize.data();
8684 index < m_FreeSuballocationsBySize.size();
8687 if(m_FreeSuballocationsBySize[index] == item)
8689 VmaVectorRemove(m_FreeSuballocationsBySize, index);
8692 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
8694 VMA_ASSERT(0 &&
"Not found.");
8700 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
8701 VkDeviceSize bufferImageGranularity,
8702 VmaSuballocationType& inOutPrevSuballocType)
const 8704 if(bufferImageGranularity == 1 || IsEmpty())
8709 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
8710 bool typeConflictFound =
false;
8711 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
8712 it != m_Suballocations.cend();
8715 const VmaSuballocationType suballocType = it->type;
8716 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
8718 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
8719 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
8721 typeConflictFound =
true;
8723 inOutPrevSuballocType = suballocType;
8727 return typeConflictFound || minAlignment >= bufferImageGranularity;
8733 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
8734 VmaBlockMetadata(hAllocator),
8736 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8737 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8738 m_1stVectorIndex(0),
8739 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
8740 m_1stNullItemsBeginCount(0),
8741 m_1stNullItemsMiddleCount(0),
8742 m_2ndNullItemsCount(0)
8746 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
8750 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
8752 VmaBlockMetadata::Init(size);
8753 m_SumFreeSize = size;
8756 bool VmaBlockMetadata_Linear::Validate()
const 8758 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8759 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8761 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
8762 VMA_VALIDATE(!suballocations1st.empty() ||
8763 suballocations2nd.empty() ||
8764 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
8766 if(!suballocations1st.empty())
8769 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
8771 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
8773 if(!suballocations2nd.empty())
8776 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
8779 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
8780 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
8782 VkDeviceSize sumUsedSize = 0;
8783 const size_t suballoc1stCount = suballocations1st.size();
8784 VkDeviceSize offset = VMA_DEBUG_MARGIN;
8786 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8788 const size_t suballoc2ndCount = suballocations2nd.size();
8789 size_t nullItem2ndCount = 0;
8790 for(
size_t i = 0; i < suballoc2ndCount; ++i)
8792 const VmaSuballocation& suballoc = suballocations2nd[i];
8793 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8795 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8796 VMA_VALIDATE(suballoc.offset >= offset);
8800 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8801 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8802 sumUsedSize += suballoc.size;
8809 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8812 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8815 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
8817 const VmaSuballocation& suballoc = suballocations1st[i];
8818 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
8819 suballoc.hAllocation == VK_NULL_HANDLE);
8822 size_t nullItem1stCount = m_1stNullItemsBeginCount;
8824 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
8826 const VmaSuballocation& suballoc = suballocations1st[i];
8827 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8829 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8830 VMA_VALIDATE(suballoc.offset >= offset);
8831 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
8835 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8836 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8837 sumUsedSize += suballoc.size;
8844 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8846 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
8848 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8850 const size_t suballoc2ndCount = suballocations2nd.size();
8851 size_t nullItem2ndCount = 0;
8852 for(
size_t i = suballoc2ndCount; i--; )
8854 const VmaSuballocation& suballoc = suballocations2nd[i];
8855 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8857 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8858 VMA_VALIDATE(suballoc.offset >= offset);
8862 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8863 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8864 sumUsedSize += suballoc.size;
8871 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8874 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8877 VMA_VALIDATE(offset <= GetSize());
8878 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
8883 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 8885 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
8886 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
8889 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 8891 const VkDeviceSize size = GetSize();
8903 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8905 switch(m_2ndVectorMode)
8907 case SECOND_VECTOR_EMPTY:
8913 const size_t suballocations1stCount = suballocations1st.size();
8914 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
8915 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8916 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
8918 firstSuballoc.offset,
8919 size - (lastSuballoc.offset + lastSuballoc.size));
8923 case SECOND_VECTOR_RING_BUFFER:
8928 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8929 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
8930 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
8931 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
8935 case SECOND_VECTOR_DOUBLE_STACK:
8940 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8941 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
8942 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
8943 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
8953 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 8955 const VkDeviceSize size = GetSize();
8956 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8957 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8958 const size_t suballoc1stCount = suballocations1st.size();
8959 const size_t suballoc2ndCount = suballocations2nd.size();
8970 VkDeviceSize lastOffset = 0;
8972 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8974 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8975 size_t nextAlloc2ndIndex = 0;
8976 while(lastOffset < freeSpace2ndTo1stEnd)
8979 while(nextAlloc2ndIndex < suballoc2ndCount &&
8980 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8982 ++nextAlloc2ndIndex;
8986 if(nextAlloc2ndIndex < suballoc2ndCount)
8988 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8991 if(lastOffset < suballoc.offset)
8994 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9008 lastOffset = suballoc.offset + suballoc.size;
9009 ++nextAlloc2ndIndex;
9015 if(lastOffset < freeSpace2ndTo1stEnd)
9017 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9025 lastOffset = freeSpace2ndTo1stEnd;
9030 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9031 const VkDeviceSize freeSpace1stTo2ndEnd =
9032 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9033 while(lastOffset < freeSpace1stTo2ndEnd)
9036 while(nextAlloc1stIndex < suballoc1stCount &&
9037 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9039 ++nextAlloc1stIndex;
9043 if(nextAlloc1stIndex < suballoc1stCount)
9045 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9048 if(lastOffset < suballoc.offset)
9051 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9065 lastOffset = suballoc.offset + suballoc.size;
9066 ++nextAlloc1stIndex;
9072 if(lastOffset < freeSpace1stTo2ndEnd)
9074 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9082 lastOffset = freeSpace1stTo2ndEnd;
9086 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9088 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9089 while(lastOffset < size)
9092 while(nextAlloc2ndIndex != SIZE_MAX &&
9093 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9095 --nextAlloc2ndIndex;
9099 if(nextAlloc2ndIndex != SIZE_MAX)
9101 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9104 if(lastOffset < suballoc.offset)
9107 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9121 lastOffset = suballoc.offset + suballoc.size;
9122 --nextAlloc2ndIndex;
9128 if(lastOffset < size)
9130 const VkDeviceSize unusedRangeSize = size - lastOffset;
9146 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 9148 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9149 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9150 const VkDeviceSize size = GetSize();
9151 const size_t suballoc1stCount = suballocations1st.size();
9152 const size_t suballoc2ndCount = suballocations2nd.size();
9154 inoutStats.
size += size;
9156 VkDeviceSize lastOffset = 0;
9158 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9160 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9161 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9162 while(lastOffset < freeSpace2ndTo1stEnd)
9165 while(nextAlloc2ndIndex < suballoc2ndCount &&
9166 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9168 ++nextAlloc2ndIndex;
9172 if(nextAlloc2ndIndex < suballoc2ndCount)
9174 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9177 if(lastOffset < suballoc.offset)
9180 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9191 lastOffset = suballoc.offset + suballoc.size;
9192 ++nextAlloc2ndIndex;
9197 if(lastOffset < freeSpace2ndTo1stEnd)
9200 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9207 lastOffset = freeSpace2ndTo1stEnd;
9212 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9213 const VkDeviceSize freeSpace1stTo2ndEnd =
9214 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9215 while(lastOffset < freeSpace1stTo2ndEnd)
9218 while(nextAlloc1stIndex < suballoc1stCount &&
9219 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9221 ++nextAlloc1stIndex;
9225 if(nextAlloc1stIndex < suballoc1stCount)
9227 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9230 if(lastOffset < suballoc.offset)
9233 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9244 lastOffset = suballoc.offset + suballoc.size;
9245 ++nextAlloc1stIndex;
9250 if(lastOffset < freeSpace1stTo2ndEnd)
9253 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9260 lastOffset = freeSpace1stTo2ndEnd;
9264 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9266 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9267 while(lastOffset < size)
9270 while(nextAlloc2ndIndex != SIZE_MAX &&
9271 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9273 --nextAlloc2ndIndex;
9277 if(nextAlloc2ndIndex != SIZE_MAX)
9279 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9282 if(lastOffset < suballoc.offset)
9285 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9296 lastOffset = suballoc.offset + suballoc.size;
9297 --nextAlloc2ndIndex;
9302 if(lastOffset < size)
9305 const VkDeviceSize unusedRangeSize = size - lastOffset;
9318 #if VMA_STATS_STRING_ENABLED 9319 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 9321 const VkDeviceSize size = GetSize();
9322 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9323 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9324 const size_t suballoc1stCount = suballocations1st.size();
9325 const size_t suballoc2ndCount = suballocations2nd.size();
9329 size_t unusedRangeCount = 0;
9330 VkDeviceSize usedBytes = 0;
9332 VkDeviceSize lastOffset = 0;
9334 size_t alloc2ndCount = 0;
9335 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9337 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9338 size_t nextAlloc2ndIndex = 0;
9339 while(lastOffset < freeSpace2ndTo1stEnd)
9342 while(nextAlloc2ndIndex < suballoc2ndCount &&
9343 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9345 ++nextAlloc2ndIndex;
9349 if(nextAlloc2ndIndex < suballoc2ndCount)
9351 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9354 if(lastOffset < suballoc.offset)
9363 usedBytes += suballoc.size;
9366 lastOffset = suballoc.offset + suballoc.size;
9367 ++nextAlloc2ndIndex;
9372 if(lastOffset < freeSpace2ndTo1stEnd)
9379 lastOffset = freeSpace2ndTo1stEnd;
9384 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9385 size_t alloc1stCount = 0;
9386 const VkDeviceSize freeSpace1stTo2ndEnd =
9387 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9388 while(lastOffset < freeSpace1stTo2ndEnd)
9391 while(nextAlloc1stIndex < suballoc1stCount &&
9392 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9394 ++nextAlloc1stIndex;
9398 if(nextAlloc1stIndex < suballoc1stCount)
9400 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9403 if(lastOffset < suballoc.offset)
9412 usedBytes += suballoc.size;
9415 lastOffset = suballoc.offset + suballoc.size;
9416 ++nextAlloc1stIndex;
9421 if(lastOffset < size)
9428 lastOffset = freeSpace1stTo2ndEnd;
9432 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9434 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9435 while(lastOffset < size)
9438 while(nextAlloc2ndIndex != SIZE_MAX &&
9439 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9441 --nextAlloc2ndIndex;
9445 if(nextAlloc2ndIndex != SIZE_MAX)
9447 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9450 if(lastOffset < suballoc.offset)
9459 usedBytes += suballoc.size;
9462 lastOffset = suballoc.offset + suballoc.size;
9463 --nextAlloc2ndIndex;
9468 if(lastOffset < size)
9480 const VkDeviceSize unusedBytes = size - usedBytes;
9481 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9486 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9488 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9489 size_t nextAlloc2ndIndex = 0;
9490 while(lastOffset < freeSpace2ndTo1stEnd)
9493 while(nextAlloc2ndIndex < suballoc2ndCount &&
9494 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9496 ++nextAlloc2ndIndex;
9500 if(nextAlloc2ndIndex < suballoc2ndCount)
9502 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9505 if(lastOffset < suballoc.offset)
9508 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9509 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9514 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9517 lastOffset = suballoc.offset + suballoc.size;
9518 ++nextAlloc2ndIndex;
9523 if(lastOffset < freeSpace2ndTo1stEnd)
9526 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9527 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9531 lastOffset = freeSpace2ndTo1stEnd;
9536 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9537 while(lastOffset < freeSpace1stTo2ndEnd)
9540 while(nextAlloc1stIndex < suballoc1stCount &&
9541 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9543 ++nextAlloc1stIndex;
9547 if(nextAlloc1stIndex < suballoc1stCount)
9549 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9552 if(lastOffset < suballoc.offset)
9555 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9556 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9561 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9564 lastOffset = suballoc.offset + suballoc.size;
9565 ++nextAlloc1stIndex;
9570 if(lastOffset < freeSpace1stTo2ndEnd)
9573 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9574 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9578 lastOffset = freeSpace1stTo2ndEnd;
9582 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9584 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9585 while(lastOffset < size)
9588 while(nextAlloc2ndIndex != SIZE_MAX &&
9589 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9591 --nextAlloc2ndIndex;
9595 if(nextAlloc2ndIndex != SIZE_MAX)
9597 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9600 if(lastOffset < suballoc.offset)
9603 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9604 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9609 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9612 lastOffset = suballoc.offset + suballoc.size;
9613 --nextAlloc2ndIndex;
9618 if(lastOffset < size)
9621 const VkDeviceSize unusedRangeSize = size - lastOffset;
9622 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9631 PrintDetailedMap_End(json);
9633 #endif // #if VMA_STATS_STRING_ENABLED 9635 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9636 uint32_t currentFrameIndex,
9637 uint32_t frameInUseCount,
9638 VkDeviceSize bufferImageGranularity,
9639 VkDeviceSize allocSize,
9640 VkDeviceSize allocAlignment,
9642 VmaSuballocationType allocType,
9643 bool canMakeOtherLost,
9645 VmaAllocationRequest* pAllocationRequest)
9647 VMA_ASSERT(allocSize > 0);
9648 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9649 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9650 VMA_HEAVY_ASSERT(Validate());
9652 const VkDeviceSize size = GetSize();
9653 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9654 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9658 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9660 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9665 if(allocSize > size)
9669 VkDeviceSize resultBaseOffset = size - allocSize;
9670 if(!suballocations2nd.empty())
9672 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9673 resultBaseOffset = lastSuballoc.offset - allocSize;
9674 if(allocSize > lastSuballoc.offset)
9681 VkDeviceSize resultOffset = resultBaseOffset;
9684 if(VMA_DEBUG_MARGIN > 0)
9686 if(resultOffset < VMA_DEBUG_MARGIN)
9690 resultOffset -= VMA_DEBUG_MARGIN;
9694 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9698 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9700 bool bufferImageGranularityConflict =
false;
9701 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9703 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9704 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9706 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9708 bufferImageGranularityConflict =
true;
9716 if(bufferImageGranularityConflict)
9718 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9723 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9724 suballocations1st.back().offset + suballocations1st.back().size :
9726 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
9730 if(bufferImageGranularity > 1)
9732 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9734 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9735 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9737 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9751 pAllocationRequest->offset = resultOffset;
9752 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
9753 pAllocationRequest->sumItemSize = 0;
9755 pAllocationRequest->itemsToMakeLostCount = 0;
9761 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9765 VkDeviceSize resultBaseOffset = 0;
9766 if(!suballocations1st.empty())
9768 const VmaSuballocation& lastSuballoc = suballocations1st.back();
9769 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9773 VkDeviceSize resultOffset = resultBaseOffset;
9776 if(VMA_DEBUG_MARGIN > 0)
9778 resultOffset += VMA_DEBUG_MARGIN;
9782 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9786 if(bufferImageGranularity > 1 && !suballocations1st.empty())
9788 bool bufferImageGranularityConflict =
false;
9789 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9791 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9792 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9794 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9796 bufferImageGranularityConflict =
true;
9804 if(bufferImageGranularityConflict)
9806 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9810 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
9811 suballocations2nd.back().offset : size;
9814 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
9818 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9820 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9822 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9823 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9825 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9839 pAllocationRequest->offset = resultOffset;
9840 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
9841 pAllocationRequest->sumItemSize = 0;
9843 pAllocationRequest->itemsToMakeLostCount = 0;
9850 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9852 VMA_ASSERT(!suballocations1st.empty());
9854 VkDeviceSize resultBaseOffset = 0;
9855 if(!suballocations2nd.empty())
9857 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9858 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9862 VkDeviceSize resultOffset = resultBaseOffset;
9865 if(VMA_DEBUG_MARGIN > 0)
9867 resultOffset += VMA_DEBUG_MARGIN;
9871 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9875 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9877 bool bufferImageGranularityConflict =
false;
9878 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
9880 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
9881 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9883 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9885 bufferImageGranularityConflict =
true;
9893 if(bufferImageGranularityConflict)
9895 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9899 pAllocationRequest->itemsToMakeLostCount = 0;
9900 pAllocationRequest->sumItemSize = 0;
9901 size_t index1st = m_1stNullItemsBeginCount;
9903 if(canMakeOtherLost)
9905 while(index1st < suballocations1st.size() &&
9906 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
9909 const VmaSuballocation& suballoc = suballocations1st[index1st];
9910 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
9916 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9917 if(suballoc.hAllocation->CanBecomeLost() &&
9918 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9920 ++pAllocationRequest->itemsToMakeLostCount;
9921 pAllocationRequest->sumItemSize += suballoc.size;
9933 if(bufferImageGranularity > 1)
9935 while(index1st < suballocations1st.size())
9937 const VmaSuballocation& suballoc = suballocations1st[index1st];
9938 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
9940 if(suballoc.hAllocation != VK_NULL_HANDLE)
9943 if(suballoc.hAllocation->CanBecomeLost() &&
9944 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9946 ++pAllocationRequest->itemsToMakeLostCount;
9947 pAllocationRequest->sumItemSize += suballoc.size;
9966 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
9967 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
9971 if(bufferImageGranularity > 1)
9973 for(
size_t nextSuballocIndex = index1st;
9974 nextSuballocIndex < suballocations1st.size();
9975 nextSuballocIndex++)
9977 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
9978 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9980 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9994 pAllocationRequest->offset = resultOffset;
9995 pAllocationRequest->sumFreeSize =
9996 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
9998 - pAllocationRequest->sumItemSize;
10008 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10009 uint32_t currentFrameIndex,
10010 uint32_t frameInUseCount,
10011 VmaAllocationRequest* pAllocationRequest)
10013 if(pAllocationRequest->itemsToMakeLostCount == 0)
10018 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10020 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10021 size_t index1st = m_1stNullItemsBeginCount;
10022 size_t madeLostCount = 0;
10023 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10025 VMA_ASSERT(index1st < suballocations1st.size());
10026 VmaSuballocation& suballoc = suballocations1st[index1st];
10027 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10029 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10030 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10031 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10033 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10034 suballoc.hAllocation = VK_NULL_HANDLE;
10035 m_SumFreeSize += suballoc.size;
10036 ++m_1stNullItemsMiddleCount;
10047 CleanupAfterFree();
10053 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10055 uint32_t lostAllocationCount = 0;
10057 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10058 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10060 VmaSuballocation& suballoc = suballocations1st[i];
10061 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10062 suballoc.hAllocation->CanBecomeLost() &&
10063 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10065 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10066 suballoc.hAllocation = VK_NULL_HANDLE;
10067 ++m_1stNullItemsMiddleCount;
10068 m_SumFreeSize += suballoc.size;
10069 ++lostAllocationCount;
10073 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10074 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10076 VmaSuballocation& suballoc = suballocations2nd[i];
10077 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10078 suballoc.hAllocation->CanBecomeLost() &&
10079 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10081 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10082 suballoc.hAllocation = VK_NULL_HANDLE;
10083 ++m_2ndNullItemsCount;
10084 ++lostAllocationCount;
10088 if(lostAllocationCount)
10090 CleanupAfterFree();
10093 return lostAllocationCount;
10096 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10098 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10099 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10101 const VmaSuballocation& suballoc = suballocations1st[i];
10102 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10104 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10106 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10107 return VK_ERROR_VALIDATION_FAILED_EXT;
10109 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10111 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10112 return VK_ERROR_VALIDATION_FAILED_EXT;
10117 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10118 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10120 const VmaSuballocation& suballoc = suballocations2nd[i];
10121 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10123 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10125 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10126 return VK_ERROR_VALIDATION_FAILED_EXT;
10128 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10130 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10131 return VK_ERROR_VALIDATION_FAILED_EXT;
10139 void VmaBlockMetadata_Linear::Alloc(
10140 const VmaAllocationRequest& request,
10141 VmaSuballocationType type,
10142 VkDeviceSize allocSize,
10146 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10150 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10151 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10152 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10153 suballocations2nd.push_back(newSuballoc);
10154 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10158 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10161 if(suballocations1st.empty())
10163 suballocations1st.push_back(newSuballoc);
10168 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
10171 VMA_ASSERT(request.offset + allocSize <= GetSize());
10172 suballocations1st.push_back(newSuballoc);
10175 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
10177 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10179 switch(m_2ndVectorMode)
10181 case SECOND_VECTOR_EMPTY:
10183 VMA_ASSERT(suballocations2nd.empty());
10184 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10186 case SECOND_VECTOR_RING_BUFFER:
10188 VMA_ASSERT(!suballocations2nd.empty());
10190 case SECOND_VECTOR_DOUBLE_STACK:
10191 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10197 suballocations2nd.push_back(newSuballoc);
10201 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10206 m_SumFreeSize -= newSuballoc.size;
10209 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10211 FreeAtOffset(allocation->GetOffset());
10214 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10216 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10217 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10219 if(!suballocations1st.empty())
10222 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10223 if(firstSuballoc.offset == offset)
10225 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10226 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10227 m_SumFreeSize += firstSuballoc.size;
10228 ++m_1stNullItemsBeginCount;
10229 CleanupAfterFree();
10235 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10236 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10238 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10239 if(lastSuballoc.offset == offset)
10241 m_SumFreeSize += lastSuballoc.size;
10242 suballocations2nd.pop_back();
10243 CleanupAfterFree();
10248 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10250 VmaSuballocation& lastSuballoc = suballocations1st.back();
10251 if(lastSuballoc.offset == offset)
10253 m_SumFreeSize += lastSuballoc.size;
10254 suballocations1st.pop_back();
10255 CleanupAfterFree();
10262 VmaSuballocation refSuballoc;
10263 refSuballoc.offset = offset;
10265 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
10266 suballocations1st.begin() + m_1stNullItemsBeginCount,
10267 suballocations1st.end(),
10269 if(it != suballocations1st.end())
10271 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10272 it->hAllocation = VK_NULL_HANDLE;
10273 ++m_1stNullItemsMiddleCount;
10274 m_SumFreeSize += it->size;
10275 CleanupAfterFree();
10280 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10283 VmaSuballocation refSuballoc;
10284 refSuballoc.offset = offset;
10286 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10287 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
10288 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
10289 if(it != suballocations2nd.end())
10291 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10292 it->hAllocation = VK_NULL_HANDLE;
10293 ++m_2ndNullItemsCount;
10294 m_SumFreeSize += it->size;
10295 CleanupAfterFree();
10300 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10303 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 10305 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10306 const size_t suballocCount = AccessSuballocations1st().size();
10307 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10310 void VmaBlockMetadata_Linear::CleanupAfterFree()
10312 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10313 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10317 suballocations1st.clear();
10318 suballocations2nd.clear();
10319 m_1stNullItemsBeginCount = 0;
10320 m_1stNullItemsMiddleCount = 0;
10321 m_2ndNullItemsCount = 0;
10322 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10326 const size_t suballoc1stCount = suballocations1st.size();
10327 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10328 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10331 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10332 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10334 ++m_1stNullItemsBeginCount;
10335 --m_1stNullItemsMiddleCount;
10339 while(m_1stNullItemsMiddleCount > 0 &&
10340 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10342 --m_1stNullItemsMiddleCount;
10343 suballocations1st.pop_back();
10347 while(m_2ndNullItemsCount > 0 &&
10348 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10350 --m_2ndNullItemsCount;
10351 suballocations2nd.pop_back();
10354 if(ShouldCompact1st())
10356 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10357 size_t srcIndex = m_1stNullItemsBeginCount;
10358 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10360 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10364 if(dstIndex != srcIndex)
10366 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10370 suballocations1st.resize(nonNullItemCount);
10371 m_1stNullItemsBeginCount = 0;
10372 m_1stNullItemsMiddleCount = 0;
10376 if(suballocations2nd.empty())
10378 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10382 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10384 suballocations1st.clear();
10385 m_1stNullItemsBeginCount = 0;
10387 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10390 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10391 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10392 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10393 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10395 ++m_1stNullItemsBeginCount;
10396 --m_1stNullItemsMiddleCount;
10398 m_2ndNullItemsCount = 0;
10399 m_1stVectorIndex ^= 1;
10404 VMA_HEAVY_ASSERT(Validate());
10411 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10412 VmaBlockMetadata(hAllocator),
10414 m_AllocationCount(0),
10418 memset(m_FreeList, 0,
sizeof(m_FreeList));
10421 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10423 DeleteNode(m_Root);
10426 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10428 VmaBlockMetadata::Init(size);
10430 m_UsableSize = VmaPrevPow2(size);
10431 m_SumFreeSize = m_UsableSize;
10435 while(m_LevelCount < MAX_LEVELS &&
10436 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10441 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10442 rootNode->offset = 0;
10443 rootNode->type = Node::TYPE_FREE;
10444 rootNode->parent = VMA_NULL;
10445 rootNode->buddy = VMA_NULL;
10448 AddToFreeListFront(0, rootNode);
10451 bool VmaBlockMetadata_Buddy::Validate()
const 10454 ValidationContext ctx;
10455 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10457 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10459 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10460 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10463 for(uint32_t level = 0; level < m_LevelCount; ++level)
10465 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10466 m_FreeList[level].front->free.prev == VMA_NULL);
10468 for(Node* node = m_FreeList[level].front;
10470 node = node->free.next)
10472 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10474 if(node->free.next == VMA_NULL)
10476 VMA_VALIDATE(m_FreeList[level].back == node);
10480 VMA_VALIDATE(node->free.next->free.prev == node);
10486 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10488 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10494 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 10496 for(uint32_t level = 0; level < m_LevelCount; ++level)
10498 if(m_FreeList[level].front != VMA_NULL)
10500 return LevelToNodeSize(level);
10506 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 10508 const VkDeviceSize unusableSize = GetUnusableSize();
10519 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10521 if(unusableSize > 0)
10530 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 10532 const VkDeviceSize unusableSize = GetUnusableSize();
10534 inoutStats.
size += GetSize();
10535 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10540 if(unusableSize > 0)
10547 #if VMA_STATS_STRING_ENABLED 10549 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 10553 CalcAllocationStatInfo(stat);
10555 PrintDetailedMap_Begin(
10561 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10563 const VkDeviceSize unusableSize = GetUnusableSize();
10564 if(unusableSize > 0)
10566 PrintDetailedMap_UnusedRange(json,
10571 PrintDetailedMap_End(json);
10574 #endif // #if VMA_STATS_STRING_ENABLED 10576 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
10577 uint32_t currentFrameIndex,
10578 uint32_t frameInUseCount,
10579 VkDeviceSize bufferImageGranularity,
10580 VkDeviceSize allocSize,
10581 VkDeviceSize allocAlignment,
10583 VmaSuballocationType allocType,
10584 bool canMakeOtherLost,
10586 VmaAllocationRequest* pAllocationRequest)
10588 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10592 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
10593 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
10594 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
10596 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
10597 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
10600 if(allocSize > m_UsableSize)
10605 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10606 for(uint32_t level = targetLevel + 1; level--; )
10608 for(Node* freeNode = m_FreeList[level].front;
10609 freeNode != VMA_NULL;
10610 freeNode = freeNode->free.next)
10612 if(freeNode->offset % allocAlignment == 0)
10614 pAllocationRequest->offset = freeNode->offset;
10615 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
10616 pAllocationRequest->sumItemSize = 0;
10617 pAllocationRequest->itemsToMakeLostCount = 0;
10618 pAllocationRequest->customData = (
void*)(uintptr_t)level;
10627 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
10628 uint32_t currentFrameIndex,
10629 uint32_t frameInUseCount,
10630 VmaAllocationRequest* pAllocationRequest)
10636 return pAllocationRequest->itemsToMakeLostCount == 0;
10639 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10648 void VmaBlockMetadata_Buddy::Alloc(
10649 const VmaAllocationRequest& request,
10650 VmaSuballocationType type,
10651 VkDeviceSize allocSize,
10655 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10656 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
10658 Node* currNode = m_FreeList[currLevel].front;
10659 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10660 while(currNode->offset != request.offset)
10662 currNode = currNode->free.next;
10663 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10667 while(currLevel < targetLevel)
10671 RemoveFromFreeList(currLevel, currNode);
10673 const uint32_t childrenLevel = currLevel + 1;
10676 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
10677 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
10679 leftChild->offset = currNode->offset;
10680 leftChild->type = Node::TYPE_FREE;
10681 leftChild->parent = currNode;
10682 leftChild->buddy = rightChild;
10684 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
10685 rightChild->type = Node::TYPE_FREE;
10686 rightChild->parent = currNode;
10687 rightChild->buddy = leftChild;
10690 currNode->type = Node::TYPE_SPLIT;
10691 currNode->split.leftChild = leftChild;
10694 AddToFreeListFront(childrenLevel, rightChild);
10695 AddToFreeListFront(childrenLevel, leftChild);
10700 currNode = m_FreeList[currLevel].front;
10709 VMA_ASSERT(currLevel == targetLevel &&
10710 currNode != VMA_NULL &&
10711 currNode->type == Node::TYPE_FREE);
10712 RemoveFromFreeList(currLevel, currNode);
10715 currNode->type = Node::TYPE_ALLOCATION;
10716 currNode->allocation.alloc = hAllocation;
10718 ++m_AllocationCount;
10720 m_SumFreeSize -= allocSize;
10723 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
10725 if(node->type == Node::TYPE_SPLIT)
10727 DeleteNode(node->split.leftChild->buddy);
10728 DeleteNode(node->split.leftChild);
10731 vma_delete(GetAllocationCallbacks(), node);
10734 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 10736 VMA_VALIDATE(level < m_LevelCount);
10737 VMA_VALIDATE(curr->parent == parent);
10738 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
10739 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
10742 case Node::TYPE_FREE:
10744 ctx.calculatedSumFreeSize += levelNodeSize;
10745 ++ctx.calculatedFreeCount;
10747 case Node::TYPE_ALLOCATION:
10748 ++ctx.calculatedAllocationCount;
10749 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
10750 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
10752 case Node::TYPE_SPLIT:
10754 const uint32_t childrenLevel = level + 1;
10755 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
10756 const Node*
const leftChild = curr->split.leftChild;
10757 VMA_VALIDATE(leftChild != VMA_NULL);
10758 VMA_VALIDATE(leftChild->offset == curr->offset);
10759 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
10761 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
10763 const Node*
const rightChild = leftChild->buddy;
10764 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
10765 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
10767 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
10778 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 10781 uint32_t level = 0;
10782 VkDeviceSize currLevelNodeSize = m_UsableSize;
10783 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
10784 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
10787 currLevelNodeSize = nextLevelNodeSize;
10788 nextLevelNodeSize = currLevelNodeSize >> 1;
10793 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
10796 Node* node = m_Root;
10797 VkDeviceSize nodeOffset = 0;
10798 uint32_t level = 0;
10799 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
10800 while(node->type == Node::TYPE_SPLIT)
10802 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
10803 if(offset < nodeOffset + nextLevelSize)
10805 node = node->split.leftChild;
10809 node = node->split.leftChild->buddy;
10810 nodeOffset += nextLevelSize;
10813 levelNodeSize = nextLevelSize;
10816 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
10817 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
10820 --m_AllocationCount;
10821 m_SumFreeSize += alloc->GetSize();
10823 node->type = Node::TYPE_FREE;
10826 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
10828 RemoveFromFreeList(level, node->buddy);
10829 Node*
const parent = node->parent;
10831 vma_delete(GetAllocationCallbacks(), node->buddy);
10832 vma_delete(GetAllocationCallbacks(), node);
10833 parent->type = Node::TYPE_FREE;
10841 AddToFreeListFront(level, node);
10844 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 10848 case Node::TYPE_FREE:
10854 case Node::TYPE_ALLOCATION:
10856 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10862 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
10863 if(unusedRangeSize > 0)
10872 case Node::TYPE_SPLIT:
10874 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
10875 const Node*
const leftChild = node->split.leftChild;
10876 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
10877 const Node*
const rightChild = leftChild->buddy;
10878 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
10886 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
10888 VMA_ASSERT(node->type == Node::TYPE_FREE);
10891 Node*
const frontNode = m_FreeList[level].front;
10892 if(frontNode == VMA_NULL)
10894 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
10895 node->free.prev = node->free.next = VMA_NULL;
10896 m_FreeList[level].front = m_FreeList[level].back = node;
10900 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
10901 node->free.prev = VMA_NULL;
10902 node->free.next = frontNode;
10903 frontNode->free.prev = node;
10904 m_FreeList[level].front = node;
10908 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
10910 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
10913 if(node->free.prev == VMA_NULL)
10915 VMA_ASSERT(m_FreeList[level].front == node);
10916 m_FreeList[level].front = node->free.next;
10920 Node*
const prevFreeNode = node->free.prev;
10921 VMA_ASSERT(prevFreeNode->free.next == node);
10922 prevFreeNode->free.next = node->free.next;
10926 if(node->free.next == VMA_NULL)
10928 VMA_ASSERT(m_FreeList[level].back == node);
10929 m_FreeList[level].back = node->free.prev;
10933 Node*
const nextFreeNode = node->free.next;
10934 VMA_ASSERT(nextFreeNode->free.prev == node);
10935 nextFreeNode->free.prev = node->free.prev;
10939 #if VMA_STATS_STRING_ENABLED 10940 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 10944 case Node::TYPE_FREE:
10945 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
10947 case Node::TYPE_ALLOCATION:
10949 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
10950 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10951 if(allocSize < levelNodeSize)
10953 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
10957 case Node::TYPE_SPLIT:
10959 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
10960 const Node*
const leftChild = node->split.leftChild;
10961 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
10962 const Node*
const rightChild = leftChild->buddy;
10963 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
10970 #endif // #if VMA_STATS_STRING_ENABLED 10976 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
10977 m_pMetadata(VMA_NULL),
10978 m_MemoryTypeIndex(UINT32_MAX),
10980 m_hMemory(VK_NULL_HANDLE),
10982 m_pMappedData(VMA_NULL)
10986 void VmaDeviceMemoryBlock::Init(
10988 uint32_t newMemoryTypeIndex,
10989 VkDeviceMemory newMemory,
10990 VkDeviceSize newSize,
10992 uint32_t algorithm)
10994 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
10996 m_MemoryTypeIndex = newMemoryTypeIndex;
10998 m_hMemory = newMemory;
11003 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11006 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11012 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11014 m_pMetadata->Init(newSize);
11017 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11021 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11023 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11024 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11025 m_hMemory = VK_NULL_HANDLE;
11027 vma_delete(allocator, m_pMetadata);
11028 m_pMetadata = VMA_NULL;
11031 bool VmaDeviceMemoryBlock::Validate()
const 11033 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11034 (m_pMetadata->GetSize() != 0));
11036 return m_pMetadata->Validate();
11039 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11041 void* pData =
nullptr;
11042 VkResult res = Map(hAllocator, 1, &pData);
11043 if(res != VK_SUCCESS)
11048 res = m_pMetadata->CheckCorruption(pData);
11050 Unmap(hAllocator, 1);
11055 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11062 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11063 if(m_MapCount != 0)
11065 m_MapCount += count;
11066 VMA_ASSERT(m_pMappedData != VMA_NULL);
11067 if(ppData != VMA_NULL)
11069 *ppData = m_pMappedData;
11075 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11076 hAllocator->m_hDevice,
11082 if(result == VK_SUCCESS)
11084 if(ppData != VMA_NULL)
11086 *ppData = m_pMappedData;
11088 m_MapCount = count;
11094 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11101 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11102 if(m_MapCount >= count)
11104 m_MapCount -= count;
11105 if(m_MapCount == 0)
11107 m_pMappedData = VMA_NULL;
11108 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11113 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11117 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11119 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11120 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11123 VkResult res = Map(hAllocator, 1, &pData);
11124 if(res != VK_SUCCESS)
11129 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11130 VmaWriteMagicValue(pData, allocOffset + allocSize);
11132 Unmap(hAllocator, 1);
11137 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11139 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11140 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11143 VkResult res = Map(hAllocator, 1, &pData);
11144 if(res != VK_SUCCESS)
11149 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11151 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11153 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11155 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11158 Unmap(hAllocator, 1);
11163 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11168 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11169 hAllocation->GetBlock() ==
this);
11171 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11172 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
11173 hAllocator->m_hDevice,
11176 hAllocation->GetOffset());
11179 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11184 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11185 hAllocation->GetBlock() ==
this);
11187 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11188 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
11189 hAllocator->m_hDevice,
11192 hAllocation->GetOffset());
11197 memset(&outInfo, 0,
sizeof(outInfo));
11216 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11224 VmaPool_T::VmaPool_T(
11227 VkDeviceSize preferredBlockSize) :
11230 createInfo.memoryTypeIndex,
11231 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11232 createInfo.minBlockCount,
11233 createInfo.maxBlockCount,
11235 createInfo.frameInUseCount,
11237 createInfo.blockSize != 0,
11243 VmaPool_T::~VmaPool_T()
11247 #if VMA_STATS_STRING_ENABLED 11249 #endif // #if VMA_STATS_STRING_ENABLED 11251 VmaBlockVector::VmaBlockVector(
11253 uint32_t memoryTypeIndex,
11254 VkDeviceSize preferredBlockSize,
11255 size_t minBlockCount,
11256 size_t maxBlockCount,
11257 VkDeviceSize bufferImageGranularity,
11258 uint32_t frameInUseCount,
11260 bool explicitBlockSize,
11261 uint32_t algorithm) :
11262 m_hAllocator(hAllocator),
11263 m_MemoryTypeIndex(memoryTypeIndex),
11264 m_PreferredBlockSize(preferredBlockSize),
11265 m_MinBlockCount(minBlockCount),
11266 m_MaxBlockCount(maxBlockCount),
11267 m_BufferImageGranularity(bufferImageGranularity),
11268 m_FrameInUseCount(frameInUseCount),
11269 m_IsCustomPool(isCustomPool),
11270 m_ExplicitBlockSize(explicitBlockSize),
11271 m_Algorithm(algorithm),
11272 m_HasEmptyBlock(false),
11273 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11278 VmaBlockVector::~VmaBlockVector()
11280 for(
size_t i = m_Blocks.size(); i--; )
11282 m_Blocks[i]->Destroy(m_hAllocator);
11283 vma_delete(m_hAllocator, m_Blocks[i]);
11287 VkResult VmaBlockVector::CreateMinBlocks()
11289 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11291 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11292 if(res != VK_SUCCESS)
11300 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11302 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11304 const size_t blockCount = m_Blocks.size();
11313 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11315 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11316 VMA_ASSERT(pBlock);
11317 VMA_HEAVY_ASSERT(pBlock->Validate());
11318 pBlock->m_pMetadata->AddPoolStats(*pStats);
11322 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 11324 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11325 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11326 (VMA_DEBUG_MARGIN > 0) &&
11327 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11330 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11332 VkResult VmaBlockVector::Allocate(
11334 uint32_t currentFrameIndex,
11336 VkDeviceSize alignment,
11338 VmaSuballocationType suballocType,
11339 size_t allocationCount,
11343 VkResult res = VK_SUCCESS;
11346 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11347 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11349 res = AllocatePage(
11356 pAllocations + allocIndex);
11357 if(res != VK_SUCCESS)
11364 if(res != VK_SUCCESS)
11367 while(allocIndex--)
11369 Free(pAllocations[allocIndex]);
11371 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11377 VkResult VmaBlockVector::AllocatePage(
11379 uint32_t currentFrameIndex,
11381 VkDeviceSize alignment,
11383 VmaSuballocationType suballocType,
11390 const bool canCreateNewBlock =
11392 (m_Blocks.size() < m_MaxBlockCount);
11399 canMakeOtherLost =
false;
11403 if(isUpperAddress &&
11406 return VK_ERROR_FEATURE_NOT_PRESENT;
11420 return VK_ERROR_FEATURE_NOT_PRESENT;
11424 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11426 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11434 if(!canMakeOtherLost || canCreateNewBlock)
11443 if(!m_Blocks.empty())
11445 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11446 VMA_ASSERT(pCurrBlock);
11447 VkResult res = AllocateFromBlock(
11458 if(res == VK_SUCCESS)
11460 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
11470 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11472 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11473 VMA_ASSERT(pCurrBlock);
11474 VkResult res = AllocateFromBlock(
11485 if(res == VK_SUCCESS)
11487 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11495 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11497 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11498 VMA_ASSERT(pCurrBlock);
11499 VkResult res = AllocateFromBlock(
11510 if(res == VK_SUCCESS)
11512 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11520 if(canCreateNewBlock)
11523 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11524 uint32_t newBlockSizeShift = 0;
11525 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11527 if(!m_ExplicitBlockSize)
11530 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11531 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
11533 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11534 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11536 newBlockSize = smallerNewBlockSize;
11537 ++newBlockSizeShift;
11546 size_t newBlockIndex = 0;
11547 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
11549 if(!m_ExplicitBlockSize)
11551 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
11553 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11554 if(smallerNewBlockSize >= size)
11556 newBlockSize = smallerNewBlockSize;
11557 ++newBlockSizeShift;
11558 res = CreateBlock(newBlockSize, &newBlockIndex);
11567 if(res == VK_SUCCESS)
11569 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
11570 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
11572 res = AllocateFromBlock(
11583 if(res == VK_SUCCESS)
11585 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
11591 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11598 if(canMakeOtherLost)
11600 uint32_t tryIndex = 0;
11601 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
11603 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
11604 VmaAllocationRequest bestRequest = {};
11605 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
11611 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11613 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11614 VMA_ASSERT(pCurrBlock);
11615 VmaAllocationRequest currRequest = {};
11616 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11619 m_BufferImageGranularity,
11628 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11629 if(pBestRequestBlock == VMA_NULL ||
11630 currRequestCost < bestRequestCost)
11632 pBestRequestBlock = pCurrBlock;
11633 bestRequest = currRequest;
11634 bestRequestCost = currRequestCost;
11636 if(bestRequestCost == 0)
11647 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11649 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11650 VMA_ASSERT(pCurrBlock);
11651 VmaAllocationRequest currRequest = {};
11652 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11655 m_BufferImageGranularity,
11664 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11665 if(pBestRequestBlock == VMA_NULL ||
11666 currRequestCost < bestRequestCost ||
11669 pBestRequestBlock = pCurrBlock;
11670 bestRequest = currRequest;
11671 bestRequestCost = currRequestCost;
11673 if(bestRequestCost == 0 ||
11683 if(pBestRequestBlock != VMA_NULL)
11687 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
11688 if(res != VK_SUCCESS)
11694 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
11700 if(pBestRequestBlock->m_pMetadata->IsEmpty())
11702 m_HasEmptyBlock =
false;
11705 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
11706 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
11707 (*pAllocation)->InitBlockAllocation(
11710 bestRequest.offset,
11716 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
11717 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
11718 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
11719 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11721 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11723 if(IsCorruptionDetectionEnabled())
11725 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
11726 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11741 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
11743 return VK_ERROR_TOO_MANY_OBJECTS;
11747 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11750 void VmaBlockVector::Free(
11753 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
11757 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11759 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
11761 if(IsCorruptionDetectionEnabled())
11763 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
11764 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
11767 if(hAllocation->IsPersistentMap())
11769 pBlock->Unmap(m_hAllocator, 1);
11772 pBlock->m_pMetadata->Free(hAllocation);
11773 VMA_HEAVY_ASSERT(pBlock->Validate());
11775 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
11778 if(pBlock->m_pMetadata->IsEmpty())
11781 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
11783 pBlockToDelete = pBlock;
11789 m_HasEmptyBlock =
true;
11794 else if(m_HasEmptyBlock)
11796 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
11797 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
11799 pBlockToDelete = pLastBlock;
11800 m_Blocks.pop_back();
11801 m_HasEmptyBlock =
false;
11805 IncrementallySortBlocks();
11810 if(pBlockToDelete != VMA_NULL)
11812 VMA_DEBUG_LOG(
" Deleted empty allocation");
11813 pBlockToDelete->Destroy(m_hAllocator);
11814 vma_delete(m_hAllocator, pBlockToDelete);
11818 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 11820 VkDeviceSize result = 0;
11821 for(
size_t i = m_Blocks.size(); i--; )
11823 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
11824 if(result >= m_PreferredBlockSize)
11832 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
11834 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11836 if(m_Blocks[blockIndex] == pBlock)
11838 VmaVectorRemove(m_Blocks, blockIndex);
11845 void VmaBlockVector::IncrementallySortBlocks()
11850 for(
size_t i = 1; i < m_Blocks.size(); ++i)
11852 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
11854 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
11861 VkResult VmaBlockVector::AllocateFromBlock(
11862 VmaDeviceMemoryBlock* pBlock,
11864 uint32_t currentFrameIndex,
11866 VkDeviceSize alignment,
11869 VmaSuballocationType suballocType,
11878 VmaAllocationRequest currRequest = {};
11879 if(pBlock->m_pMetadata->CreateAllocationRequest(
11882 m_BufferImageGranularity,
11892 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
11896 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
11897 if(res != VK_SUCCESS)
11904 if(pBlock->m_pMetadata->IsEmpty())
11906 m_HasEmptyBlock =
false;
11909 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
11910 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
11911 (*pAllocation)->InitBlockAllocation(
11914 currRequest.offset,
11920 VMA_HEAVY_ASSERT(pBlock->Validate());
11921 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
11922 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11924 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11926 if(IsCorruptionDetectionEnabled())
11928 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
11929 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11933 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11936 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
11938 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
11939 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
11940 allocInfo.allocationSize = blockSize;
11941 VkDeviceMemory mem = VK_NULL_HANDLE;
11942 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
11951 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
11956 allocInfo.allocationSize,
11960 m_Blocks.push_back(pBlock);
11961 if(pNewBlockIndex != VMA_NULL)
11963 *pNewBlockIndex = m_Blocks.size() - 1;
11969 void VmaBlockVector::ApplyDefragmentationMovesCpu(
11970 class VmaBlockVectorDefragmentationContext* pDefragCtx,
11971 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
11973 const size_t blockCount = m_Blocks.size();
11974 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
11978 BLOCK_FLAG_USED = 0x00000001,
11979 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
11987 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
11988 blockInfo(blockCount, VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
11989 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
11992 const size_t moveCount = moves.size();
11993 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
11995 const VmaDefragmentationMove& move = moves[moveIndex];
11996 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
11997 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12000 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12003 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12005 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12006 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12007 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12009 currBlockInfo.pMappedData = pBlock->GetMappedData();
12011 if(currBlockInfo.pMappedData == VMA_NULL)
12013 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12014 if(pDefragCtx->res == VK_SUCCESS)
12016 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12023 if(pDefragCtx->res == VK_SUCCESS)
12025 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12026 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12028 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12030 const VmaDefragmentationMove& move = moves[moveIndex];
12032 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12033 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12035 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12040 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12041 memRange.memory = pSrcBlock->GetDeviceMemory();
12042 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12043 memRange.size = VMA_MIN(
12044 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12045 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12046 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12051 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12052 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12053 static_cast<size_t>(move.size));
12055 if(IsCorruptionDetectionEnabled())
12057 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12058 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12064 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12065 memRange.memory = pDstBlock->GetDeviceMemory();
12066 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12067 memRange.size = VMA_MIN(
12068 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12069 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12070 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12077 for(
size_t blockIndex = blockCount; blockIndex--; )
12079 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12080 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12082 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12083 pBlock->Unmap(m_hAllocator, 1);
12088 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12089 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12090 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12091 VkCommandBuffer commandBuffer)
12093 const size_t blockCount = m_Blocks.size();
12095 pDefragCtx->blockContexts.resize(blockCount);
12096 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12099 const size_t moveCount = moves.size();
12100 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12102 const VmaDefragmentationMove& move = moves[moveIndex];
12103 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12104 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12107 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12111 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
12112 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
12113 VK_BUFFER_USAGE_TRANSFER_DST_BIT;
12115 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12117 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12118 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12119 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12121 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12122 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12123 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12124 if(pDefragCtx->res == VK_SUCCESS)
12126 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12127 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12134 if(pDefragCtx->res == VK_SUCCESS)
12136 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12137 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12139 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12141 const VmaDefragmentationMove& move = moves[moveIndex];
12143 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12144 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12146 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12148 VkBufferCopy region = {
12152 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12153 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12158 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12160 pDefragCtx->res = VK_NOT_READY;
12166 m_HasEmptyBlock =
false;
12167 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12169 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12170 if(pBlock->m_pMetadata->IsEmpty())
12172 if(m_Blocks.size() > m_MinBlockCount)
12174 if(pDefragmentationStats != VMA_NULL)
12177 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12180 VmaVectorRemove(m_Blocks, blockIndex);
12181 pBlock->Destroy(m_hAllocator);
12182 vma_delete(m_hAllocator, pBlock);
12186 m_HasEmptyBlock =
true;
12192 #if VMA_STATS_STRING_ENABLED 12194 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12196 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12198 json.BeginObject();
12202 json.WriteString(
"MemoryTypeIndex");
12203 json.WriteNumber(m_MemoryTypeIndex);
12205 json.WriteString(
"BlockSize");
12206 json.WriteNumber(m_PreferredBlockSize);
12208 json.WriteString(
"BlockCount");
12209 json.BeginObject(
true);
12210 if(m_MinBlockCount > 0)
12212 json.WriteString(
"Min");
12213 json.WriteNumber((uint64_t)m_MinBlockCount);
12215 if(m_MaxBlockCount < SIZE_MAX)
12217 json.WriteString(
"Max");
12218 json.WriteNumber((uint64_t)m_MaxBlockCount);
12220 json.WriteString(
"Cur");
12221 json.WriteNumber((uint64_t)m_Blocks.size());
12224 if(m_FrameInUseCount > 0)
12226 json.WriteString(
"FrameInUseCount");
12227 json.WriteNumber(m_FrameInUseCount);
12230 if(m_Algorithm != 0)
12232 json.WriteString(
"Algorithm");
12233 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12238 json.WriteString(
"PreferredBlockSize");
12239 json.WriteNumber(m_PreferredBlockSize);
12242 json.WriteString(
"Blocks");
12243 json.BeginObject();
12244 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12246 json.BeginString();
12247 json.ContinueString(m_Blocks[i]->GetId());
12250 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12257 #endif // #if VMA_STATS_STRING_ENABLED 12259 void VmaBlockVector::Defragment(
12260 class VmaBlockVectorDefragmentationContext* pCtx,
12262 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12263 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12264 VkCommandBuffer commandBuffer)
12266 pCtx->res = VK_SUCCESS;
12268 const VkMemoryPropertyFlags memPropFlags =
12269 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12270 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12271 const bool isHostCoherent = (memPropFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0;
12273 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12275 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12276 (VMA_DEBUG_DETECT_CORRUPTION == 0 || !(isHostVisible && isHostCoherent));
12279 if(canDefragmentOnCpu || canDefragmentOnGpu)
12281 bool defragmentOnGpu;
12283 if(canDefragmentOnGpu != canDefragmentOnCpu)
12285 defragmentOnGpu = canDefragmentOnGpu;
12290 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12291 m_hAllocator->IsIntegratedGpu();
12294 bool overlappingMoveSupported = !defragmentOnGpu;
12296 if(m_hAllocator->m_UseMutex)
12298 m_Mutex.LockWrite();
12299 pCtx->mutexLocked =
true;
12302 pCtx->Begin(overlappingMoveSupported);
12306 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12307 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12308 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12309 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12310 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12313 if(pStats != VMA_NULL)
12315 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12316 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12317 pStats->bytesMoved += bytesMoved;
12318 pStats->allocationsMoved += allocationsMoved;
12319 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12320 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12321 if(defragmentOnGpu)
12323 maxGpuBytesToMove -= bytesMoved;
12324 maxGpuAllocationsToMove -= allocationsMoved;
12328 maxCpuBytesToMove -= bytesMoved;
12329 maxCpuAllocationsToMove -= allocationsMoved;
12333 if(pCtx->res >= VK_SUCCESS)
12335 if(defragmentOnGpu)
12337 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12341 ApplyDefragmentationMovesCpu(pCtx, moves);
12347 void VmaBlockVector::DefragmentationEnd(
12348 class VmaBlockVectorDefragmentationContext* pCtx,
12352 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12354 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12355 if(blockCtx.hBuffer)
12357 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12358 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12362 if(pCtx->res >= VK_SUCCESS)
12364 FreeEmptyBlocks(pStats);
12367 if(pCtx->mutexLocked)
12369 VMA_ASSERT(m_hAllocator->m_UseMutex);
12370 m_Mutex.UnlockWrite();
12374 size_t VmaBlockVector::CalcAllocationCount()
const 12377 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12379 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12384 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const 12386 if(m_BufferImageGranularity == 1)
12390 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12391 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12393 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12394 VMA_ASSERT(m_Algorithm == 0);
12395 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12396 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12404 void VmaBlockVector::MakePoolAllocationsLost(
12405 uint32_t currentFrameIndex,
12406 size_t* pLostAllocationCount)
12408 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12409 size_t lostAllocationCount = 0;
12410 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12412 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12413 VMA_ASSERT(pBlock);
12414 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12416 if(pLostAllocationCount != VMA_NULL)
12418 *pLostAllocationCount = lostAllocationCount;
12422 VkResult VmaBlockVector::CheckCorruption()
12424 if(!IsCorruptionDetectionEnabled())
12426 return VK_ERROR_FEATURE_NOT_PRESENT;
12429 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12430 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12432 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12433 VMA_ASSERT(pBlock);
12434 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12435 if(res != VK_SUCCESS)
12443 void VmaBlockVector::AddStats(
VmaStats* pStats)
12445 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12446 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12448 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12450 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12452 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12453 VMA_ASSERT(pBlock);
12454 VMA_HEAVY_ASSERT(pBlock->Validate());
12456 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12457 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12458 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12459 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12466 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12468 VmaBlockVector* pBlockVector,
12469 uint32_t currentFrameIndex,
12470 bool overlappingMoveSupported) :
12471 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12472 m_AllAllocations(false),
12473 m_AllocationCount(0),
12475 m_AllocationsMoved(0),
12476 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12479 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12480 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12482 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12483 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12484 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12485 m_Blocks.push_back(pBlockInfo);
12489 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12492 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12494 for(
size_t i = m_Blocks.size(); i--; )
12496 vma_delete(m_hAllocator, m_Blocks[i]);
12500 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12503 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12505 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12506 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12507 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12509 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12510 (*it)->m_Allocations.push_back(allocInfo);
12517 ++m_AllocationCount;
12521 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
12522 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12523 VkDeviceSize maxBytesToMove,
12524 uint32_t maxAllocationsToMove)
12526 if(m_Blocks.empty())
12539 size_t srcBlockMinIndex = 0;
12552 size_t srcBlockIndex = m_Blocks.size() - 1;
12553 size_t srcAllocIndex = SIZE_MAX;
12559 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
12561 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
12564 if(srcBlockIndex == srcBlockMinIndex)
12571 srcAllocIndex = SIZE_MAX;
12576 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
12580 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
12581 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
12583 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
12584 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
12585 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
12586 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
12589 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
12591 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
12592 VmaAllocationRequest dstAllocRequest;
12593 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
12594 m_CurrentFrameIndex,
12595 m_pBlockVector->GetFrameInUseCount(),
12596 m_pBlockVector->GetBufferImageGranularity(),
12603 &dstAllocRequest) &&
12605 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
12607 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
12610 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
12611 (m_BytesMoved + size > maxBytesToMove))
12616 VmaDefragmentationMove move;
12617 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
12618 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
12619 move.srcOffset = srcOffset;
12620 move.dstOffset = dstAllocRequest.offset;
12622 moves.push_back(move);
12624 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
12629 allocInfo.m_hAllocation);
12630 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
12632 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
12634 if(allocInfo.m_pChanged != VMA_NULL)
12636 *allocInfo.m_pChanged = VK_TRUE;
12639 ++m_AllocationsMoved;
12640 m_BytesMoved += size;
12642 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
12650 if(srcAllocIndex > 0)
12656 if(srcBlockIndex > 0)
12659 srcAllocIndex = SIZE_MAX;
12669 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const 12672 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12674 if(m_Blocks[i]->m_HasNonMovableAllocations)
12682 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
12683 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12684 VkDeviceSize maxBytesToMove,
12685 uint32_t maxAllocationsToMove)
12687 if(!m_AllAllocations && m_AllocationCount == 0)
12692 const size_t blockCount = m_Blocks.size();
12693 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12695 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
12697 if(m_AllAllocations)
12699 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
12700 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
12701 it != pMetadata->m_Suballocations.end();
12704 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
12706 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
12707 pBlockInfo->m_Allocations.push_back(allocInfo);
12712 pBlockInfo->CalcHasNonMovableAllocations();
12716 pBlockInfo->SortAllocationsByOffsetDescending();
12722 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
12725 const uint32_t roundCount = 2;
12728 VkResult result = VK_SUCCESS;
12729 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
12731 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
12737 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
12738 size_t dstBlockIndex, VkDeviceSize dstOffset,
12739 size_t srcBlockIndex, VkDeviceSize srcOffset)
12741 if(dstBlockIndex < srcBlockIndex)
12745 if(dstBlockIndex > srcBlockIndex)
12749 if(dstOffset < srcOffset)
12759 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
12761 VmaBlockVector* pBlockVector,
12762 uint32_t currentFrameIndex,
12763 bool overlappingMoveSupported) :
12764 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12765 m_OverlappingMoveSupported(overlappingMoveSupported),
12766 m_AllocationCount(0),
12767 m_AllAllocations(false),
12769 m_AllocationsMoved(0),
12770 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
12772 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
12776 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
12780 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
12781 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12782 VkDeviceSize maxBytesToMove,
12783 uint32_t maxAllocationsToMove)
12785 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
12787 const size_t blockCount = m_pBlockVector->GetBlockCount();
12788 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
12793 PreprocessMetadata();
12797 m_BlockInfos.resize(blockCount);
12798 for(
size_t i = 0; i < blockCount; ++i)
12800 m_BlockInfos[i].origBlockIndex = i;
12803 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
12804 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
12805 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
12810 FreeSpaceDatabase freeSpaceDb;
12812 size_t dstBlockInfoIndex = 0;
12813 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12814 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12815 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12816 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
12817 VkDeviceSize dstOffset = 0;
12820 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
12822 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
12823 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
12824 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
12825 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
12826 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
12828 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
12829 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
12830 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
12831 if(m_AllocationsMoved == maxAllocationsToMove ||
12832 m_BytesMoved + srcAllocSize > maxBytesToMove)
12837 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
12840 size_t freeSpaceInfoIndex;
12841 VkDeviceSize dstAllocOffset;
12842 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
12843 freeSpaceInfoIndex, dstAllocOffset))
12845 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
12846 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
12847 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
12848 VkDeviceSize freeSpaceBlockSize = pFreeSpaceMetadata->GetSize();
12851 if(freeSpaceInfoIndex == srcBlockInfoIndex)
12853 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
12857 VmaSuballocation suballoc = *srcSuballocIt;
12858 suballoc.offset = dstAllocOffset;
12859 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
12860 m_BytesMoved += srcAllocSize;
12861 ++m_AllocationsMoved;
12863 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12865 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12866 srcSuballocIt = nextSuballocIt;
12868 InsertSuballoc(pFreeSpaceMetadata, suballoc);
12870 VmaDefragmentationMove move = {
12871 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
12872 srcAllocOffset, dstAllocOffset,
12874 moves.push_back(move);
12881 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
12883 VmaSuballocation suballoc = *srcSuballocIt;
12884 suballoc.offset = dstAllocOffset;
12885 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
12886 m_BytesMoved += srcAllocSize;
12887 ++m_AllocationsMoved;
12889 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12891 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12892 srcSuballocIt = nextSuballocIt;
12894 InsertSuballoc(pFreeSpaceMetadata, suballoc);
12896 VmaDefragmentationMove move = {
12897 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
12898 srcAllocOffset, dstAllocOffset,
12900 moves.push_back(move);
12905 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
12908 while(dstBlockInfoIndex < srcBlockInfoIndex &&
12909 dstAllocOffset + srcAllocSize > dstBlockSize)
12912 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
12914 ++dstBlockInfoIndex;
12915 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12916 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12917 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12918 dstBlockSize = pDstMetadata->GetSize();
12920 dstAllocOffset = 0;
12924 if(dstBlockInfoIndex == srcBlockInfoIndex)
12926 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
12928 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
12930 bool skipOver = overlap;
12931 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
12935 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
12940 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
12942 dstOffset = srcAllocOffset + srcAllocSize;
12948 srcSuballocIt->offset = dstAllocOffset;
12949 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
12950 dstOffset = dstAllocOffset + srcAllocSize;
12951 m_BytesMoved += srcAllocSize;
12952 ++m_AllocationsMoved;
12954 VmaDefragmentationMove move = {
12955 srcOrigBlockIndex, dstOrigBlockIndex,
12956 srcAllocOffset, dstAllocOffset,
12958 moves.push_back(move);
12966 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
12967 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
12969 VmaSuballocation suballoc = *srcSuballocIt;
12970 suballoc.offset = dstAllocOffset;
12971 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
12972 dstOffset = dstAllocOffset + srcAllocSize;
12973 m_BytesMoved += srcAllocSize;
12974 ++m_AllocationsMoved;
12976 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12978 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12979 srcSuballocIt = nextSuballocIt;
12981 pDstMetadata->m_Suballocations.push_back(suballoc);
12983 VmaDefragmentationMove move = {
12984 srcOrigBlockIndex, dstOrigBlockIndex,
12985 srcAllocOffset, dstAllocOffset,
12987 moves.push_back(move);
12993 m_BlockInfos.clear();
12995 PostprocessMetadata();
13000 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13002 const size_t blockCount = m_pBlockVector->GetBlockCount();
13003 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13005 VmaBlockMetadata_Generic*
const pMetadata =
13006 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13007 pMetadata->m_FreeCount = 0;
13008 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13009 pMetadata->m_FreeSuballocationsBySize.clear();
13010 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13011 it != pMetadata->m_Suballocations.end(); )
13013 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13015 VmaSuballocationList::iterator nextIt = it;
13017 pMetadata->m_Suballocations.erase(it);
13028 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13030 const size_t blockCount = m_pBlockVector->GetBlockCount();
13031 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13033 VmaBlockMetadata_Generic*
const pMetadata =
13034 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13035 const VkDeviceSize blockSize = pMetadata->GetSize();
13038 if(pMetadata->m_Suballocations.empty())
13040 pMetadata->m_FreeCount = 1;
13042 VmaSuballocation suballoc = {
13046 VMA_SUBALLOCATION_TYPE_FREE };
13047 pMetadata->m_Suballocations.push_back(suballoc);
13048 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13053 VkDeviceSize offset = 0;
13054 VmaSuballocationList::iterator it;
13055 for(it = pMetadata->m_Suballocations.begin();
13056 it != pMetadata->m_Suballocations.end();
13059 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13060 VMA_ASSERT(it->offset >= offset);
13063 if(it->offset > offset)
13065 ++pMetadata->m_FreeCount;
13066 const VkDeviceSize freeSize = it->offset - offset;
13067 VmaSuballocation suballoc = {
13071 VMA_SUBALLOCATION_TYPE_FREE };
13072 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13073 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13075 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13079 pMetadata->m_SumFreeSize -= it->size;
13080 offset = it->offset + it->size;
13084 if(offset < blockSize)
13086 ++pMetadata->m_FreeCount;
13087 const VkDeviceSize freeSize = blockSize - offset;
13088 VmaSuballocation suballoc = {
13092 VMA_SUBALLOCATION_TYPE_FREE };
13093 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13094 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13095 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13097 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13102 pMetadata->m_FreeSuballocationsBySize.begin(),
13103 pMetadata->m_FreeSuballocationsBySize.end(),
13104 VmaSuballocationItemSizeLess());
13107 VMA_HEAVY_ASSERT(pMetadata->Validate());
13111 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13114 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13115 while(it != pMetadata->m_Suballocations.end())
13117 if(it->offset < suballoc.offset)
13122 pMetadata->m_Suballocations.insert(it, suballoc);
13128 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13131 VmaBlockVector* pBlockVector,
13132 uint32_t currFrameIndex,
13133 uint32_t algorithmFlags) :
13135 mutexLocked(false),
13136 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13137 m_hAllocator(hAllocator),
13138 m_hCustomPool(hCustomPool),
13139 m_pBlockVector(pBlockVector),
13140 m_CurrFrameIndex(currFrameIndex),
13141 m_AlgorithmFlags(algorithmFlags),
13142 m_pAlgorithm(VMA_NULL),
13143 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13144 m_AllAllocations(false)
13148 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13150 vma_delete(m_hAllocator, m_pAlgorithm);
13153 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13155 AllocInfo info = { hAlloc, pChanged };
13156 m_Allocations.push_back(info);
13159 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13161 const bool allAllocations = m_AllAllocations ||
13162 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13174 if(VMA_DEBUG_MARGIN == 0 &&
13176 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13178 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13179 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13183 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13184 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13189 m_pAlgorithm->AddAll();
13193 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13195 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13203 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13205 uint32_t currFrameIndex,
13208 m_hAllocator(hAllocator),
13209 m_CurrFrameIndex(currFrameIndex),
13212 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13214 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13217 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13219 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13221 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13222 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13223 vma_delete(m_hAllocator, pBlockVectorCtx);
13225 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13227 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13228 if(pBlockVectorCtx)
13230 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13231 vma_delete(m_hAllocator, pBlockVectorCtx);
13236 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13238 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13240 VmaPool pool = pPools[poolIndex];
13243 if(pool->m_BlockVector.GetAlgorithm() == 0)
13245 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13247 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13249 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13251 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13256 if(!pBlockVectorDefragCtx)
13258 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13261 &pool->m_BlockVector,
13264 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13267 pBlockVectorDefragCtx->AddAll();
13272 void VmaDefragmentationContext_T::AddAllocations(
13273 uint32_t allocationCount,
13275 VkBool32* pAllocationsChanged)
13278 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13281 VMA_ASSERT(hAlloc);
13283 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13285 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13287 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13289 const VmaPool hAllocPool = hAlloc->GetPool();
13291 if(hAllocPool != VK_NULL_HANDLE)
13294 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13296 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13298 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13300 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13304 if(!pBlockVectorDefragCtx)
13306 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13309 &hAllocPool->m_BlockVector,
13312 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13319 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13320 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13321 if(!pBlockVectorDefragCtx)
13323 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13326 m_hAllocator->m_pBlockVectors[memTypeIndex],
13329 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13333 if(pBlockVectorDefragCtx)
13335 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13336 &pAllocationsChanged[allocIndex] : VMA_NULL;
13337 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13343 VkResult VmaDefragmentationContext_T::Defragment(
13344 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13345 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13353 if(commandBuffer == VK_NULL_HANDLE)
13355 maxGpuBytesToMove = 0;
13356 maxGpuAllocationsToMove = 0;
13359 VkResult res = VK_SUCCESS;
13362 for(uint32_t memTypeIndex = 0;
13363 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13366 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13367 if(pBlockVectorCtx)
13369 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13370 pBlockVectorCtx->GetBlockVector()->Defragment(
13373 maxCpuBytesToMove, maxCpuAllocationsToMove,
13374 maxGpuBytesToMove, maxGpuAllocationsToMove,
13376 if(pBlockVectorCtx->res != VK_SUCCESS)
13378 res = pBlockVectorCtx->res;
13384 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13385 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13388 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13389 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13390 pBlockVectorCtx->GetBlockVector()->Defragment(
13393 maxCpuBytesToMove, maxCpuAllocationsToMove,
13394 maxGpuBytesToMove, maxGpuAllocationsToMove,
13396 if(pBlockVectorCtx->res != VK_SUCCESS)
13398 res = pBlockVectorCtx->res;
13408 #if VMA_RECORDING_ENABLED 13410 VmaRecorder::VmaRecorder() :
13415 m_StartCounter(INT64_MAX)
13421 m_UseMutex = useMutex;
13422 m_Flags = settings.
flags;
13424 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13425 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13428 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13431 return VK_ERROR_INITIALIZATION_FAILED;
13435 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13436 fprintf(m_File,
"%s\n",
"1,5");
13441 VmaRecorder::~VmaRecorder()
13443 if(m_File != VMA_NULL)
13449 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13451 CallParams callParams;
13452 GetBasicParams(callParams);
13454 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13455 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13459 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13461 CallParams callParams;
13462 GetBasicParams(callParams);
13464 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13465 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13471 CallParams callParams;
13472 GetBasicParams(callParams);
13474 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13475 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13486 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13488 CallParams callParams;
13489 GetBasicParams(callParams);
13491 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13492 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13497 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13498 const VkMemoryRequirements& vkMemReq,
13502 CallParams callParams;
13503 GetBasicParams(callParams);
13505 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13506 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13507 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13509 vkMemReq.alignment,
13510 vkMemReq.memoryTypeBits,
13518 userDataStr.GetString());
13522 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
13523 const VkMemoryRequirements& vkMemReq,
13525 uint64_t allocationCount,
13528 CallParams callParams;
13529 GetBasicParams(callParams);
13531 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13532 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13533 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
13535 vkMemReq.alignment,
13536 vkMemReq.memoryTypeBits,
13543 PrintPointerList(allocationCount, pAllocations);
13544 fprintf(m_File,
",%s\n", userDataStr.GetString());
13548 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
13549 const VkMemoryRequirements& vkMemReq,
13550 bool requiresDedicatedAllocation,
13551 bool prefersDedicatedAllocation,
13555 CallParams callParams;
13556 GetBasicParams(callParams);
13558 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13559 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13560 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13562 vkMemReq.alignment,
13563 vkMemReq.memoryTypeBits,
13564 requiresDedicatedAllocation ? 1 : 0,
13565 prefersDedicatedAllocation ? 1 : 0,
13573 userDataStr.GetString());
13577 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
13578 const VkMemoryRequirements& vkMemReq,
13579 bool requiresDedicatedAllocation,
13580 bool prefersDedicatedAllocation,
13584 CallParams callParams;
13585 GetBasicParams(callParams);
13587 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13588 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13589 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13591 vkMemReq.alignment,
13592 vkMemReq.memoryTypeBits,
13593 requiresDedicatedAllocation ? 1 : 0,
13594 prefersDedicatedAllocation ? 1 : 0,
13602 userDataStr.GetString());
13606 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
13609 CallParams callParams;
13610 GetBasicParams(callParams);
13612 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13613 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13618 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
13619 uint64_t allocationCount,
13622 CallParams callParams;
13623 GetBasicParams(callParams);
13625 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13626 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
13627 PrintPointerList(allocationCount, pAllocations);
13628 fprintf(m_File,
"\n");
13632 void VmaRecorder::RecordResizeAllocation(
13633 uint32_t frameIndex,
13635 VkDeviceSize newSize)
13637 CallParams callParams;
13638 GetBasicParams(callParams);
13640 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13641 fprintf(m_File,
"%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
13642 allocation, newSize);
13646 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
13648 const void* pUserData)
13650 CallParams callParams;
13651 GetBasicParams(callParams);
13653 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13654 UserDataString userDataStr(
13657 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13659 userDataStr.GetString());
13663 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
13666 CallParams callParams;
13667 GetBasicParams(callParams);
13669 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13670 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13675 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
13678 CallParams callParams;
13679 GetBasicParams(callParams);
13681 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13682 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13687 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
13690 CallParams callParams;
13691 GetBasicParams(callParams);
13693 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13694 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13699 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
13700 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13702 CallParams callParams;
13703 GetBasicParams(callParams);
13705 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13706 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13713 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
13714 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13716 CallParams callParams;
13717 GetBasicParams(callParams);
13719 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13720 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13727 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
13728 const VkBufferCreateInfo& bufCreateInfo,
13732 CallParams callParams;
13733 GetBasicParams(callParams);
13735 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13736 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13737 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13738 bufCreateInfo.flags,
13739 bufCreateInfo.size,
13740 bufCreateInfo.usage,
13741 bufCreateInfo.sharingMode,
13742 allocCreateInfo.
flags,
13743 allocCreateInfo.
usage,
13747 allocCreateInfo.
pool,
13749 userDataStr.GetString());
13753 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
13754 const VkImageCreateInfo& imageCreateInfo,
13758 CallParams callParams;
13759 GetBasicParams(callParams);
13761 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13762 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13763 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13764 imageCreateInfo.flags,
13765 imageCreateInfo.imageType,
13766 imageCreateInfo.format,
13767 imageCreateInfo.extent.width,
13768 imageCreateInfo.extent.height,
13769 imageCreateInfo.extent.depth,
13770 imageCreateInfo.mipLevels,
13771 imageCreateInfo.arrayLayers,
13772 imageCreateInfo.samples,
13773 imageCreateInfo.tiling,
13774 imageCreateInfo.usage,
13775 imageCreateInfo.sharingMode,
13776 imageCreateInfo.initialLayout,
13777 allocCreateInfo.
flags,
13778 allocCreateInfo.
usage,
13782 allocCreateInfo.
pool,
13784 userDataStr.GetString());
13788 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
13791 CallParams callParams;
13792 GetBasicParams(callParams);
13794 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13795 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
13800 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
13803 CallParams callParams;
13804 GetBasicParams(callParams);
13806 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13807 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
13812 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
13815 CallParams callParams;
13816 GetBasicParams(callParams);
13818 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13819 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13824 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
13827 CallParams callParams;
13828 GetBasicParams(callParams);
13830 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13831 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
13836 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
13839 CallParams callParams;
13840 GetBasicParams(callParams);
13842 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13843 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
13848 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
13852 CallParams callParams;
13853 GetBasicParams(callParams);
13855 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13856 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
13859 fprintf(m_File,
",");
13861 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
13871 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
13874 CallParams callParams;
13875 GetBasicParams(callParams);
13877 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13878 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
13885 if(pUserData != VMA_NULL)
13889 m_Str = (
const char*)pUserData;
13893 sprintf_s(m_PtrStr,
"%p", pUserData);
13903 void VmaRecorder::WriteConfiguration(
13904 const VkPhysicalDeviceProperties& devProps,
13905 const VkPhysicalDeviceMemoryProperties& memProps,
13906 bool dedicatedAllocationExtensionEnabled)
13908 fprintf(m_File,
"Config,Begin\n");
13910 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
13911 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
13912 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
13913 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
13914 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
13915 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
13917 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
13918 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
13919 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
13921 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
13922 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
13924 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
13925 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
13927 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
13928 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
13930 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
13931 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
13934 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
13936 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
13937 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
13938 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
13939 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
13940 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
13941 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
13942 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
13943 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
13944 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
13946 fprintf(m_File,
"Config,End\n");
13949 void VmaRecorder::GetBasicParams(CallParams& outParams)
13951 outParams.threadId = GetCurrentThreadId();
13953 LARGE_INTEGER counter;
13954 QueryPerformanceCounter(&counter);
13955 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
13958 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
13962 fprintf(m_File,
"%p", pItems[0]);
13963 for(uint64_t i = 1; i < count; ++i)
13965 fprintf(m_File,
" %p", pItems[i]);
13970 void VmaRecorder::Flush()
13978 #endif // #if VMA_RECORDING_ENABLED 13986 m_hDevice(pCreateInfo->device),
13987 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
13988 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
13989 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
13990 m_PreferredLargeHeapBlockSize(0),
13991 m_PhysicalDevice(pCreateInfo->physicalDevice),
13992 m_CurrentFrameIndex(0),
13993 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
13996 ,m_pRecorder(VMA_NULL)
13999 if(VMA_DEBUG_DETECT_CORRUPTION)
14002 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14007 #if !(VMA_DEDICATED_ALLOCATION) 14010 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14014 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14015 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14016 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14018 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14019 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14021 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14023 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
14034 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14035 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14037 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14038 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14039 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14040 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14047 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14049 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14050 if(limit != VK_WHOLE_SIZE)
14052 m_HeapSizeLimit[heapIndex] = limit;
14053 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14055 m_MemProps.memoryHeaps[heapIndex].size = limit;
14061 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14063 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14065 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14068 preferredBlockSize,
14071 GetBufferImageGranularity(),
14078 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14085 VkResult res = VK_SUCCESS;
14090 #if VMA_RECORDING_ENABLED 14091 m_pRecorder = vma_new(
this, VmaRecorder)();
14093 if(res != VK_SUCCESS)
14097 m_pRecorder->WriteConfiguration(
14098 m_PhysicalDeviceProperties,
14100 m_UseKhrDedicatedAllocation);
14101 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14103 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14104 return VK_ERROR_FEATURE_NOT_PRESENT;
14111 VmaAllocator_T::~VmaAllocator_T()
14113 #if VMA_RECORDING_ENABLED 14114 if(m_pRecorder != VMA_NULL)
14116 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14117 vma_delete(
this, m_pRecorder);
14121 VMA_ASSERT(m_Pools.empty());
14123 for(
size_t i = GetMemoryTypeCount(); i--; )
14125 vma_delete(
this, m_pDedicatedAllocations[i]);
14126 vma_delete(
this, m_pBlockVectors[i]);
14130 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14132 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14133 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
14134 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
14135 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
14136 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
14137 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
14138 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
14139 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
14140 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
14141 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
14142 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
14143 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
14144 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
14145 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
14146 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
14147 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
14148 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
14149 m_VulkanFunctions.vkCmdCopyBuffer = &vkCmdCopyBuffer;
14150 #if VMA_DEDICATED_ALLOCATION 14151 if(m_UseKhrDedicatedAllocation)
14153 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14154 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14155 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14156 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14158 #endif // #if VMA_DEDICATED_ALLOCATION 14159 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14161 #define VMA_COPY_IF_NOT_NULL(funcName) \ 14162 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 14164 if(pVulkanFunctions != VMA_NULL)
14166 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14167 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14168 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14169 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14170 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14171 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14172 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14173 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14174 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14175 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14176 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14177 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14178 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14179 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14180 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14181 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14182 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14183 #if VMA_DEDICATED_ALLOCATION 14184 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14185 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14189 #undef VMA_COPY_IF_NOT_NULL 14193 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14194 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14195 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14196 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14197 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14198 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14199 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14200 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14201 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14202 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14203 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14204 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14205 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14206 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14207 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14208 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14209 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14210 #if VMA_DEDICATED_ALLOCATION 14211 if(m_UseKhrDedicatedAllocation)
14213 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14214 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14219 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14221 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14222 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14223 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14224 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
14227 VkResult VmaAllocator_T::AllocateMemoryOfType(
14229 VkDeviceSize alignment,
14230 bool dedicatedAllocation,
14231 VkBuffer dedicatedBuffer,
14232 VkImage dedicatedImage,
14234 uint32_t memTypeIndex,
14235 VmaSuballocationType suballocType,
14236 size_t allocationCount,
14239 VMA_ASSERT(pAllocations != VMA_NULL);
14240 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, vkMemReq.size);
14246 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14251 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14252 VMA_ASSERT(blockVector);
14254 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14255 bool preferDedicatedMemory =
14256 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14257 dedicatedAllocation ||
14259 size > preferredBlockSize / 2;
14261 if(preferDedicatedMemory &&
14263 finalCreateInfo.
pool == VK_NULL_HANDLE)
14272 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14276 return AllocateDedicatedMemory(
14291 VkResult res = blockVector->Allocate(
14293 m_CurrentFrameIndex.load(),
14300 if(res == VK_SUCCESS)
14308 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14312 res = AllocateDedicatedMemory(
14318 finalCreateInfo.pUserData,
14323 if(res == VK_SUCCESS)
14326 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14332 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14339 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14341 VmaSuballocationType suballocType,
14342 uint32_t memTypeIndex,
14344 bool isUserDataString,
14346 VkBuffer dedicatedBuffer,
14347 VkImage dedicatedImage,
14348 size_t allocationCount,
14351 VMA_ASSERT(allocationCount > 0 && pAllocations);
14353 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14354 allocInfo.memoryTypeIndex = memTypeIndex;
14355 allocInfo.allocationSize = size;
14357 #if VMA_DEDICATED_ALLOCATION 14358 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14359 if(m_UseKhrDedicatedAllocation)
14361 if(dedicatedBuffer != VK_NULL_HANDLE)
14363 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14364 dedicatedAllocInfo.buffer = dedicatedBuffer;
14365 allocInfo.pNext = &dedicatedAllocInfo;
14367 else if(dedicatedImage != VK_NULL_HANDLE)
14369 dedicatedAllocInfo.image = dedicatedImage;
14370 allocInfo.pNext = &dedicatedAllocInfo;
14373 #endif // #if VMA_DEDICATED_ALLOCATION 14377 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14379 res = AllocateDedicatedMemoryPage(
14387 pAllocations + allocIndex);
14388 if(res != VK_SUCCESS)
14394 if(res == VK_SUCCESS)
14398 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14399 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14400 VMA_ASSERT(pDedicatedAllocations);
14401 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14403 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
14407 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
14412 while(allocIndex--)
14415 VkDeviceMemory hMemory = currAlloc->GetMemory();
14427 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
14429 currAlloc->SetUserData(
this, VMA_NULL);
14430 vma_delete(
this, currAlloc);
14433 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14439 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
14441 VmaSuballocationType suballocType,
14442 uint32_t memTypeIndex,
14443 const VkMemoryAllocateInfo& allocInfo,
14445 bool isUserDataString,
14449 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14450 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14453 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14457 void* pMappedData = VMA_NULL;
14460 res = (*m_VulkanFunctions.vkMapMemory)(
14469 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
14470 FreeVulkanMemory(memTypeIndex, size, hMemory);
14475 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
14476 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
14477 (*pAllocation)->SetUserData(
this, pUserData);
14478 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14480 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14486 void VmaAllocator_T::GetBufferMemoryRequirements(
14488 VkMemoryRequirements& memReq,
14489 bool& requiresDedicatedAllocation,
14490 bool& prefersDedicatedAllocation)
const 14492 #if VMA_DEDICATED_ALLOCATION 14493 if(m_UseKhrDedicatedAllocation)
14495 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14496 memReqInfo.buffer = hBuffer;
14498 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14500 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14501 memReq2.pNext = &memDedicatedReq;
14503 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14505 memReq = memReq2.memoryRequirements;
14506 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14507 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14510 #endif // #if VMA_DEDICATED_ALLOCATION 14512 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14513 requiresDedicatedAllocation =
false;
14514 prefersDedicatedAllocation =
false;
14518 void VmaAllocator_T::GetImageMemoryRequirements(
14520 VkMemoryRequirements& memReq,
14521 bool& requiresDedicatedAllocation,
14522 bool& prefersDedicatedAllocation)
const 14524 #if VMA_DEDICATED_ALLOCATION 14525 if(m_UseKhrDedicatedAllocation)
14527 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
14528 memReqInfo.image = hImage;
14530 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14532 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14533 memReq2.pNext = &memDedicatedReq;
14535 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14537 memReq = memReq2.memoryRequirements;
14538 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14539 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14542 #endif // #if VMA_DEDICATED_ALLOCATION 14544 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14545 requiresDedicatedAllocation =
false;
14546 prefersDedicatedAllocation =
false;
14550 VkResult VmaAllocator_T::AllocateMemory(
14551 const VkMemoryRequirements& vkMemReq,
14552 bool requiresDedicatedAllocation,
14553 bool prefersDedicatedAllocation,
14554 VkBuffer dedicatedBuffer,
14555 VkImage dedicatedImage,
14557 VmaSuballocationType suballocType,
14558 size_t allocationCount,
14561 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14563 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
14565 if(vkMemReq.size == 0)
14567 return VK_ERROR_VALIDATION_FAILED_EXT;
14572 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14573 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14578 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
14579 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14581 if(requiresDedicatedAllocation)
14585 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
14586 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14588 if(createInfo.
pool != VK_NULL_HANDLE)
14590 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
14591 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14594 if((createInfo.
pool != VK_NULL_HANDLE) &&
14597 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
14598 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14601 if(createInfo.
pool != VK_NULL_HANDLE)
14603 const VkDeviceSize alignmentForPool = VMA_MAX(
14604 vkMemReq.alignment,
14605 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
14606 return createInfo.
pool->m_BlockVector.Allocate(
14608 m_CurrentFrameIndex.load(),
14619 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
14620 uint32_t memTypeIndex = UINT32_MAX;
14622 if(res == VK_SUCCESS)
14624 VkDeviceSize alignmentForMemType = VMA_MAX(
14625 vkMemReq.alignment,
14626 GetMemoryTypeMinAlignment(memTypeIndex));
14628 res = AllocateMemoryOfType(
14630 alignmentForMemType,
14631 requiresDedicatedAllocation || prefersDedicatedAllocation,
14640 if(res == VK_SUCCESS)
14650 memoryTypeBits &= ~(1u << memTypeIndex);
14653 if(res == VK_SUCCESS)
14655 alignmentForMemType = VMA_MAX(
14656 vkMemReq.alignment,
14657 GetMemoryTypeMinAlignment(memTypeIndex));
14659 res = AllocateMemoryOfType(
14661 alignmentForMemType,
14662 requiresDedicatedAllocation || prefersDedicatedAllocation,
14671 if(res == VK_SUCCESS)
14681 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14692 void VmaAllocator_T::FreeMemory(
14693 size_t allocationCount,
14696 VMA_ASSERT(pAllocations);
14698 for(
size_t allocIndex = allocationCount; allocIndex--; )
14702 if(allocation != VK_NULL_HANDLE)
14704 if(TouchAllocation(allocation))
14706 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14708 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
14711 switch(allocation->GetType())
14713 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14715 VmaBlockVector* pBlockVector = VMA_NULL;
14716 VmaPool hPool = allocation->GetPool();
14717 if(hPool != VK_NULL_HANDLE)
14719 pBlockVector = &hPool->m_BlockVector;
14723 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14724 pBlockVector = m_pBlockVectors[memTypeIndex];
14726 pBlockVector->Free(allocation);
14729 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14730 FreeDedicatedMemory(allocation);
14737 allocation->SetUserData(
this, VMA_NULL);
14738 vma_delete(
this, allocation);
14743 VkResult VmaAllocator_T::ResizeAllocation(
14745 VkDeviceSize newSize)
14747 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
14749 return VK_ERROR_VALIDATION_FAILED_EXT;
14751 if(newSize == alloc->GetSize())
14756 switch(alloc->GetType())
14758 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14759 return VK_ERROR_FEATURE_NOT_PRESENT;
14760 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14761 if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
14763 alloc->ChangeSize(newSize);
14764 VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
14769 return VK_ERROR_OUT_OF_POOL_MEMORY;
14773 return VK_ERROR_VALIDATION_FAILED_EXT;
14777 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
14780 InitStatInfo(pStats->
total);
14781 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
14783 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14787 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14789 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
14790 VMA_ASSERT(pBlockVector);
14791 pBlockVector->AddStats(pStats);
14796 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
14797 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
14799 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
14804 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14806 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14807 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14808 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
14809 VMA_ASSERT(pDedicatedAllocVector);
14810 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
14813 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
14814 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14815 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14816 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
14821 VmaPostprocessCalcStatInfo(pStats->
total);
14822 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
14823 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
14824 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
14825 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
14828 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
14830 VkResult VmaAllocator_T::DefragmentationBegin(
14840 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
14841 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
14844 (*pContext)->AddAllocations(
14847 VkResult res = (*pContext)->Defragment(
14852 if(res != VK_NOT_READY)
14854 vma_delete(
this, *pContext);
14855 *pContext = VMA_NULL;
14861 VkResult VmaAllocator_T::DefragmentationEnd(
14864 vma_delete(
this, context);
14870 if(hAllocation->CanBecomeLost())
14876 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14877 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14880 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
14884 pAllocationInfo->
offset = 0;
14885 pAllocationInfo->
size = hAllocation->GetSize();
14887 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14890 else if(localLastUseFrameIndex == localCurrFrameIndex)
14892 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
14893 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
14894 pAllocationInfo->
offset = hAllocation->GetOffset();
14895 pAllocationInfo->
size = hAllocation->GetSize();
14897 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14902 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14904 localLastUseFrameIndex = localCurrFrameIndex;
14911 #if VMA_STATS_STRING_ENABLED 14912 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14913 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14916 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
14917 if(localLastUseFrameIndex == localCurrFrameIndex)
14923 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14925 localLastUseFrameIndex = localCurrFrameIndex;
14931 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
14932 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
14933 pAllocationInfo->
offset = hAllocation->GetOffset();
14934 pAllocationInfo->
size = hAllocation->GetSize();
14935 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
14936 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14940 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
14943 if(hAllocation->CanBecomeLost())
14945 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14946 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14949 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
14953 else if(localLastUseFrameIndex == localCurrFrameIndex)
14959 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14961 localLastUseFrameIndex = localCurrFrameIndex;
14968 #if VMA_STATS_STRING_ENABLED 14969 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14970 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14973 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
14974 if(localLastUseFrameIndex == localCurrFrameIndex)
14980 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14982 localLastUseFrameIndex = localCurrFrameIndex;
14994 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
15004 return VK_ERROR_INITIALIZATION_FAILED;
15007 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15009 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15011 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15012 if(res != VK_SUCCESS)
15014 vma_delete(
this, *pPool);
15021 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15022 (*pPool)->SetId(m_NextPoolId++);
15023 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15029 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15033 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15034 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15035 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15038 vma_delete(
this, pool);
15043 pool->m_BlockVector.GetPoolStats(pPoolStats);
15046 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15048 m_CurrentFrameIndex.store(frameIndex);
15051 void VmaAllocator_T::MakePoolAllocationsLost(
15053 size_t* pLostAllocationCount)
15055 hPool->m_BlockVector.MakePoolAllocationsLost(
15056 m_CurrentFrameIndex.load(),
15057 pLostAllocationCount);
15060 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15062 return hPool->m_BlockVector.CheckCorruption();
15065 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15067 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15070 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15072 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15074 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15075 VMA_ASSERT(pBlockVector);
15076 VkResult localRes = pBlockVector->CheckCorruption();
15079 case VK_ERROR_FEATURE_NOT_PRESENT:
15082 finalRes = VK_SUCCESS;
15092 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15093 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15095 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15097 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15100 case VK_ERROR_FEATURE_NOT_PRESENT:
15103 finalRes = VK_SUCCESS;
15115 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15117 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
15118 (*pAllocation)->InitLost();
15121 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15123 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15126 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15128 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15129 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
15131 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15132 if(res == VK_SUCCESS)
15134 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
15139 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
15144 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15147 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15149 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15155 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15157 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15159 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15162 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15164 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
15165 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15167 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15168 m_HeapSizeLimit[heapIndex] += size;
15172 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15174 if(hAllocation->CanBecomeLost())
15176 return VK_ERROR_MEMORY_MAP_FAILED;
15179 switch(hAllocation->GetType())
15181 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15183 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15184 char *pBytes = VMA_NULL;
15185 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15186 if(res == VK_SUCCESS)
15188 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15189 hAllocation->BlockAllocMap();
15193 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15194 return hAllocation->DedicatedAllocMap(
this, ppData);
15197 return VK_ERROR_MEMORY_MAP_FAILED;
15203 switch(hAllocation->GetType())
15205 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15207 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15208 hAllocation->BlockAllocUnmap();
15209 pBlock->Unmap(
this, 1);
15212 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15213 hAllocation->DedicatedAllocUnmap(
this);
15220 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
15222 VkResult res = VK_SUCCESS;
15223 switch(hAllocation->GetType())
15225 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15226 res = GetVulkanFunctions().vkBindBufferMemory(
15229 hAllocation->GetMemory(),
15232 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15234 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15235 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
15236 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
15245 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
15247 VkResult res = VK_SUCCESS;
15248 switch(hAllocation->GetType())
15250 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15251 res = GetVulkanFunctions().vkBindImageMemory(
15254 hAllocation->GetMemory(),
15257 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15259 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15260 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
15261 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
15270 void VmaAllocator_T::FlushOrInvalidateAllocation(
15272 VkDeviceSize offset, VkDeviceSize size,
15273 VMA_CACHE_OPERATION op)
15275 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
15276 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
15278 const VkDeviceSize allocationSize = hAllocation->GetSize();
15279 VMA_ASSERT(offset <= allocationSize);
15281 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
15283 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
15284 memRange.memory = hAllocation->GetMemory();
15286 switch(hAllocation->GetType())
15288 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15289 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15290 if(size == VK_WHOLE_SIZE)
15292 memRange.size = allocationSize - memRange.offset;
15296 VMA_ASSERT(offset + size <= allocationSize);
15297 memRange.size = VMA_MIN(
15298 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
15299 allocationSize - memRange.offset);
15303 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15306 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15307 if(size == VK_WHOLE_SIZE)
15309 size = allocationSize - offset;
15313 VMA_ASSERT(offset + size <= allocationSize);
15315 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
15318 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
15319 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
15320 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
15321 memRange.offset += allocationOffset;
15322 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
15333 case VMA_CACHE_FLUSH:
15334 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
15336 case VMA_CACHE_INVALIDATE:
15337 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
15346 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
15348 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
15350 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15352 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15353 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15354 VMA_ASSERT(pDedicatedAllocations);
15355 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
15356 VMA_ASSERT(success);
15359 VkDeviceMemory hMemory = allocation->GetMemory();
15371 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15373 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15376 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
15378 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
15379 !hAllocation->CanBecomeLost() &&
15380 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15382 void* pData = VMA_NULL;
15383 VkResult res = Map(hAllocation, &pData);
15384 if(res == VK_SUCCESS)
15386 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
15387 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
15388 Unmap(hAllocation);
15392 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15397 #if VMA_STATS_STRING_ENABLED 15399 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15401 bool dedicatedAllocationsStarted =
false;
15402 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15404 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15405 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15406 VMA_ASSERT(pDedicatedAllocVector);
15407 if(pDedicatedAllocVector->empty() ==
false)
15409 if(dedicatedAllocationsStarted ==
false)
15411 dedicatedAllocationsStarted =
true;
15412 json.WriteString(
"DedicatedAllocations");
15413 json.BeginObject();
15416 json.BeginString(
"Type ");
15417 json.ContinueString(memTypeIndex);
15422 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
15424 json.BeginObject(
true);
15426 hAlloc->PrintParameters(json);
15433 if(dedicatedAllocationsStarted)
15439 bool allocationsStarted =
false;
15440 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15442 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
15444 if(allocationsStarted ==
false)
15446 allocationsStarted =
true;
15447 json.WriteString(
"DefaultPools");
15448 json.BeginObject();
15451 json.BeginString(
"Type ");
15452 json.ContinueString(memTypeIndex);
15455 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
15458 if(allocationsStarted)
15466 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15467 const size_t poolCount = m_Pools.size();
15470 json.WriteString(
"Pools");
15471 json.BeginObject();
15472 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15474 json.BeginString();
15475 json.ContinueString(m_Pools[poolIndex]->GetId());
15478 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
15485 #endif // #if VMA_STATS_STRING_ENABLED 15494 VMA_ASSERT(pCreateInfo && pAllocator);
15495 VMA_DEBUG_LOG(
"vmaCreateAllocator");
15497 return (*pAllocator)->Init(pCreateInfo);
15503 if(allocator != VK_NULL_HANDLE)
15505 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
15506 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
15507 vma_delete(&allocationCallbacks, allocator);
15513 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
15515 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
15516 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
15521 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
15523 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
15524 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
15529 uint32_t memoryTypeIndex,
15530 VkMemoryPropertyFlags* pFlags)
15532 VMA_ASSERT(allocator && pFlags);
15533 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
15534 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
15539 uint32_t frameIndex)
15541 VMA_ASSERT(allocator);
15542 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
15544 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15546 allocator->SetCurrentFrameIndex(frameIndex);
15553 VMA_ASSERT(allocator && pStats);
15554 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15555 allocator->CalculateStats(pStats);
15558 #if VMA_STATS_STRING_ENABLED 15562 char** ppStatsString,
15563 VkBool32 detailedMap)
15565 VMA_ASSERT(allocator && ppStatsString);
15566 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15568 VmaStringBuilder sb(allocator);
15570 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
15571 json.BeginObject();
15574 allocator->CalculateStats(&stats);
15576 json.WriteString(
"Total");
15577 VmaPrintStatInfo(json, stats.
total);
15579 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
15581 json.BeginString(
"Heap ");
15582 json.ContinueString(heapIndex);
15584 json.BeginObject();
15586 json.WriteString(
"Size");
15587 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
15589 json.WriteString(
"Flags");
15590 json.BeginArray(
true);
15591 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
15593 json.WriteString(
"DEVICE_LOCAL");
15599 json.WriteString(
"Stats");
15600 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
15603 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
15605 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
15607 json.BeginString(
"Type ");
15608 json.ContinueString(typeIndex);
15611 json.BeginObject();
15613 json.WriteString(
"Flags");
15614 json.BeginArray(
true);
15615 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
15616 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
15618 json.WriteString(
"DEVICE_LOCAL");
15620 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15622 json.WriteString(
"HOST_VISIBLE");
15624 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
15626 json.WriteString(
"HOST_COHERENT");
15628 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
15630 json.WriteString(
"HOST_CACHED");
15632 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
15634 json.WriteString(
"LAZILY_ALLOCATED");
15640 json.WriteString(
"Stats");
15641 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
15650 if(detailedMap == VK_TRUE)
15652 allocator->PrintDetailedMap(json);
15658 const size_t len = sb.GetLength();
15659 char*
const pChars = vma_new_array(allocator,
char, len + 1);
15662 memcpy(pChars, sb.GetData(), len);
15664 pChars[len] =
'\0';
15665 *ppStatsString = pChars;
15670 char* pStatsString)
15672 if(pStatsString != VMA_NULL)
15674 VMA_ASSERT(allocator);
15675 size_t len = strlen(pStatsString);
15676 vma_delete_array(allocator, pStatsString, len + 1);
15680 #endif // #if VMA_STATS_STRING_ENABLED 15687 uint32_t memoryTypeBits,
15689 uint32_t* pMemoryTypeIndex)
15691 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15692 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15693 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15700 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
15701 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
15706 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15710 switch(pAllocationCreateInfo->
usage)
15715 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15717 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15721 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
15724 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15725 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15727 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15731 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15732 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
15738 *pMemoryTypeIndex = UINT32_MAX;
15739 uint32_t minCost = UINT32_MAX;
15740 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
15741 memTypeIndex < allocator->GetMemoryTypeCount();
15742 ++memTypeIndex, memTypeBit <<= 1)
15745 if((memTypeBit & memoryTypeBits) != 0)
15747 const VkMemoryPropertyFlags currFlags =
15748 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
15750 if((requiredFlags & ~currFlags) == 0)
15753 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
15755 if(currCost < minCost)
15757 *pMemoryTypeIndex = memTypeIndex;
15762 minCost = currCost;
15767 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
15772 const VkBufferCreateInfo* pBufferCreateInfo,
15774 uint32_t* pMemoryTypeIndex)
15776 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15777 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
15778 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15779 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15781 const VkDevice hDev = allocator->m_hDevice;
15782 VkBuffer hBuffer = VK_NULL_HANDLE;
15783 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
15784 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
15785 if(res == VK_SUCCESS)
15787 VkMemoryRequirements memReq = {};
15788 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
15789 hDev, hBuffer, &memReq);
15793 memReq.memoryTypeBits,
15794 pAllocationCreateInfo,
15797 allocator->GetVulkanFunctions().vkDestroyBuffer(
15798 hDev, hBuffer, allocator->GetAllocationCallbacks());
15805 const VkImageCreateInfo* pImageCreateInfo,
15807 uint32_t* pMemoryTypeIndex)
15809 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15810 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
15811 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15812 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15814 const VkDevice hDev = allocator->m_hDevice;
15815 VkImage hImage = VK_NULL_HANDLE;
15816 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
15817 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
15818 if(res == VK_SUCCESS)
15820 VkMemoryRequirements memReq = {};
15821 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
15822 hDev, hImage, &memReq);
15826 memReq.memoryTypeBits,
15827 pAllocationCreateInfo,
15830 allocator->GetVulkanFunctions().vkDestroyImage(
15831 hDev, hImage, allocator->GetAllocationCallbacks());
15841 VMA_ASSERT(allocator && pCreateInfo && pPool);
15843 VMA_DEBUG_LOG(
"vmaCreatePool");
15845 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15847 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
15849 #if VMA_RECORDING_ENABLED 15850 if(allocator->GetRecorder() != VMA_NULL)
15852 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
15863 VMA_ASSERT(allocator);
15865 if(pool == VK_NULL_HANDLE)
15870 VMA_DEBUG_LOG(
"vmaDestroyPool");
15872 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15874 #if VMA_RECORDING_ENABLED 15875 if(allocator->GetRecorder() != VMA_NULL)
15877 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
15881 allocator->DestroyPool(pool);
15889 VMA_ASSERT(allocator && pool && pPoolStats);
15891 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15893 allocator->GetPoolStats(pool, pPoolStats);
15899 size_t* pLostAllocationCount)
15901 VMA_ASSERT(allocator && pool);
15903 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15905 #if VMA_RECORDING_ENABLED 15906 if(allocator->GetRecorder() != VMA_NULL)
15908 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
15912 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
15917 VMA_ASSERT(allocator && pool);
15919 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15921 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
15923 return allocator->CheckPoolCorruption(pool);
15928 const VkMemoryRequirements* pVkMemoryRequirements,
15933 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
15935 VMA_DEBUG_LOG(
"vmaAllocateMemory");
15937 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15939 VkResult result = allocator->AllocateMemory(
15940 *pVkMemoryRequirements,
15946 VMA_SUBALLOCATION_TYPE_UNKNOWN,
15950 #if VMA_RECORDING_ENABLED 15951 if(allocator->GetRecorder() != VMA_NULL)
15953 allocator->GetRecorder()->RecordAllocateMemory(
15954 allocator->GetCurrentFrameIndex(),
15955 *pVkMemoryRequirements,
15961 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
15963 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
15971 const VkMemoryRequirements* pVkMemoryRequirements,
15973 size_t allocationCount,
15977 if(allocationCount == 0)
15982 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
15984 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
15986 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15988 VkResult result = allocator->AllocateMemory(
15989 *pVkMemoryRequirements,
15995 VMA_SUBALLOCATION_TYPE_UNKNOWN,
15999 #if VMA_RECORDING_ENABLED 16000 if(allocator->GetRecorder() != VMA_NULL)
16002 allocator->GetRecorder()->RecordAllocateMemoryPages(
16003 allocator->GetCurrentFrameIndex(),
16004 *pVkMemoryRequirements,
16006 (uint64_t)allocationCount,
16011 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16013 for(
size_t i = 0; i < allocationCount; ++i)
16015 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16029 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16031 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16033 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16035 VkMemoryRequirements vkMemReq = {};
16036 bool requiresDedicatedAllocation =
false;
16037 bool prefersDedicatedAllocation =
false;
16038 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16039 requiresDedicatedAllocation,
16040 prefersDedicatedAllocation);
16042 VkResult result = allocator->AllocateMemory(
16044 requiresDedicatedAllocation,
16045 prefersDedicatedAllocation,
16049 VMA_SUBALLOCATION_TYPE_BUFFER,
16053 #if VMA_RECORDING_ENABLED 16054 if(allocator->GetRecorder() != VMA_NULL)
16056 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16057 allocator->GetCurrentFrameIndex(),
16059 requiresDedicatedAllocation,
16060 prefersDedicatedAllocation,
16066 if(pAllocationInfo && result == VK_SUCCESS)
16068 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16081 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16083 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
16085 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16087 VkMemoryRequirements vkMemReq = {};
16088 bool requiresDedicatedAllocation =
false;
16089 bool prefersDedicatedAllocation =
false;
16090 allocator->GetImageMemoryRequirements(image, vkMemReq,
16091 requiresDedicatedAllocation, prefersDedicatedAllocation);
16093 VkResult result = allocator->AllocateMemory(
16095 requiresDedicatedAllocation,
16096 prefersDedicatedAllocation,
16100 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16104 #if VMA_RECORDING_ENABLED 16105 if(allocator->GetRecorder() != VMA_NULL)
16107 allocator->GetRecorder()->RecordAllocateMemoryForImage(
16108 allocator->GetCurrentFrameIndex(),
16110 requiresDedicatedAllocation,
16111 prefersDedicatedAllocation,
16117 if(pAllocationInfo && result == VK_SUCCESS)
16119 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16129 VMA_ASSERT(allocator);
16131 if(allocation == VK_NULL_HANDLE)
16136 VMA_DEBUG_LOG(
"vmaFreeMemory");
16138 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16140 #if VMA_RECORDING_ENABLED 16141 if(allocator->GetRecorder() != VMA_NULL)
16143 allocator->GetRecorder()->RecordFreeMemory(
16144 allocator->GetCurrentFrameIndex(),
16149 allocator->FreeMemory(
16156 size_t allocationCount,
16159 if(allocationCount == 0)
16164 VMA_ASSERT(allocator);
16166 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
16168 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16170 #if VMA_RECORDING_ENABLED 16171 if(allocator->GetRecorder() != VMA_NULL)
16173 allocator->GetRecorder()->RecordFreeMemoryPages(
16174 allocator->GetCurrentFrameIndex(),
16175 (uint64_t)allocationCount,
16180 allocator->FreeMemory(allocationCount, pAllocations);
16186 VkDeviceSize newSize)
16188 VMA_ASSERT(allocator && allocation);
16190 VMA_DEBUG_LOG(
"vmaResizeAllocation");
16192 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16194 #if VMA_RECORDING_ENABLED 16195 if(allocator->GetRecorder() != VMA_NULL)
16197 allocator->GetRecorder()->RecordResizeAllocation(
16198 allocator->GetCurrentFrameIndex(),
16204 return allocator->ResizeAllocation(allocation, newSize);
16212 VMA_ASSERT(allocator && allocation && pAllocationInfo);
16214 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16216 #if VMA_RECORDING_ENABLED 16217 if(allocator->GetRecorder() != VMA_NULL)
16219 allocator->GetRecorder()->RecordGetAllocationInfo(
16220 allocator->GetCurrentFrameIndex(),
16225 allocator->GetAllocationInfo(allocation, pAllocationInfo);
16232 VMA_ASSERT(allocator && allocation);
16234 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16236 #if VMA_RECORDING_ENABLED 16237 if(allocator->GetRecorder() != VMA_NULL)
16239 allocator->GetRecorder()->RecordTouchAllocation(
16240 allocator->GetCurrentFrameIndex(),
16245 return allocator->TouchAllocation(allocation);
16253 VMA_ASSERT(allocator && allocation);
16255 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16257 allocation->SetUserData(allocator, pUserData);
16259 #if VMA_RECORDING_ENABLED 16260 if(allocator->GetRecorder() != VMA_NULL)
16262 allocator->GetRecorder()->RecordSetAllocationUserData(
16263 allocator->GetCurrentFrameIndex(),
16274 VMA_ASSERT(allocator && pAllocation);
16276 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
16278 allocator->CreateLostAllocation(pAllocation);
16280 #if VMA_RECORDING_ENABLED 16281 if(allocator->GetRecorder() != VMA_NULL)
16283 allocator->GetRecorder()->RecordCreateLostAllocation(
16284 allocator->GetCurrentFrameIndex(),
16295 VMA_ASSERT(allocator && allocation && ppData);
16297 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16299 VkResult res = allocator->Map(allocation, ppData);
16301 #if VMA_RECORDING_ENABLED 16302 if(allocator->GetRecorder() != VMA_NULL)
16304 allocator->GetRecorder()->RecordMapMemory(
16305 allocator->GetCurrentFrameIndex(),
16317 VMA_ASSERT(allocator && allocation);
16319 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16321 #if VMA_RECORDING_ENABLED 16322 if(allocator->GetRecorder() != VMA_NULL)
16324 allocator->GetRecorder()->RecordUnmapMemory(
16325 allocator->GetCurrentFrameIndex(),
16330 allocator->Unmap(allocation);
16335 VMA_ASSERT(allocator && allocation);
16337 VMA_DEBUG_LOG(
"vmaFlushAllocation");
16339 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16341 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
16343 #if VMA_RECORDING_ENABLED 16344 if(allocator->GetRecorder() != VMA_NULL)
16346 allocator->GetRecorder()->RecordFlushAllocation(
16347 allocator->GetCurrentFrameIndex(),
16348 allocation, offset, size);
16355 VMA_ASSERT(allocator && allocation);
16357 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
16359 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16361 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
16363 #if VMA_RECORDING_ENABLED 16364 if(allocator->GetRecorder() != VMA_NULL)
16366 allocator->GetRecorder()->RecordInvalidateAllocation(
16367 allocator->GetCurrentFrameIndex(),
16368 allocation, offset, size);
16375 VMA_ASSERT(allocator);
16377 VMA_DEBUG_LOG(
"vmaCheckCorruption");
16379 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16381 return allocator->CheckCorruption(memoryTypeBits);
16387 size_t allocationCount,
16388 VkBool32* pAllocationsChanged,
16398 if(pDefragmentationInfo != VMA_NULL)
16412 if(res == VK_NOT_READY)
16425 VMA_ASSERT(allocator && pInfo && pContext);
16436 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
16438 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
16440 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16442 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
16444 #if VMA_RECORDING_ENABLED 16445 if(allocator->GetRecorder() != VMA_NULL)
16447 allocator->GetRecorder()->RecordDefragmentationBegin(
16448 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
16459 VMA_ASSERT(allocator);
16461 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
16463 if(context != VK_NULL_HANDLE)
16465 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16467 #if VMA_RECORDING_ENABLED 16468 if(allocator->GetRecorder() != VMA_NULL)
16470 allocator->GetRecorder()->RecordDefragmentationEnd(
16471 allocator->GetCurrentFrameIndex(), context);
16475 return allocator->DefragmentationEnd(context);
16488 VMA_ASSERT(allocator && allocation && buffer);
16490 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
16492 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16494 return allocator->BindBufferMemory(allocation, buffer);
16502 VMA_ASSERT(allocator && allocation && image);
16504 VMA_DEBUG_LOG(
"vmaBindImageMemory");
16506 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16508 return allocator->BindImageMemory(allocation, image);
16513 const VkBufferCreateInfo* pBufferCreateInfo,
16519 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16521 if(pBufferCreateInfo->size == 0)
16523 return VK_ERROR_VALIDATION_FAILED_EXT;
16526 VMA_DEBUG_LOG(
"vmaCreateBuffer");
16528 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16530 *pBuffer = VK_NULL_HANDLE;
16531 *pAllocation = VK_NULL_HANDLE;
16534 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
16535 allocator->m_hDevice,
16537 allocator->GetAllocationCallbacks(),
16542 VkMemoryRequirements vkMemReq = {};
16543 bool requiresDedicatedAllocation =
false;
16544 bool prefersDedicatedAllocation =
false;
16545 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
16546 requiresDedicatedAllocation, prefersDedicatedAllocation);
16550 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
16552 VMA_ASSERT(vkMemReq.alignment %
16553 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
16555 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
16557 VMA_ASSERT(vkMemReq.alignment %
16558 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
16560 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
16562 VMA_ASSERT(vkMemReq.alignment %
16563 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
16567 res = allocator->AllocateMemory(
16569 requiresDedicatedAllocation,
16570 prefersDedicatedAllocation,
16573 *pAllocationCreateInfo,
16574 VMA_SUBALLOCATION_TYPE_BUFFER,
16578 #if VMA_RECORDING_ENABLED 16579 if(allocator->GetRecorder() != VMA_NULL)
16581 allocator->GetRecorder()->RecordCreateBuffer(
16582 allocator->GetCurrentFrameIndex(),
16583 *pBufferCreateInfo,
16584 *pAllocationCreateInfo,
16592 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
16596 #if VMA_STATS_STRING_ENABLED 16597 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
16599 if(pAllocationInfo != VMA_NULL)
16601 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16606 allocator->FreeMemory(
16609 *pAllocation = VK_NULL_HANDLE;
16610 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16611 *pBuffer = VK_NULL_HANDLE;
16614 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16615 *pBuffer = VK_NULL_HANDLE;
16626 VMA_ASSERT(allocator);
16628 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16633 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
16635 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16637 #if VMA_RECORDING_ENABLED 16638 if(allocator->GetRecorder() != VMA_NULL)
16640 allocator->GetRecorder()->RecordDestroyBuffer(
16641 allocator->GetCurrentFrameIndex(),
16646 if(buffer != VK_NULL_HANDLE)
16648 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
16651 if(allocation != VK_NULL_HANDLE)
16653 allocator->FreeMemory(
16661 const VkImageCreateInfo* pImageCreateInfo,
16667 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
16669 if(pImageCreateInfo->extent.width == 0 ||
16670 pImageCreateInfo->extent.height == 0 ||
16671 pImageCreateInfo->extent.depth == 0 ||
16672 pImageCreateInfo->mipLevels == 0 ||
16673 pImageCreateInfo->arrayLayers == 0)
16675 return VK_ERROR_VALIDATION_FAILED_EXT;
16678 VMA_DEBUG_LOG(
"vmaCreateImage");
16680 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16682 *pImage = VK_NULL_HANDLE;
16683 *pAllocation = VK_NULL_HANDLE;
16686 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
16687 allocator->m_hDevice,
16689 allocator->GetAllocationCallbacks(),
16693 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
16694 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
16695 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
16698 VkMemoryRequirements vkMemReq = {};
16699 bool requiresDedicatedAllocation =
false;
16700 bool prefersDedicatedAllocation =
false;
16701 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
16702 requiresDedicatedAllocation, prefersDedicatedAllocation);
16704 res = allocator->AllocateMemory(
16706 requiresDedicatedAllocation,
16707 prefersDedicatedAllocation,
16710 *pAllocationCreateInfo,
16715 #if VMA_RECORDING_ENABLED 16716 if(allocator->GetRecorder() != VMA_NULL)
16718 allocator->GetRecorder()->RecordCreateImage(
16719 allocator->GetCurrentFrameIndex(),
16721 *pAllocationCreateInfo,
16729 res = allocator->BindImageMemory(*pAllocation, *pImage);
16733 #if VMA_STATS_STRING_ENABLED 16734 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
16736 if(pAllocationInfo != VMA_NULL)
16738 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16743 allocator->FreeMemory(
16746 *pAllocation = VK_NULL_HANDLE;
16747 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16748 *pImage = VK_NULL_HANDLE;
16751 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16752 *pImage = VK_NULL_HANDLE;
16763 VMA_ASSERT(allocator);
16765 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16770 VMA_DEBUG_LOG(
"vmaDestroyImage");
16772 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16774 #if VMA_RECORDING_ENABLED 16775 if(allocator->GetRecorder() != VMA_NULL)
16777 allocator->GetRecorder()->RecordDestroyImage(
16778 allocator->GetCurrentFrameIndex(),
16783 if(image != VK_NULL_HANDLE)
16785 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
16787 if(allocation != VK_NULL_HANDLE)
16789 allocator->FreeMemory(
16795 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1753
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2051
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1811
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:2848
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
Definition: vk_mem_alloc.h:1785
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2376
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1765
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:2013
Definition: vk_mem_alloc.h:2111
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2801
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1757
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2476
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1808
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2884
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2265
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1652
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2357
Definition: vk_mem_alloc.h:2088
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2804
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1746
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2164
Definition: vk_mem_alloc.h:2040
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1820
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2293
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1.
Definition: vk_mem_alloc.h:1874
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1805
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2044
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1946
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1762
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2838
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1945
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2888
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(),...
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1837
VmaStatInfo total
Definition: vk_mem_alloc.h:1955
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2896
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2148
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2879
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1763
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1688
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1814
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2307
Definition: vk_mem_alloc.h:2301
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1769
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1881
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2486
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1758
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1783
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2185
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2327
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:2363
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1744
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2310
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2853
VmaMemoryUsage
Definition: vk_mem_alloc.h:1991
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2813
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2874
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:2892
Definition: vk_mem_alloc.h:2030
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2172
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1761
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1951
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1694
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2792
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
Definition: vk_mem_alloc.h:2790
Definition: vk_mem_alloc.h:2132
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2819
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1715
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1787
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1720
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2894
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2159
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2373
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1754
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1934
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:2322
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1707
Definition: vk_mem_alloc.h:2297
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:2095
Represents Opaque object that represents started defragmentation process.
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1947
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1711
Definition: vk_mem_alloc.h:2122
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2313
Definition: vk_mem_alloc.h:2039
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1760
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2154
Definition: vk_mem_alloc.h:2145
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1937
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1756
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2335
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1823
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2366
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2143
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2843
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2178
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1862
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1953
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2075
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1946
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1767
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1793
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:2789
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2867
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1709
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1766
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2349
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1759
Definition: vk_mem_alloc.h:2106
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1801
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2500
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:1817
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1946
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1943
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2354
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2798
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions.
Definition: vk_mem_alloc.h:2115
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:2481
Definition: vk_mem_alloc.h:2129
Definition: vk_mem_alloc.h:2141
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2890
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1752
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1941
Definition: vk_mem_alloc.h:1996
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2303
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1790
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1939
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1764
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1768
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2062
Definition: vk_mem_alloc.h:2136
Definition: vk_mem_alloc.h:2023
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2495
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1742
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1755
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2282
VkResult vmaResizeAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize newSize)
Tries to resize an allocation in place, if there is enough free memory after it.
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2462
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:2126
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2247
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1947
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame.
Definition: vk_mem_alloc.h:2101
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1777
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1954
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2360
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1947
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:2858
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2467
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2822