23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1639 #ifndef VMA_RECORDING_ENABLED 1641 #define VMA_RECORDING_ENABLED 1 1643 #define VMA_RECORDING_ENABLED 0 1648 #define NOMINMAX // For windows.h 1652 #include <vulkan/vulkan.h> 1655 #if VMA_RECORDING_ENABLED 1656 #include <windows.h> 1659 #if !defined(VMA_DEDICATED_ALLOCATION) 1660 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1661 #define VMA_DEDICATED_ALLOCATION 1 1663 #define VMA_DEDICATED_ALLOCATION 0 1681 uint32_t memoryType,
1682 VkDeviceMemory memory,
1687 uint32_t memoryType,
1688 VkDeviceMemory memory,
1761 #if VMA_DEDICATED_ALLOCATION 1762 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1763 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1890 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1898 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1908 uint32_t memoryTypeIndex,
1909 VkMemoryPropertyFlags* pFlags);
1921 uint32_t frameIndex);
1954 #define VMA_STATS_STRING_ENABLED 1 1956 #if VMA_STATS_STRING_ENABLED 1963 char** ppStatsString,
1964 VkBool32 detailedMap);
1968 char* pStatsString);
1970 #endif // #if VMA_STATS_STRING_ENABLED 2197 uint32_t memoryTypeBits,
2199 uint32_t* pMemoryTypeIndex);
2215 const VkBufferCreateInfo* pBufferCreateInfo,
2217 uint32_t* pMemoryTypeIndex);
2233 const VkImageCreateInfo* pImageCreateInfo,
2235 uint32_t* pMemoryTypeIndex);
2407 size_t* pLostAllocationCount);
2506 const VkMemoryRequirements* pVkMemoryRequirements,
2532 const VkMemoryRequirements* pVkMemoryRequirements,
2534 size_t allocationCount,
2579 size_t allocationCount,
2605 VkDeviceSize newSize);
2974 size_t allocationCount,
2975 VkBool32* pAllocationsChanged,
3041 const VkBufferCreateInfo* pBufferCreateInfo,
3066 const VkImageCreateInfo* pImageCreateInfo,
3092 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 3095 #if defined(__cplusplus) && defined(__INTELLISENSE__) 3096 #define VMA_IMPLEMENTATION 3099 #ifdef VMA_IMPLEMENTATION 3100 #undef VMA_IMPLEMENTATION 3122 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 3123 #define VMA_STATIC_VULKAN_FUNCTIONS 1 3135 #if VMA_USE_STL_CONTAINERS 3136 #define VMA_USE_STL_VECTOR 1 3137 #define VMA_USE_STL_UNORDERED_MAP 1 3138 #define VMA_USE_STL_LIST 1 3141 #ifndef VMA_USE_STL_SHARED_MUTEX 3143 #if defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 3144 #define VMA_USE_STL_SHARED_MUTEX 1 3148 #if VMA_USE_STL_VECTOR 3152 #if VMA_USE_STL_UNORDERED_MAP 3153 #include <unordered_map> 3156 #if VMA_USE_STL_LIST 3165 #include <algorithm> 3171 #define VMA_NULL nullptr 3174 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 3176 void *aligned_alloc(
size_t alignment,
size_t size)
3179 if(alignment <
sizeof(
void*))
3181 alignment =
sizeof(
void*);
3184 return memalign(alignment, size);
3186 #elif defined(__APPLE__) || defined(__ANDROID__) 3188 void *aligned_alloc(
size_t alignment,
size_t size)
3191 if(alignment <
sizeof(
void*))
3193 alignment =
sizeof(
void*);
3197 if(posix_memalign(&pointer, alignment, size) == 0)
3211 #define VMA_ASSERT(expr) assert(expr) 3213 #define VMA_ASSERT(expr) 3219 #ifndef VMA_HEAVY_ASSERT 3221 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 3223 #define VMA_HEAVY_ASSERT(expr) 3227 #ifndef VMA_ALIGN_OF 3228 #define VMA_ALIGN_OF(type) (__alignof(type)) 3231 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 3233 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 3235 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 3239 #ifndef VMA_SYSTEM_FREE 3241 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 3243 #define VMA_SYSTEM_FREE(ptr) free(ptr) 3248 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 3252 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 3256 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 3260 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 3263 #ifndef VMA_DEBUG_LOG 3264 #define VMA_DEBUG_LOG(format, ...) 3274 #if VMA_STATS_STRING_ENABLED 3275 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3277 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3279 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3281 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3283 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3285 snprintf(outStr, strLen,
"%p", ptr);
3293 void Lock() { m_Mutex.lock(); }
3294 void Unlock() { m_Mutex.unlock(); }
3298 #define VMA_MUTEX VmaMutex 3302 #ifndef VMA_RW_MUTEX 3303 #if VMA_USE_STL_SHARED_MUTEX 3305 #include <shared_mutex> 3309 void LockRead() { m_Mutex.lock_shared(); }
3310 void UnlockRead() { m_Mutex.unlock_shared(); }
3311 void LockWrite() { m_Mutex.lock(); }
3312 void UnlockWrite() { m_Mutex.unlock(); }
3314 std::shared_mutex m_Mutex;
3316 #define VMA_RW_MUTEX VmaRWMutex 3317 #elif defined(_WIN32) 3322 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3323 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3324 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3325 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3326 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3330 #define VMA_RW_MUTEX VmaRWMutex 3336 void LockRead() { m_Mutex.Lock(); }
3337 void UnlockRead() { m_Mutex.Unlock(); }
3338 void LockWrite() { m_Mutex.Lock(); }
3339 void UnlockWrite() { m_Mutex.Unlock(); }
3343 #define VMA_RW_MUTEX VmaRWMutex 3344 #endif // #if VMA_USE_STL_SHARED_MUTEX 3345 #endif // #ifndef VMA_RW_MUTEX 3355 #ifndef VMA_ATOMIC_UINT32 3356 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 3359 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 3364 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 3367 #ifndef VMA_DEBUG_ALIGNMENT 3372 #define VMA_DEBUG_ALIGNMENT (1) 3375 #ifndef VMA_DEBUG_MARGIN 3380 #define VMA_DEBUG_MARGIN (0) 3383 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3388 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3391 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3397 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3400 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3405 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3408 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3413 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3416 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3417 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3421 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3422 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3426 #ifndef VMA_CLASS_NO_COPY 3427 #define VMA_CLASS_NO_COPY(className) \ 3429 className(const className&) = delete; \ 3430 className& operator=(const className&) = delete; 3433 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3436 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3438 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3439 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3445 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3447 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3448 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3451 static inline uint32_t VmaCountBitsSet(uint32_t v)
3453 uint32_t c = v - ((v >> 1) & 0x55555555);
3454 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3455 c = ((c >> 4) + c) & 0x0F0F0F0F;
3456 c = ((c >> 8) + c) & 0x00FF00FF;
3457 c = ((c >> 16) + c) & 0x0000FFFF;
3463 template <
typename T>
3464 static inline T VmaAlignUp(T val, T align)
3466 return (val + align - 1) / align * align;
3470 template <
typename T>
3471 static inline T VmaAlignDown(T val, T align)
3473 return val / align * align;
3477 template <
typename T>
3478 static inline T VmaRoundDiv(T x, T y)
3480 return (x + (y / (T)2)) / y;
3488 template <
typename T>
3489 inline bool VmaIsPow2(T x)
3491 return (x & (x-1)) == 0;
3495 static inline uint32_t VmaNextPow2(uint32_t v)
3506 static inline uint64_t VmaNextPow2(uint64_t v)
3520 static inline uint32_t VmaPrevPow2(uint32_t v)
3530 static inline uint64_t VmaPrevPow2(uint64_t v)
3542 static inline bool VmaStrIsEmpty(
const char* pStr)
3544 return pStr == VMA_NULL || *pStr ==
'\0';
3547 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3565 template<
typename Iterator,
typename Compare>
3566 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3568 Iterator centerValue = end; --centerValue;
3569 Iterator insertIndex = beg;
3570 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3572 if(cmp(*memTypeIndex, *centerValue))
3574 if(insertIndex != memTypeIndex)
3576 VMA_SWAP(*memTypeIndex, *insertIndex);
3581 if(insertIndex != centerValue)
3583 VMA_SWAP(*insertIndex, *centerValue);
3588 template<
typename Iterator,
typename Compare>
3589 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3593 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3594 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3595 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3599 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3601 #endif // #ifndef VMA_SORT 3610 static inline bool VmaBlocksOnSamePage(
3611 VkDeviceSize resourceAOffset,
3612 VkDeviceSize resourceASize,
3613 VkDeviceSize resourceBOffset,
3614 VkDeviceSize pageSize)
3616 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3617 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3618 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3619 VkDeviceSize resourceBStart = resourceBOffset;
3620 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3621 return resourceAEndPage == resourceBStartPage;
3624 enum VmaSuballocationType
3626 VMA_SUBALLOCATION_TYPE_FREE = 0,
3627 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3628 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3629 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3630 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3631 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3632 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3641 static inline bool VmaIsBufferImageGranularityConflict(
3642 VmaSuballocationType suballocType1,
3643 VmaSuballocationType suballocType2)
3645 if(suballocType1 > suballocType2)
3647 VMA_SWAP(suballocType1, suballocType2);
3650 switch(suballocType1)
3652 case VMA_SUBALLOCATION_TYPE_FREE:
3654 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3656 case VMA_SUBALLOCATION_TYPE_BUFFER:
3658 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3659 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3660 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3662 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3663 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3664 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3665 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3667 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3668 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3676 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3678 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3679 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3680 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3682 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3686 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3688 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3689 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3690 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3692 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3703 VMA_CLASS_NO_COPY(VmaMutexLock)
3705 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3706 m_pMutex(useMutex ? &mutex : VMA_NULL)
3707 {
if(m_pMutex) { m_pMutex->Lock(); } }
3709 {
if(m_pMutex) { m_pMutex->Unlock(); } }
3711 VMA_MUTEX* m_pMutex;
3715 struct VmaMutexLockRead
3717 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3719 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3720 m_pMutex(useMutex ? &mutex : VMA_NULL)
3721 {
if(m_pMutex) { m_pMutex->LockRead(); } }
3722 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
3724 VMA_RW_MUTEX* m_pMutex;
3728 struct VmaMutexLockWrite
3730 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3732 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
3733 m_pMutex(useMutex ? &mutex : VMA_NULL)
3734 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
3735 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
3737 VMA_RW_MUTEX* m_pMutex;
3740 #if VMA_DEBUG_GLOBAL_MUTEX 3741 static VMA_MUTEX gDebugGlobalMutex;
3742 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3744 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3748 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3759 template <
typename CmpLess,
typename IterT,
typename KeyT>
3760 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3762 size_t down = 0, up = (end - beg);
3765 const size_t mid = (down + up) / 2;
3766 if(cmp(*(beg+mid), key))
3783 template<
typename T>
3784 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
3786 for(uint32_t i = 0; i < count; ++i)
3788 const T iPtr = arr[i];
3789 if(iPtr == VMA_NULL)
3793 for(uint32_t j = i + 1; j < count; ++j)
3807 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3809 if((pAllocationCallbacks != VMA_NULL) &&
3810 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3812 return (*pAllocationCallbacks->pfnAllocation)(
3813 pAllocationCallbacks->pUserData,
3816 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3820 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3824 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3826 if((pAllocationCallbacks != VMA_NULL) &&
3827 (pAllocationCallbacks->pfnFree != VMA_NULL))
3829 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3833 VMA_SYSTEM_FREE(ptr);
3837 template<
typename T>
3838 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3840 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3843 template<
typename T>
3844 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3846 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3849 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3851 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3853 template<
typename T>
3854 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3857 VmaFree(pAllocationCallbacks, ptr);
3860 template<
typename T>
3861 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3865 for(
size_t i = count; i--; )
3869 VmaFree(pAllocationCallbacks, ptr);
3874 template<
typename T>
3875 class VmaStlAllocator
3878 const VkAllocationCallbacks*
const m_pCallbacks;
3879 typedef T value_type;
3881 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3882 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3884 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3885 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3887 template<
typename U>
3888 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3890 return m_pCallbacks == rhs.m_pCallbacks;
3892 template<
typename U>
3893 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3895 return m_pCallbacks != rhs.m_pCallbacks;
3898 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3901 #if VMA_USE_STL_VECTOR 3903 #define VmaVector std::vector 3905 template<
typename T,
typename allocatorT>
3906 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3908 vec.insert(vec.begin() + index, item);
3911 template<
typename T,
typename allocatorT>
3912 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3914 vec.erase(vec.begin() + index);
3917 #else // #if VMA_USE_STL_VECTOR 3922 template<
typename T,
typename AllocatorT>
3926 typedef T value_type;
3928 VmaVector(
const AllocatorT& allocator) :
3929 m_Allocator(allocator),
3936 VmaVector(
size_t count,
const AllocatorT& allocator) :
3937 m_Allocator(allocator),
3938 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3944 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3945 m_Allocator(src.m_Allocator),
3946 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3947 m_Count(src.m_Count),
3948 m_Capacity(src.m_Count)
3952 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3958 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3961 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3965 resize(rhs.m_Count);
3968 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3974 bool empty()
const {
return m_Count == 0; }
3975 size_t size()
const {
return m_Count; }
3976 T* data() {
return m_pArray; }
3977 const T* data()
const {
return m_pArray; }
3979 T& operator[](
size_t index)
3981 VMA_HEAVY_ASSERT(index < m_Count);
3982 return m_pArray[index];
3984 const T& operator[](
size_t index)
const 3986 VMA_HEAVY_ASSERT(index < m_Count);
3987 return m_pArray[index];
3992 VMA_HEAVY_ASSERT(m_Count > 0);
3995 const T& front()
const 3997 VMA_HEAVY_ASSERT(m_Count > 0);
4002 VMA_HEAVY_ASSERT(m_Count > 0);
4003 return m_pArray[m_Count - 1];
4005 const T& back()
const 4007 VMA_HEAVY_ASSERT(m_Count > 0);
4008 return m_pArray[m_Count - 1];
4011 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4013 newCapacity = VMA_MAX(newCapacity, m_Count);
4015 if((newCapacity < m_Capacity) && !freeMemory)
4017 newCapacity = m_Capacity;
4020 if(newCapacity != m_Capacity)
4022 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4025 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4027 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4028 m_Capacity = newCapacity;
4029 m_pArray = newArray;
4033 void resize(
size_t newCount,
bool freeMemory =
false)
4035 size_t newCapacity = m_Capacity;
4036 if(newCount > m_Capacity)
4038 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4042 newCapacity = newCount;
4045 if(newCapacity != m_Capacity)
4047 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4048 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4049 if(elementsToCopy != 0)
4051 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4053 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4054 m_Capacity = newCapacity;
4055 m_pArray = newArray;
4061 void clear(
bool freeMemory =
false)
4063 resize(0, freeMemory);
4066 void insert(
size_t index,
const T& src)
4068 VMA_HEAVY_ASSERT(index <= m_Count);
4069 const size_t oldCount = size();
4070 resize(oldCount + 1);
4071 if(index < oldCount)
4073 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4075 m_pArray[index] = src;
4078 void remove(
size_t index)
4080 VMA_HEAVY_ASSERT(index < m_Count);
4081 const size_t oldCount = size();
4082 if(index < oldCount - 1)
4084 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4086 resize(oldCount - 1);
4089 void push_back(
const T& src)
4091 const size_t newIndex = size();
4092 resize(newIndex + 1);
4093 m_pArray[newIndex] = src;
4098 VMA_HEAVY_ASSERT(m_Count > 0);
4102 void push_front(
const T& src)
4109 VMA_HEAVY_ASSERT(m_Count > 0);
4113 typedef T* iterator;
4115 iterator begin() {
return m_pArray; }
4116 iterator end() {
return m_pArray + m_Count; }
4119 AllocatorT m_Allocator;
4125 template<
typename T,
typename allocatorT>
4126 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4128 vec.insert(index, item);
4131 template<
typename T,
typename allocatorT>
4132 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4137 #endif // #if VMA_USE_STL_VECTOR 4139 template<
typename CmpLess,
typename VectorT>
4140 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4142 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4144 vector.data() + vector.size(),
4146 CmpLess()) - vector.data();
4147 VmaVectorInsert(vector, indexToInsert, value);
4148 return indexToInsert;
4151 template<
typename CmpLess,
typename VectorT>
4152 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4155 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4160 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4162 size_t indexToRemove = it - vector.begin();
4163 VmaVectorRemove(vector, indexToRemove);
4169 template<
typename CmpLess,
typename IterT,
typename KeyT>
4170 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
4173 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
4174 beg, end, value, comparator);
4176 (!comparator(*it, value) && !comparator(value, *it)))
4191 template<
typename T>
4192 class VmaPoolAllocator
4194 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4196 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
4197 ~VmaPoolAllocator();
4205 uint32_t NextFreeIndex;
4212 uint32_t FirstFreeIndex;
4215 const VkAllocationCallbacks* m_pAllocationCallbacks;
4216 size_t m_ItemsPerBlock;
4217 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4219 ItemBlock& CreateNewBlock();
4222 template<
typename T>
4223 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
4224 m_pAllocationCallbacks(pAllocationCallbacks),
4225 m_ItemsPerBlock(itemsPerBlock),
4226 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4228 VMA_ASSERT(itemsPerBlock > 0);
4231 template<
typename T>
4232 VmaPoolAllocator<T>::~VmaPoolAllocator()
4237 template<
typename T>
4238 void VmaPoolAllocator<T>::Clear()
4240 for(
size_t i = m_ItemBlocks.size(); i--; )
4241 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
4242 m_ItemBlocks.clear();
4245 template<
typename T>
4246 T* VmaPoolAllocator<T>::Alloc()
4248 for(
size_t i = m_ItemBlocks.size(); i--; )
4250 ItemBlock& block = m_ItemBlocks[i];
4252 if(block.FirstFreeIndex != UINT32_MAX)
4254 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4255 block.FirstFreeIndex = pItem->NextFreeIndex;
4256 return &pItem->Value;
4261 ItemBlock& newBlock = CreateNewBlock();
4262 Item*
const pItem = &newBlock.pItems[0];
4263 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4264 return &pItem->Value;
4267 template<
typename T>
4268 void VmaPoolAllocator<T>::Free(T* ptr)
4271 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
4273 ItemBlock& block = m_ItemBlocks[i];
4277 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4280 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
4282 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
4283 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4284 block.FirstFreeIndex = index;
4288 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4291 template<
typename T>
4292 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4294 ItemBlock newBlock = {
4295 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
4297 m_ItemBlocks.push_back(newBlock);
4300 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
4301 newBlock.pItems[i].NextFreeIndex = i + 1;
4302 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
4303 return m_ItemBlocks.back();
4309 #if VMA_USE_STL_LIST 4311 #define VmaList std::list 4313 #else // #if VMA_USE_STL_LIST 4315 template<
typename T>
4324 template<
typename T>
4327 VMA_CLASS_NO_COPY(VmaRawList)
4329 typedef VmaListItem<T> ItemType;
4331 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4335 size_t GetCount()
const {
return m_Count; }
4336 bool IsEmpty()
const {
return m_Count == 0; }
4338 ItemType* Front() {
return m_pFront; }
4339 const ItemType* Front()
const {
return m_pFront; }
4340 ItemType* Back() {
return m_pBack; }
4341 const ItemType* Back()
const {
return m_pBack; }
4343 ItemType* PushBack();
4344 ItemType* PushFront();
4345 ItemType* PushBack(
const T& value);
4346 ItemType* PushFront(
const T& value);
4351 ItemType* InsertBefore(ItemType* pItem);
4353 ItemType* InsertAfter(ItemType* pItem);
4355 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4356 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4358 void Remove(ItemType* pItem);
4361 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4362 VmaPoolAllocator<ItemType> m_ItemAllocator;
4368 template<
typename T>
4369 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4370 m_pAllocationCallbacks(pAllocationCallbacks),
4371 m_ItemAllocator(pAllocationCallbacks, 128),
4378 template<
typename T>
4379 VmaRawList<T>::~VmaRawList()
4385 template<
typename T>
4386 void VmaRawList<T>::Clear()
4388 if(IsEmpty() ==
false)
4390 ItemType* pItem = m_pBack;
4391 while(pItem != VMA_NULL)
4393 ItemType*
const pPrevItem = pItem->pPrev;
4394 m_ItemAllocator.Free(pItem);
4397 m_pFront = VMA_NULL;
4403 template<
typename T>
4404 VmaListItem<T>* VmaRawList<T>::PushBack()
4406 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4407 pNewItem->pNext = VMA_NULL;
4410 pNewItem->pPrev = VMA_NULL;
4411 m_pFront = pNewItem;
4417 pNewItem->pPrev = m_pBack;
4418 m_pBack->pNext = pNewItem;
4425 template<
typename T>
4426 VmaListItem<T>* VmaRawList<T>::PushFront()
4428 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4429 pNewItem->pPrev = VMA_NULL;
4432 pNewItem->pNext = VMA_NULL;
4433 m_pFront = pNewItem;
4439 pNewItem->pNext = m_pFront;
4440 m_pFront->pPrev = pNewItem;
4441 m_pFront = pNewItem;
4447 template<
typename T>
4448 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4450 ItemType*
const pNewItem = PushBack();
4451 pNewItem->Value = value;
4455 template<
typename T>
4456 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4458 ItemType*
const pNewItem = PushFront();
4459 pNewItem->Value = value;
4463 template<
typename T>
4464 void VmaRawList<T>::PopBack()
4466 VMA_HEAVY_ASSERT(m_Count > 0);
4467 ItemType*
const pBackItem = m_pBack;
4468 ItemType*
const pPrevItem = pBackItem->pPrev;
4469 if(pPrevItem != VMA_NULL)
4471 pPrevItem->pNext = VMA_NULL;
4473 m_pBack = pPrevItem;
4474 m_ItemAllocator.Free(pBackItem);
4478 template<
typename T>
4479 void VmaRawList<T>::PopFront()
4481 VMA_HEAVY_ASSERT(m_Count > 0);
4482 ItemType*
const pFrontItem = m_pFront;
4483 ItemType*
const pNextItem = pFrontItem->pNext;
4484 if(pNextItem != VMA_NULL)
4486 pNextItem->pPrev = VMA_NULL;
4488 m_pFront = pNextItem;
4489 m_ItemAllocator.Free(pFrontItem);
4493 template<
typename T>
4494 void VmaRawList<T>::Remove(ItemType* pItem)
4496 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4497 VMA_HEAVY_ASSERT(m_Count > 0);
4499 if(pItem->pPrev != VMA_NULL)
4501 pItem->pPrev->pNext = pItem->pNext;
4505 VMA_HEAVY_ASSERT(m_pFront == pItem);
4506 m_pFront = pItem->pNext;
4509 if(pItem->pNext != VMA_NULL)
4511 pItem->pNext->pPrev = pItem->pPrev;
4515 VMA_HEAVY_ASSERT(m_pBack == pItem);
4516 m_pBack = pItem->pPrev;
4519 m_ItemAllocator.Free(pItem);
4523 template<
typename T>
4524 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4526 if(pItem != VMA_NULL)
4528 ItemType*
const prevItem = pItem->pPrev;
4529 ItemType*
const newItem = m_ItemAllocator.Alloc();
4530 newItem->pPrev = prevItem;
4531 newItem->pNext = pItem;
4532 pItem->pPrev = newItem;
4533 if(prevItem != VMA_NULL)
4535 prevItem->pNext = newItem;
4539 VMA_HEAVY_ASSERT(m_pFront == pItem);
4549 template<
typename T>
4550 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4552 if(pItem != VMA_NULL)
4554 ItemType*
const nextItem = pItem->pNext;
4555 ItemType*
const newItem = m_ItemAllocator.Alloc();
4556 newItem->pNext = nextItem;
4557 newItem->pPrev = pItem;
4558 pItem->pNext = newItem;
4559 if(nextItem != VMA_NULL)
4561 nextItem->pPrev = newItem;
4565 VMA_HEAVY_ASSERT(m_pBack == pItem);
4575 template<
typename T>
4576 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4578 ItemType*
const newItem = InsertBefore(pItem);
4579 newItem->Value = value;
4583 template<
typename T>
4584 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4586 ItemType*
const newItem = InsertAfter(pItem);
4587 newItem->Value = value;
4591 template<
typename T,
typename AllocatorT>
4594 VMA_CLASS_NO_COPY(VmaList)
4605 T& operator*()
const 4607 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4608 return m_pItem->Value;
4610 T* operator->()
const 4612 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4613 return &m_pItem->Value;
4616 iterator& operator++()
4618 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4619 m_pItem = m_pItem->pNext;
4622 iterator& operator--()
4624 if(m_pItem != VMA_NULL)
4626 m_pItem = m_pItem->pPrev;
4630 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4631 m_pItem = m_pList->Back();
4636 iterator operator++(
int)
4638 iterator result = *
this;
4642 iterator operator--(
int)
4644 iterator result = *
this;
4649 bool operator==(
const iterator& rhs)
const 4651 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4652 return m_pItem == rhs.m_pItem;
4654 bool operator!=(
const iterator& rhs)
const 4656 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4657 return m_pItem != rhs.m_pItem;
4661 VmaRawList<T>* m_pList;
4662 VmaListItem<T>* m_pItem;
4664 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4670 friend class VmaList<T, AllocatorT>;
4673 class const_iterator
4682 const_iterator(
const iterator& src) :
4683 m_pList(src.m_pList),
4684 m_pItem(src.m_pItem)
4688 const T& operator*()
const 4690 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4691 return m_pItem->Value;
4693 const T* operator->()
const 4695 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4696 return &m_pItem->Value;
4699 const_iterator& operator++()
4701 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4702 m_pItem = m_pItem->pNext;
4705 const_iterator& operator--()
4707 if(m_pItem != VMA_NULL)
4709 m_pItem = m_pItem->pPrev;
4713 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4714 m_pItem = m_pList->Back();
4719 const_iterator operator++(
int)
4721 const_iterator result = *
this;
4725 const_iterator operator--(
int)
4727 const_iterator result = *
this;
4732 bool operator==(
const const_iterator& rhs)
const 4734 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4735 return m_pItem == rhs.m_pItem;
4737 bool operator!=(
const const_iterator& rhs)
const 4739 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4740 return m_pItem != rhs.m_pItem;
4744 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4750 const VmaRawList<T>* m_pList;
4751 const VmaListItem<T>* m_pItem;
4753 friend class VmaList<T, AllocatorT>;
4756 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4758 bool empty()
const {
return m_RawList.IsEmpty(); }
4759 size_t size()
const {
return m_RawList.GetCount(); }
4761 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4762 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4764 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4765 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4767 void clear() { m_RawList.Clear(); }
4768 void push_back(
const T& value) { m_RawList.PushBack(value); }
4769 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4770 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4773 VmaRawList<T> m_RawList;
4776 #endif // #if VMA_USE_STL_LIST 4784 #if VMA_USE_STL_UNORDERED_MAP 4786 #define VmaPair std::pair 4788 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4789 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4791 #else // #if VMA_USE_STL_UNORDERED_MAP 4793 template<
typename T1,
typename T2>
4799 VmaPair() : first(), second() { }
4800 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4806 template<
typename KeyT,
typename ValueT>
4810 typedef VmaPair<KeyT, ValueT> PairType;
4811 typedef PairType* iterator;
4813 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4815 iterator begin() {
return m_Vector.begin(); }
4816 iterator end() {
return m_Vector.end(); }
4818 void insert(
const PairType& pair);
4819 iterator find(
const KeyT& key);
4820 void erase(iterator it);
4823 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4826 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4828 template<
typename FirstT,
typename SecondT>
4829 struct VmaPairFirstLess
4831 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4833 return lhs.first < rhs.first;
4835 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4837 return lhs.first < rhsFirst;
4841 template<
typename KeyT,
typename ValueT>
4842 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4844 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4846 m_Vector.data() + m_Vector.size(),
4848 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4849 VmaVectorInsert(m_Vector, indexToInsert, pair);
4852 template<
typename KeyT,
typename ValueT>
4853 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4855 PairType* it = VmaBinaryFindFirstNotLess(
4857 m_Vector.data() + m_Vector.size(),
4859 VmaPairFirstLess<KeyT, ValueT>());
4860 if((it != m_Vector.end()) && (it->first == key))
4866 return m_Vector.end();
4870 template<
typename KeyT,
typename ValueT>
4871 void VmaMap<KeyT, ValueT>::erase(iterator it)
4873 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4876 #endif // #if VMA_USE_STL_UNORDERED_MAP 4882 class VmaDeviceMemoryBlock;
4884 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4886 struct VmaAllocation_T
4888 VMA_CLASS_NO_COPY(VmaAllocation_T)
4890 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4894 FLAG_USER_DATA_STRING = 0x01,
4898 enum ALLOCATION_TYPE
4900 ALLOCATION_TYPE_NONE,
4901 ALLOCATION_TYPE_BLOCK,
4902 ALLOCATION_TYPE_DEDICATED,
4905 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4908 m_pUserData(VMA_NULL),
4909 m_LastUseFrameIndex(currentFrameIndex),
4910 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4911 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4913 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4915 #if VMA_STATS_STRING_ENABLED 4916 m_CreationFrameIndex = currentFrameIndex;
4917 m_BufferImageUsage = 0;
4923 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4926 VMA_ASSERT(m_pUserData == VMA_NULL);
4929 void InitBlockAllocation(
4931 VmaDeviceMemoryBlock* block,
4932 VkDeviceSize offset,
4933 VkDeviceSize alignment,
4935 VmaSuballocationType suballocationType,
4939 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4940 VMA_ASSERT(block != VMA_NULL);
4941 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4942 m_Alignment = alignment;
4944 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4945 m_SuballocationType = (uint8_t)suballocationType;
4946 m_BlockAllocation.m_hPool = hPool;
4947 m_BlockAllocation.m_Block = block;
4948 m_BlockAllocation.m_Offset = offset;
4949 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4954 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4955 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4956 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4957 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4958 m_BlockAllocation.m_Block = VMA_NULL;
4959 m_BlockAllocation.m_Offset = 0;
4960 m_BlockAllocation.m_CanBecomeLost =
true;
4963 void ChangeBlockAllocation(
4965 VmaDeviceMemoryBlock* block,
4966 VkDeviceSize offset);
4968 void ChangeSize(VkDeviceSize newSize);
4969 void ChangeOffset(VkDeviceSize newOffset);
4972 void InitDedicatedAllocation(
4973 uint32_t memoryTypeIndex,
4974 VkDeviceMemory hMemory,
4975 VmaSuballocationType suballocationType,
4979 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4980 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4981 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4984 m_SuballocationType = (uint8_t)suballocationType;
4985 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4986 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4987 m_DedicatedAllocation.m_hMemory = hMemory;
4988 m_DedicatedAllocation.m_pMappedData = pMappedData;
4991 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4992 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4993 VkDeviceSize GetSize()
const {
return m_Size; }
4994 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4995 void* GetUserData()
const {
return m_pUserData; }
4996 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4997 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4999 VmaDeviceMemoryBlock* GetBlock()
const 5001 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5002 return m_BlockAllocation.m_Block;
5004 VkDeviceSize GetOffset()
const;
5005 VkDeviceMemory GetMemory()
const;
5006 uint32_t GetMemoryTypeIndex()
const;
5007 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5008 void* GetMappedData()
const;
5009 bool CanBecomeLost()
const;
5012 uint32_t GetLastUseFrameIndex()
const 5014 return m_LastUseFrameIndex.load();
5016 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5018 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5028 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5030 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5032 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5043 void BlockAllocMap();
5044 void BlockAllocUnmap();
5045 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5048 #if VMA_STATS_STRING_ENABLED 5049 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5050 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5052 void InitBufferImageUsage(uint32_t bufferImageUsage)
5054 VMA_ASSERT(m_BufferImageUsage == 0);
5055 m_BufferImageUsage = bufferImageUsage;
5058 void PrintParameters(
class VmaJsonWriter& json)
const;
5062 VkDeviceSize m_Alignment;
5063 VkDeviceSize m_Size;
5065 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5067 uint8_t m_SuballocationType;
5074 struct BlockAllocation
5077 VmaDeviceMemoryBlock* m_Block;
5078 VkDeviceSize m_Offset;
5079 bool m_CanBecomeLost;
5083 struct DedicatedAllocation
5085 uint32_t m_MemoryTypeIndex;
5086 VkDeviceMemory m_hMemory;
5087 void* m_pMappedData;
5093 BlockAllocation m_BlockAllocation;
5095 DedicatedAllocation m_DedicatedAllocation;
5098 #if VMA_STATS_STRING_ENABLED 5099 uint32_t m_CreationFrameIndex;
5100 uint32_t m_BufferImageUsage;
5110 struct VmaSuballocation
5112 VkDeviceSize offset;
5115 VmaSuballocationType type;
5119 struct VmaSuballocationOffsetLess
5121 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5123 return lhs.offset < rhs.offset;
5126 struct VmaSuballocationOffsetGreater
5128 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5130 return lhs.offset > rhs.offset;
5134 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5137 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5152 struct VmaAllocationRequest
5154 VkDeviceSize offset;
5155 VkDeviceSize sumFreeSize;
5156 VkDeviceSize sumItemSize;
5157 VmaSuballocationList::iterator item;
5158 size_t itemsToMakeLostCount;
5161 VkDeviceSize CalcCost()
const 5163 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5171 class VmaBlockMetadata
5175 virtual ~VmaBlockMetadata() { }
5176 virtual void Init(VkDeviceSize size) { m_Size = size; }
5179 virtual bool Validate()
const = 0;
5180 VkDeviceSize GetSize()
const {
return m_Size; }
5181 virtual size_t GetAllocationCount()
const = 0;
5182 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5183 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5185 virtual bool IsEmpty()
const = 0;
5187 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5189 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5191 #if VMA_STATS_STRING_ENABLED 5192 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5198 virtual bool CreateAllocationRequest(
5199 uint32_t currentFrameIndex,
5200 uint32_t frameInUseCount,
5201 VkDeviceSize bufferImageGranularity,
5202 VkDeviceSize allocSize,
5203 VkDeviceSize allocAlignment,
5205 VmaSuballocationType allocType,
5206 bool canMakeOtherLost,
5209 VmaAllocationRequest* pAllocationRequest) = 0;
5211 virtual bool MakeRequestedAllocationsLost(
5212 uint32_t currentFrameIndex,
5213 uint32_t frameInUseCount,
5214 VmaAllocationRequest* pAllocationRequest) = 0;
5216 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5218 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5222 const VmaAllocationRequest& request,
5223 VmaSuballocationType type,
5224 VkDeviceSize allocSize,
5230 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5233 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize) {
return false; }
5236 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5238 #if VMA_STATS_STRING_ENABLED 5239 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5240 VkDeviceSize unusedBytes,
5241 size_t allocationCount,
5242 size_t unusedRangeCount)
const;
5243 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5244 VkDeviceSize offset,
5246 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5247 VkDeviceSize offset,
5248 VkDeviceSize size)
const;
5249 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5253 VkDeviceSize m_Size;
5254 const VkAllocationCallbacks* m_pAllocationCallbacks;
5257 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 5258 VMA_ASSERT(0 && "Validation failed: " #cond); \ 5262 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5264 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5267 virtual ~VmaBlockMetadata_Generic();
5268 virtual void Init(VkDeviceSize size);
5270 virtual bool Validate()
const;
5271 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5272 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5273 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5274 virtual bool IsEmpty()
const;
5276 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5277 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5279 #if VMA_STATS_STRING_ENABLED 5280 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5283 virtual bool CreateAllocationRequest(
5284 uint32_t currentFrameIndex,
5285 uint32_t frameInUseCount,
5286 VkDeviceSize bufferImageGranularity,
5287 VkDeviceSize allocSize,
5288 VkDeviceSize allocAlignment,
5290 VmaSuballocationType allocType,
5291 bool canMakeOtherLost,
5293 VmaAllocationRequest* pAllocationRequest);
5295 virtual bool MakeRequestedAllocationsLost(
5296 uint32_t currentFrameIndex,
5297 uint32_t frameInUseCount,
5298 VmaAllocationRequest* pAllocationRequest);
5300 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5302 virtual VkResult CheckCorruption(
const void* pBlockData);
5305 const VmaAllocationRequest& request,
5306 VmaSuballocationType type,
5307 VkDeviceSize allocSize,
5312 virtual void FreeAtOffset(VkDeviceSize offset);
5314 virtual bool ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize);
5319 bool IsBufferImageGranularityConflictPossible(
5320 VkDeviceSize bufferImageGranularity,
5321 VmaSuballocationType& inOutPrevSuballocType)
const;
5324 friend class VmaDefragmentationAlgorithm_Generic;
5325 friend class VmaDefragmentationAlgorithm_Fast;
5327 uint32_t m_FreeCount;
5328 VkDeviceSize m_SumFreeSize;
5329 VmaSuballocationList m_Suballocations;
5332 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5334 bool ValidateFreeSuballocationList()
const;
5338 bool CheckAllocation(
5339 uint32_t currentFrameIndex,
5340 uint32_t frameInUseCount,
5341 VkDeviceSize bufferImageGranularity,
5342 VkDeviceSize allocSize,
5343 VkDeviceSize allocAlignment,
5344 VmaSuballocationType allocType,
5345 VmaSuballocationList::const_iterator suballocItem,
5346 bool canMakeOtherLost,
5347 VkDeviceSize* pOffset,
5348 size_t* itemsToMakeLostCount,
5349 VkDeviceSize* pSumFreeSize,
5350 VkDeviceSize* pSumItemSize)
const;
5352 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5356 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5359 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5362 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5443 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5445 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5448 virtual ~VmaBlockMetadata_Linear();
5449 virtual void Init(VkDeviceSize size);
5451 virtual bool Validate()
const;
5452 virtual size_t GetAllocationCount()
const;
5453 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5454 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5455 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5457 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5458 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5460 #if VMA_STATS_STRING_ENABLED 5461 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5464 virtual bool CreateAllocationRequest(
5465 uint32_t currentFrameIndex,
5466 uint32_t frameInUseCount,
5467 VkDeviceSize bufferImageGranularity,
5468 VkDeviceSize allocSize,
5469 VkDeviceSize allocAlignment,
5471 VmaSuballocationType allocType,
5472 bool canMakeOtherLost,
5474 VmaAllocationRequest* pAllocationRequest);
5476 virtual bool MakeRequestedAllocationsLost(
5477 uint32_t currentFrameIndex,
5478 uint32_t frameInUseCount,
5479 VmaAllocationRequest* pAllocationRequest);
5481 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5483 virtual VkResult CheckCorruption(
const void* pBlockData);
5486 const VmaAllocationRequest& request,
5487 VmaSuballocationType type,
5488 VkDeviceSize allocSize,
5493 virtual void FreeAtOffset(VkDeviceSize offset);
5503 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5505 enum SECOND_VECTOR_MODE
5507 SECOND_VECTOR_EMPTY,
5512 SECOND_VECTOR_RING_BUFFER,
5518 SECOND_VECTOR_DOUBLE_STACK,
5521 VkDeviceSize m_SumFreeSize;
5522 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5523 uint32_t m_1stVectorIndex;
5524 SECOND_VECTOR_MODE m_2ndVectorMode;
5526 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5527 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5528 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5529 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5532 size_t m_1stNullItemsBeginCount;
5534 size_t m_1stNullItemsMiddleCount;
5536 size_t m_2ndNullItemsCount;
5538 bool ShouldCompact1st()
const;
5539 void CleanupAfterFree();
5553 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5555 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5558 virtual ~VmaBlockMetadata_Buddy();
5559 virtual void Init(VkDeviceSize size);
5561 virtual bool Validate()
const;
5562 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5563 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5564 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5565 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5567 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5568 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5570 #if VMA_STATS_STRING_ENABLED 5571 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5574 virtual bool CreateAllocationRequest(
5575 uint32_t currentFrameIndex,
5576 uint32_t frameInUseCount,
5577 VkDeviceSize bufferImageGranularity,
5578 VkDeviceSize allocSize,
5579 VkDeviceSize allocAlignment,
5581 VmaSuballocationType allocType,
5582 bool canMakeOtherLost,
5584 VmaAllocationRequest* pAllocationRequest);
5586 virtual bool MakeRequestedAllocationsLost(
5587 uint32_t currentFrameIndex,
5588 uint32_t frameInUseCount,
5589 VmaAllocationRequest* pAllocationRequest);
5591 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5593 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5596 const VmaAllocationRequest& request,
5597 VmaSuballocationType type,
5598 VkDeviceSize allocSize,
5602 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5603 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5606 static const VkDeviceSize MIN_NODE_SIZE = 32;
5607 static const size_t MAX_LEVELS = 30;
5609 struct ValidationContext
5611 size_t calculatedAllocationCount;
5612 size_t calculatedFreeCount;
5613 VkDeviceSize calculatedSumFreeSize;
5615 ValidationContext() :
5616 calculatedAllocationCount(0),
5617 calculatedFreeCount(0),
5618 calculatedSumFreeSize(0) { }
5623 VkDeviceSize offset;
5653 VkDeviceSize m_UsableSize;
5654 uint32_t m_LevelCount;
5660 } m_FreeList[MAX_LEVELS];
5662 size_t m_AllocationCount;
5666 VkDeviceSize m_SumFreeSize;
5668 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5669 void DeleteNode(Node* node);
5670 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5671 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5672 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5674 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5675 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5679 void AddToFreeListFront(uint32_t level, Node* node);
5683 void RemoveFromFreeList(uint32_t level, Node* node);
5685 #if VMA_STATS_STRING_ENABLED 5686 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5696 class VmaDeviceMemoryBlock
5698 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5700 VmaBlockMetadata* m_pMetadata;
5704 ~VmaDeviceMemoryBlock()
5706 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5707 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5713 uint32_t newMemoryTypeIndex,
5714 VkDeviceMemory newMemory,
5715 VkDeviceSize newSize,
5717 uint32_t algorithm);
5721 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5722 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5723 uint32_t GetId()
const {
return m_Id; }
5724 void* GetMappedData()
const {
return m_pMappedData; }
5727 bool Validate()
const;
5732 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5735 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5736 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5738 VkResult BindBufferMemory(
5742 VkResult BindImageMemory(
5748 uint32_t m_MemoryTypeIndex;
5750 VkDeviceMemory m_hMemory;
5758 uint32_t m_MapCount;
5759 void* m_pMappedData;
5762 struct VmaPointerLess
5764 bool operator()(
const void* lhs,
const void* rhs)
const 5770 struct VmaDefragmentationMove
5772 size_t srcBlockIndex;
5773 size_t dstBlockIndex;
5774 VkDeviceSize srcOffset;
5775 VkDeviceSize dstOffset;
5779 class VmaDefragmentationAlgorithm;
5787 struct VmaBlockVector
5789 VMA_CLASS_NO_COPY(VmaBlockVector)
5793 uint32_t memoryTypeIndex,
5794 VkDeviceSize preferredBlockSize,
5795 size_t minBlockCount,
5796 size_t maxBlockCount,
5797 VkDeviceSize bufferImageGranularity,
5798 uint32_t frameInUseCount,
5800 bool explicitBlockSize,
5801 uint32_t algorithm);
5804 VkResult CreateMinBlocks();
5806 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5807 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5808 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5809 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5810 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5814 bool IsEmpty()
const {
return m_Blocks.empty(); }
5815 bool IsCorruptionDetectionEnabled()
const;
5819 uint32_t currentFrameIndex,
5821 VkDeviceSize alignment,
5823 VmaSuballocationType suballocType,
5824 size_t allocationCount,
5833 #if VMA_STATS_STRING_ENABLED 5834 void PrintDetailedMap(
class VmaJsonWriter& json);
5837 void MakePoolAllocationsLost(
5838 uint32_t currentFrameIndex,
5839 size_t* pLostAllocationCount);
5840 VkResult CheckCorruption();
5844 class VmaBlockVectorDefragmentationContext* pCtx,
5846 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
5847 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
5848 VkCommandBuffer commandBuffer);
5849 void DefragmentationEnd(
5850 class VmaBlockVectorDefragmentationContext* pCtx,
5856 size_t GetBlockCount()
const {
return m_Blocks.size(); }
5857 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
5858 size_t CalcAllocationCount()
const;
5859 bool IsBufferImageGranularityConflictPossible()
const;
5862 friend class VmaDefragmentationAlgorithm_Generic;
5865 const uint32_t m_MemoryTypeIndex;
5866 const VkDeviceSize m_PreferredBlockSize;
5867 const size_t m_MinBlockCount;
5868 const size_t m_MaxBlockCount;
5869 const VkDeviceSize m_BufferImageGranularity;
5870 const uint32_t m_FrameInUseCount;
5871 const bool m_IsCustomPool;
5872 const bool m_ExplicitBlockSize;
5873 const uint32_t m_Algorithm;
5877 bool m_HasEmptyBlock;
5878 VMA_RW_MUTEX m_Mutex;
5880 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5881 uint32_t m_NextBlockId;
5883 VkDeviceSize CalcMaxBlockSize()
const;
5886 void Remove(VmaDeviceMemoryBlock* pBlock);
5890 void IncrementallySortBlocks();
5892 VkResult AllocatePage(
5894 uint32_t currentFrameIndex,
5896 VkDeviceSize alignment,
5898 VmaSuballocationType suballocType,
5902 VkResult AllocateFromBlock(
5903 VmaDeviceMemoryBlock* pBlock,
5905 uint32_t currentFrameIndex,
5907 VkDeviceSize alignment,
5910 VmaSuballocationType suballocType,
5914 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5917 void ApplyDefragmentationMovesCpu(
5918 class VmaBlockVectorDefragmentationContext* pDefragCtx,
5919 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
5921 void ApplyDefragmentationMovesGpu(
5922 class VmaBlockVectorDefragmentationContext* pDefragCtx,
5923 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
5924 VkCommandBuffer commandBuffer);
5935 VMA_CLASS_NO_COPY(VmaPool_T)
5937 VmaBlockVector m_BlockVector;
5942 VkDeviceSize preferredBlockSize);
5945 uint32_t GetId()
const {
return m_Id; }
5946 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5948 #if VMA_STATS_STRING_ENABLED 5963 class VmaDefragmentationAlgorithm
5965 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
5967 VmaDefragmentationAlgorithm(
5969 VmaBlockVector* pBlockVector,
5970 uint32_t currentFrameIndex) :
5971 m_hAllocator(hAllocator),
5972 m_pBlockVector(pBlockVector),
5973 m_CurrentFrameIndex(currentFrameIndex)
5976 virtual ~VmaDefragmentationAlgorithm()
5980 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
5981 virtual void AddAll() = 0;
5983 virtual VkResult Defragment(
5984 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
5985 VkDeviceSize maxBytesToMove,
5986 uint32_t maxAllocationsToMove) = 0;
5988 virtual VkDeviceSize GetBytesMoved()
const = 0;
5989 virtual uint32_t GetAllocationsMoved()
const = 0;
5993 VmaBlockVector*
const m_pBlockVector;
5994 const uint32_t m_CurrentFrameIndex;
5996 struct AllocationInfo
5999 VkBool32* m_pChanged;
6002 m_hAllocation(VK_NULL_HANDLE),
6003 m_pChanged(VMA_NULL)
6007 m_hAllocation(hAlloc),
6008 m_pChanged(pChanged)
6014 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6016 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6018 VmaDefragmentationAlgorithm_Generic(
6020 VmaBlockVector* pBlockVector,
6021 uint32_t currentFrameIndex,
6022 bool overlappingMoveSupported);
6023 virtual ~VmaDefragmentationAlgorithm_Generic();
6025 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6026 virtual void AddAll() { m_AllAllocations =
true; }
6028 virtual VkResult Defragment(
6029 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6030 VkDeviceSize maxBytesToMove,
6031 uint32_t maxAllocationsToMove);
6033 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6034 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6037 uint32_t m_AllocationCount;
6038 bool m_AllAllocations;
6040 VkDeviceSize m_BytesMoved;
6041 uint32_t m_AllocationsMoved;
6043 struct AllocationInfoSizeGreater
6045 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6047 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6051 struct AllocationInfoOffsetGreater
6053 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6055 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6061 size_t m_OriginalBlockIndex;
6062 VmaDeviceMemoryBlock* m_pBlock;
6063 bool m_HasNonMovableAllocations;
6064 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6066 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6067 m_OriginalBlockIndex(SIZE_MAX),
6069 m_HasNonMovableAllocations(true),
6070 m_Allocations(pAllocationCallbacks)
6074 void CalcHasNonMovableAllocations()
6076 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6077 const size_t defragmentAllocCount = m_Allocations.size();
6078 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6081 void SortAllocationsBySizeDescending()
6083 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6086 void SortAllocationsByOffsetDescending()
6088 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6092 struct BlockPointerLess
6094 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 6096 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6098 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6100 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6106 struct BlockInfoCompareMoveDestination
6108 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6110 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6114 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6118 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6126 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6127 BlockInfoVector m_Blocks;
6129 VkResult DefragmentRound(
6130 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6131 VkDeviceSize maxBytesToMove,
6132 uint32_t maxAllocationsToMove);
6134 size_t CalcBlocksWithNonMovableCount()
const;
6136 static bool MoveMakesSense(
6137 size_t dstBlockIndex, VkDeviceSize dstOffset,
6138 size_t srcBlockIndex, VkDeviceSize srcOffset);
6141 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6143 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6145 VmaDefragmentationAlgorithm_Fast(
6147 VmaBlockVector* pBlockVector,
6148 uint32_t currentFrameIndex,
6149 bool overlappingMoveSupported);
6150 virtual ~VmaDefragmentationAlgorithm_Fast();
6152 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6153 virtual void AddAll() { m_AllAllocations =
true; }
6155 virtual VkResult Defragment(
6156 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6157 VkDeviceSize maxBytesToMove,
6158 uint32_t maxAllocationsToMove);
6160 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6161 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6166 size_t origBlockIndex;
6169 class FreeSpaceDatabase
6175 s.blockInfoIndex = SIZE_MAX;
6176 for(
size_t i = 0; i < MAX_COUNT; ++i)
6178 m_FreeSpaces[i] = s;
6182 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6184 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6190 size_t bestIndex = SIZE_MAX;
6191 for(
size_t i = 0; i < MAX_COUNT; ++i)
6194 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6199 if(m_FreeSpaces[i].size < size &&
6200 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6206 if(bestIndex != SIZE_MAX)
6208 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6209 m_FreeSpaces[bestIndex].offset = offset;
6210 m_FreeSpaces[bestIndex].size = size;
6214 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6215 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6217 size_t bestIndex = SIZE_MAX;
6218 VkDeviceSize bestFreeSpaceAfter = 0;
6219 for(
size_t i = 0; i < MAX_COUNT; ++i)
6222 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6224 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6226 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6228 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6230 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6233 bestFreeSpaceAfter = freeSpaceAfter;
6239 if(bestIndex != SIZE_MAX)
6241 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6242 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6244 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6247 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6248 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6249 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6254 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6264 static const size_t MAX_COUNT = 4;
6268 size_t blockInfoIndex;
6269 VkDeviceSize offset;
6271 } m_FreeSpaces[MAX_COUNT];
6274 const bool m_OverlappingMoveSupported;
6276 uint32_t m_AllocationCount;
6277 bool m_AllAllocations;
6279 VkDeviceSize m_BytesMoved;
6280 uint32_t m_AllocationsMoved;
6282 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6284 void PreprocessMetadata();
6285 void PostprocessMetadata();
6286 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6289 struct VmaBlockDefragmentationContext
6293 BLOCK_FLAG_USED = 0x00000001,
6298 VmaBlockDefragmentationContext() :
6300 hBuffer(VK_NULL_HANDLE)
6305 class VmaBlockVectorDefragmentationContext
6307 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6311 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6313 VmaBlockVectorDefragmentationContext(
6316 VmaBlockVector* pBlockVector,
6317 uint32_t currFrameIndex,
6319 ~VmaBlockVectorDefragmentationContext();
6321 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6322 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6323 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6325 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6326 void AddAll() { m_AllAllocations =
true; }
6328 void Begin(
bool overlappingMoveSupported);
6335 VmaBlockVector*
const m_pBlockVector;
6336 const uint32_t m_CurrFrameIndex;
6337 const uint32_t m_AlgorithmFlags;
6339 VmaDefragmentationAlgorithm* m_pAlgorithm;
6347 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6348 bool m_AllAllocations;
6351 struct VmaDefragmentationContext_T
6354 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6356 VmaDefragmentationContext_T(
6358 uint32_t currFrameIndex,
6361 ~VmaDefragmentationContext_T();
6363 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6364 void AddAllocations(
6365 uint32_t allocationCount,
6367 VkBool32* pAllocationsChanged);
6375 VkResult Defragment(
6376 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6377 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6382 const uint32_t m_CurrFrameIndex;
6383 const uint32_t m_Flags;
6386 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6388 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6391 #if VMA_RECORDING_ENABLED 6398 void WriteConfiguration(
6399 const VkPhysicalDeviceProperties& devProps,
6400 const VkPhysicalDeviceMemoryProperties& memProps,
6401 bool dedicatedAllocationExtensionEnabled);
6404 void RecordCreateAllocator(uint32_t frameIndex);
6405 void RecordDestroyAllocator(uint32_t frameIndex);
6406 void RecordCreatePool(uint32_t frameIndex,
6409 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6410 void RecordAllocateMemory(uint32_t frameIndex,
6411 const VkMemoryRequirements& vkMemReq,
6414 void RecordAllocateMemoryPages(uint32_t frameIndex,
6415 const VkMemoryRequirements& vkMemReq,
6417 uint64_t allocationCount,
6419 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6420 const VkMemoryRequirements& vkMemReq,
6421 bool requiresDedicatedAllocation,
6422 bool prefersDedicatedAllocation,
6425 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6426 const VkMemoryRequirements& vkMemReq,
6427 bool requiresDedicatedAllocation,
6428 bool prefersDedicatedAllocation,
6431 void RecordFreeMemory(uint32_t frameIndex,
6433 void RecordFreeMemoryPages(uint32_t frameIndex,
6434 uint64_t allocationCount,
6436 void RecordResizeAllocation(
6437 uint32_t frameIndex,
6439 VkDeviceSize newSize);
6440 void RecordSetAllocationUserData(uint32_t frameIndex,
6442 const void* pUserData);
6443 void RecordCreateLostAllocation(uint32_t frameIndex,
6445 void RecordMapMemory(uint32_t frameIndex,
6447 void RecordUnmapMemory(uint32_t frameIndex,
6449 void RecordFlushAllocation(uint32_t frameIndex,
6450 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6451 void RecordInvalidateAllocation(uint32_t frameIndex,
6452 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6453 void RecordCreateBuffer(uint32_t frameIndex,
6454 const VkBufferCreateInfo& bufCreateInfo,
6457 void RecordCreateImage(uint32_t frameIndex,
6458 const VkImageCreateInfo& imageCreateInfo,
6461 void RecordDestroyBuffer(uint32_t frameIndex,
6463 void RecordDestroyImage(uint32_t frameIndex,
6465 void RecordTouchAllocation(uint32_t frameIndex,
6467 void RecordGetAllocationInfo(uint32_t frameIndex,
6469 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6471 void RecordDefragmentationBegin(uint32_t frameIndex,
6474 void RecordDefragmentationEnd(uint32_t frameIndex,
6484 class UserDataString
6488 const char* GetString()
const {
return m_Str; }
6498 VMA_MUTEX m_FileMutex;
6500 int64_t m_StartCounter;
6502 void GetBasicParams(CallParams& outParams);
6505 template<
typename T>
6506 void PrintPointerList(uint64_t count,
const T* pItems)
6510 fprintf(m_File,
"%p", pItems[0]);
6511 for(uint64_t i = 1; i < count; ++i)
6513 fprintf(m_File,
" %p", pItems[i]);
6518 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6522 #endif // #if VMA_RECORDING_ENABLED 6525 struct VmaAllocator_T
6527 VMA_CLASS_NO_COPY(VmaAllocator_T)
6530 bool m_UseKhrDedicatedAllocation;
6532 bool m_AllocationCallbacksSpecified;
6533 VkAllocationCallbacks m_AllocationCallbacks;
6537 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
6538 VMA_MUTEX m_HeapSizeLimitMutex;
6540 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
6541 VkPhysicalDeviceMemoryProperties m_MemProps;
6544 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
6547 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
6548 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
6549 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
6555 const VkAllocationCallbacks* GetAllocationCallbacks()
const 6557 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
6561 return m_VulkanFunctions;
6564 VkDeviceSize GetBufferImageGranularity()
const 6567 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
6568 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
6571 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
6572 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
6574 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 6576 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
6577 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6580 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 6582 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
6583 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6586 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 6588 return IsMemoryTypeNonCoherent(memTypeIndex) ?
6589 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
6590 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
6593 bool IsIntegratedGpu()
const 6595 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
6598 #if VMA_RECORDING_ENABLED 6599 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
6602 void GetBufferMemoryRequirements(
6604 VkMemoryRequirements& memReq,
6605 bool& requiresDedicatedAllocation,
6606 bool& prefersDedicatedAllocation)
const;
6607 void GetImageMemoryRequirements(
6609 VkMemoryRequirements& memReq,
6610 bool& requiresDedicatedAllocation,
6611 bool& prefersDedicatedAllocation)
const;
6614 VkResult AllocateMemory(
6615 const VkMemoryRequirements& vkMemReq,
6616 bool requiresDedicatedAllocation,
6617 bool prefersDedicatedAllocation,
6618 VkBuffer dedicatedBuffer,
6619 VkImage dedicatedImage,
6621 VmaSuballocationType suballocType,
6622 size_t allocationCount,
6627 size_t allocationCount,
6630 VkResult ResizeAllocation(
6632 VkDeviceSize newSize);
6634 void CalculateStats(
VmaStats* pStats);
6636 #if VMA_STATS_STRING_ENABLED 6637 void PrintDetailedMap(
class VmaJsonWriter& json);
6640 VkResult DefragmentationBegin(
6644 VkResult DefragmentationEnd(
6651 void DestroyPool(
VmaPool pool);
6654 void SetCurrentFrameIndex(uint32_t frameIndex);
6655 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
6657 void MakePoolAllocationsLost(
6659 size_t* pLostAllocationCount);
6660 VkResult CheckPoolCorruption(
VmaPool hPool);
6661 VkResult CheckCorruption(uint32_t memoryTypeBits);
6665 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
6666 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
6671 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
6672 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
6674 void FlushOrInvalidateAllocation(
6676 VkDeviceSize offset, VkDeviceSize size,
6677 VMA_CACHE_OPERATION op);
6679 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
6682 VkDeviceSize m_PreferredLargeHeapBlockSize;
6684 VkPhysicalDevice m_PhysicalDevice;
6685 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
6687 VMA_RW_MUTEX m_PoolsMutex;
6689 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
6690 uint32_t m_NextPoolId;
6694 #if VMA_RECORDING_ENABLED 6695 VmaRecorder* m_pRecorder;
6700 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
6702 VkResult AllocateMemoryOfType(
6704 VkDeviceSize alignment,
6705 bool dedicatedAllocation,
6706 VkBuffer dedicatedBuffer,
6707 VkImage dedicatedImage,
6709 uint32_t memTypeIndex,
6710 VmaSuballocationType suballocType,
6711 size_t allocationCount,
6715 VkResult AllocateDedicatedMemoryPage(
6717 VmaSuballocationType suballocType,
6718 uint32_t memTypeIndex,
6719 const VkMemoryAllocateInfo& allocInfo,
6721 bool isUserDataString,
6726 VkResult AllocateDedicatedMemory(
6728 VmaSuballocationType suballocType,
6729 uint32_t memTypeIndex,
6731 bool isUserDataString,
6733 VkBuffer dedicatedBuffer,
6734 VkImage dedicatedImage,
6735 size_t allocationCount,
6745 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
6747 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
6750 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
6752 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
6755 template<
typename T>
6758 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
6761 template<
typename T>
6762 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
6764 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
6767 template<
typename T>
6768 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
6773 VmaFree(hAllocator, ptr);
6777 template<
typename T>
6778 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
6782 for(
size_t i = count; i--; )
6784 VmaFree(hAllocator, ptr);
6791 #if VMA_STATS_STRING_ENABLED 6793 class VmaStringBuilder
6796 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
6797 size_t GetLength()
const {
return m_Data.size(); }
6798 const char* GetData()
const {
return m_Data.data(); }
6800 void Add(
char ch) { m_Data.push_back(ch); }
6801 void Add(
const char* pStr);
6802 void AddNewLine() { Add(
'\n'); }
6803 void AddNumber(uint32_t num);
6804 void AddNumber(uint64_t num);
6805 void AddPointer(
const void* ptr);
6808 VmaVector< char, VmaStlAllocator<char> > m_Data;
6811 void VmaStringBuilder::Add(
const char* pStr)
6813 const size_t strLen = strlen(pStr);
6816 const size_t oldCount = m_Data.size();
6817 m_Data.resize(oldCount + strLen);
6818 memcpy(m_Data.data() + oldCount, pStr, strLen);
6822 void VmaStringBuilder::AddNumber(uint32_t num)
6825 VmaUint32ToStr(buf,
sizeof(buf), num);
6829 void VmaStringBuilder::AddNumber(uint64_t num)
6832 VmaUint64ToStr(buf,
sizeof(buf), num);
6836 void VmaStringBuilder::AddPointer(
const void* ptr)
6839 VmaPtrToStr(buf,
sizeof(buf), ptr);
6843 #endif // #if VMA_STATS_STRING_ENABLED 6848 #if VMA_STATS_STRING_ENABLED 6852 VMA_CLASS_NO_COPY(VmaJsonWriter)
6854 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6857 void BeginObject(
bool singleLine =
false);
6860 void BeginArray(
bool singleLine =
false);
6863 void WriteString(
const char* pStr);
6864 void BeginString(
const char* pStr = VMA_NULL);
6865 void ContinueString(
const char* pStr);
6866 void ContinueString(uint32_t n);
6867 void ContinueString(uint64_t n);
6868 void ContinueString_Pointer(
const void* ptr);
6869 void EndString(
const char* pStr = VMA_NULL);
6871 void WriteNumber(uint32_t n);
6872 void WriteNumber(uint64_t n);
6873 void WriteBool(
bool b);
6877 static const char*
const INDENT;
6879 enum COLLECTION_TYPE
6881 COLLECTION_TYPE_OBJECT,
6882 COLLECTION_TYPE_ARRAY,
6886 COLLECTION_TYPE type;
6887 uint32_t valueCount;
6888 bool singleLineMode;
6891 VmaStringBuilder& m_SB;
6892 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
6893 bool m_InsideString;
6895 void BeginValue(
bool isString);
6896 void WriteIndent(
bool oneLess =
false);
6899 const char*
const VmaJsonWriter::INDENT =
" ";
6901 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
6903 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
6904 m_InsideString(false)
6908 VmaJsonWriter::~VmaJsonWriter()
6910 VMA_ASSERT(!m_InsideString);
6911 VMA_ASSERT(m_Stack.empty());
6914 void VmaJsonWriter::BeginObject(
bool singleLine)
6916 VMA_ASSERT(!m_InsideString);
6922 item.type = COLLECTION_TYPE_OBJECT;
6923 item.valueCount = 0;
6924 item.singleLineMode = singleLine;
6925 m_Stack.push_back(item);
6928 void VmaJsonWriter::EndObject()
6930 VMA_ASSERT(!m_InsideString);
6935 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
6939 void VmaJsonWriter::BeginArray(
bool singleLine)
6941 VMA_ASSERT(!m_InsideString);
6947 item.type = COLLECTION_TYPE_ARRAY;
6948 item.valueCount = 0;
6949 item.singleLineMode = singleLine;
6950 m_Stack.push_back(item);
6953 void VmaJsonWriter::EndArray()
6955 VMA_ASSERT(!m_InsideString);
6960 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
6964 void VmaJsonWriter::WriteString(
const char* pStr)
6970 void VmaJsonWriter::BeginString(
const char* pStr)
6972 VMA_ASSERT(!m_InsideString);
6976 m_InsideString =
true;
6977 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6979 ContinueString(pStr);
6983 void VmaJsonWriter::ContinueString(
const char* pStr)
6985 VMA_ASSERT(m_InsideString);
6987 const size_t strLen = strlen(pStr);
6988 for(
size_t i = 0; i < strLen; ++i)
7021 VMA_ASSERT(0 &&
"Character not currently supported.");
7027 void VmaJsonWriter::ContinueString(uint32_t n)
7029 VMA_ASSERT(m_InsideString);
7033 void VmaJsonWriter::ContinueString(uint64_t n)
7035 VMA_ASSERT(m_InsideString);
7039 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7041 VMA_ASSERT(m_InsideString);
7042 m_SB.AddPointer(ptr);
7045 void VmaJsonWriter::EndString(
const char* pStr)
7047 VMA_ASSERT(m_InsideString);
7048 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7050 ContinueString(pStr);
7053 m_InsideString =
false;
7056 void VmaJsonWriter::WriteNumber(uint32_t n)
7058 VMA_ASSERT(!m_InsideString);
7063 void VmaJsonWriter::WriteNumber(uint64_t n)
7065 VMA_ASSERT(!m_InsideString);
7070 void VmaJsonWriter::WriteBool(
bool b)
7072 VMA_ASSERT(!m_InsideString);
7074 m_SB.Add(b ?
"true" :
"false");
7077 void VmaJsonWriter::WriteNull()
7079 VMA_ASSERT(!m_InsideString);
7084 void VmaJsonWriter::BeginValue(
bool isString)
7086 if(!m_Stack.empty())
7088 StackItem& currItem = m_Stack.back();
7089 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7090 currItem.valueCount % 2 == 0)
7092 VMA_ASSERT(isString);
7095 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7096 currItem.valueCount % 2 != 0)
7100 else if(currItem.valueCount > 0)
7109 ++currItem.valueCount;
7113 void VmaJsonWriter::WriteIndent(
bool oneLess)
7115 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7119 size_t count = m_Stack.size();
7120 if(count > 0 && oneLess)
7124 for(
size_t i = 0; i < count; ++i)
7131 #endif // #if VMA_STATS_STRING_ENABLED 7135 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7137 if(IsUserDataString())
7139 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7141 FreeUserDataString(hAllocator);
7143 if(pUserData != VMA_NULL)
7145 const char*
const newStrSrc = (
char*)pUserData;
7146 const size_t newStrLen = strlen(newStrSrc);
7147 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
7148 memcpy(newStrDst, newStrSrc, newStrLen + 1);
7149 m_pUserData = newStrDst;
7154 m_pUserData = pUserData;
7158 void VmaAllocation_T::ChangeBlockAllocation(
7160 VmaDeviceMemoryBlock* block,
7161 VkDeviceSize offset)
7163 VMA_ASSERT(block != VMA_NULL);
7164 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7167 if(block != m_BlockAllocation.m_Block)
7169 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7170 if(IsPersistentMap())
7172 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7173 block->Map(hAllocator, mapRefCount, VMA_NULL);
7176 m_BlockAllocation.m_Block = block;
7177 m_BlockAllocation.m_Offset = offset;
7180 void VmaAllocation_T::ChangeSize(VkDeviceSize newSize)
7182 VMA_ASSERT(newSize > 0);
7186 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7188 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7189 m_BlockAllocation.m_Offset = newOffset;
7192 VkDeviceSize VmaAllocation_T::GetOffset()
const 7196 case ALLOCATION_TYPE_BLOCK:
7197 return m_BlockAllocation.m_Offset;
7198 case ALLOCATION_TYPE_DEDICATED:
7206 VkDeviceMemory VmaAllocation_T::GetMemory()
const 7210 case ALLOCATION_TYPE_BLOCK:
7211 return m_BlockAllocation.m_Block->GetDeviceMemory();
7212 case ALLOCATION_TYPE_DEDICATED:
7213 return m_DedicatedAllocation.m_hMemory;
7216 return VK_NULL_HANDLE;
7220 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 7224 case ALLOCATION_TYPE_BLOCK:
7225 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
7226 case ALLOCATION_TYPE_DEDICATED:
7227 return m_DedicatedAllocation.m_MemoryTypeIndex;
7234 void* VmaAllocation_T::GetMappedData()
const 7238 case ALLOCATION_TYPE_BLOCK:
7241 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7242 VMA_ASSERT(pBlockData != VMA_NULL);
7243 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7250 case ALLOCATION_TYPE_DEDICATED:
7251 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7252 return m_DedicatedAllocation.m_pMappedData;
7259 bool VmaAllocation_T::CanBecomeLost()
const 7263 case ALLOCATION_TYPE_BLOCK:
7264 return m_BlockAllocation.m_CanBecomeLost;
7265 case ALLOCATION_TYPE_DEDICATED:
7273 VmaPool VmaAllocation_T::GetPool()
const 7275 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7276 return m_BlockAllocation.m_hPool;
7279 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7281 VMA_ASSERT(CanBecomeLost());
7287 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7290 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7295 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7301 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7311 #if VMA_STATS_STRING_ENABLED 7314 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7323 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 7325 json.WriteString(
"Type");
7326 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7328 json.WriteString(
"Size");
7329 json.WriteNumber(m_Size);
7331 if(m_pUserData != VMA_NULL)
7333 json.WriteString(
"UserData");
7334 if(IsUserDataString())
7336 json.WriteString((
const char*)m_pUserData);
7341 json.ContinueString_Pointer(m_pUserData);
7346 json.WriteString(
"CreationFrameIndex");
7347 json.WriteNumber(m_CreationFrameIndex);
7349 json.WriteString(
"LastUseFrameIndex");
7350 json.WriteNumber(GetLastUseFrameIndex());
7352 if(m_BufferImageUsage != 0)
7354 json.WriteString(
"Usage");
7355 json.WriteNumber(m_BufferImageUsage);
7361 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7363 VMA_ASSERT(IsUserDataString());
7364 if(m_pUserData != VMA_NULL)
7366 char*
const oldStr = (
char*)m_pUserData;
7367 const size_t oldStrLen = strlen(oldStr);
7368 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
7369 m_pUserData = VMA_NULL;
7373 void VmaAllocation_T::BlockAllocMap()
7375 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7377 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7383 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7387 void VmaAllocation_T::BlockAllocUnmap()
7389 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7391 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7397 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7401 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7403 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7407 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7409 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7410 *ppData = m_DedicatedAllocation.m_pMappedData;
7416 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7417 return VK_ERROR_MEMORY_MAP_FAILED;
7422 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7423 hAllocator->m_hDevice,
7424 m_DedicatedAllocation.m_hMemory,
7429 if(result == VK_SUCCESS)
7431 m_DedicatedAllocation.m_pMappedData = *ppData;
7438 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7440 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7442 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7447 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7448 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7449 hAllocator->m_hDevice,
7450 m_DedicatedAllocation.m_hMemory);
7455 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7459 #if VMA_STATS_STRING_ENABLED 7461 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7465 json.WriteString(
"Blocks");
7468 json.WriteString(
"Allocations");
7471 json.WriteString(
"UnusedRanges");
7474 json.WriteString(
"UsedBytes");
7477 json.WriteString(
"UnusedBytes");
7482 json.WriteString(
"AllocationSize");
7483 json.BeginObject(
true);
7484 json.WriteString(
"Min");
7486 json.WriteString(
"Avg");
7488 json.WriteString(
"Max");
7495 json.WriteString(
"UnusedRangeSize");
7496 json.BeginObject(
true);
7497 json.WriteString(
"Min");
7499 json.WriteString(
"Avg");
7501 json.WriteString(
"Max");
7509 #endif // #if VMA_STATS_STRING_ENABLED 7511 struct VmaSuballocationItemSizeLess
7514 const VmaSuballocationList::iterator lhs,
7515 const VmaSuballocationList::iterator rhs)
const 7517 return lhs->size < rhs->size;
7520 const VmaSuballocationList::iterator lhs,
7521 VkDeviceSize rhsSize)
const 7523 return lhs->size < rhsSize;
7531 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
7533 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
7537 #if VMA_STATS_STRING_ENABLED 7539 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
7540 VkDeviceSize unusedBytes,
7541 size_t allocationCount,
7542 size_t unusedRangeCount)
const 7546 json.WriteString(
"TotalBytes");
7547 json.WriteNumber(GetSize());
7549 json.WriteString(
"UnusedBytes");
7550 json.WriteNumber(unusedBytes);
7552 json.WriteString(
"Allocations");
7553 json.WriteNumber((uint64_t)allocationCount);
7555 json.WriteString(
"UnusedRanges");
7556 json.WriteNumber((uint64_t)unusedRangeCount);
7558 json.WriteString(
"Suballocations");
7562 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
7563 VkDeviceSize offset,
7566 json.BeginObject(
true);
7568 json.WriteString(
"Offset");
7569 json.WriteNumber(offset);
7571 hAllocation->PrintParameters(json);
7576 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
7577 VkDeviceSize offset,
7578 VkDeviceSize size)
const 7580 json.BeginObject(
true);
7582 json.WriteString(
"Offset");
7583 json.WriteNumber(offset);
7585 json.WriteString(
"Type");
7586 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
7588 json.WriteString(
"Size");
7589 json.WriteNumber(size);
7594 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 7600 #endif // #if VMA_STATS_STRING_ENABLED 7605 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
7606 VmaBlockMetadata(hAllocator),
7609 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7610 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
7614 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
7618 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
7620 VmaBlockMetadata::Init(size);
7623 m_SumFreeSize = size;
7625 VmaSuballocation suballoc = {};
7626 suballoc.offset = 0;
7627 suballoc.size = size;
7628 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7629 suballoc.hAllocation = VK_NULL_HANDLE;
7631 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7632 m_Suballocations.push_back(suballoc);
7633 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
7635 m_FreeSuballocationsBySize.push_back(suballocItem);
7638 bool VmaBlockMetadata_Generic::Validate()
const 7640 VMA_VALIDATE(!m_Suballocations.empty());
7643 VkDeviceSize calculatedOffset = 0;
7645 uint32_t calculatedFreeCount = 0;
7647 VkDeviceSize calculatedSumFreeSize = 0;
7650 size_t freeSuballocationsToRegister = 0;
7652 bool prevFree =
false;
7654 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7655 suballocItem != m_Suballocations.cend();
7658 const VmaSuballocation& subAlloc = *suballocItem;
7661 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
7663 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
7665 VMA_VALIDATE(!prevFree || !currFree);
7667 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
7671 calculatedSumFreeSize += subAlloc.size;
7672 ++calculatedFreeCount;
7673 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7675 ++freeSuballocationsToRegister;
7679 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
7683 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
7684 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
7687 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
7690 calculatedOffset += subAlloc.size;
7691 prevFree = currFree;
7696 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
7698 VkDeviceSize lastSize = 0;
7699 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
7701 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
7704 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7706 VMA_VALIDATE(suballocItem->size >= lastSize);
7708 lastSize = suballocItem->size;
7712 VMA_VALIDATE(ValidateFreeSuballocationList());
7713 VMA_VALIDATE(calculatedOffset == GetSize());
7714 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
7715 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
7720 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 7722 if(!m_FreeSuballocationsBySize.empty())
7724 return m_FreeSuballocationsBySize.back()->size;
7732 bool VmaBlockMetadata_Generic::IsEmpty()
const 7734 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
7737 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7741 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7753 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7754 suballocItem != m_Suballocations.cend();
7757 const VmaSuballocation& suballoc = *suballocItem;
7758 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7771 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 7773 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7775 inoutStats.
size += GetSize();
7782 #if VMA_STATS_STRING_ENABLED 7784 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 7786 PrintDetailedMap_Begin(json,
7788 m_Suballocations.size() - (size_t)m_FreeCount,
7792 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7793 suballocItem != m_Suballocations.cend();
7794 ++suballocItem, ++i)
7796 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7798 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
7802 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
7806 PrintDetailedMap_End(json);
7809 #endif // #if VMA_STATS_STRING_ENABLED 7811 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
7812 uint32_t currentFrameIndex,
7813 uint32_t frameInUseCount,
7814 VkDeviceSize bufferImageGranularity,
7815 VkDeviceSize allocSize,
7816 VkDeviceSize allocAlignment,
7818 VmaSuballocationType allocType,
7819 bool canMakeOtherLost,
7821 VmaAllocationRequest* pAllocationRequest)
7823 VMA_ASSERT(allocSize > 0);
7824 VMA_ASSERT(!upperAddress);
7825 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7826 VMA_ASSERT(pAllocationRequest != VMA_NULL);
7827 VMA_HEAVY_ASSERT(Validate());
7830 if(canMakeOtherLost ==
false &&
7831 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
7837 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
7838 if(freeSuballocCount > 0)
7843 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7844 m_FreeSuballocationsBySize.data(),
7845 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7846 allocSize + 2 * VMA_DEBUG_MARGIN,
7847 VmaSuballocationItemSizeLess());
7848 size_t index = it - m_FreeSuballocationsBySize.data();
7849 for(; index < freeSuballocCount; ++index)
7854 bufferImageGranularity,
7858 m_FreeSuballocationsBySize[index],
7860 &pAllocationRequest->offset,
7861 &pAllocationRequest->itemsToMakeLostCount,
7862 &pAllocationRequest->sumFreeSize,
7863 &pAllocationRequest->sumItemSize))
7865 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7870 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
7872 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7873 it != m_Suballocations.end();
7876 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
7879 bufferImageGranularity,
7885 &pAllocationRequest->offset,
7886 &pAllocationRequest->itemsToMakeLostCount,
7887 &pAllocationRequest->sumFreeSize,
7888 &pAllocationRequest->sumItemSize))
7890 pAllocationRequest->item = it;
7898 for(
size_t index = freeSuballocCount; index--; )
7903 bufferImageGranularity,
7907 m_FreeSuballocationsBySize[index],
7909 &pAllocationRequest->offset,
7910 &pAllocationRequest->itemsToMakeLostCount,
7911 &pAllocationRequest->sumFreeSize,
7912 &pAllocationRequest->sumItemSize))
7914 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7921 if(canMakeOtherLost)
7925 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
7926 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
7928 VmaAllocationRequest tmpAllocRequest = {};
7929 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
7930 suballocIt != m_Suballocations.end();
7933 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
7934 suballocIt->hAllocation->CanBecomeLost())
7939 bufferImageGranularity,
7945 &tmpAllocRequest.offset,
7946 &tmpAllocRequest.itemsToMakeLostCount,
7947 &tmpAllocRequest.sumFreeSize,
7948 &tmpAllocRequest.sumItemSize))
7950 tmpAllocRequest.item = suballocIt;
7952 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
7955 *pAllocationRequest = tmpAllocRequest;
7961 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
7970 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
7971 uint32_t currentFrameIndex,
7972 uint32_t frameInUseCount,
7973 VmaAllocationRequest* pAllocationRequest)
7975 while(pAllocationRequest->itemsToMakeLostCount > 0)
7977 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
7979 ++pAllocationRequest->item;
7981 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7982 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
7983 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
7984 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7986 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
7987 --pAllocationRequest->itemsToMakeLostCount;
7995 VMA_HEAVY_ASSERT(Validate());
7996 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7997 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8002 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8004 uint32_t lostAllocationCount = 0;
8005 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8006 it != m_Suballocations.end();
8009 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8010 it->hAllocation->CanBecomeLost() &&
8011 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8013 it = FreeSuballocation(it);
8014 ++lostAllocationCount;
8017 return lostAllocationCount;
8020 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8022 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8023 it != m_Suballocations.end();
8026 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8028 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8030 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8031 return VK_ERROR_VALIDATION_FAILED_EXT;
8033 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8035 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8036 return VK_ERROR_VALIDATION_FAILED_EXT;
8044 void VmaBlockMetadata_Generic::Alloc(
8045 const VmaAllocationRequest& request,
8046 VmaSuballocationType type,
8047 VkDeviceSize allocSize,
8051 VMA_ASSERT(!upperAddress);
8052 VMA_ASSERT(request.item != m_Suballocations.end());
8053 VmaSuballocation& suballoc = *request.item;
8055 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8057 VMA_ASSERT(request.offset >= suballoc.offset);
8058 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8059 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8060 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8064 UnregisterFreeSuballocation(request.item);
8066 suballoc.offset = request.offset;
8067 suballoc.size = allocSize;
8068 suballoc.type = type;
8069 suballoc.hAllocation = hAllocation;
8074 VmaSuballocation paddingSuballoc = {};
8075 paddingSuballoc.offset = request.offset + allocSize;
8076 paddingSuballoc.size = paddingEnd;
8077 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8078 VmaSuballocationList::iterator next = request.item;
8080 const VmaSuballocationList::iterator paddingEndItem =
8081 m_Suballocations.insert(next, paddingSuballoc);
8082 RegisterFreeSuballocation(paddingEndItem);
8088 VmaSuballocation paddingSuballoc = {};
8089 paddingSuballoc.offset = request.offset - paddingBegin;
8090 paddingSuballoc.size = paddingBegin;
8091 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8092 const VmaSuballocationList::iterator paddingBeginItem =
8093 m_Suballocations.insert(request.item, paddingSuballoc);
8094 RegisterFreeSuballocation(paddingBeginItem);
8098 m_FreeCount = m_FreeCount - 1;
8099 if(paddingBegin > 0)
8107 m_SumFreeSize -= allocSize;
8110 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8112 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8113 suballocItem != m_Suballocations.end();
8116 VmaSuballocation& suballoc = *suballocItem;
8117 if(suballoc.hAllocation == allocation)
8119 FreeSuballocation(suballocItem);
8120 VMA_HEAVY_ASSERT(Validate());
8124 VMA_ASSERT(0 &&
"Not found!");
8127 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8129 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8130 suballocItem != m_Suballocations.end();
8133 VmaSuballocation& suballoc = *suballocItem;
8134 if(suballoc.offset == offset)
8136 FreeSuballocation(suballocItem);
8140 VMA_ASSERT(0 &&
"Not found!");
8143 bool VmaBlockMetadata_Generic::ResizeAllocation(
const VmaAllocation alloc, VkDeviceSize newSize)
8145 typedef VmaSuballocationList::iterator iter_type;
8146 for(iter_type suballocItem = m_Suballocations.begin();
8147 suballocItem != m_Suballocations.end();
8150 VmaSuballocation& suballoc = *suballocItem;
8151 if(suballoc.hAllocation == alloc)
8153 iter_type nextItem = suballocItem;
8157 VMA_ASSERT(newSize != alloc->GetSize() && newSize > 0);
8160 if(newSize < alloc->GetSize())
8162 const VkDeviceSize sizeDiff = suballoc.size - newSize;
8165 if(nextItem != m_Suballocations.end())
8168 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8171 UnregisterFreeSuballocation(nextItem);
8172 nextItem->offset -= sizeDiff;
8173 nextItem->size += sizeDiff;
8174 RegisterFreeSuballocation(nextItem);
8180 VmaSuballocation newFreeSuballoc;
8181 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8182 newFreeSuballoc.offset = suballoc.offset + newSize;
8183 newFreeSuballoc.size = sizeDiff;
8184 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8185 iter_type newFreeSuballocIt = m_Suballocations.insert(nextItem, newFreeSuballoc);
8186 RegisterFreeSuballocation(newFreeSuballocIt);
8195 VmaSuballocation newFreeSuballoc;
8196 newFreeSuballoc.hAllocation = VK_NULL_HANDLE;
8197 newFreeSuballoc.offset = suballoc.offset + newSize;
8198 newFreeSuballoc.size = sizeDiff;
8199 newFreeSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8200 m_Suballocations.push_back(newFreeSuballoc);
8202 iter_type newFreeSuballocIt = m_Suballocations.end();
8203 RegisterFreeSuballocation(--newFreeSuballocIt);
8208 suballoc.size = newSize;
8209 m_SumFreeSize += sizeDiff;
8214 const VkDeviceSize sizeDiff = newSize - suballoc.size;
8217 if(nextItem != m_Suballocations.end())
8220 if(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8223 if(nextItem->size < sizeDiff + VMA_DEBUG_MARGIN)
8229 if(nextItem->size > sizeDiff)
8232 UnregisterFreeSuballocation(nextItem);
8233 nextItem->offset += sizeDiff;
8234 nextItem->size -= sizeDiff;
8235 RegisterFreeSuballocation(nextItem);
8241 UnregisterFreeSuballocation(nextItem);
8242 m_Suballocations.erase(nextItem);
8258 suballoc.size = newSize;
8259 m_SumFreeSize -= sizeDiff;
8266 VMA_ASSERT(0 &&
"Not found!");
8270 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 8272 VkDeviceSize lastSize = 0;
8273 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8275 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8277 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8278 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8279 VMA_VALIDATE(it->size >= lastSize);
8280 lastSize = it->size;
8285 bool VmaBlockMetadata_Generic::CheckAllocation(
8286 uint32_t currentFrameIndex,
8287 uint32_t frameInUseCount,
8288 VkDeviceSize bufferImageGranularity,
8289 VkDeviceSize allocSize,
8290 VkDeviceSize allocAlignment,
8291 VmaSuballocationType allocType,
8292 VmaSuballocationList::const_iterator suballocItem,
8293 bool canMakeOtherLost,
8294 VkDeviceSize* pOffset,
8295 size_t* itemsToMakeLostCount,
8296 VkDeviceSize* pSumFreeSize,
8297 VkDeviceSize* pSumItemSize)
const 8299 VMA_ASSERT(allocSize > 0);
8300 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8301 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8302 VMA_ASSERT(pOffset != VMA_NULL);
8304 *itemsToMakeLostCount = 0;
8308 if(canMakeOtherLost)
8310 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8312 *pSumFreeSize = suballocItem->size;
8316 if(suballocItem->hAllocation->CanBecomeLost() &&
8317 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8319 ++*itemsToMakeLostCount;
8320 *pSumItemSize = suballocItem->size;
8329 if(GetSize() - suballocItem->offset < allocSize)
8335 *pOffset = suballocItem->offset;
8338 if(VMA_DEBUG_MARGIN > 0)
8340 *pOffset += VMA_DEBUG_MARGIN;
8344 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8348 if(bufferImageGranularity > 1)
8350 bool bufferImageGranularityConflict =
false;
8351 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8352 while(prevSuballocItem != m_Suballocations.cbegin())
8355 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8356 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8358 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8360 bufferImageGranularityConflict =
true;
8368 if(bufferImageGranularityConflict)
8370 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8376 if(*pOffset >= suballocItem->offset + suballocItem->size)
8382 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8385 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8387 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8389 if(suballocItem->offset + totalSize > GetSize())
8396 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8397 if(totalSize > suballocItem->size)
8399 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8400 while(remainingSize > 0)
8403 if(lastSuballocItem == m_Suballocations.cend())
8407 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8409 *pSumFreeSize += lastSuballocItem->size;
8413 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8414 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8415 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8417 ++*itemsToMakeLostCount;
8418 *pSumItemSize += lastSuballocItem->size;
8425 remainingSize = (lastSuballocItem->size < remainingSize) ?
8426 remainingSize - lastSuballocItem->size : 0;
8432 if(bufferImageGranularity > 1)
8434 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8436 while(nextSuballocItem != m_Suballocations.cend())
8438 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8439 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8441 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8443 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8444 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8445 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8447 ++*itemsToMakeLostCount;
8466 const VmaSuballocation& suballoc = *suballocItem;
8467 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8469 *pSumFreeSize = suballoc.size;
8472 if(suballoc.size < allocSize)
8478 *pOffset = suballoc.offset;
8481 if(VMA_DEBUG_MARGIN > 0)
8483 *pOffset += VMA_DEBUG_MARGIN;
8487 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8491 if(bufferImageGranularity > 1)
8493 bool bufferImageGranularityConflict =
false;
8494 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8495 while(prevSuballocItem != m_Suballocations.cbegin())
8498 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8499 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8501 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8503 bufferImageGranularityConflict =
true;
8511 if(bufferImageGranularityConflict)
8513 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8518 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8521 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8524 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8531 if(bufferImageGranularity > 1)
8533 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8535 while(nextSuballocItem != m_Suballocations.cend())
8537 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8538 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8540 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8559 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8561 VMA_ASSERT(item != m_Suballocations.end());
8562 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8564 VmaSuballocationList::iterator nextItem = item;
8566 VMA_ASSERT(nextItem != m_Suballocations.end());
8567 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8569 item->size += nextItem->size;
8571 m_Suballocations.erase(nextItem);
8574 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8577 VmaSuballocation& suballoc = *suballocItem;
8578 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8579 suballoc.hAllocation = VK_NULL_HANDLE;
8583 m_SumFreeSize += suballoc.size;
8586 bool mergeWithNext =
false;
8587 bool mergeWithPrev =
false;
8589 VmaSuballocationList::iterator nextItem = suballocItem;
8591 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8593 mergeWithNext =
true;
8596 VmaSuballocationList::iterator prevItem = suballocItem;
8597 if(suballocItem != m_Suballocations.begin())
8600 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8602 mergeWithPrev =
true;
8608 UnregisterFreeSuballocation(nextItem);
8609 MergeFreeWithNext(suballocItem);
8614 UnregisterFreeSuballocation(prevItem);
8615 MergeFreeWithNext(prevItem);
8616 RegisterFreeSuballocation(prevItem);
8621 RegisterFreeSuballocation(suballocItem);
8622 return suballocItem;
8626 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
8628 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8629 VMA_ASSERT(item->size > 0);
8633 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8635 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8637 if(m_FreeSuballocationsBySize.empty())
8639 m_FreeSuballocationsBySize.push_back(item);
8643 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
8651 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
8653 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8654 VMA_ASSERT(item->size > 0);
8658 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8660 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8662 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8663 m_FreeSuballocationsBySize.data(),
8664 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
8666 VmaSuballocationItemSizeLess());
8667 for(
size_t index = it - m_FreeSuballocationsBySize.data();
8668 index < m_FreeSuballocationsBySize.size();
8671 if(m_FreeSuballocationsBySize[index] == item)
8673 VmaVectorRemove(m_FreeSuballocationsBySize, index);
8676 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
8678 VMA_ASSERT(0 &&
"Not found.");
8684 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
8685 VkDeviceSize bufferImageGranularity,
8686 VmaSuballocationType& inOutPrevSuballocType)
const 8688 if(bufferImageGranularity == 1 || IsEmpty())
8693 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
8694 bool typeConflictFound =
false;
8695 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
8696 it != m_Suballocations.cend();
8699 const VmaSuballocationType suballocType = it->type;
8700 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
8702 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
8703 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
8705 typeConflictFound =
true;
8707 inOutPrevSuballocType = suballocType;
8711 return typeConflictFound || minAlignment >= bufferImageGranularity;
8717 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
8718 VmaBlockMetadata(hAllocator),
8720 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8721 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8722 m_1stVectorIndex(0),
8723 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
8724 m_1stNullItemsBeginCount(0),
8725 m_1stNullItemsMiddleCount(0),
8726 m_2ndNullItemsCount(0)
8730 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
8734 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
8736 VmaBlockMetadata::Init(size);
8737 m_SumFreeSize = size;
8740 bool VmaBlockMetadata_Linear::Validate()
const 8742 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8743 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8745 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
8746 VMA_VALIDATE(!suballocations1st.empty() ||
8747 suballocations2nd.empty() ||
8748 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
8750 if(!suballocations1st.empty())
8753 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
8755 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
8757 if(!suballocations2nd.empty())
8760 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
8763 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
8764 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
8766 VkDeviceSize sumUsedSize = 0;
8767 const size_t suballoc1stCount = suballocations1st.size();
8768 VkDeviceSize offset = VMA_DEBUG_MARGIN;
8770 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8772 const size_t suballoc2ndCount = suballocations2nd.size();
8773 size_t nullItem2ndCount = 0;
8774 for(
size_t i = 0; i < suballoc2ndCount; ++i)
8776 const VmaSuballocation& suballoc = suballocations2nd[i];
8777 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8779 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8780 VMA_VALIDATE(suballoc.offset >= offset);
8784 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8785 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8786 sumUsedSize += suballoc.size;
8793 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8796 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8799 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
8801 const VmaSuballocation& suballoc = suballocations1st[i];
8802 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
8803 suballoc.hAllocation == VK_NULL_HANDLE);
8806 size_t nullItem1stCount = m_1stNullItemsBeginCount;
8808 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
8810 const VmaSuballocation& suballoc = suballocations1st[i];
8811 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8813 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8814 VMA_VALIDATE(suballoc.offset >= offset);
8815 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
8819 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8820 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8821 sumUsedSize += suballoc.size;
8828 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8830 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
8832 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8834 const size_t suballoc2ndCount = suballocations2nd.size();
8835 size_t nullItem2ndCount = 0;
8836 for(
size_t i = suballoc2ndCount; i--; )
8838 const VmaSuballocation& suballoc = suballocations2nd[i];
8839 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8841 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8842 VMA_VALIDATE(suballoc.offset >= offset);
8846 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8847 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8848 sumUsedSize += suballoc.size;
8855 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8858 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8861 VMA_VALIDATE(offset <= GetSize());
8862 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
8867 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 8869 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
8870 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
8873 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 8875 const VkDeviceSize size = GetSize();
8887 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8889 switch(m_2ndVectorMode)
8891 case SECOND_VECTOR_EMPTY:
8897 const size_t suballocations1stCount = suballocations1st.size();
8898 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
8899 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8900 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
8902 firstSuballoc.offset,
8903 size - (lastSuballoc.offset + lastSuballoc.size));
8907 case SECOND_VECTOR_RING_BUFFER:
8912 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8913 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
8914 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
8915 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
8919 case SECOND_VECTOR_DOUBLE_STACK:
8924 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8925 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
8926 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
8927 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
8937 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 8939 const VkDeviceSize size = GetSize();
8940 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8941 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8942 const size_t suballoc1stCount = suballocations1st.size();
8943 const size_t suballoc2ndCount = suballocations2nd.size();
8954 VkDeviceSize lastOffset = 0;
8956 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8958 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8959 size_t nextAlloc2ndIndex = 0;
8960 while(lastOffset < freeSpace2ndTo1stEnd)
8963 while(nextAlloc2ndIndex < suballoc2ndCount &&
8964 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8966 ++nextAlloc2ndIndex;
8970 if(nextAlloc2ndIndex < suballoc2ndCount)
8972 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8975 if(lastOffset < suballoc.offset)
8978 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8992 lastOffset = suballoc.offset + suballoc.size;
8993 ++nextAlloc2ndIndex;
8999 if(lastOffset < freeSpace2ndTo1stEnd)
9001 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9009 lastOffset = freeSpace2ndTo1stEnd;
9014 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9015 const VkDeviceSize freeSpace1stTo2ndEnd =
9016 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9017 while(lastOffset < freeSpace1stTo2ndEnd)
9020 while(nextAlloc1stIndex < suballoc1stCount &&
9021 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9023 ++nextAlloc1stIndex;
9027 if(nextAlloc1stIndex < suballoc1stCount)
9029 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9032 if(lastOffset < suballoc.offset)
9035 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9049 lastOffset = suballoc.offset + suballoc.size;
9050 ++nextAlloc1stIndex;
9056 if(lastOffset < freeSpace1stTo2ndEnd)
9058 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9066 lastOffset = freeSpace1stTo2ndEnd;
9070 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9072 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9073 while(lastOffset < size)
9076 while(nextAlloc2ndIndex != SIZE_MAX &&
9077 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9079 --nextAlloc2ndIndex;
9083 if(nextAlloc2ndIndex != SIZE_MAX)
9085 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9088 if(lastOffset < suballoc.offset)
9091 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9105 lastOffset = suballoc.offset + suballoc.size;
9106 --nextAlloc2ndIndex;
9112 if(lastOffset < size)
9114 const VkDeviceSize unusedRangeSize = size - lastOffset;
9130 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 9132 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9133 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9134 const VkDeviceSize size = GetSize();
9135 const size_t suballoc1stCount = suballocations1st.size();
9136 const size_t suballoc2ndCount = suballocations2nd.size();
9138 inoutStats.
size += size;
9140 VkDeviceSize lastOffset = 0;
9142 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9144 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9145 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9146 while(lastOffset < freeSpace2ndTo1stEnd)
9149 while(nextAlloc2ndIndex < suballoc2ndCount &&
9150 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9152 ++nextAlloc2ndIndex;
9156 if(nextAlloc2ndIndex < suballoc2ndCount)
9158 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9161 if(lastOffset < suballoc.offset)
9164 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9175 lastOffset = suballoc.offset + suballoc.size;
9176 ++nextAlloc2ndIndex;
9181 if(lastOffset < freeSpace2ndTo1stEnd)
9184 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9191 lastOffset = freeSpace2ndTo1stEnd;
9196 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9197 const VkDeviceSize freeSpace1stTo2ndEnd =
9198 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9199 while(lastOffset < freeSpace1stTo2ndEnd)
9202 while(nextAlloc1stIndex < suballoc1stCount &&
9203 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9205 ++nextAlloc1stIndex;
9209 if(nextAlloc1stIndex < suballoc1stCount)
9211 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9214 if(lastOffset < suballoc.offset)
9217 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9228 lastOffset = suballoc.offset + suballoc.size;
9229 ++nextAlloc1stIndex;
9234 if(lastOffset < freeSpace1stTo2ndEnd)
9237 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9244 lastOffset = freeSpace1stTo2ndEnd;
9248 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9250 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9251 while(lastOffset < size)
9254 while(nextAlloc2ndIndex != SIZE_MAX &&
9255 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9257 --nextAlloc2ndIndex;
9261 if(nextAlloc2ndIndex != SIZE_MAX)
9263 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9266 if(lastOffset < suballoc.offset)
9269 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9280 lastOffset = suballoc.offset + suballoc.size;
9281 --nextAlloc2ndIndex;
9286 if(lastOffset < size)
9289 const VkDeviceSize unusedRangeSize = size - lastOffset;
9302 #if VMA_STATS_STRING_ENABLED 9303 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 9305 const VkDeviceSize size = GetSize();
9306 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9307 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9308 const size_t suballoc1stCount = suballocations1st.size();
9309 const size_t suballoc2ndCount = suballocations2nd.size();
9313 size_t unusedRangeCount = 0;
9314 VkDeviceSize usedBytes = 0;
9316 VkDeviceSize lastOffset = 0;
9318 size_t alloc2ndCount = 0;
9319 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9321 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9322 size_t nextAlloc2ndIndex = 0;
9323 while(lastOffset < freeSpace2ndTo1stEnd)
9326 while(nextAlloc2ndIndex < suballoc2ndCount &&
9327 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9329 ++nextAlloc2ndIndex;
9333 if(nextAlloc2ndIndex < suballoc2ndCount)
9335 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9338 if(lastOffset < suballoc.offset)
9347 usedBytes += suballoc.size;
9350 lastOffset = suballoc.offset + suballoc.size;
9351 ++nextAlloc2ndIndex;
9356 if(lastOffset < freeSpace2ndTo1stEnd)
9363 lastOffset = freeSpace2ndTo1stEnd;
9368 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9369 size_t alloc1stCount = 0;
9370 const VkDeviceSize freeSpace1stTo2ndEnd =
9371 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9372 while(lastOffset < freeSpace1stTo2ndEnd)
9375 while(nextAlloc1stIndex < suballoc1stCount &&
9376 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9378 ++nextAlloc1stIndex;
9382 if(nextAlloc1stIndex < suballoc1stCount)
9384 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9387 if(lastOffset < suballoc.offset)
9396 usedBytes += suballoc.size;
9399 lastOffset = suballoc.offset + suballoc.size;
9400 ++nextAlloc1stIndex;
9405 if(lastOffset < size)
9412 lastOffset = freeSpace1stTo2ndEnd;
9416 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9418 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9419 while(lastOffset < size)
9422 while(nextAlloc2ndIndex != SIZE_MAX &&
9423 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9425 --nextAlloc2ndIndex;
9429 if(nextAlloc2ndIndex != SIZE_MAX)
9431 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9434 if(lastOffset < suballoc.offset)
9443 usedBytes += suballoc.size;
9446 lastOffset = suballoc.offset + suballoc.size;
9447 --nextAlloc2ndIndex;
9452 if(lastOffset < size)
9464 const VkDeviceSize unusedBytes = size - usedBytes;
9465 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9470 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9472 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9473 size_t nextAlloc2ndIndex = 0;
9474 while(lastOffset < freeSpace2ndTo1stEnd)
9477 while(nextAlloc2ndIndex < suballoc2ndCount &&
9478 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9480 ++nextAlloc2ndIndex;
9484 if(nextAlloc2ndIndex < suballoc2ndCount)
9486 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9489 if(lastOffset < suballoc.offset)
9492 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9493 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9498 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9501 lastOffset = suballoc.offset + suballoc.size;
9502 ++nextAlloc2ndIndex;
9507 if(lastOffset < freeSpace2ndTo1stEnd)
9510 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9511 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9515 lastOffset = freeSpace2ndTo1stEnd;
9520 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9521 while(lastOffset < freeSpace1stTo2ndEnd)
9524 while(nextAlloc1stIndex < suballoc1stCount &&
9525 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9527 ++nextAlloc1stIndex;
9531 if(nextAlloc1stIndex < suballoc1stCount)
9533 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9536 if(lastOffset < suballoc.offset)
9539 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9540 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9545 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9548 lastOffset = suballoc.offset + suballoc.size;
9549 ++nextAlloc1stIndex;
9554 if(lastOffset < freeSpace1stTo2ndEnd)
9557 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9558 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9562 lastOffset = freeSpace1stTo2ndEnd;
9566 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9568 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9569 while(lastOffset < size)
9572 while(nextAlloc2ndIndex != SIZE_MAX &&
9573 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9575 --nextAlloc2ndIndex;
9579 if(nextAlloc2ndIndex != SIZE_MAX)
9581 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9584 if(lastOffset < suballoc.offset)
9587 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9588 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9593 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9596 lastOffset = suballoc.offset + suballoc.size;
9597 --nextAlloc2ndIndex;
9602 if(lastOffset < size)
9605 const VkDeviceSize unusedRangeSize = size - lastOffset;
9606 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9615 PrintDetailedMap_End(json);
9617 #endif // #if VMA_STATS_STRING_ENABLED 9619 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9620 uint32_t currentFrameIndex,
9621 uint32_t frameInUseCount,
9622 VkDeviceSize bufferImageGranularity,
9623 VkDeviceSize allocSize,
9624 VkDeviceSize allocAlignment,
9626 VmaSuballocationType allocType,
9627 bool canMakeOtherLost,
9629 VmaAllocationRequest* pAllocationRequest)
9631 VMA_ASSERT(allocSize > 0);
9632 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9633 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9634 VMA_HEAVY_ASSERT(Validate());
9636 const VkDeviceSize size = GetSize();
9637 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9638 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9642 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9644 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9649 if(allocSize > size)
9653 VkDeviceSize resultBaseOffset = size - allocSize;
9654 if(!suballocations2nd.empty())
9656 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9657 resultBaseOffset = lastSuballoc.offset - allocSize;
9658 if(allocSize > lastSuballoc.offset)
9665 VkDeviceSize resultOffset = resultBaseOffset;
9668 if(VMA_DEBUG_MARGIN > 0)
9670 if(resultOffset < VMA_DEBUG_MARGIN)
9674 resultOffset -= VMA_DEBUG_MARGIN;
9678 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9682 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9684 bool bufferImageGranularityConflict =
false;
9685 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9687 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9688 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9690 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9692 bufferImageGranularityConflict =
true;
9700 if(bufferImageGranularityConflict)
9702 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9707 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9708 suballocations1st.back().offset + suballocations1st.back().size :
9710 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
9714 if(bufferImageGranularity > 1)
9716 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9718 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9719 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9721 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9735 pAllocationRequest->offset = resultOffset;
9736 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
9737 pAllocationRequest->sumItemSize = 0;
9739 pAllocationRequest->itemsToMakeLostCount = 0;
9745 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9749 VkDeviceSize resultBaseOffset = 0;
9750 if(!suballocations1st.empty())
9752 const VmaSuballocation& lastSuballoc = suballocations1st.back();
9753 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9757 VkDeviceSize resultOffset = resultBaseOffset;
9760 if(VMA_DEBUG_MARGIN > 0)
9762 resultOffset += VMA_DEBUG_MARGIN;
9766 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9770 if(bufferImageGranularity > 1 && !suballocations1st.empty())
9772 bool bufferImageGranularityConflict =
false;
9773 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9775 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9776 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9778 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9780 bufferImageGranularityConflict =
true;
9788 if(bufferImageGranularityConflict)
9790 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9794 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
9795 suballocations2nd.back().offset : size;
9798 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
9802 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9804 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9806 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9807 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9809 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9823 pAllocationRequest->offset = resultOffset;
9824 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
9825 pAllocationRequest->sumItemSize = 0;
9827 pAllocationRequest->itemsToMakeLostCount = 0;
9834 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9836 VMA_ASSERT(!suballocations1st.empty());
9838 VkDeviceSize resultBaseOffset = 0;
9839 if(!suballocations2nd.empty())
9841 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9842 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9846 VkDeviceSize resultOffset = resultBaseOffset;
9849 if(VMA_DEBUG_MARGIN > 0)
9851 resultOffset += VMA_DEBUG_MARGIN;
9855 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9859 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9861 bool bufferImageGranularityConflict =
false;
9862 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
9864 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
9865 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9867 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9869 bufferImageGranularityConflict =
true;
9877 if(bufferImageGranularityConflict)
9879 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9883 pAllocationRequest->itemsToMakeLostCount = 0;
9884 pAllocationRequest->sumItemSize = 0;
9885 size_t index1st = m_1stNullItemsBeginCount;
9887 if(canMakeOtherLost)
9889 while(index1st < suballocations1st.size() &&
9890 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
9893 const VmaSuballocation& suballoc = suballocations1st[index1st];
9894 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
9900 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9901 if(suballoc.hAllocation->CanBecomeLost() &&
9902 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9904 ++pAllocationRequest->itemsToMakeLostCount;
9905 pAllocationRequest->sumItemSize += suballoc.size;
9917 if(bufferImageGranularity > 1)
9919 while(index1st < suballocations1st.size())
9921 const VmaSuballocation& suballoc = suballocations1st[index1st];
9922 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
9924 if(suballoc.hAllocation != VK_NULL_HANDLE)
9927 if(suballoc.hAllocation->CanBecomeLost() &&
9928 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9930 ++pAllocationRequest->itemsToMakeLostCount;
9931 pAllocationRequest->sumItemSize += suballoc.size;
9950 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
9951 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
9955 if(bufferImageGranularity > 1)
9957 for(
size_t nextSuballocIndex = index1st;
9958 nextSuballocIndex < suballocations1st.size();
9959 nextSuballocIndex++)
9961 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
9962 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9964 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9978 pAllocationRequest->offset = resultOffset;
9979 pAllocationRequest->sumFreeSize =
9980 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
9982 - pAllocationRequest->sumItemSize;
9992 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
9993 uint32_t currentFrameIndex,
9994 uint32_t frameInUseCount,
9995 VmaAllocationRequest* pAllocationRequest)
9997 if(pAllocationRequest->itemsToMakeLostCount == 0)
10002 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10004 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10005 size_t index1st = m_1stNullItemsBeginCount;
10006 size_t madeLostCount = 0;
10007 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10009 VMA_ASSERT(index1st < suballocations1st.size());
10010 VmaSuballocation& suballoc = suballocations1st[index1st];
10011 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10013 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10014 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10015 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10017 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10018 suballoc.hAllocation = VK_NULL_HANDLE;
10019 m_SumFreeSize += suballoc.size;
10020 ++m_1stNullItemsMiddleCount;
10031 CleanupAfterFree();
10037 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10039 uint32_t lostAllocationCount = 0;
10041 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10042 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10044 VmaSuballocation& suballoc = suballocations1st[i];
10045 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10046 suballoc.hAllocation->CanBecomeLost() &&
10047 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10049 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10050 suballoc.hAllocation = VK_NULL_HANDLE;
10051 ++m_1stNullItemsMiddleCount;
10052 m_SumFreeSize += suballoc.size;
10053 ++lostAllocationCount;
10057 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10058 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10060 VmaSuballocation& suballoc = suballocations2nd[i];
10061 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10062 suballoc.hAllocation->CanBecomeLost() &&
10063 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10065 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10066 suballoc.hAllocation = VK_NULL_HANDLE;
10067 ++m_2ndNullItemsCount;
10068 ++lostAllocationCount;
10072 if(lostAllocationCount)
10074 CleanupAfterFree();
10077 return lostAllocationCount;
10080 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10082 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10083 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10085 const VmaSuballocation& suballoc = suballocations1st[i];
10086 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10088 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10090 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10091 return VK_ERROR_VALIDATION_FAILED_EXT;
10093 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10095 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10096 return VK_ERROR_VALIDATION_FAILED_EXT;
10101 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10102 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10104 const VmaSuballocation& suballoc = suballocations2nd[i];
10105 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10107 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10109 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10110 return VK_ERROR_VALIDATION_FAILED_EXT;
10112 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10114 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10115 return VK_ERROR_VALIDATION_FAILED_EXT;
10123 void VmaBlockMetadata_Linear::Alloc(
10124 const VmaAllocationRequest& request,
10125 VmaSuballocationType type,
10126 VkDeviceSize allocSize,
10130 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10134 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10135 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10136 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10137 suballocations2nd.push_back(newSuballoc);
10138 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10142 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10145 if(suballocations1st.empty())
10147 suballocations1st.push_back(newSuballoc);
10152 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
10155 VMA_ASSERT(request.offset + allocSize <= GetSize());
10156 suballocations1st.push_back(newSuballoc);
10159 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
10161 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10163 switch(m_2ndVectorMode)
10165 case SECOND_VECTOR_EMPTY:
10167 VMA_ASSERT(suballocations2nd.empty());
10168 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10170 case SECOND_VECTOR_RING_BUFFER:
10172 VMA_ASSERT(!suballocations2nd.empty());
10174 case SECOND_VECTOR_DOUBLE_STACK:
10175 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10181 suballocations2nd.push_back(newSuballoc);
10185 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10190 m_SumFreeSize -= newSuballoc.size;
10193 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10195 FreeAtOffset(allocation->GetOffset());
10198 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10200 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10201 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10203 if(!suballocations1st.empty())
10206 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10207 if(firstSuballoc.offset == offset)
10209 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10210 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10211 m_SumFreeSize += firstSuballoc.size;
10212 ++m_1stNullItemsBeginCount;
10213 CleanupAfterFree();
10219 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10220 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10222 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10223 if(lastSuballoc.offset == offset)
10225 m_SumFreeSize += lastSuballoc.size;
10226 suballocations2nd.pop_back();
10227 CleanupAfterFree();
10232 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10234 VmaSuballocation& lastSuballoc = suballocations1st.back();
10235 if(lastSuballoc.offset == offset)
10237 m_SumFreeSize += lastSuballoc.size;
10238 suballocations1st.pop_back();
10239 CleanupAfterFree();
10246 VmaSuballocation refSuballoc;
10247 refSuballoc.offset = offset;
10249 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
10250 suballocations1st.begin() + m_1stNullItemsBeginCount,
10251 suballocations1st.end(),
10253 if(it != suballocations1st.end())
10255 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10256 it->hAllocation = VK_NULL_HANDLE;
10257 ++m_1stNullItemsMiddleCount;
10258 m_SumFreeSize += it->size;
10259 CleanupAfterFree();
10264 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10267 VmaSuballocation refSuballoc;
10268 refSuballoc.offset = offset;
10270 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10271 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
10272 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
10273 if(it != suballocations2nd.end())
10275 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10276 it->hAllocation = VK_NULL_HANDLE;
10277 ++m_2ndNullItemsCount;
10278 m_SumFreeSize += it->size;
10279 CleanupAfterFree();
10284 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10287 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 10289 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10290 const size_t suballocCount = AccessSuballocations1st().size();
10291 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10294 void VmaBlockMetadata_Linear::CleanupAfterFree()
10296 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10297 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10301 suballocations1st.clear();
10302 suballocations2nd.clear();
10303 m_1stNullItemsBeginCount = 0;
10304 m_1stNullItemsMiddleCount = 0;
10305 m_2ndNullItemsCount = 0;
10306 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10310 const size_t suballoc1stCount = suballocations1st.size();
10311 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10312 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10315 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10316 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10318 ++m_1stNullItemsBeginCount;
10319 --m_1stNullItemsMiddleCount;
10323 while(m_1stNullItemsMiddleCount > 0 &&
10324 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10326 --m_1stNullItemsMiddleCount;
10327 suballocations1st.pop_back();
10331 while(m_2ndNullItemsCount > 0 &&
10332 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10334 --m_2ndNullItemsCount;
10335 suballocations2nd.pop_back();
10338 if(ShouldCompact1st())
10340 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10341 size_t srcIndex = m_1stNullItemsBeginCount;
10342 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10344 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10348 if(dstIndex != srcIndex)
10350 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10354 suballocations1st.resize(nonNullItemCount);
10355 m_1stNullItemsBeginCount = 0;
10356 m_1stNullItemsMiddleCount = 0;
10360 if(suballocations2nd.empty())
10362 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10366 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10368 suballocations1st.clear();
10369 m_1stNullItemsBeginCount = 0;
10371 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10374 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10375 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10376 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10377 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10379 ++m_1stNullItemsBeginCount;
10380 --m_1stNullItemsMiddleCount;
10382 m_2ndNullItemsCount = 0;
10383 m_1stVectorIndex ^= 1;
10388 VMA_HEAVY_ASSERT(Validate());
10395 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10396 VmaBlockMetadata(hAllocator),
10398 m_AllocationCount(0),
10402 memset(m_FreeList, 0,
sizeof(m_FreeList));
10405 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10407 DeleteNode(m_Root);
10410 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10412 VmaBlockMetadata::Init(size);
10414 m_UsableSize = VmaPrevPow2(size);
10415 m_SumFreeSize = m_UsableSize;
10419 while(m_LevelCount < MAX_LEVELS &&
10420 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10425 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10426 rootNode->offset = 0;
10427 rootNode->type = Node::TYPE_FREE;
10428 rootNode->parent = VMA_NULL;
10429 rootNode->buddy = VMA_NULL;
10432 AddToFreeListFront(0, rootNode);
10435 bool VmaBlockMetadata_Buddy::Validate()
const 10438 ValidationContext ctx;
10439 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10441 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10443 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10444 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10447 for(uint32_t level = 0; level < m_LevelCount; ++level)
10449 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10450 m_FreeList[level].front->free.prev == VMA_NULL);
10452 for(Node* node = m_FreeList[level].front;
10454 node = node->free.next)
10456 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10458 if(node->free.next == VMA_NULL)
10460 VMA_VALIDATE(m_FreeList[level].back == node);
10464 VMA_VALIDATE(node->free.next->free.prev == node);
10470 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10472 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10478 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 10480 for(uint32_t level = 0; level < m_LevelCount; ++level)
10482 if(m_FreeList[level].front != VMA_NULL)
10484 return LevelToNodeSize(level);
10490 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 10492 const VkDeviceSize unusableSize = GetUnusableSize();
10503 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10505 if(unusableSize > 0)
10514 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 10516 const VkDeviceSize unusableSize = GetUnusableSize();
10518 inoutStats.
size += GetSize();
10519 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10524 if(unusableSize > 0)
10531 #if VMA_STATS_STRING_ENABLED 10533 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 10537 CalcAllocationStatInfo(stat);
10539 PrintDetailedMap_Begin(
10545 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10547 const VkDeviceSize unusableSize = GetUnusableSize();
10548 if(unusableSize > 0)
10550 PrintDetailedMap_UnusedRange(json,
10555 PrintDetailedMap_End(json);
10558 #endif // #if VMA_STATS_STRING_ENABLED 10560 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
10561 uint32_t currentFrameIndex,
10562 uint32_t frameInUseCount,
10563 VkDeviceSize bufferImageGranularity,
10564 VkDeviceSize allocSize,
10565 VkDeviceSize allocAlignment,
10567 VmaSuballocationType allocType,
10568 bool canMakeOtherLost,
10570 VmaAllocationRequest* pAllocationRequest)
10572 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10576 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
10577 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
10578 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
10580 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
10581 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
10584 if(allocSize > m_UsableSize)
10589 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10590 for(uint32_t level = targetLevel + 1; level--; )
10592 for(Node* freeNode = m_FreeList[level].front;
10593 freeNode != VMA_NULL;
10594 freeNode = freeNode->free.next)
10596 if(freeNode->offset % allocAlignment == 0)
10598 pAllocationRequest->offset = freeNode->offset;
10599 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
10600 pAllocationRequest->sumItemSize = 0;
10601 pAllocationRequest->itemsToMakeLostCount = 0;
10602 pAllocationRequest->customData = (
void*)(uintptr_t)level;
10611 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
10612 uint32_t currentFrameIndex,
10613 uint32_t frameInUseCount,
10614 VmaAllocationRequest* pAllocationRequest)
10620 return pAllocationRequest->itemsToMakeLostCount == 0;
10623 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10632 void VmaBlockMetadata_Buddy::Alloc(
10633 const VmaAllocationRequest& request,
10634 VmaSuballocationType type,
10635 VkDeviceSize allocSize,
10639 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10640 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
10642 Node* currNode = m_FreeList[currLevel].front;
10643 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10644 while(currNode->offset != request.offset)
10646 currNode = currNode->free.next;
10647 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10651 while(currLevel < targetLevel)
10655 RemoveFromFreeList(currLevel, currNode);
10657 const uint32_t childrenLevel = currLevel + 1;
10660 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
10661 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
10663 leftChild->offset = currNode->offset;
10664 leftChild->type = Node::TYPE_FREE;
10665 leftChild->parent = currNode;
10666 leftChild->buddy = rightChild;
10668 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
10669 rightChild->type = Node::TYPE_FREE;
10670 rightChild->parent = currNode;
10671 rightChild->buddy = leftChild;
10674 currNode->type = Node::TYPE_SPLIT;
10675 currNode->split.leftChild = leftChild;
10678 AddToFreeListFront(childrenLevel, rightChild);
10679 AddToFreeListFront(childrenLevel, leftChild);
10684 currNode = m_FreeList[currLevel].front;
10693 VMA_ASSERT(currLevel == targetLevel &&
10694 currNode != VMA_NULL &&
10695 currNode->type == Node::TYPE_FREE);
10696 RemoveFromFreeList(currLevel, currNode);
10699 currNode->type = Node::TYPE_ALLOCATION;
10700 currNode->allocation.alloc = hAllocation;
10702 ++m_AllocationCount;
10704 m_SumFreeSize -= allocSize;
10707 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
10709 if(node->type == Node::TYPE_SPLIT)
10711 DeleteNode(node->split.leftChild->buddy);
10712 DeleteNode(node->split.leftChild);
10715 vma_delete(GetAllocationCallbacks(), node);
10718 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 10720 VMA_VALIDATE(level < m_LevelCount);
10721 VMA_VALIDATE(curr->parent == parent);
10722 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
10723 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
10726 case Node::TYPE_FREE:
10728 ctx.calculatedSumFreeSize += levelNodeSize;
10729 ++ctx.calculatedFreeCount;
10731 case Node::TYPE_ALLOCATION:
10732 ++ctx.calculatedAllocationCount;
10733 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
10734 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
10736 case Node::TYPE_SPLIT:
10738 const uint32_t childrenLevel = level + 1;
10739 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
10740 const Node*
const leftChild = curr->split.leftChild;
10741 VMA_VALIDATE(leftChild != VMA_NULL);
10742 VMA_VALIDATE(leftChild->offset == curr->offset);
10743 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
10745 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
10747 const Node*
const rightChild = leftChild->buddy;
10748 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
10749 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
10751 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
10762 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 10765 uint32_t level = 0;
10766 VkDeviceSize currLevelNodeSize = m_UsableSize;
10767 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
10768 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
10771 currLevelNodeSize = nextLevelNodeSize;
10772 nextLevelNodeSize = currLevelNodeSize >> 1;
10777 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
10780 Node* node = m_Root;
10781 VkDeviceSize nodeOffset = 0;
10782 uint32_t level = 0;
10783 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
10784 while(node->type == Node::TYPE_SPLIT)
10786 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
10787 if(offset < nodeOffset + nextLevelSize)
10789 node = node->split.leftChild;
10793 node = node->split.leftChild->buddy;
10794 nodeOffset += nextLevelSize;
10797 levelNodeSize = nextLevelSize;
10800 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
10801 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
10804 --m_AllocationCount;
10805 m_SumFreeSize += alloc->GetSize();
10807 node->type = Node::TYPE_FREE;
10810 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
10812 RemoveFromFreeList(level, node->buddy);
10813 Node*
const parent = node->parent;
10815 vma_delete(GetAllocationCallbacks(), node->buddy);
10816 vma_delete(GetAllocationCallbacks(), node);
10817 parent->type = Node::TYPE_FREE;
10825 AddToFreeListFront(level, node);
10828 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 10832 case Node::TYPE_FREE:
10838 case Node::TYPE_ALLOCATION:
10840 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10846 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
10847 if(unusedRangeSize > 0)
10856 case Node::TYPE_SPLIT:
10858 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
10859 const Node*
const leftChild = node->split.leftChild;
10860 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
10861 const Node*
const rightChild = leftChild->buddy;
10862 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
10870 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
10872 VMA_ASSERT(node->type == Node::TYPE_FREE);
10875 Node*
const frontNode = m_FreeList[level].front;
10876 if(frontNode == VMA_NULL)
10878 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
10879 node->free.prev = node->free.next = VMA_NULL;
10880 m_FreeList[level].front = m_FreeList[level].back = node;
10884 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
10885 node->free.prev = VMA_NULL;
10886 node->free.next = frontNode;
10887 frontNode->free.prev = node;
10888 m_FreeList[level].front = node;
10892 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
10894 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
10897 if(node->free.prev == VMA_NULL)
10899 VMA_ASSERT(m_FreeList[level].front == node);
10900 m_FreeList[level].front = node->free.next;
10904 Node*
const prevFreeNode = node->free.prev;
10905 VMA_ASSERT(prevFreeNode->free.next == node);
10906 prevFreeNode->free.next = node->free.next;
10910 if(node->free.next == VMA_NULL)
10912 VMA_ASSERT(m_FreeList[level].back == node);
10913 m_FreeList[level].back = node->free.prev;
10917 Node*
const nextFreeNode = node->free.next;
10918 VMA_ASSERT(nextFreeNode->free.prev == node);
10919 nextFreeNode->free.prev = node->free.prev;
10923 #if VMA_STATS_STRING_ENABLED 10924 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 10928 case Node::TYPE_FREE:
10929 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
10931 case Node::TYPE_ALLOCATION:
10933 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
10934 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10935 if(allocSize < levelNodeSize)
10937 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
10941 case Node::TYPE_SPLIT:
10943 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
10944 const Node*
const leftChild = node->split.leftChild;
10945 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
10946 const Node*
const rightChild = leftChild->buddy;
10947 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
10954 #endif // #if VMA_STATS_STRING_ENABLED 10960 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
10961 m_pMetadata(VMA_NULL),
10962 m_MemoryTypeIndex(UINT32_MAX),
10964 m_hMemory(VK_NULL_HANDLE),
10966 m_pMappedData(VMA_NULL)
10970 void VmaDeviceMemoryBlock::Init(
10972 uint32_t newMemoryTypeIndex,
10973 VkDeviceMemory newMemory,
10974 VkDeviceSize newSize,
10976 uint32_t algorithm)
10978 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
10980 m_MemoryTypeIndex = newMemoryTypeIndex;
10982 m_hMemory = newMemory;
10987 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
10990 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
10996 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
10998 m_pMetadata->Init(newSize);
11001 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11005 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11007 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11008 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11009 m_hMemory = VK_NULL_HANDLE;
11011 vma_delete(allocator, m_pMetadata);
11012 m_pMetadata = VMA_NULL;
11015 bool VmaDeviceMemoryBlock::Validate()
const 11017 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11018 (m_pMetadata->GetSize() != 0));
11020 return m_pMetadata->Validate();
11023 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11025 void* pData =
nullptr;
11026 VkResult res = Map(hAllocator, 1, &pData);
11027 if(res != VK_SUCCESS)
11032 res = m_pMetadata->CheckCorruption(pData);
11034 Unmap(hAllocator, 1);
11039 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11046 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11047 if(m_MapCount != 0)
11049 m_MapCount += count;
11050 VMA_ASSERT(m_pMappedData != VMA_NULL);
11051 if(ppData != VMA_NULL)
11053 *ppData = m_pMappedData;
11059 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11060 hAllocator->m_hDevice,
11066 if(result == VK_SUCCESS)
11068 if(ppData != VMA_NULL)
11070 *ppData = m_pMappedData;
11072 m_MapCount = count;
11078 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11085 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11086 if(m_MapCount >= count)
11088 m_MapCount -= count;
11089 if(m_MapCount == 0)
11091 m_pMappedData = VMA_NULL;
11092 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11097 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11101 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11103 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11104 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11107 VkResult res = Map(hAllocator, 1, &pData);
11108 if(res != VK_SUCCESS)
11113 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11114 VmaWriteMagicValue(pData, allocOffset + allocSize);
11116 Unmap(hAllocator, 1);
11121 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11123 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11124 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11127 VkResult res = Map(hAllocator, 1, &pData);
11128 if(res != VK_SUCCESS)
11133 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11135 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11137 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11139 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11142 Unmap(hAllocator, 1);
11147 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11152 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11153 hAllocation->GetBlock() ==
this);
11155 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11156 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
11157 hAllocator->m_hDevice,
11160 hAllocation->GetOffset());
11163 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11168 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11169 hAllocation->GetBlock() ==
this);
11171 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11172 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
11173 hAllocator->m_hDevice,
11176 hAllocation->GetOffset());
11181 memset(&outInfo, 0,
sizeof(outInfo));
11200 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11208 VmaPool_T::VmaPool_T(
11211 VkDeviceSize preferredBlockSize) :
11214 createInfo.memoryTypeIndex,
11215 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11216 createInfo.minBlockCount,
11217 createInfo.maxBlockCount,
11219 createInfo.frameInUseCount,
11221 createInfo.blockSize != 0,
11227 VmaPool_T::~VmaPool_T()
11231 #if VMA_STATS_STRING_ENABLED 11233 #endif // #if VMA_STATS_STRING_ENABLED 11235 VmaBlockVector::VmaBlockVector(
11237 uint32_t memoryTypeIndex,
11238 VkDeviceSize preferredBlockSize,
11239 size_t minBlockCount,
11240 size_t maxBlockCount,
11241 VkDeviceSize bufferImageGranularity,
11242 uint32_t frameInUseCount,
11244 bool explicitBlockSize,
11245 uint32_t algorithm) :
11246 m_hAllocator(hAllocator),
11247 m_MemoryTypeIndex(memoryTypeIndex),
11248 m_PreferredBlockSize(preferredBlockSize),
11249 m_MinBlockCount(minBlockCount),
11250 m_MaxBlockCount(maxBlockCount),
11251 m_BufferImageGranularity(bufferImageGranularity),
11252 m_FrameInUseCount(frameInUseCount),
11253 m_IsCustomPool(isCustomPool),
11254 m_ExplicitBlockSize(explicitBlockSize),
11255 m_Algorithm(algorithm),
11256 m_HasEmptyBlock(false),
11257 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11262 VmaBlockVector::~VmaBlockVector()
11264 for(
size_t i = m_Blocks.size(); i--; )
11266 m_Blocks[i]->Destroy(m_hAllocator);
11267 vma_delete(m_hAllocator, m_Blocks[i]);
11271 VkResult VmaBlockVector::CreateMinBlocks()
11273 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11275 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11276 if(res != VK_SUCCESS)
11284 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11286 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11288 const size_t blockCount = m_Blocks.size();
11297 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11299 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11300 VMA_ASSERT(pBlock);
11301 VMA_HEAVY_ASSERT(pBlock->Validate());
11302 pBlock->m_pMetadata->AddPoolStats(*pStats);
11306 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 11308 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11309 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11310 (VMA_DEBUG_MARGIN > 0) &&
11311 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11314 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11316 VkResult VmaBlockVector::Allocate(
11318 uint32_t currentFrameIndex,
11320 VkDeviceSize alignment,
11322 VmaSuballocationType suballocType,
11323 size_t allocationCount,
11327 VkResult res = VK_SUCCESS;
11330 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11331 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11333 res = AllocatePage(
11340 pAllocations + allocIndex);
11341 if(res != VK_SUCCESS)
11348 if(res != VK_SUCCESS)
11351 while(allocIndex--)
11353 Free(pAllocations[allocIndex]);
11355 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11361 VkResult VmaBlockVector::AllocatePage(
11363 uint32_t currentFrameIndex,
11365 VkDeviceSize alignment,
11367 VmaSuballocationType suballocType,
11374 const bool canCreateNewBlock =
11376 (m_Blocks.size() < m_MaxBlockCount);
11383 canMakeOtherLost =
false;
11387 if(isUpperAddress &&
11390 return VK_ERROR_FEATURE_NOT_PRESENT;
11404 return VK_ERROR_FEATURE_NOT_PRESENT;
11408 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11410 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11418 if(!canMakeOtherLost || canCreateNewBlock)
11427 if(!m_Blocks.empty())
11429 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11430 VMA_ASSERT(pCurrBlock);
11431 VkResult res = AllocateFromBlock(
11442 if(res == VK_SUCCESS)
11444 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
11454 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11456 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11457 VMA_ASSERT(pCurrBlock);
11458 VkResult res = AllocateFromBlock(
11469 if(res == VK_SUCCESS)
11471 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11479 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11481 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11482 VMA_ASSERT(pCurrBlock);
11483 VkResult res = AllocateFromBlock(
11494 if(res == VK_SUCCESS)
11496 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11504 if(canCreateNewBlock)
11507 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11508 uint32_t newBlockSizeShift = 0;
11509 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11511 if(!m_ExplicitBlockSize)
11514 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11515 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
11517 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11518 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11520 newBlockSize = smallerNewBlockSize;
11521 ++newBlockSizeShift;
11530 size_t newBlockIndex = 0;
11531 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
11533 if(!m_ExplicitBlockSize)
11535 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
11537 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11538 if(smallerNewBlockSize >= size)
11540 newBlockSize = smallerNewBlockSize;
11541 ++newBlockSizeShift;
11542 res = CreateBlock(newBlockSize, &newBlockIndex);
11551 if(res == VK_SUCCESS)
11553 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
11554 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
11556 res = AllocateFromBlock(
11567 if(res == VK_SUCCESS)
11569 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
11575 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11582 if(canMakeOtherLost)
11584 uint32_t tryIndex = 0;
11585 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
11587 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
11588 VmaAllocationRequest bestRequest = {};
11589 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
11595 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11597 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11598 VMA_ASSERT(pCurrBlock);
11599 VmaAllocationRequest currRequest = {};
11600 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11603 m_BufferImageGranularity,
11612 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11613 if(pBestRequestBlock == VMA_NULL ||
11614 currRequestCost < bestRequestCost)
11616 pBestRequestBlock = pCurrBlock;
11617 bestRequest = currRequest;
11618 bestRequestCost = currRequestCost;
11620 if(bestRequestCost == 0)
11631 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11633 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11634 VMA_ASSERT(pCurrBlock);
11635 VmaAllocationRequest currRequest = {};
11636 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11639 m_BufferImageGranularity,
11648 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11649 if(pBestRequestBlock == VMA_NULL ||
11650 currRequestCost < bestRequestCost ||
11653 pBestRequestBlock = pCurrBlock;
11654 bestRequest = currRequest;
11655 bestRequestCost = currRequestCost;
11657 if(bestRequestCost == 0 ||
11667 if(pBestRequestBlock != VMA_NULL)
11671 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
11672 if(res != VK_SUCCESS)
11678 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
11684 if(pBestRequestBlock->m_pMetadata->IsEmpty())
11686 m_HasEmptyBlock =
false;
11689 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
11690 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
11691 (*pAllocation)->InitBlockAllocation(
11694 bestRequest.offset,
11700 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
11701 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
11702 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
11703 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11705 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11707 if(IsCorruptionDetectionEnabled())
11709 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
11710 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11725 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
11727 return VK_ERROR_TOO_MANY_OBJECTS;
11731 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11734 void VmaBlockVector::Free(
11737 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
11741 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11743 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
11745 if(IsCorruptionDetectionEnabled())
11747 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
11748 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
11751 if(hAllocation->IsPersistentMap())
11753 pBlock->Unmap(m_hAllocator, 1);
11756 pBlock->m_pMetadata->Free(hAllocation);
11757 VMA_HEAVY_ASSERT(pBlock->Validate());
11759 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
11762 if(pBlock->m_pMetadata->IsEmpty())
11765 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
11767 pBlockToDelete = pBlock;
11773 m_HasEmptyBlock =
true;
11778 else if(m_HasEmptyBlock)
11780 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
11781 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
11783 pBlockToDelete = pLastBlock;
11784 m_Blocks.pop_back();
11785 m_HasEmptyBlock =
false;
11789 IncrementallySortBlocks();
11794 if(pBlockToDelete != VMA_NULL)
11796 VMA_DEBUG_LOG(
" Deleted empty allocation");
11797 pBlockToDelete->Destroy(m_hAllocator);
11798 vma_delete(m_hAllocator, pBlockToDelete);
11802 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 11804 VkDeviceSize result = 0;
11805 for(
size_t i = m_Blocks.size(); i--; )
11807 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
11808 if(result >= m_PreferredBlockSize)
11816 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
11818 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11820 if(m_Blocks[blockIndex] == pBlock)
11822 VmaVectorRemove(m_Blocks, blockIndex);
11829 void VmaBlockVector::IncrementallySortBlocks()
11834 for(
size_t i = 1; i < m_Blocks.size(); ++i)
11836 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
11838 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
11845 VkResult VmaBlockVector::AllocateFromBlock(
11846 VmaDeviceMemoryBlock* pBlock,
11848 uint32_t currentFrameIndex,
11850 VkDeviceSize alignment,
11853 VmaSuballocationType suballocType,
11862 VmaAllocationRequest currRequest = {};
11863 if(pBlock->m_pMetadata->CreateAllocationRequest(
11866 m_BufferImageGranularity,
11876 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
11880 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
11881 if(res != VK_SUCCESS)
11888 if(pBlock->m_pMetadata->IsEmpty())
11890 m_HasEmptyBlock =
false;
11893 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
11894 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
11895 (*pAllocation)->InitBlockAllocation(
11898 currRequest.offset,
11904 VMA_HEAVY_ASSERT(pBlock->Validate());
11905 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
11906 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11908 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11910 if(IsCorruptionDetectionEnabled())
11912 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
11913 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11917 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11920 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
11922 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
11923 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
11924 allocInfo.allocationSize = blockSize;
11925 VkDeviceMemory mem = VK_NULL_HANDLE;
11926 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
11935 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
11940 allocInfo.allocationSize,
11944 m_Blocks.push_back(pBlock);
11945 if(pNewBlockIndex != VMA_NULL)
11947 *pNewBlockIndex = m_Blocks.size() - 1;
11953 void VmaBlockVector::ApplyDefragmentationMovesCpu(
11954 class VmaBlockVectorDefragmentationContext* pDefragCtx,
11955 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
11957 const size_t blockCount = m_Blocks.size();
11958 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
11962 BLOCK_FLAG_USED = 0x00000001,
11963 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
11971 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
11972 blockInfo(blockCount, VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
11973 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
11976 const size_t moveCount = moves.size();
11977 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
11979 const VmaDefragmentationMove& move = moves[moveIndex];
11980 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
11981 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
11984 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
11987 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
11989 BlockInfo& currBlockInfo = blockInfo[blockIndex];
11990 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
11991 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
11993 currBlockInfo.pMappedData = pBlock->GetMappedData();
11995 if(currBlockInfo.pMappedData == VMA_NULL)
11997 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
11998 if(pDefragCtx->res == VK_SUCCESS)
12000 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12007 if(pDefragCtx->res == VK_SUCCESS)
12009 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12010 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12012 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12014 const VmaDefragmentationMove& move = moves[moveIndex];
12016 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12017 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12019 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12024 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12025 memRange.memory = pSrcBlock->GetDeviceMemory();
12026 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12027 memRange.size = VMA_MIN(
12028 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12029 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12030 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12035 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12036 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12037 static_cast<size_t>(move.size));
12039 if(IsCorruptionDetectionEnabled())
12041 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12042 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12048 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12049 memRange.memory = pDstBlock->GetDeviceMemory();
12050 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12051 memRange.size = VMA_MIN(
12052 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12053 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12054 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12061 for(
size_t blockIndex = blockCount; blockIndex--; )
12063 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12064 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12066 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12067 pBlock->Unmap(m_hAllocator, 1);
12072 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12073 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12074 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12075 VkCommandBuffer commandBuffer)
12077 const size_t blockCount = m_Blocks.size();
12079 pDefragCtx->blockContexts.resize(blockCount);
12080 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12083 const size_t moveCount = moves.size();
12084 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12086 const VmaDefragmentationMove& move = moves[moveIndex];
12087 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12088 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12091 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12095 VkBufferCreateInfo bufCreateInfo = { VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO };
12096 bufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT |
12097 VK_BUFFER_USAGE_TRANSFER_DST_BIT;
12099 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12101 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12102 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12103 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12105 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12106 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12107 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12108 if(pDefragCtx->res == VK_SUCCESS)
12110 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12111 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12118 if(pDefragCtx->res == VK_SUCCESS)
12120 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12121 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12123 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12125 const VmaDefragmentationMove& move = moves[moveIndex];
12127 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12128 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12130 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12132 VkBufferCopy region = {
12136 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12137 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12142 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12144 pDefragCtx->res = VK_NOT_READY;
12150 m_HasEmptyBlock =
false;
12151 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12153 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12154 if(pBlock->m_pMetadata->IsEmpty())
12156 if(m_Blocks.size() > m_MinBlockCount)
12158 if(pDefragmentationStats != VMA_NULL)
12161 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12164 VmaVectorRemove(m_Blocks, blockIndex);
12165 pBlock->Destroy(m_hAllocator);
12166 vma_delete(m_hAllocator, pBlock);
12170 m_HasEmptyBlock =
true;
12176 #if VMA_STATS_STRING_ENABLED 12178 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12180 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12182 json.BeginObject();
12186 json.WriteString(
"MemoryTypeIndex");
12187 json.WriteNumber(m_MemoryTypeIndex);
12189 json.WriteString(
"BlockSize");
12190 json.WriteNumber(m_PreferredBlockSize);
12192 json.WriteString(
"BlockCount");
12193 json.BeginObject(
true);
12194 if(m_MinBlockCount > 0)
12196 json.WriteString(
"Min");
12197 json.WriteNumber((uint64_t)m_MinBlockCount);
12199 if(m_MaxBlockCount < SIZE_MAX)
12201 json.WriteString(
"Max");
12202 json.WriteNumber((uint64_t)m_MaxBlockCount);
12204 json.WriteString(
"Cur");
12205 json.WriteNumber((uint64_t)m_Blocks.size());
12208 if(m_FrameInUseCount > 0)
12210 json.WriteString(
"FrameInUseCount");
12211 json.WriteNumber(m_FrameInUseCount);
12214 if(m_Algorithm != 0)
12216 json.WriteString(
"Algorithm");
12217 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12222 json.WriteString(
"PreferredBlockSize");
12223 json.WriteNumber(m_PreferredBlockSize);
12226 json.WriteString(
"Blocks");
12227 json.BeginObject();
12228 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12230 json.BeginString();
12231 json.ContinueString(m_Blocks[i]->GetId());
12234 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12241 #endif // #if VMA_STATS_STRING_ENABLED 12243 void VmaBlockVector::Defragment(
12244 class VmaBlockVectorDefragmentationContext* pCtx,
12246 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12247 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12248 VkCommandBuffer commandBuffer)
12250 pCtx->res = VK_SUCCESS;
12252 const VkMemoryPropertyFlags memPropFlags =
12253 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12254 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12255 const bool isHostCoherent = (memPropFlags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0;
12257 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12259 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12260 (VMA_DEBUG_DETECT_CORRUPTION == 0 || !(isHostVisible && isHostCoherent));
12263 if(canDefragmentOnCpu || canDefragmentOnGpu)
12265 bool defragmentOnGpu;
12267 if(canDefragmentOnGpu != canDefragmentOnCpu)
12269 defragmentOnGpu = canDefragmentOnGpu;
12274 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12275 m_hAllocator->IsIntegratedGpu();
12278 bool overlappingMoveSupported = !defragmentOnGpu;
12280 if(m_hAllocator->m_UseMutex)
12282 m_Mutex.LockWrite();
12283 pCtx->mutexLocked =
true;
12286 pCtx->Begin(overlappingMoveSupported);
12290 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12291 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12292 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12293 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12294 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12297 if(pStats != VMA_NULL)
12299 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12300 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12301 pStats->bytesMoved += bytesMoved;
12302 pStats->allocationsMoved += allocationsMoved;
12303 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12304 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12305 if(defragmentOnGpu)
12307 maxGpuBytesToMove -= bytesMoved;
12308 maxGpuAllocationsToMove -= allocationsMoved;
12312 maxCpuBytesToMove -= bytesMoved;
12313 maxCpuAllocationsToMove -= allocationsMoved;
12317 if(pCtx->res >= VK_SUCCESS)
12319 if(defragmentOnGpu)
12321 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12325 ApplyDefragmentationMovesCpu(pCtx, moves);
12331 void VmaBlockVector::DefragmentationEnd(
12332 class VmaBlockVectorDefragmentationContext* pCtx,
12336 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12338 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12339 if(blockCtx.hBuffer)
12341 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12342 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12346 if(pCtx->res >= VK_SUCCESS)
12348 FreeEmptyBlocks(pStats);
12351 if(pCtx->mutexLocked)
12353 VMA_ASSERT(m_hAllocator->m_UseMutex);
12354 m_Mutex.UnlockWrite();
12358 size_t VmaBlockVector::CalcAllocationCount()
const 12361 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12363 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12368 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const 12370 if(m_BufferImageGranularity == 1)
12374 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12375 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12377 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12378 VMA_ASSERT(m_Algorithm == 0);
12379 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12380 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12388 void VmaBlockVector::MakePoolAllocationsLost(
12389 uint32_t currentFrameIndex,
12390 size_t* pLostAllocationCount)
12392 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12393 size_t lostAllocationCount = 0;
12394 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12396 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12397 VMA_ASSERT(pBlock);
12398 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12400 if(pLostAllocationCount != VMA_NULL)
12402 *pLostAllocationCount = lostAllocationCount;
12406 VkResult VmaBlockVector::CheckCorruption()
12408 if(!IsCorruptionDetectionEnabled())
12410 return VK_ERROR_FEATURE_NOT_PRESENT;
12413 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12414 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12416 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12417 VMA_ASSERT(pBlock);
12418 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12419 if(res != VK_SUCCESS)
12427 void VmaBlockVector::AddStats(
VmaStats* pStats)
12429 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12430 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12432 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12434 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12436 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12437 VMA_ASSERT(pBlock);
12438 VMA_HEAVY_ASSERT(pBlock->Validate());
12440 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12441 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12442 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12443 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12450 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12452 VmaBlockVector* pBlockVector,
12453 uint32_t currentFrameIndex,
12454 bool overlappingMoveSupported) :
12455 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12456 m_AllAllocations(false),
12457 m_AllocationCount(0),
12459 m_AllocationsMoved(0),
12460 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12463 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12464 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12466 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12467 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12468 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12469 m_Blocks.push_back(pBlockInfo);
12473 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12476 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12478 for(
size_t i = m_Blocks.size(); i--; )
12480 vma_delete(m_hAllocator, m_Blocks[i]);
12484 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12487 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12489 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12490 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12491 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12493 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12494 (*it)->m_Allocations.push_back(allocInfo);
12501 ++m_AllocationCount;
12505 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
12506 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12507 VkDeviceSize maxBytesToMove,
12508 uint32_t maxAllocationsToMove)
12510 if(m_Blocks.empty())
12523 size_t srcBlockMinIndex = 0;
12536 size_t srcBlockIndex = m_Blocks.size() - 1;
12537 size_t srcAllocIndex = SIZE_MAX;
12543 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
12545 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
12548 if(srcBlockIndex == srcBlockMinIndex)
12555 srcAllocIndex = SIZE_MAX;
12560 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
12564 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
12565 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
12567 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
12568 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
12569 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
12570 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
12573 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
12575 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
12576 VmaAllocationRequest dstAllocRequest;
12577 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
12578 m_CurrentFrameIndex,
12579 m_pBlockVector->GetFrameInUseCount(),
12580 m_pBlockVector->GetBufferImageGranularity(),
12587 &dstAllocRequest) &&
12589 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
12591 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
12594 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
12595 (m_BytesMoved + size > maxBytesToMove))
12600 VmaDefragmentationMove move;
12601 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
12602 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
12603 move.srcOffset = srcOffset;
12604 move.dstOffset = dstAllocRequest.offset;
12606 moves.push_back(move);
12608 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
12613 allocInfo.m_hAllocation);
12614 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
12616 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
12618 if(allocInfo.m_pChanged != VMA_NULL)
12620 *allocInfo.m_pChanged = VK_TRUE;
12623 ++m_AllocationsMoved;
12624 m_BytesMoved += size;
12626 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
12634 if(srcAllocIndex > 0)
12640 if(srcBlockIndex > 0)
12643 srcAllocIndex = SIZE_MAX;
12653 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const 12656 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12658 if(m_Blocks[i]->m_HasNonMovableAllocations)
12666 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
12667 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12668 VkDeviceSize maxBytesToMove,
12669 uint32_t maxAllocationsToMove)
12671 if(!m_AllAllocations && m_AllocationCount == 0)
12676 const size_t blockCount = m_Blocks.size();
12677 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12679 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
12681 if(m_AllAllocations)
12683 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
12684 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
12685 it != pMetadata->m_Suballocations.end();
12688 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
12690 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
12691 pBlockInfo->m_Allocations.push_back(allocInfo);
12696 pBlockInfo->CalcHasNonMovableAllocations();
12700 pBlockInfo->SortAllocationsByOffsetDescending();
12706 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
12709 const uint32_t roundCount = 2;
12712 VkResult result = VK_SUCCESS;
12713 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
12715 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
12721 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
12722 size_t dstBlockIndex, VkDeviceSize dstOffset,
12723 size_t srcBlockIndex, VkDeviceSize srcOffset)
12725 if(dstBlockIndex < srcBlockIndex)
12729 if(dstBlockIndex > srcBlockIndex)
12733 if(dstOffset < srcOffset)
12743 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
12745 VmaBlockVector* pBlockVector,
12746 uint32_t currentFrameIndex,
12747 bool overlappingMoveSupported) :
12748 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12749 m_OverlappingMoveSupported(overlappingMoveSupported),
12750 m_AllocationCount(0),
12751 m_AllAllocations(false),
12753 m_AllocationsMoved(0),
12754 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
12756 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
12760 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
12764 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
12765 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12766 VkDeviceSize maxBytesToMove,
12767 uint32_t maxAllocationsToMove)
12769 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
12771 const size_t blockCount = m_pBlockVector->GetBlockCount();
12772 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
12777 PreprocessMetadata();
12781 m_BlockInfos.resize(blockCount);
12782 for(
size_t i = 0; i < blockCount; ++i)
12784 m_BlockInfos[i].origBlockIndex = i;
12787 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
12788 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
12789 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
12794 FreeSpaceDatabase freeSpaceDb;
12796 size_t dstBlockInfoIndex = 0;
12797 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12798 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12799 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12800 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
12801 VkDeviceSize dstOffset = 0;
12804 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
12806 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
12807 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
12808 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
12809 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
12810 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
12812 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
12813 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
12814 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
12815 if(m_AllocationsMoved == maxAllocationsToMove ||
12816 m_BytesMoved + srcAllocSize > maxBytesToMove)
12821 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
12824 size_t freeSpaceInfoIndex;
12825 VkDeviceSize dstAllocOffset;
12826 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
12827 freeSpaceInfoIndex, dstAllocOffset))
12829 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
12830 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
12831 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
12832 VkDeviceSize freeSpaceBlockSize = pFreeSpaceMetadata->GetSize();
12835 if(freeSpaceInfoIndex == srcBlockInfoIndex)
12837 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
12841 VmaSuballocation suballoc = *srcSuballocIt;
12842 suballoc.offset = dstAllocOffset;
12843 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
12844 m_BytesMoved += srcAllocSize;
12845 ++m_AllocationsMoved;
12847 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12849 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12850 srcSuballocIt = nextSuballocIt;
12852 InsertSuballoc(pFreeSpaceMetadata, suballoc);
12854 VmaDefragmentationMove move = {
12855 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
12856 srcAllocOffset, dstAllocOffset,
12858 moves.push_back(move);
12865 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
12867 VmaSuballocation suballoc = *srcSuballocIt;
12868 suballoc.offset = dstAllocOffset;
12869 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
12870 m_BytesMoved += srcAllocSize;
12871 ++m_AllocationsMoved;
12873 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12875 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12876 srcSuballocIt = nextSuballocIt;
12878 InsertSuballoc(pFreeSpaceMetadata, suballoc);
12880 VmaDefragmentationMove move = {
12881 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
12882 srcAllocOffset, dstAllocOffset,
12884 moves.push_back(move);
12889 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
12892 while(dstBlockInfoIndex < srcBlockInfoIndex &&
12893 dstAllocOffset + srcAllocSize > dstBlockSize)
12896 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
12898 ++dstBlockInfoIndex;
12899 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12900 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12901 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12902 dstBlockSize = pDstMetadata->GetSize();
12904 dstAllocOffset = 0;
12908 if(dstBlockInfoIndex == srcBlockInfoIndex)
12910 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
12912 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
12914 bool skipOver = overlap;
12915 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
12919 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
12924 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
12926 dstOffset = srcAllocOffset + srcAllocSize;
12932 srcSuballocIt->offset = dstAllocOffset;
12933 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
12934 dstOffset = dstAllocOffset + srcAllocSize;
12935 m_BytesMoved += srcAllocSize;
12936 ++m_AllocationsMoved;
12938 VmaDefragmentationMove move = {
12939 srcOrigBlockIndex, dstOrigBlockIndex,
12940 srcAllocOffset, dstAllocOffset,
12942 moves.push_back(move);
12950 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
12951 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
12953 VmaSuballocation suballoc = *srcSuballocIt;
12954 suballoc.offset = dstAllocOffset;
12955 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
12956 dstOffset = dstAllocOffset + srcAllocSize;
12957 m_BytesMoved += srcAllocSize;
12958 ++m_AllocationsMoved;
12960 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12962 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12963 srcSuballocIt = nextSuballocIt;
12965 pDstMetadata->m_Suballocations.push_back(suballoc);
12967 VmaDefragmentationMove move = {
12968 srcOrigBlockIndex, dstOrigBlockIndex,
12969 srcAllocOffset, dstAllocOffset,
12971 moves.push_back(move);
12977 m_BlockInfos.clear();
12979 PostprocessMetadata();
12984 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
12986 const size_t blockCount = m_pBlockVector->GetBlockCount();
12987 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12989 VmaBlockMetadata_Generic*
const pMetadata =
12990 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
12991 pMetadata->m_FreeCount = 0;
12992 pMetadata->m_SumFreeSize = pMetadata->GetSize();
12993 pMetadata->m_FreeSuballocationsBySize.clear();
12994 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
12995 it != pMetadata->m_Suballocations.end(); )
12997 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
12999 VmaSuballocationList::iterator nextIt = it;
13001 pMetadata->m_Suballocations.erase(it);
13012 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13014 const size_t blockCount = m_pBlockVector->GetBlockCount();
13015 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13017 VmaBlockMetadata_Generic*
const pMetadata =
13018 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13019 const VkDeviceSize blockSize = pMetadata->GetSize();
13022 if(pMetadata->m_Suballocations.empty())
13024 pMetadata->m_FreeCount = 1;
13026 VmaSuballocation suballoc = {
13030 VMA_SUBALLOCATION_TYPE_FREE };
13031 pMetadata->m_Suballocations.push_back(suballoc);
13032 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13037 VkDeviceSize offset = 0;
13038 VmaSuballocationList::iterator it;
13039 for(it = pMetadata->m_Suballocations.begin();
13040 it != pMetadata->m_Suballocations.end();
13043 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13044 VMA_ASSERT(it->offset >= offset);
13047 if(it->offset > offset)
13049 ++pMetadata->m_FreeCount;
13050 const VkDeviceSize freeSize = it->offset - offset;
13051 VmaSuballocation suballoc = {
13055 VMA_SUBALLOCATION_TYPE_FREE };
13056 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13057 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13059 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13063 pMetadata->m_SumFreeSize -= it->size;
13064 offset = it->offset + it->size;
13068 if(offset < blockSize)
13070 ++pMetadata->m_FreeCount;
13071 const VkDeviceSize freeSize = blockSize - offset;
13072 VmaSuballocation suballoc = {
13076 VMA_SUBALLOCATION_TYPE_FREE };
13077 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13078 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13079 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13081 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13086 pMetadata->m_FreeSuballocationsBySize.begin(),
13087 pMetadata->m_FreeSuballocationsBySize.end(),
13088 VmaSuballocationItemSizeLess());
13091 VMA_HEAVY_ASSERT(pMetadata->Validate());
13095 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13098 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13099 while(it != pMetadata->m_Suballocations.end())
13101 if(it->offset < suballoc.offset)
13106 pMetadata->m_Suballocations.insert(it, suballoc);
13112 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13115 VmaBlockVector* pBlockVector,
13116 uint32_t currFrameIndex,
13117 uint32_t algorithmFlags) :
13119 mutexLocked(false),
13120 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13121 m_hAllocator(hAllocator),
13122 m_hCustomPool(hCustomPool),
13123 m_pBlockVector(pBlockVector),
13124 m_CurrFrameIndex(currFrameIndex),
13125 m_AlgorithmFlags(algorithmFlags),
13126 m_pAlgorithm(VMA_NULL),
13127 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13128 m_AllAllocations(false)
13132 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13134 vma_delete(m_hAllocator, m_pAlgorithm);
13137 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13139 AllocInfo info = { hAlloc, pChanged };
13140 m_Allocations.push_back(info);
13143 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13145 const bool allAllocations = m_AllAllocations ||
13146 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13158 if(VMA_DEBUG_MARGIN == 0 &&
13160 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13162 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13163 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13167 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13168 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13173 m_pAlgorithm->AddAll();
13177 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13179 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13187 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13189 uint32_t currFrameIndex,
13192 m_hAllocator(hAllocator),
13193 m_CurrFrameIndex(currFrameIndex),
13196 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13198 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13201 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13203 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13205 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13206 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13207 vma_delete(m_hAllocator, pBlockVectorCtx);
13209 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13211 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13212 if(pBlockVectorCtx)
13214 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13215 vma_delete(m_hAllocator, pBlockVectorCtx);
13220 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13222 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13224 VmaPool pool = pPools[poolIndex];
13227 if(pool->m_BlockVector.GetAlgorithm() == 0)
13229 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13231 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13233 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13235 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13240 if(!pBlockVectorDefragCtx)
13242 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13245 &pool->m_BlockVector,
13248 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13251 pBlockVectorDefragCtx->AddAll();
13256 void VmaDefragmentationContext_T::AddAllocations(
13257 uint32_t allocationCount,
13259 VkBool32* pAllocationsChanged)
13262 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13265 VMA_ASSERT(hAlloc);
13267 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13269 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13271 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13273 const VmaPool hAllocPool = hAlloc->GetPool();
13275 if(hAllocPool != VK_NULL_HANDLE)
13278 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13280 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13282 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13284 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13288 if(!pBlockVectorDefragCtx)
13290 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13293 &hAllocPool->m_BlockVector,
13296 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13303 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13304 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13305 if(!pBlockVectorDefragCtx)
13307 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13310 m_hAllocator->m_pBlockVectors[memTypeIndex],
13313 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13317 if(pBlockVectorDefragCtx)
13319 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13320 &pAllocationsChanged[allocIndex] : VMA_NULL;
13321 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13327 VkResult VmaDefragmentationContext_T::Defragment(
13328 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13329 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13337 if(commandBuffer == VK_NULL_HANDLE)
13339 maxGpuBytesToMove = 0;
13340 maxGpuAllocationsToMove = 0;
13343 VkResult res = VK_SUCCESS;
13346 for(uint32_t memTypeIndex = 0;
13347 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13350 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13351 if(pBlockVectorCtx)
13353 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13354 pBlockVectorCtx->GetBlockVector()->Defragment(
13357 maxCpuBytesToMove, maxCpuAllocationsToMove,
13358 maxGpuBytesToMove, maxGpuAllocationsToMove,
13360 if(pBlockVectorCtx->res != VK_SUCCESS)
13362 res = pBlockVectorCtx->res;
13368 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13369 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13372 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13373 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13374 pBlockVectorCtx->GetBlockVector()->Defragment(
13377 maxCpuBytesToMove, maxCpuAllocationsToMove,
13378 maxGpuBytesToMove, maxGpuAllocationsToMove,
13380 if(pBlockVectorCtx->res != VK_SUCCESS)
13382 res = pBlockVectorCtx->res;
13392 #if VMA_RECORDING_ENABLED 13394 VmaRecorder::VmaRecorder() :
13399 m_StartCounter(INT64_MAX)
13405 m_UseMutex = useMutex;
13406 m_Flags = settings.
flags;
13408 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13409 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13412 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13415 return VK_ERROR_INITIALIZATION_FAILED;
13419 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13420 fprintf(m_File,
"%s\n",
"1,5");
13425 VmaRecorder::~VmaRecorder()
13427 if(m_File != VMA_NULL)
13433 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13435 CallParams callParams;
13436 GetBasicParams(callParams);
13438 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13439 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13443 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13445 CallParams callParams;
13446 GetBasicParams(callParams);
13448 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13449 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13455 CallParams callParams;
13456 GetBasicParams(callParams);
13458 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13459 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13470 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13472 CallParams callParams;
13473 GetBasicParams(callParams);
13475 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13476 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13481 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13482 const VkMemoryRequirements& vkMemReq,
13486 CallParams callParams;
13487 GetBasicParams(callParams);
13489 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13490 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13491 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13493 vkMemReq.alignment,
13494 vkMemReq.memoryTypeBits,
13502 userDataStr.GetString());
13506 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
13507 const VkMemoryRequirements& vkMemReq,
13509 uint64_t allocationCount,
13512 CallParams callParams;
13513 GetBasicParams(callParams);
13515 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13516 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13517 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
13519 vkMemReq.alignment,
13520 vkMemReq.memoryTypeBits,
13527 PrintPointerList(allocationCount, pAllocations);
13528 fprintf(m_File,
",%s\n", userDataStr.GetString());
13532 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
13533 const VkMemoryRequirements& vkMemReq,
13534 bool requiresDedicatedAllocation,
13535 bool prefersDedicatedAllocation,
13539 CallParams callParams;
13540 GetBasicParams(callParams);
13542 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13543 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13544 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13546 vkMemReq.alignment,
13547 vkMemReq.memoryTypeBits,
13548 requiresDedicatedAllocation ? 1 : 0,
13549 prefersDedicatedAllocation ? 1 : 0,
13557 userDataStr.GetString());
13561 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
13562 const VkMemoryRequirements& vkMemReq,
13563 bool requiresDedicatedAllocation,
13564 bool prefersDedicatedAllocation,
13568 CallParams callParams;
13569 GetBasicParams(callParams);
13571 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13572 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13573 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13575 vkMemReq.alignment,
13576 vkMemReq.memoryTypeBits,
13577 requiresDedicatedAllocation ? 1 : 0,
13578 prefersDedicatedAllocation ? 1 : 0,
13586 userDataStr.GetString());
13590 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
13593 CallParams callParams;
13594 GetBasicParams(callParams);
13596 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13597 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13602 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
13603 uint64_t allocationCount,
13606 CallParams callParams;
13607 GetBasicParams(callParams);
13609 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13610 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
13611 PrintPointerList(allocationCount, pAllocations);
13612 fprintf(m_File,
"\n");
13616 void VmaRecorder::RecordResizeAllocation(
13617 uint32_t frameIndex,
13619 VkDeviceSize newSize)
13621 CallParams callParams;
13622 GetBasicParams(callParams);
13624 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13625 fprintf(m_File,
"%u,%.3f,%u,vmaResizeAllocation,%p,%llu\n", callParams.threadId, callParams.time, frameIndex,
13626 allocation, newSize);
13630 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
13632 const void* pUserData)
13634 CallParams callParams;
13635 GetBasicParams(callParams);
13637 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13638 UserDataString userDataStr(
13641 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13643 userDataStr.GetString());
13647 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
13650 CallParams callParams;
13651 GetBasicParams(callParams);
13653 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13654 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13659 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
13662 CallParams callParams;
13663 GetBasicParams(callParams);
13665 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13666 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13671 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
13674 CallParams callParams;
13675 GetBasicParams(callParams);
13677 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13678 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13683 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
13684 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13686 CallParams callParams;
13687 GetBasicParams(callParams);
13689 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13690 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13697 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
13698 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13700 CallParams callParams;
13701 GetBasicParams(callParams);
13703 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13704 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13711 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
13712 const VkBufferCreateInfo& bufCreateInfo,
13716 CallParams callParams;
13717 GetBasicParams(callParams);
13719 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13720 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13721 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13722 bufCreateInfo.flags,
13723 bufCreateInfo.size,
13724 bufCreateInfo.usage,
13725 bufCreateInfo.sharingMode,
13726 allocCreateInfo.
flags,
13727 allocCreateInfo.
usage,
13731 allocCreateInfo.
pool,
13733 userDataStr.GetString());
13737 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
13738 const VkImageCreateInfo& imageCreateInfo,
13742 CallParams callParams;
13743 GetBasicParams(callParams);
13745 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13746 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13747 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13748 imageCreateInfo.flags,
13749 imageCreateInfo.imageType,
13750 imageCreateInfo.format,
13751 imageCreateInfo.extent.width,
13752 imageCreateInfo.extent.height,
13753 imageCreateInfo.extent.depth,
13754 imageCreateInfo.mipLevels,
13755 imageCreateInfo.arrayLayers,
13756 imageCreateInfo.samples,
13757 imageCreateInfo.tiling,
13758 imageCreateInfo.usage,
13759 imageCreateInfo.sharingMode,
13760 imageCreateInfo.initialLayout,
13761 allocCreateInfo.
flags,
13762 allocCreateInfo.
usage,
13766 allocCreateInfo.
pool,
13768 userDataStr.GetString());
13772 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
13775 CallParams callParams;
13776 GetBasicParams(callParams);
13778 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13779 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
13784 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
13787 CallParams callParams;
13788 GetBasicParams(callParams);
13790 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13791 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
13796 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
13799 CallParams callParams;
13800 GetBasicParams(callParams);
13802 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13803 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13808 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
13811 CallParams callParams;
13812 GetBasicParams(callParams);
13814 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13815 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
13820 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
13823 CallParams callParams;
13824 GetBasicParams(callParams);
13826 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13827 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
13832 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
13836 CallParams callParams;
13837 GetBasicParams(callParams);
13839 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13840 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
13843 fprintf(m_File,
",");
13845 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
13855 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
13858 CallParams callParams;
13859 GetBasicParams(callParams);
13861 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13862 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
13869 if(pUserData != VMA_NULL)
13873 m_Str = (
const char*)pUserData;
13877 sprintf_s(m_PtrStr,
"%p", pUserData);
13887 void VmaRecorder::WriteConfiguration(
13888 const VkPhysicalDeviceProperties& devProps,
13889 const VkPhysicalDeviceMemoryProperties& memProps,
13890 bool dedicatedAllocationExtensionEnabled)
13892 fprintf(m_File,
"Config,Begin\n");
13894 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
13895 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
13896 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
13897 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
13898 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
13899 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
13901 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
13902 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
13903 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
13905 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
13906 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
13908 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
13909 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
13911 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
13912 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
13914 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
13915 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
13918 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
13920 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
13921 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
13922 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
13923 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
13924 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
13925 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
13926 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
13927 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
13928 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
13930 fprintf(m_File,
"Config,End\n");
13933 void VmaRecorder::GetBasicParams(CallParams& outParams)
13935 outParams.threadId = GetCurrentThreadId();
13937 LARGE_INTEGER counter;
13938 QueryPerformanceCounter(&counter);
13939 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
13942 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
13946 fprintf(m_File,
"%p", pItems[0]);
13947 for(uint64_t i = 1; i < count; ++i)
13949 fprintf(m_File,
" %p", pItems[i]);
13954 void VmaRecorder::Flush()
13962 #endif // #if VMA_RECORDING_ENABLED 13970 m_hDevice(pCreateInfo->device),
13971 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
13972 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
13973 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
13974 m_PreferredLargeHeapBlockSize(0),
13975 m_PhysicalDevice(pCreateInfo->physicalDevice),
13976 m_CurrentFrameIndex(0),
13977 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
13980 ,m_pRecorder(VMA_NULL)
13983 if(VMA_DEBUG_DETECT_CORRUPTION)
13986 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
13991 #if !(VMA_DEDICATED_ALLOCATION) 13994 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
13998 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
13999 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14000 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14002 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14003 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14005 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14007 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
14018 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14019 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14021 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14022 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14023 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14024 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14031 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14033 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14034 if(limit != VK_WHOLE_SIZE)
14036 m_HeapSizeLimit[heapIndex] = limit;
14037 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14039 m_MemProps.memoryHeaps[heapIndex].size = limit;
14045 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14047 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14049 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14052 preferredBlockSize,
14055 GetBufferImageGranularity(),
14062 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14069 VkResult res = VK_SUCCESS;
14074 #if VMA_RECORDING_ENABLED 14075 m_pRecorder = vma_new(
this, VmaRecorder)();
14077 if(res != VK_SUCCESS)
14081 m_pRecorder->WriteConfiguration(
14082 m_PhysicalDeviceProperties,
14084 m_UseKhrDedicatedAllocation);
14085 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14087 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14088 return VK_ERROR_FEATURE_NOT_PRESENT;
14095 VmaAllocator_T::~VmaAllocator_T()
14097 #if VMA_RECORDING_ENABLED 14098 if(m_pRecorder != VMA_NULL)
14100 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14101 vma_delete(
this, m_pRecorder);
14105 VMA_ASSERT(m_Pools.empty());
14107 for(
size_t i = GetMemoryTypeCount(); i--; )
14109 vma_delete(
this, m_pDedicatedAllocations[i]);
14110 vma_delete(
this, m_pBlockVectors[i]);
14114 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14116 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14117 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
14118 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
14119 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
14120 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
14121 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
14122 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
14123 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
14124 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
14125 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
14126 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
14127 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
14128 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
14129 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
14130 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
14131 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
14132 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
14133 m_VulkanFunctions.vkCmdCopyBuffer = &vkCmdCopyBuffer;
14134 #if VMA_DEDICATED_ALLOCATION 14135 if(m_UseKhrDedicatedAllocation)
14137 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14138 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14139 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14140 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14142 #endif // #if VMA_DEDICATED_ALLOCATION 14143 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14145 #define VMA_COPY_IF_NOT_NULL(funcName) \ 14146 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 14148 if(pVulkanFunctions != VMA_NULL)
14150 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14151 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14152 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14153 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14154 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14155 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14156 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14157 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14158 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14159 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14160 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14161 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14162 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14163 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14164 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14165 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14166 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14167 #if VMA_DEDICATED_ALLOCATION 14168 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14169 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14173 #undef VMA_COPY_IF_NOT_NULL 14177 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14178 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14179 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14180 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14181 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14182 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14183 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14184 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14185 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14186 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14187 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14188 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14189 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14190 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14191 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14192 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14193 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14194 #if VMA_DEDICATED_ALLOCATION 14195 if(m_UseKhrDedicatedAllocation)
14197 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14198 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14203 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14205 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14206 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14207 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14208 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
14211 VkResult VmaAllocator_T::AllocateMemoryOfType(
14213 VkDeviceSize alignment,
14214 bool dedicatedAllocation,
14215 VkBuffer dedicatedBuffer,
14216 VkImage dedicatedImage,
14218 uint32_t memTypeIndex,
14219 VmaSuballocationType suballocType,
14220 size_t allocationCount,
14223 VMA_ASSERT(pAllocations != VMA_NULL);
14224 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, vkMemReq.size);
14230 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14235 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14236 VMA_ASSERT(blockVector);
14238 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14239 bool preferDedicatedMemory =
14240 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14241 dedicatedAllocation ||
14243 size > preferredBlockSize / 2;
14245 if(preferDedicatedMemory &&
14247 finalCreateInfo.
pool == VK_NULL_HANDLE)
14256 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14260 return AllocateDedicatedMemory(
14275 VkResult res = blockVector->Allocate(
14277 m_CurrentFrameIndex.load(),
14284 if(res == VK_SUCCESS)
14292 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14296 res = AllocateDedicatedMemory(
14302 finalCreateInfo.pUserData,
14307 if(res == VK_SUCCESS)
14310 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14316 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14323 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14325 VmaSuballocationType suballocType,
14326 uint32_t memTypeIndex,
14328 bool isUserDataString,
14330 VkBuffer dedicatedBuffer,
14331 VkImage dedicatedImage,
14332 size_t allocationCount,
14335 VMA_ASSERT(allocationCount > 0 && pAllocations);
14337 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14338 allocInfo.memoryTypeIndex = memTypeIndex;
14339 allocInfo.allocationSize = size;
14341 #if VMA_DEDICATED_ALLOCATION 14342 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14343 if(m_UseKhrDedicatedAllocation)
14345 if(dedicatedBuffer != VK_NULL_HANDLE)
14347 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14348 dedicatedAllocInfo.buffer = dedicatedBuffer;
14349 allocInfo.pNext = &dedicatedAllocInfo;
14351 else if(dedicatedImage != VK_NULL_HANDLE)
14353 dedicatedAllocInfo.image = dedicatedImage;
14354 allocInfo.pNext = &dedicatedAllocInfo;
14357 #endif // #if VMA_DEDICATED_ALLOCATION 14361 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14363 res = AllocateDedicatedMemoryPage(
14371 pAllocations + allocIndex);
14372 if(res != VK_SUCCESS)
14378 if(res == VK_SUCCESS)
14382 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14383 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14384 VMA_ASSERT(pDedicatedAllocations);
14385 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14387 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
14391 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
14396 while(allocIndex--)
14399 VkDeviceMemory hMemory = currAlloc->GetMemory();
14411 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
14413 currAlloc->SetUserData(
this, VMA_NULL);
14414 vma_delete(
this, currAlloc);
14417 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14423 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
14425 VmaSuballocationType suballocType,
14426 uint32_t memTypeIndex,
14427 const VkMemoryAllocateInfo& allocInfo,
14429 bool isUserDataString,
14433 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14434 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14437 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14441 void* pMappedData = VMA_NULL;
14444 res = (*m_VulkanFunctions.vkMapMemory)(
14453 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
14454 FreeVulkanMemory(memTypeIndex, size, hMemory);
14459 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
14460 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
14461 (*pAllocation)->SetUserData(
this, pUserData);
14462 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14464 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14470 void VmaAllocator_T::GetBufferMemoryRequirements(
14472 VkMemoryRequirements& memReq,
14473 bool& requiresDedicatedAllocation,
14474 bool& prefersDedicatedAllocation)
const 14476 #if VMA_DEDICATED_ALLOCATION 14477 if(m_UseKhrDedicatedAllocation)
14479 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14480 memReqInfo.buffer = hBuffer;
14482 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14484 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14485 memReq2.pNext = &memDedicatedReq;
14487 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14489 memReq = memReq2.memoryRequirements;
14490 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14491 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14494 #endif // #if VMA_DEDICATED_ALLOCATION 14496 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14497 requiresDedicatedAllocation =
false;
14498 prefersDedicatedAllocation =
false;
14502 void VmaAllocator_T::GetImageMemoryRequirements(
14504 VkMemoryRequirements& memReq,
14505 bool& requiresDedicatedAllocation,
14506 bool& prefersDedicatedAllocation)
const 14508 #if VMA_DEDICATED_ALLOCATION 14509 if(m_UseKhrDedicatedAllocation)
14511 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
14512 memReqInfo.image = hImage;
14514 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14516 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14517 memReq2.pNext = &memDedicatedReq;
14519 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14521 memReq = memReq2.memoryRequirements;
14522 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14523 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14526 #endif // #if VMA_DEDICATED_ALLOCATION 14528 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14529 requiresDedicatedAllocation =
false;
14530 prefersDedicatedAllocation =
false;
14534 VkResult VmaAllocator_T::AllocateMemory(
14535 const VkMemoryRequirements& vkMemReq,
14536 bool requiresDedicatedAllocation,
14537 bool prefersDedicatedAllocation,
14538 VkBuffer dedicatedBuffer,
14539 VkImage dedicatedImage,
14541 VmaSuballocationType suballocType,
14542 size_t allocationCount,
14545 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14547 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
14549 if(vkMemReq.size == 0)
14551 return VK_ERROR_VALIDATION_FAILED_EXT;
14556 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14557 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14562 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
14563 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14565 if(requiresDedicatedAllocation)
14569 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
14570 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14572 if(createInfo.
pool != VK_NULL_HANDLE)
14574 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
14575 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14578 if((createInfo.
pool != VK_NULL_HANDLE) &&
14581 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
14582 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14585 if(createInfo.
pool != VK_NULL_HANDLE)
14587 const VkDeviceSize alignmentForPool = VMA_MAX(
14588 vkMemReq.alignment,
14589 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
14590 return createInfo.
pool->m_BlockVector.Allocate(
14592 m_CurrentFrameIndex.load(),
14603 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
14604 uint32_t memTypeIndex = UINT32_MAX;
14606 if(res == VK_SUCCESS)
14608 VkDeviceSize alignmentForMemType = VMA_MAX(
14609 vkMemReq.alignment,
14610 GetMemoryTypeMinAlignment(memTypeIndex));
14612 res = AllocateMemoryOfType(
14614 alignmentForMemType,
14615 requiresDedicatedAllocation || prefersDedicatedAllocation,
14624 if(res == VK_SUCCESS)
14634 memoryTypeBits &= ~(1u << memTypeIndex);
14637 if(res == VK_SUCCESS)
14639 alignmentForMemType = VMA_MAX(
14640 vkMemReq.alignment,
14641 GetMemoryTypeMinAlignment(memTypeIndex));
14643 res = AllocateMemoryOfType(
14645 alignmentForMemType,
14646 requiresDedicatedAllocation || prefersDedicatedAllocation,
14655 if(res == VK_SUCCESS)
14665 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14676 void VmaAllocator_T::FreeMemory(
14677 size_t allocationCount,
14680 VMA_ASSERT(pAllocations);
14682 for(
size_t allocIndex = allocationCount; allocIndex--; )
14686 if(allocation != VK_NULL_HANDLE)
14688 if(TouchAllocation(allocation))
14690 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14692 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
14695 switch(allocation->GetType())
14697 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14699 VmaBlockVector* pBlockVector = VMA_NULL;
14700 VmaPool hPool = allocation->GetPool();
14701 if(hPool != VK_NULL_HANDLE)
14703 pBlockVector = &hPool->m_BlockVector;
14707 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14708 pBlockVector = m_pBlockVectors[memTypeIndex];
14710 pBlockVector->Free(allocation);
14713 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14714 FreeDedicatedMemory(allocation);
14721 allocation->SetUserData(
this, VMA_NULL);
14722 vma_delete(
this, allocation);
14727 VkResult VmaAllocator_T::ResizeAllocation(
14729 VkDeviceSize newSize)
14731 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
14733 return VK_ERROR_VALIDATION_FAILED_EXT;
14735 if(newSize == alloc->GetSize())
14740 switch(alloc->GetType())
14742 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14743 return VK_ERROR_FEATURE_NOT_PRESENT;
14744 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14745 if(alloc->GetBlock()->m_pMetadata->ResizeAllocation(alloc, newSize))
14747 alloc->ChangeSize(newSize);
14748 VMA_HEAVY_ASSERT(alloc->GetBlock()->m_pMetadata->Validate());
14753 return VK_ERROR_OUT_OF_POOL_MEMORY;
14757 return VK_ERROR_VALIDATION_FAILED_EXT;
14761 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
14764 InitStatInfo(pStats->
total);
14765 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
14767 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14771 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14773 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
14774 VMA_ASSERT(pBlockVector);
14775 pBlockVector->AddStats(pStats);
14780 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
14781 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
14783 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
14788 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14790 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14791 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14792 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
14793 VMA_ASSERT(pDedicatedAllocVector);
14794 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
14797 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
14798 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14799 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14800 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
14805 VmaPostprocessCalcStatInfo(pStats->
total);
14806 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
14807 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
14808 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
14809 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
14812 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
14814 VkResult VmaAllocator_T::DefragmentationBegin(
14824 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
14825 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
14828 (*pContext)->AddAllocations(
14831 VkResult res = (*pContext)->Defragment(
14836 if(res != VK_NOT_READY)
14838 vma_delete(
this, *pContext);
14839 *pContext = VMA_NULL;
14845 VkResult VmaAllocator_T::DefragmentationEnd(
14848 vma_delete(
this, context);
14854 if(hAllocation->CanBecomeLost())
14860 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14861 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14864 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
14868 pAllocationInfo->
offset = 0;
14869 pAllocationInfo->
size = hAllocation->GetSize();
14871 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14874 else if(localLastUseFrameIndex == localCurrFrameIndex)
14876 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
14877 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
14878 pAllocationInfo->
offset = hAllocation->GetOffset();
14879 pAllocationInfo->
size = hAllocation->GetSize();
14881 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14886 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14888 localLastUseFrameIndex = localCurrFrameIndex;
14895 #if VMA_STATS_STRING_ENABLED 14896 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14897 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14900 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
14901 if(localLastUseFrameIndex == localCurrFrameIndex)
14907 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14909 localLastUseFrameIndex = localCurrFrameIndex;
14915 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
14916 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
14917 pAllocationInfo->
offset = hAllocation->GetOffset();
14918 pAllocationInfo->
size = hAllocation->GetSize();
14919 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
14920 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14924 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
14927 if(hAllocation->CanBecomeLost())
14929 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14930 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14933 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
14937 else if(localLastUseFrameIndex == localCurrFrameIndex)
14943 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14945 localLastUseFrameIndex = localCurrFrameIndex;
14952 #if VMA_STATS_STRING_ENABLED 14953 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14954 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14957 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
14958 if(localLastUseFrameIndex == localCurrFrameIndex)
14964 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14966 localLastUseFrameIndex = localCurrFrameIndex;
14978 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
14988 return VK_ERROR_INITIALIZATION_FAILED;
14991 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
14993 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
14995 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
14996 if(res != VK_SUCCESS)
14998 vma_delete(
this, *pPool);
15005 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15006 (*pPool)->SetId(m_NextPoolId++);
15007 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15013 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15017 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15018 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15019 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15022 vma_delete(
this, pool);
15027 pool->m_BlockVector.GetPoolStats(pPoolStats);
15030 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15032 m_CurrentFrameIndex.store(frameIndex);
15035 void VmaAllocator_T::MakePoolAllocationsLost(
15037 size_t* pLostAllocationCount)
15039 hPool->m_BlockVector.MakePoolAllocationsLost(
15040 m_CurrentFrameIndex.load(),
15041 pLostAllocationCount);
15044 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15046 return hPool->m_BlockVector.CheckCorruption();
15049 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15051 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15054 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15056 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15058 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15059 VMA_ASSERT(pBlockVector);
15060 VkResult localRes = pBlockVector->CheckCorruption();
15063 case VK_ERROR_FEATURE_NOT_PRESENT:
15066 finalRes = VK_SUCCESS;
15076 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15077 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15079 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15081 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15084 case VK_ERROR_FEATURE_NOT_PRESENT:
15087 finalRes = VK_SUCCESS;
15099 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15101 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
15102 (*pAllocation)->InitLost();
15105 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15107 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15110 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15112 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15113 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
15115 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15116 if(res == VK_SUCCESS)
15118 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
15123 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
15128 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15131 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15133 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15139 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15141 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15143 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15146 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15148 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
15149 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15151 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15152 m_HeapSizeLimit[heapIndex] += size;
15156 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15158 if(hAllocation->CanBecomeLost())
15160 return VK_ERROR_MEMORY_MAP_FAILED;
15163 switch(hAllocation->GetType())
15165 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15167 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15168 char *pBytes = VMA_NULL;
15169 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15170 if(res == VK_SUCCESS)
15172 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15173 hAllocation->BlockAllocMap();
15177 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15178 return hAllocation->DedicatedAllocMap(
this, ppData);
15181 return VK_ERROR_MEMORY_MAP_FAILED;
15187 switch(hAllocation->GetType())
15189 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15191 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15192 hAllocation->BlockAllocUnmap();
15193 pBlock->Unmap(
this, 1);
15196 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15197 hAllocation->DedicatedAllocUnmap(
this);
15204 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
15206 VkResult res = VK_SUCCESS;
15207 switch(hAllocation->GetType())
15209 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15210 res = GetVulkanFunctions().vkBindBufferMemory(
15213 hAllocation->GetMemory(),
15216 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15218 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15219 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
15220 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
15229 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
15231 VkResult res = VK_SUCCESS;
15232 switch(hAllocation->GetType())
15234 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15235 res = GetVulkanFunctions().vkBindImageMemory(
15238 hAllocation->GetMemory(),
15241 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15243 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15244 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
15245 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
15254 void VmaAllocator_T::FlushOrInvalidateAllocation(
15256 VkDeviceSize offset, VkDeviceSize size,
15257 VMA_CACHE_OPERATION op)
15259 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
15260 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
15262 const VkDeviceSize allocationSize = hAllocation->GetSize();
15263 VMA_ASSERT(offset <= allocationSize);
15265 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
15267 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
15268 memRange.memory = hAllocation->GetMemory();
15270 switch(hAllocation->GetType())
15272 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15273 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15274 if(size == VK_WHOLE_SIZE)
15276 memRange.size = allocationSize - memRange.offset;
15280 VMA_ASSERT(offset + size <= allocationSize);
15281 memRange.size = VMA_MIN(
15282 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
15283 allocationSize - memRange.offset);
15287 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15290 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15291 if(size == VK_WHOLE_SIZE)
15293 size = allocationSize - offset;
15297 VMA_ASSERT(offset + size <= allocationSize);
15299 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
15302 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
15303 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
15304 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
15305 memRange.offset += allocationOffset;
15306 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
15317 case VMA_CACHE_FLUSH:
15318 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
15320 case VMA_CACHE_INVALIDATE:
15321 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
15330 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
15332 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
15334 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15336 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15337 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15338 VMA_ASSERT(pDedicatedAllocations);
15339 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
15340 VMA_ASSERT(success);
15343 VkDeviceMemory hMemory = allocation->GetMemory();
15355 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15357 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15360 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
15362 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
15363 !hAllocation->CanBecomeLost() &&
15364 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15366 void* pData = VMA_NULL;
15367 VkResult res = Map(hAllocation, &pData);
15368 if(res == VK_SUCCESS)
15370 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
15371 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
15372 Unmap(hAllocation);
15376 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15381 #if VMA_STATS_STRING_ENABLED 15383 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15385 bool dedicatedAllocationsStarted =
false;
15386 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15388 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15389 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15390 VMA_ASSERT(pDedicatedAllocVector);
15391 if(pDedicatedAllocVector->empty() ==
false)
15393 if(dedicatedAllocationsStarted ==
false)
15395 dedicatedAllocationsStarted =
true;
15396 json.WriteString(
"DedicatedAllocations");
15397 json.BeginObject();
15400 json.BeginString(
"Type ");
15401 json.ContinueString(memTypeIndex);
15406 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
15408 json.BeginObject(
true);
15410 hAlloc->PrintParameters(json);
15417 if(dedicatedAllocationsStarted)
15423 bool allocationsStarted =
false;
15424 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15426 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
15428 if(allocationsStarted ==
false)
15430 allocationsStarted =
true;
15431 json.WriteString(
"DefaultPools");
15432 json.BeginObject();
15435 json.BeginString(
"Type ");
15436 json.ContinueString(memTypeIndex);
15439 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
15442 if(allocationsStarted)
15450 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15451 const size_t poolCount = m_Pools.size();
15454 json.WriteString(
"Pools");
15455 json.BeginObject();
15456 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15458 json.BeginString();
15459 json.ContinueString(m_Pools[poolIndex]->GetId());
15462 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
15469 #endif // #if VMA_STATS_STRING_ENABLED 15478 VMA_ASSERT(pCreateInfo && pAllocator);
15479 VMA_DEBUG_LOG(
"vmaCreateAllocator");
15481 return (*pAllocator)->Init(pCreateInfo);
15487 if(allocator != VK_NULL_HANDLE)
15489 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
15490 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
15491 vma_delete(&allocationCallbacks, allocator);
15497 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
15499 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
15500 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
15505 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
15507 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
15508 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
15513 uint32_t memoryTypeIndex,
15514 VkMemoryPropertyFlags* pFlags)
15516 VMA_ASSERT(allocator && pFlags);
15517 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
15518 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
15523 uint32_t frameIndex)
15525 VMA_ASSERT(allocator);
15526 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
15528 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15530 allocator->SetCurrentFrameIndex(frameIndex);
15537 VMA_ASSERT(allocator && pStats);
15538 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15539 allocator->CalculateStats(pStats);
15542 #if VMA_STATS_STRING_ENABLED 15546 char** ppStatsString,
15547 VkBool32 detailedMap)
15549 VMA_ASSERT(allocator && ppStatsString);
15550 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15552 VmaStringBuilder sb(allocator);
15554 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
15555 json.BeginObject();
15558 allocator->CalculateStats(&stats);
15560 json.WriteString(
"Total");
15561 VmaPrintStatInfo(json, stats.
total);
15563 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
15565 json.BeginString(
"Heap ");
15566 json.ContinueString(heapIndex);
15568 json.BeginObject();
15570 json.WriteString(
"Size");
15571 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
15573 json.WriteString(
"Flags");
15574 json.BeginArray(
true);
15575 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
15577 json.WriteString(
"DEVICE_LOCAL");
15583 json.WriteString(
"Stats");
15584 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
15587 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
15589 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
15591 json.BeginString(
"Type ");
15592 json.ContinueString(typeIndex);
15595 json.BeginObject();
15597 json.WriteString(
"Flags");
15598 json.BeginArray(
true);
15599 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
15600 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
15602 json.WriteString(
"DEVICE_LOCAL");
15604 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15606 json.WriteString(
"HOST_VISIBLE");
15608 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
15610 json.WriteString(
"HOST_COHERENT");
15612 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
15614 json.WriteString(
"HOST_CACHED");
15616 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
15618 json.WriteString(
"LAZILY_ALLOCATED");
15624 json.WriteString(
"Stats");
15625 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
15634 if(detailedMap == VK_TRUE)
15636 allocator->PrintDetailedMap(json);
15642 const size_t len = sb.GetLength();
15643 char*
const pChars = vma_new_array(allocator,
char, len + 1);
15646 memcpy(pChars, sb.GetData(), len);
15648 pChars[len] =
'\0';
15649 *ppStatsString = pChars;
15654 char* pStatsString)
15656 if(pStatsString != VMA_NULL)
15658 VMA_ASSERT(allocator);
15659 size_t len = strlen(pStatsString);
15660 vma_delete_array(allocator, pStatsString, len + 1);
15664 #endif // #if VMA_STATS_STRING_ENABLED 15671 uint32_t memoryTypeBits,
15673 uint32_t* pMemoryTypeIndex)
15675 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15676 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15677 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15684 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
15685 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
15690 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15694 switch(pAllocationCreateInfo->
usage)
15699 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15701 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15705 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
15708 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15709 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15711 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15715 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15716 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
15722 *pMemoryTypeIndex = UINT32_MAX;
15723 uint32_t minCost = UINT32_MAX;
15724 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
15725 memTypeIndex < allocator->GetMemoryTypeCount();
15726 ++memTypeIndex, memTypeBit <<= 1)
15729 if((memTypeBit & memoryTypeBits) != 0)
15731 const VkMemoryPropertyFlags currFlags =
15732 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
15734 if((requiredFlags & ~currFlags) == 0)
15737 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
15739 if(currCost < minCost)
15741 *pMemoryTypeIndex = memTypeIndex;
15746 minCost = currCost;
15751 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
15756 const VkBufferCreateInfo* pBufferCreateInfo,
15758 uint32_t* pMemoryTypeIndex)
15760 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15761 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
15762 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15763 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15765 const VkDevice hDev = allocator->m_hDevice;
15766 VkBuffer hBuffer = VK_NULL_HANDLE;
15767 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
15768 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
15769 if(res == VK_SUCCESS)
15771 VkMemoryRequirements memReq = {};
15772 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
15773 hDev, hBuffer, &memReq);
15777 memReq.memoryTypeBits,
15778 pAllocationCreateInfo,
15781 allocator->GetVulkanFunctions().vkDestroyBuffer(
15782 hDev, hBuffer, allocator->GetAllocationCallbacks());
15789 const VkImageCreateInfo* pImageCreateInfo,
15791 uint32_t* pMemoryTypeIndex)
15793 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15794 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
15795 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15796 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15798 const VkDevice hDev = allocator->m_hDevice;
15799 VkImage hImage = VK_NULL_HANDLE;
15800 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
15801 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
15802 if(res == VK_SUCCESS)
15804 VkMemoryRequirements memReq = {};
15805 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
15806 hDev, hImage, &memReq);
15810 memReq.memoryTypeBits,
15811 pAllocationCreateInfo,
15814 allocator->GetVulkanFunctions().vkDestroyImage(
15815 hDev, hImage, allocator->GetAllocationCallbacks());
15825 VMA_ASSERT(allocator && pCreateInfo && pPool);
15827 VMA_DEBUG_LOG(
"vmaCreatePool");
15829 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15831 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
15833 #if VMA_RECORDING_ENABLED 15834 if(allocator->GetRecorder() != VMA_NULL)
15836 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
15847 VMA_ASSERT(allocator);
15849 if(pool == VK_NULL_HANDLE)
15854 VMA_DEBUG_LOG(
"vmaDestroyPool");
15856 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15858 #if VMA_RECORDING_ENABLED 15859 if(allocator->GetRecorder() != VMA_NULL)
15861 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
15865 allocator->DestroyPool(pool);
15873 VMA_ASSERT(allocator && pool && pPoolStats);
15875 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15877 allocator->GetPoolStats(pool, pPoolStats);
15883 size_t* pLostAllocationCount)
15885 VMA_ASSERT(allocator && pool);
15887 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15889 #if VMA_RECORDING_ENABLED 15890 if(allocator->GetRecorder() != VMA_NULL)
15892 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
15896 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
15901 VMA_ASSERT(allocator && pool);
15903 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15905 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
15907 return allocator->CheckPoolCorruption(pool);
15912 const VkMemoryRequirements* pVkMemoryRequirements,
15917 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
15919 VMA_DEBUG_LOG(
"vmaAllocateMemory");
15921 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15923 VkResult result = allocator->AllocateMemory(
15924 *pVkMemoryRequirements,
15930 VMA_SUBALLOCATION_TYPE_UNKNOWN,
15934 #if VMA_RECORDING_ENABLED 15935 if(allocator->GetRecorder() != VMA_NULL)
15937 allocator->GetRecorder()->RecordAllocateMemory(
15938 allocator->GetCurrentFrameIndex(),
15939 *pVkMemoryRequirements,
15945 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
15947 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
15955 const VkMemoryRequirements* pVkMemoryRequirements,
15957 size_t allocationCount,
15961 if(allocationCount == 0)
15966 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
15968 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
15970 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15972 VkResult result = allocator->AllocateMemory(
15973 *pVkMemoryRequirements,
15979 VMA_SUBALLOCATION_TYPE_UNKNOWN,
15983 #if VMA_RECORDING_ENABLED 15984 if(allocator->GetRecorder() != VMA_NULL)
15986 allocator->GetRecorder()->RecordAllocateMemoryPages(
15987 allocator->GetCurrentFrameIndex(),
15988 *pVkMemoryRequirements,
15990 (uint64_t)allocationCount,
15995 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
15997 for(
size_t i = 0; i < allocationCount; ++i)
15999 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16013 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16015 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16017 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16019 VkMemoryRequirements vkMemReq = {};
16020 bool requiresDedicatedAllocation =
false;
16021 bool prefersDedicatedAllocation =
false;
16022 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16023 requiresDedicatedAllocation,
16024 prefersDedicatedAllocation);
16026 VkResult result = allocator->AllocateMemory(
16028 requiresDedicatedAllocation,
16029 prefersDedicatedAllocation,
16033 VMA_SUBALLOCATION_TYPE_BUFFER,
16037 #if VMA_RECORDING_ENABLED 16038 if(allocator->GetRecorder() != VMA_NULL)
16040 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16041 allocator->GetCurrentFrameIndex(),
16043 requiresDedicatedAllocation,
16044 prefersDedicatedAllocation,
16050 if(pAllocationInfo && result == VK_SUCCESS)
16052 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16065 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16067 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
16069 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16071 VkMemoryRequirements vkMemReq = {};
16072 bool requiresDedicatedAllocation =
false;
16073 bool prefersDedicatedAllocation =
false;
16074 allocator->GetImageMemoryRequirements(image, vkMemReq,
16075 requiresDedicatedAllocation, prefersDedicatedAllocation);
16077 VkResult result = allocator->AllocateMemory(
16079 requiresDedicatedAllocation,
16080 prefersDedicatedAllocation,
16084 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16088 #if VMA_RECORDING_ENABLED 16089 if(allocator->GetRecorder() != VMA_NULL)
16091 allocator->GetRecorder()->RecordAllocateMemoryForImage(
16092 allocator->GetCurrentFrameIndex(),
16094 requiresDedicatedAllocation,
16095 prefersDedicatedAllocation,
16101 if(pAllocationInfo && result == VK_SUCCESS)
16103 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16113 VMA_ASSERT(allocator);
16115 if(allocation == VK_NULL_HANDLE)
16120 VMA_DEBUG_LOG(
"vmaFreeMemory");
16122 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16124 #if VMA_RECORDING_ENABLED 16125 if(allocator->GetRecorder() != VMA_NULL)
16127 allocator->GetRecorder()->RecordFreeMemory(
16128 allocator->GetCurrentFrameIndex(),
16133 allocator->FreeMemory(
16140 size_t allocationCount,
16143 if(allocationCount == 0)
16148 VMA_ASSERT(allocator);
16150 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
16152 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16154 #if VMA_RECORDING_ENABLED 16155 if(allocator->GetRecorder() != VMA_NULL)
16157 allocator->GetRecorder()->RecordFreeMemoryPages(
16158 allocator->GetCurrentFrameIndex(),
16159 (uint64_t)allocationCount,
16164 allocator->FreeMemory(allocationCount, pAllocations);
16170 VkDeviceSize newSize)
16172 VMA_ASSERT(allocator && allocation);
16174 VMA_DEBUG_LOG(
"vmaResizeAllocation");
16176 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16178 #if VMA_RECORDING_ENABLED 16179 if(allocator->GetRecorder() != VMA_NULL)
16181 allocator->GetRecorder()->RecordResizeAllocation(
16182 allocator->GetCurrentFrameIndex(),
16188 return allocator->ResizeAllocation(allocation, newSize);
16196 VMA_ASSERT(allocator && allocation && pAllocationInfo);
16198 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16200 #if VMA_RECORDING_ENABLED 16201 if(allocator->GetRecorder() != VMA_NULL)
16203 allocator->GetRecorder()->RecordGetAllocationInfo(
16204 allocator->GetCurrentFrameIndex(),
16209 allocator->GetAllocationInfo(allocation, pAllocationInfo);
16216 VMA_ASSERT(allocator && allocation);
16218 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16220 #if VMA_RECORDING_ENABLED 16221 if(allocator->GetRecorder() != VMA_NULL)
16223 allocator->GetRecorder()->RecordTouchAllocation(
16224 allocator->GetCurrentFrameIndex(),
16229 return allocator->TouchAllocation(allocation);
16237 VMA_ASSERT(allocator && allocation);
16239 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16241 allocation->SetUserData(allocator, pUserData);
16243 #if VMA_RECORDING_ENABLED 16244 if(allocator->GetRecorder() != VMA_NULL)
16246 allocator->GetRecorder()->RecordSetAllocationUserData(
16247 allocator->GetCurrentFrameIndex(),
16258 VMA_ASSERT(allocator && pAllocation);
16260 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
16262 allocator->CreateLostAllocation(pAllocation);
16264 #if VMA_RECORDING_ENABLED 16265 if(allocator->GetRecorder() != VMA_NULL)
16267 allocator->GetRecorder()->RecordCreateLostAllocation(
16268 allocator->GetCurrentFrameIndex(),
16279 VMA_ASSERT(allocator && allocation && ppData);
16281 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16283 VkResult res = allocator->Map(allocation, ppData);
16285 #if VMA_RECORDING_ENABLED 16286 if(allocator->GetRecorder() != VMA_NULL)
16288 allocator->GetRecorder()->RecordMapMemory(
16289 allocator->GetCurrentFrameIndex(),
16301 VMA_ASSERT(allocator && allocation);
16303 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16305 #if VMA_RECORDING_ENABLED 16306 if(allocator->GetRecorder() != VMA_NULL)
16308 allocator->GetRecorder()->RecordUnmapMemory(
16309 allocator->GetCurrentFrameIndex(),
16314 allocator->Unmap(allocation);
16319 VMA_ASSERT(allocator && allocation);
16321 VMA_DEBUG_LOG(
"vmaFlushAllocation");
16323 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16325 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
16327 #if VMA_RECORDING_ENABLED 16328 if(allocator->GetRecorder() != VMA_NULL)
16330 allocator->GetRecorder()->RecordFlushAllocation(
16331 allocator->GetCurrentFrameIndex(),
16332 allocation, offset, size);
16339 VMA_ASSERT(allocator && allocation);
16341 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
16343 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16345 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
16347 #if VMA_RECORDING_ENABLED 16348 if(allocator->GetRecorder() != VMA_NULL)
16350 allocator->GetRecorder()->RecordInvalidateAllocation(
16351 allocator->GetCurrentFrameIndex(),
16352 allocation, offset, size);
16359 VMA_ASSERT(allocator);
16361 VMA_DEBUG_LOG(
"vmaCheckCorruption");
16363 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16365 return allocator->CheckCorruption(memoryTypeBits);
16371 size_t allocationCount,
16372 VkBool32* pAllocationsChanged,
16382 if(pDefragmentationInfo != VMA_NULL)
16396 if(res == VK_NOT_READY)
16409 VMA_ASSERT(allocator && pInfo && pContext);
16420 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
16422 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
16424 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16426 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
16428 #if VMA_RECORDING_ENABLED 16429 if(allocator->GetRecorder() != VMA_NULL)
16431 allocator->GetRecorder()->RecordDefragmentationBegin(
16432 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
16443 VMA_ASSERT(allocator);
16445 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
16447 if(context != VK_NULL_HANDLE)
16449 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16451 #if VMA_RECORDING_ENABLED 16452 if(allocator->GetRecorder() != VMA_NULL)
16454 allocator->GetRecorder()->RecordDefragmentationEnd(
16455 allocator->GetCurrentFrameIndex(), context);
16459 return allocator->DefragmentationEnd(context);
16472 VMA_ASSERT(allocator && allocation && buffer);
16474 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
16476 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16478 return allocator->BindBufferMemory(allocation, buffer);
16486 VMA_ASSERT(allocator && allocation && image);
16488 VMA_DEBUG_LOG(
"vmaBindImageMemory");
16490 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16492 return allocator->BindImageMemory(allocation, image);
16497 const VkBufferCreateInfo* pBufferCreateInfo,
16503 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16505 if(pBufferCreateInfo->size == 0)
16507 return VK_ERROR_VALIDATION_FAILED_EXT;
16510 VMA_DEBUG_LOG(
"vmaCreateBuffer");
16512 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16514 *pBuffer = VK_NULL_HANDLE;
16515 *pAllocation = VK_NULL_HANDLE;
16518 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
16519 allocator->m_hDevice,
16521 allocator->GetAllocationCallbacks(),
16526 VkMemoryRequirements vkMemReq = {};
16527 bool requiresDedicatedAllocation =
false;
16528 bool prefersDedicatedAllocation =
false;
16529 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
16530 requiresDedicatedAllocation, prefersDedicatedAllocation);
16534 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
16536 VMA_ASSERT(vkMemReq.alignment %
16537 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
16539 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
16541 VMA_ASSERT(vkMemReq.alignment %
16542 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
16544 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
16546 VMA_ASSERT(vkMemReq.alignment %
16547 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
16551 res = allocator->AllocateMemory(
16553 requiresDedicatedAllocation,
16554 prefersDedicatedAllocation,
16557 *pAllocationCreateInfo,
16558 VMA_SUBALLOCATION_TYPE_BUFFER,
16562 #if VMA_RECORDING_ENABLED 16563 if(allocator->GetRecorder() != VMA_NULL)
16565 allocator->GetRecorder()->RecordCreateBuffer(
16566 allocator->GetCurrentFrameIndex(),
16567 *pBufferCreateInfo,
16568 *pAllocationCreateInfo,
16576 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
16580 #if VMA_STATS_STRING_ENABLED 16581 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
16583 if(pAllocationInfo != VMA_NULL)
16585 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16590 allocator->FreeMemory(
16593 *pAllocation = VK_NULL_HANDLE;
16594 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16595 *pBuffer = VK_NULL_HANDLE;
16598 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16599 *pBuffer = VK_NULL_HANDLE;
16610 VMA_ASSERT(allocator);
16612 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16617 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
16619 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16621 #if VMA_RECORDING_ENABLED 16622 if(allocator->GetRecorder() != VMA_NULL)
16624 allocator->GetRecorder()->RecordDestroyBuffer(
16625 allocator->GetCurrentFrameIndex(),
16630 if(buffer != VK_NULL_HANDLE)
16632 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
16635 if(allocation != VK_NULL_HANDLE)
16637 allocator->FreeMemory(
16645 const VkImageCreateInfo* pImageCreateInfo,
16651 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
16653 if(pImageCreateInfo->extent.width == 0 ||
16654 pImageCreateInfo->extent.height == 0 ||
16655 pImageCreateInfo->extent.depth == 0 ||
16656 pImageCreateInfo->mipLevels == 0 ||
16657 pImageCreateInfo->arrayLayers == 0)
16659 return VK_ERROR_VALIDATION_FAILED_EXT;
16662 VMA_DEBUG_LOG(
"vmaCreateImage");
16664 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16666 *pImage = VK_NULL_HANDLE;
16667 *pAllocation = VK_NULL_HANDLE;
16670 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
16671 allocator->m_hDevice,
16673 allocator->GetAllocationCallbacks(),
16677 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
16678 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
16679 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
16682 VkMemoryRequirements vkMemReq = {};
16683 bool requiresDedicatedAllocation =
false;
16684 bool prefersDedicatedAllocation =
false;
16685 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
16686 requiresDedicatedAllocation, prefersDedicatedAllocation);
16688 res = allocator->AllocateMemory(
16690 requiresDedicatedAllocation,
16691 prefersDedicatedAllocation,
16694 *pAllocationCreateInfo,
16699 #if VMA_RECORDING_ENABLED 16700 if(allocator->GetRecorder() != VMA_NULL)
16702 allocator->GetRecorder()->RecordCreateImage(
16703 allocator->GetCurrentFrameIndex(),
16705 *pAllocationCreateInfo,
16713 res = allocator->BindImageMemory(*pAllocation, *pImage);
16717 #if VMA_STATS_STRING_ENABLED 16718 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
16720 if(pAllocationInfo != VMA_NULL)
16722 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16727 allocator->FreeMemory(
16730 *pAllocation = VK_NULL_HANDLE;
16731 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16732 *pImage = VK_NULL_HANDLE;
16735 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16736 *pImage = VK_NULL_HANDLE;
16747 VMA_ASSERT(allocator);
16749 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16754 VMA_DEBUG_LOG(
"vmaDestroyImage");
16756 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16758 #if VMA_RECORDING_ENABLED 16759 if(allocator->GetRecorder() != VMA_NULL)
16761 allocator->GetRecorder()->RecordDestroyImage(
16762 allocator->GetCurrentFrameIndex(),
16767 if(image != VK_NULL_HANDLE)
16769 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
16771 if(allocation != VK_NULL_HANDLE)
16773 allocator->FreeMemory(
16779 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1744
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2042
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1802
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side...
Definition: vk_mem_alloc.h:2839
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
Definition: vk_mem_alloc.h:1776
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2367
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1756
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:2004
Definition: vk_mem_alloc.h:2102
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2792
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1748
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2467
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1799
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2875
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2256
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1643
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2348
Definition: vk_mem_alloc.h:2079
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2795
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1737
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2155
Definition: vk_mem_alloc.h:2031
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1811
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2284
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1865
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1796
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2035
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1937
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1753
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2829
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1936
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2879
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1828
VmaStatInfo total
Definition: vk_mem_alloc.h:1946
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2887
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2139
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2870
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1754
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1679
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1805
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2298
Definition: vk_mem_alloc.h:2292
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1760
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1872
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2477
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1749
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1774
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2176
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2318
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2354
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1735
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2301
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2844
VmaMemoryUsage
Definition: vk_mem_alloc.h:1982
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2804
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2865
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2883
Definition: vk_mem_alloc.h:2021
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2163
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1752
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1942
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1685
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2783
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
Definition: vk_mem_alloc.h:2781
Definition: vk_mem_alloc.h:2123
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2810
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1706
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1778
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1711
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2885
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2150
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2364
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1745
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1925
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2313
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1698
Definition: vk_mem_alloc.h:2288
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:2086
Represents Opaque object that represents started defragmentation process.
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1938
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1702
Definition: vk_mem_alloc.h:2113
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2304
Definition: vk_mem_alloc.h:2030
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1751
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2145
Definition: vk_mem_alloc.h:2136
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1928
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1747
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2326
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1814
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2357
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2134
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2834
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2169
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1853
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1944
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:2066
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1937
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1758
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1784
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use...
Definition: vk_mem_alloc.h:2780
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2858
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1700
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1757
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2340
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1750
Definition: vk_mem_alloc.h:2097
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1792
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2491
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1808
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1937
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1934
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2345
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2789
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
Definition: vk_mem_alloc.h:2106
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2472
Definition: vk_mem_alloc.h:2120
Definition: vk_mem_alloc.h:2132
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2881
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1743
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1932
Definition: vk_mem_alloc.h:1987
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2294
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1781
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1930
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1755
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1759
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2053
Definition: vk_mem_alloc.h:2127
Definition: vk_mem_alloc.h:2014
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2486
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1733
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1746
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2273
VkResult vmaResizeAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize newSize)
Tries to resize an allocation in place, if there is enough free memory after it.
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2453
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:2117
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2238
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1938
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
Definition: vk_mem_alloc.h:2092
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1768
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1945
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2351
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1938
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side...
Definition: vk_mem_alloc.h:2849
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2458
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2813