23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1681 #ifndef VMA_RECORDING_ENABLED 1683 #define VMA_RECORDING_ENABLED 1 1685 #define VMA_RECORDING_ENABLED 0 1690 #define NOMINMAX // For windows.h 1694 #include <vulkan/vulkan.h> 1697 #if VMA_RECORDING_ENABLED 1698 #include <windows.h> 1701 #if !defined(VMA_DEDICATED_ALLOCATION) 1702 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1703 #define VMA_DEDICATED_ALLOCATION 1 1705 #define VMA_DEDICATED_ALLOCATION 0 1723 uint32_t memoryType,
1724 VkDeviceMemory memory,
1729 uint32_t memoryType,
1730 VkDeviceMemory memory,
1803 #if VMA_DEDICATED_ALLOCATION 1804 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1805 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1932 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1940 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1950 uint32_t memoryTypeIndex,
1951 VkMemoryPropertyFlags* pFlags);
1963 uint32_t frameIndex);
1996 #ifndef VMA_STATS_STRING_ENABLED 1997 #define VMA_STATS_STRING_ENABLED 1 2000 #if VMA_STATS_STRING_ENABLED 2007 char** ppStatsString,
2008 VkBool32 detailedMap);
2012 char* pStatsString);
2014 #endif // #if VMA_STATS_STRING_ENABLED 2247 uint32_t memoryTypeBits,
2249 uint32_t* pMemoryTypeIndex);
2265 const VkBufferCreateInfo* pBufferCreateInfo,
2267 uint32_t* pMemoryTypeIndex);
2283 const VkImageCreateInfo* pImageCreateInfo,
2285 uint32_t* pMemoryTypeIndex);
2457 size_t* pLostAllocationCount);
2556 const VkMemoryRequirements* pVkMemoryRequirements,
2582 const VkMemoryRequirements* pVkMemoryRequirements,
2584 size_t allocationCount,
2629 size_t allocationCount,
2641 VkDeviceSize newSize);
3021 size_t allocationCount,
3022 VkBool32* pAllocationsChanged,
3088 const VkBufferCreateInfo* pBufferCreateInfo,
3113 const VkImageCreateInfo* pImageCreateInfo,
3139 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 3142 #if defined(__cplusplus) && defined(__INTELLISENSE__) 3143 #define VMA_IMPLEMENTATION 3146 #ifdef VMA_IMPLEMENTATION 3147 #undef VMA_IMPLEMENTATION 3169 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 3170 #define VMA_STATIC_VULKAN_FUNCTIONS 1 3182 #if VMA_USE_STL_CONTAINERS 3183 #define VMA_USE_STL_VECTOR 1 3184 #define VMA_USE_STL_UNORDERED_MAP 1 3185 #define VMA_USE_STL_LIST 1 3188 #ifndef VMA_USE_STL_SHARED_MUTEX 3190 #if __cplusplus >= 201703L 3191 #define VMA_USE_STL_SHARED_MUTEX 1 3195 #elif defined(_MSC_FULL_VER) && _MSC_FULL_VER >= 190023918 && __cplusplus == 199711L && _MSVC_LANG >= 201703L 3196 #define VMA_USE_STL_SHARED_MUTEX 1 3198 #define VMA_USE_STL_SHARED_MUTEX 0 3206 #if VMA_USE_STL_VECTOR 3210 #if VMA_USE_STL_UNORDERED_MAP 3211 #include <unordered_map> 3214 #if VMA_USE_STL_LIST 3223 #include <algorithm> 3228 #define VMA_NULL nullptr 3231 #if defined(__ANDROID_API__) && (__ANDROID_API__ < 16) 3233 void *aligned_alloc(
size_t alignment,
size_t size)
3236 if(alignment <
sizeof(
void*))
3238 alignment =
sizeof(
void*);
3241 return memalign(alignment, size);
3243 #elif defined(__APPLE__) || defined(__ANDROID__) 3245 void *aligned_alloc(
size_t alignment,
size_t size)
3248 if(alignment <
sizeof(
void*))
3250 alignment =
sizeof(
void*);
3254 if(posix_memalign(&pointer, alignment, size) == 0)
3268 #define VMA_ASSERT(expr) assert(expr) 3270 #define VMA_ASSERT(expr) 3276 #ifndef VMA_HEAVY_ASSERT 3278 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 3280 #define VMA_HEAVY_ASSERT(expr) 3284 #ifndef VMA_ALIGN_OF 3285 #define VMA_ALIGN_OF(type) (__alignof(type)) 3288 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 3290 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 3292 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 3296 #ifndef VMA_SYSTEM_FREE 3298 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 3300 #define VMA_SYSTEM_FREE(ptr) free(ptr) 3305 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 3309 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 3313 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 3317 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 3320 #ifndef VMA_DEBUG_LOG 3321 #define VMA_DEBUG_LOG(format, ...) 3331 #if VMA_STATS_STRING_ENABLED 3332 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3334 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3336 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3338 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3340 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3342 snprintf(outStr, strLen,
"%p", ptr);
3350 void Lock() { m_Mutex.lock(); }
3351 void Unlock() { m_Mutex.unlock(); }
3355 #define VMA_MUTEX VmaMutex 3359 #ifndef VMA_RW_MUTEX 3360 #if VMA_USE_STL_SHARED_MUTEX 3362 #include <shared_mutex> 3366 void LockRead() { m_Mutex.lock_shared(); }
3367 void UnlockRead() { m_Mutex.unlock_shared(); }
3368 void LockWrite() { m_Mutex.lock(); }
3369 void UnlockWrite() { m_Mutex.unlock(); }
3371 std::shared_mutex m_Mutex;
3373 #define VMA_RW_MUTEX VmaRWMutex 3374 #elif defined(_WIN32) && defined(WINVER) && WINVER >= 0x0600 3380 VmaRWMutex() { InitializeSRWLock(&m_Lock); }
3381 void LockRead() { AcquireSRWLockShared(&m_Lock); }
3382 void UnlockRead() { ReleaseSRWLockShared(&m_Lock); }
3383 void LockWrite() { AcquireSRWLockExclusive(&m_Lock); }
3384 void UnlockWrite() { ReleaseSRWLockExclusive(&m_Lock); }
3388 #define VMA_RW_MUTEX VmaRWMutex 3394 void LockRead() { m_Mutex.Lock(); }
3395 void UnlockRead() { m_Mutex.Unlock(); }
3396 void LockWrite() { m_Mutex.Lock(); }
3397 void UnlockWrite() { m_Mutex.Unlock(); }
3401 #define VMA_RW_MUTEX VmaRWMutex 3402 #endif // #if VMA_USE_STL_SHARED_MUTEX 3403 #endif // #ifndef VMA_RW_MUTEX 3413 #ifndef VMA_ATOMIC_UINT32 3415 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 3418 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 3423 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 3426 #ifndef VMA_DEBUG_ALIGNMENT 3431 #define VMA_DEBUG_ALIGNMENT (1) 3434 #ifndef VMA_DEBUG_MARGIN 3439 #define VMA_DEBUG_MARGIN (0) 3442 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3447 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3450 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3456 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3459 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3464 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3467 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3472 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3475 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3476 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3480 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3481 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3485 #ifndef VMA_CLASS_NO_COPY 3486 #define VMA_CLASS_NO_COPY(className) \ 3488 className(const className&) = delete; \ 3489 className& operator=(const className&) = delete; 3492 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3495 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3497 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3498 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3504 static const uint32_t VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET = 0x10000000u;
3506 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3507 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3510 static inline uint32_t VmaCountBitsSet(uint32_t v)
3512 uint32_t c = v - ((v >> 1) & 0x55555555);
3513 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3514 c = ((c >> 4) + c) & 0x0F0F0F0F;
3515 c = ((c >> 8) + c) & 0x00FF00FF;
3516 c = ((c >> 16) + c) & 0x0000FFFF;
3522 template <
typename T>
3523 static inline T VmaAlignUp(T val, T align)
3525 return (val + align - 1) / align * align;
3529 template <
typename T>
3530 static inline T VmaAlignDown(T val, T align)
3532 return val / align * align;
3536 template <
typename T>
3537 static inline T VmaRoundDiv(T x, T y)
3539 return (x + (y / (T)2)) / y;
3547 template <
typename T>
3548 inline bool VmaIsPow2(T x)
3550 return (x & (x-1)) == 0;
3554 static inline uint32_t VmaNextPow2(uint32_t v)
3565 static inline uint64_t VmaNextPow2(uint64_t v)
3579 static inline uint32_t VmaPrevPow2(uint32_t v)
3589 static inline uint64_t VmaPrevPow2(uint64_t v)
3601 static inline bool VmaStrIsEmpty(
const char* pStr)
3603 return pStr == VMA_NULL || *pStr ==
'\0';
3606 #if VMA_STATS_STRING_ENABLED 3608 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3624 #endif // #if VMA_STATS_STRING_ENABLED 3628 template<
typename Iterator,
typename Compare>
3629 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3631 Iterator centerValue = end; --centerValue;
3632 Iterator insertIndex = beg;
3633 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3635 if(cmp(*memTypeIndex, *centerValue))
3637 if(insertIndex != memTypeIndex)
3639 VMA_SWAP(*memTypeIndex, *insertIndex);
3644 if(insertIndex != centerValue)
3646 VMA_SWAP(*insertIndex, *centerValue);
3651 template<
typename Iterator,
typename Compare>
3652 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3656 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3657 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3658 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3662 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3664 #endif // #ifndef VMA_SORT 3673 static inline bool VmaBlocksOnSamePage(
3674 VkDeviceSize resourceAOffset,
3675 VkDeviceSize resourceASize,
3676 VkDeviceSize resourceBOffset,
3677 VkDeviceSize pageSize)
3679 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3680 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3681 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3682 VkDeviceSize resourceBStart = resourceBOffset;
3683 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3684 return resourceAEndPage == resourceBStartPage;
3687 enum VmaSuballocationType
3689 VMA_SUBALLOCATION_TYPE_FREE = 0,
3690 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3691 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3692 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3693 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3694 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3695 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3704 static inline bool VmaIsBufferImageGranularityConflict(
3705 VmaSuballocationType suballocType1,
3706 VmaSuballocationType suballocType2)
3708 if(suballocType1 > suballocType2)
3710 VMA_SWAP(suballocType1, suballocType2);
3713 switch(suballocType1)
3715 case VMA_SUBALLOCATION_TYPE_FREE:
3717 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3719 case VMA_SUBALLOCATION_TYPE_BUFFER:
3721 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3722 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3723 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3725 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3726 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3727 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3728 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3730 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3731 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3739 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3741 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION 3742 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3743 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3744 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3746 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3753 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3755 #if VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_DETECT_CORRUPTION 3756 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3757 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3758 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3760 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3773 static void VmaFillGpuDefragmentationBufferCreateInfo(VkBufferCreateInfo& outBufCreateInfo)
3775 memset(&outBufCreateInfo, 0,
sizeof(outBufCreateInfo));
3776 outBufCreateInfo.sType = VK_STRUCTURE_TYPE_BUFFER_CREATE_INFO;
3777 outBufCreateInfo.usage = VK_BUFFER_USAGE_TRANSFER_SRC_BIT | VK_BUFFER_USAGE_TRANSFER_DST_BIT;
3778 outBufCreateInfo.size = (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE;
3784 VMA_CLASS_NO_COPY(VmaMutexLock)
3786 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex =
true) :
3787 m_pMutex(useMutex ? &mutex : VMA_NULL)
3788 {
if(m_pMutex) { m_pMutex->Lock(); } }
3790 {
if(m_pMutex) { m_pMutex->Unlock(); } }
3792 VMA_MUTEX* m_pMutex;
3796 struct VmaMutexLockRead
3798 VMA_CLASS_NO_COPY(VmaMutexLockRead)
3800 VmaMutexLockRead(VMA_RW_MUTEX& mutex,
bool useMutex) :
3801 m_pMutex(useMutex ? &mutex : VMA_NULL)
3802 {
if(m_pMutex) { m_pMutex->LockRead(); } }
3803 ~VmaMutexLockRead() {
if(m_pMutex) { m_pMutex->UnlockRead(); } }
3805 VMA_RW_MUTEX* m_pMutex;
3809 struct VmaMutexLockWrite
3811 VMA_CLASS_NO_COPY(VmaMutexLockWrite)
3813 VmaMutexLockWrite(VMA_RW_MUTEX& mutex,
bool useMutex) :
3814 m_pMutex(useMutex ? &mutex : VMA_NULL)
3815 {
if(m_pMutex) { m_pMutex->LockWrite(); } }
3816 ~VmaMutexLockWrite() {
if(m_pMutex) { m_pMutex->UnlockWrite(); } }
3818 VMA_RW_MUTEX* m_pMutex;
3821 #if VMA_DEBUG_GLOBAL_MUTEX 3822 static VMA_MUTEX gDebugGlobalMutex;
3823 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3825 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3829 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3840 template <
typename CmpLess,
typename IterT,
typename KeyT>
3841 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key,
const CmpLess& cmp)
3843 size_t down = 0, up = (end - beg);
3846 const size_t mid = (down + up) / 2;
3847 if(cmp(*(beg+mid), key))
3859 template<
typename CmpLess,
typename IterT,
typename KeyT>
3860 IterT VmaBinaryFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value,
const CmpLess& cmp)
3862 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3863 beg, end, value, cmp);
3865 (!cmp(*it, value) && !cmp(value, *it)))
3877 template<
typename T>
3878 static bool VmaValidatePointerArray(uint32_t count,
const T* arr)
3880 for(uint32_t i = 0; i < count; ++i)
3882 const T iPtr = arr[i];
3883 if(iPtr == VMA_NULL)
3887 for(uint32_t j = i + 1; j < count; ++j)
3901 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3903 if((pAllocationCallbacks != VMA_NULL) &&
3904 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3906 return (*pAllocationCallbacks->pfnAllocation)(
3907 pAllocationCallbacks->pUserData,
3910 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3914 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3918 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3920 if((pAllocationCallbacks != VMA_NULL) &&
3921 (pAllocationCallbacks->pfnFree != VMA_NULL))
3923 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3927 VMA_SYSTEM_FREE(ptr);
3931 template<
typename T>
3932 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3934 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3937 template<
typename T>
3938 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3940 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3943 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3945 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3947 template<
typename T>
3948 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3951 VmaFree(pAllocationCallbacks, ptr);
3954 template<
typename T>
3955 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3959 for(
size_t i = count; i--; )
3963 VmaFree(pAllocationCallbacks, ptr);
3968 template<
typename T>
3969 class VmaStlAllocator
3972 const VkAllocationCallbacks*
const m_pCallbacks;
3973 typedef T value_type;
3975 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3976 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3978 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3979 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3981 template<
typename U>
3982 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3984 return m_pCallbacks == rhs.m_pCallbacks;
3986 template<
typename U>
3987 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3989 return m_pCallbacks != rhs.m_pCallbacks;
3992 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3995 #if VMA_USE_STL_VECTOR 3997 #define VmaVector std::vector 3999 template<
typename T,
typename allocatorT>
4000 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
4002 vec.insert(vec.begin() + index, item);
4005 template<
typename T,
typename allocatorT>
4006 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
4008 vec.erase(vec.begin() + index);
4011 #else // #if VMA_USE_STL_VECTOR 4016 template<
typename T,
typename AllocatorT>
4020 typedef T value_type;
4022 VmaVector(
const AllocatorT& allocator) :
4023 m_Allocator(allocator),
4030 VmaVector(
size_t count,
const AllocatorT& allocator) :
4031 m_Allocator(allocator),
4032 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
4038 VmaVector(
const VmaVector<T, AllocatorT>& src) :
4039 m_Allocator(src.m_Allocator),
4040 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
4041 m_Count(src.m_Count),
4042 m_Capacity(src.m_Count)
4046 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
4052 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4055 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
4059 resize(rhs.m_Count);
4062 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
4068 bool empty()
const {
return m_Count == 0; }
4069 size_t size()
const {
return m_Count; }
4070 T* data() {
return m_pArray; }
4071 const T* data()
const {
return m_pArray; }
4073 T& operator[](
size_t index)
4075 VMA_HEAVY_ASSERT(index < m_Count);
4076 return m_pArray[index];
4078 const T& operator[](
size_t index)
const 4080 VMA_HEAVY_ASSERT(index < m_Count);
4081 return m_pArray[index];
4086 VMA_HEAVY_ASSERT(m_Count > 0);
4089 const T& front()
const 4091 VMA_HEAVY_ASSERT(m_Count > 0);
4096 VMA_HEAVY_ASSERT(m_Count > 0);
4097 return m_pArray[m_Count - 1];
4099 const T& back()
const 4101 VMA_HEAVY_ASSERT(m_Count > 0);
4102 return m_pArray[m_Count - 1];
4105 void reserve(
size_t newCapacity,
bool freeMemory =
false)
4107 newCapacity = VMA_MAX(newCapacity, m_Count);
4109 if((newCapacity < m_Capacity) && !freeMemory)
4111 newCapacity = m_Capacity;
4114 if(newCapacity != m_Capacity)
4116 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
4119 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
4121 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4122 m_Capacity = newCapacity;
4123 m_pArray = newArray;
4127 void resize(
size_t newCount,
bool freeMemory =
false)
4129 size_t newCapacity = m_Capacity;
4130 if(newCount > m_Capacity)
4132 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
4136 newCapacity = newCount;
4139 if(newCapacity != m_Capacity)
4141 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
4142 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
4143 if(elementsToCopy != 0)
4145 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
4147 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
4148 m_Capacity = newCapacity;
4149 m_pArray = newArray;
4155 void clear(
bool freeMemory =
false)
4157 resize(0, freeMemory);
4160 void insert(
size_t index,
const T& src)
4162 VMA_HEAVY_ASSERT(index <= m_Count);
4163 const size_t oldCount = size();
4164 resize(oldCount + 1);
4165 if(index < oldCount)
4167 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
4169 m_pArray[index] = src;
4172 void remove(
size_t index)
4174 VMA_HEAVY_ASSERT(index < m_Count);
4175 const size_t oldCount = size();
4176 if(index < oldCount - 1)
4178 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
4180 resize(oldCount - 1);
4183 void push_back(
const T& src)
4185 const size_t newIndex = size();
4186 resize(newIndex + 1);
4187 m_pArray[newIndex] = src;
4192 VMA_HEAVY_ASSERT(m_Count > 0);
4196 void push_front(
const T& src)
4203 VMA_HEAVY_ASSERT(m_Count > 0);
4207 typedef T* iterator;
4209 iterator begin() {
return m_pArray; }
4210 iterator end() {
return m_pArray + m_Count; }
4213 AllocatorT m_Allocator;
4219 template<
typename T,
typename allocatorT>
4220 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
4222 vec.insert(index, item);
4225 template<
typename T,
typename allocatorT>
4226 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
4231 #endif // #if VMA_USE_STL_VECTOR 4233 template<
typename CmpLess,
typename VectorT>
4234 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
4236 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4238 vector.data() + vector.size(),
4240 CmpLess()) - vector.data();
4241 VmaVectorInsert(vector, indexToInsert, value);
4242 return indexToInsert;
4245 template<
typename CmpLess,
typename VectorT>
4246 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
4249 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
4254 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
4256 size_t indexToRemove = it - vector.begin();
4257 VmaVectorRemove(vector, indexToRemove);
4271 template<
typename T>
4272 class VmaPoolAllocator
4274 VMA_CLASS_NO_COPY(VmaPoolAllocator)
4276 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity);
4277 ~VmaPoolAllocator();
4285 uint32_t NextFreeIndex;
4293 uint32_t FirstFreeIndex;
4296 const VkAllocationCallbacks* m_pAllocationCallbacks;
4297 const uint32_t m_FirstBlockCapacity;
4298 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
4300 ItemBlock& CreateNewBlock();
4303 template<
typename T>
4304 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks, uint32_t firstBlockCapacity) :
4305 m_pAllocationCallbacks(pAllocationCallbacks),
4306 m_FirstBlockCapacity(firstBlockCapacity),
4307 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
4309 VMA_ASSERT(m_FirstBlockCapacity > 1);
4312 template<
typename T>
4313 VmaPoolAllocator<T>::~VmaPoolAllocator()
4318 template<
typename T>
4319 void VmaPoolAllocator<T>::Clear()
4321 for(
size_t i = m_ItemBlocks.size(); i--; )
4322 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemBlocks[i].Capacity);
4323 m_ItemBlocks.clear();
4326 template<
typename T>
4327 T* VmaPoolAllocator<T>::Alloc()
4329 for(
size_t i = m_ItemBlocks.size(); i--; )
4331 ItemBlock& block = m_ItemBlocks[i];
4333 if(block.FirstFreeIndex != UINT32_MAX)
4335 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
4336 block.FirstFreeIndex = pItem->NextFreeIndex;
4337 return &pItem->Value;
4342 ItemBlock& newBlock = CreateNewBlock();
4343 Item*
const pItem = &newBlock.pItems[0];
4344 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
4345 return &pItem->Value;
4348 template<
typename T>
4349 void VmaPoolAllocator<T>::Free(T* ptr)
4352 for(
size_t i = m_ItemBlocks.size(); i--; )
4354 ItemBlock& block = m_ItemBlocks[i];
4358 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
4361 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + block.Capacity))
4363 const uint32_t index = static_cast<uint32_t>(pItemPtr - block.pItems);
4364 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
4365 block.FirstFreeIndex = index;
4369 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
4372 template<
typename T>
4373 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
4375 const uint32_t newBlockCapacity = m_ItemBlocks.empty() ?
4376 m_FirstBlockCapacity : m_ItemBlocks.back().Capacity * 3 / 2;
4378 const ItemBlock newBlock = {
4379 vma_new_array(m_pAllocationCallbacks, Item, newBlockCapacity),
4383 m_ItemBlocks.push_back(newBlock);
4386 for(uint32_t i = 0; i < newBlockCapacity - 1; ++i)
4387 newBlock.pItems[i].NextFreeIndex = i + 1;
4388 newBlock.pItems[newBlockCapacity - 1].NextFreeIndex = UINT32_MAX;
4389 return m_ItemBlocks.back();
4395 #if VMA_USE_STL_LIST 4397 #define VmaList std::list 4399 #else // #if VMA_USE_STL_LIST 4401 template<
typename T>
4410 template<
typename T>
4413 VMA_CLASS_NO_COPY(VmaRawList)
4415 typedef VmaListItem<T> ItemType;
4417 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
4421 size_t GetCount()
const {
return m_Count; }
4422 bool IsEmpty()
const {
return m_Count == 0; }
4424 ItemType* Front() {
return m_pFront; }
4425 const ItemType* Front()
const {
return m_pFront; }
4426 ItemType* Back() {
return m_pBack; }
4427 const ItemType* Back()
const {
return m_pBack; }
4429 ItemType* PushBack();
4430 ItemType* PushFront();
4431 ItemType* PushBack(
const T& value);
4432 ItemType* PushFront(
const T& value);
4437 ItemType* InsertBefore(ItemType* pItem);
4439 ItemType* InsertAfter(ItemType* pItem);
4441 ItemType* InsertBefore(ItemType* pItem,
const T& value);
4442 ItemType* InsertAfter(ItemType* pItem,
const T& value);
4444 void Remove(ItemType* pItem);
4447 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4448 VmaPoolAllocator<ItemType> m_ItemAllocator;
4454 template<
typename T>
4455 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4456 m_pAllocationCallbacks(pAllocationCallbacks),
4457 m_ItemAllocator(pAllocationCallbacks, 128),
4464 template<
typename T>
4465 VmaRawList<T>::~VmaRawList()
4471 template<
typename T>
4472 void VmaRawList<T>::Clear()
4474 if(IsEmpty() ==
false)
4476 ItemType* pItem = m_pBack;
4477 while(pItem != VMA_NULL)
4479 ItemType*
const pPrevItem = pItem->pPrev;
4480 m_ItemAllocator.Free(pItem);
4483 m_pFront = VMA_NULL;
4489 template<
typename T>
4490 VmaListItem<T>* VmaRawList<T>::PushBack()
4492 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4493 pNewItem->pNext = VMA_NULL;
4496 pNewItem->pPrev = VMA_NULL;
4497 m_pFront = pNewItem;
4503 pNewItem->pPrev = m_pBack;
4504 m_pBack->pNext = pNewItem;
4511 template<
typename T>
4512 VmaListItem<T>* VmaRawList<T>::PushFront()
4514 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4515 pNewItem->pPrev = VMA_NULL;
4518 pNewItem->pNext = VMA_NULL;
4519 m_pFront = pNewItem;
4525 pNewItem->pNext = m_pFront;
4526 m_pFront->pPrev = pNewItem;
4527 m_pFront = pNewItem;
4533 template<
typename T>
4534 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4536 ItemType*
const pNewItem = PushBack();
4537 pNewItem->Value = value;
4541 template<
typename T>
4542 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4544 ItemType*
const pNewItem = PushFront();
4545 pNewItem->Value = value;
4549 template<
typename T>
4550 void VmaRawList<T>::PopBack()
4552 VMA_HEAVY_ASSERT(m_Count > 0);
4553 ItemType*
const pBackItem = m_pBack;
4554 ItemType*
const pPrevItem = pBackItem->pPrev;
4555 if(pPrevItem != VMA_NULL)
4557 pPrevItem->pNext = VMA_NULL;
4559 m_pBack = pPrevItem;
4560 m_ItemAllocator.Free(pBackItem);
4564 template<
typename T>
4565 void VmaRawList<T>::PopFront()
4567 VMA_HEAVY_ASSERT(m_Count > 0);
4568 ItemType*
const pFrontItem = m_pFront;
4569 ItemType*
const pNextItem = pFrontItem->pNext;
4570 if(pNextItem != VMA_NULL)
4572 pNextItem->pPrev = VMA_NULL;
4574 m_pFront = pNextItem;
4575 m_ItemAllocator.Free(pFrontItem);
4579 template<
typename T>
4580 void VmaRawList<T>::Remove(ItemType* pItem)
4582 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4583 VMA_HEAVY_ASSERT(m_Count > 0);
4585 if(pItem->pPrev != VMA_NULL)
4587 pItem->pPrev->pNext = pItem->pNext;
4591 VMA_HEAVY_ASSERT(m_pFront == pItem);
4592 m_pFront = pItem->pNext;
4595 if(pItem->pNext != VMA_NULL)
4597 pItem->pNext->pPrev = pItem->pPrev;
4601 VMA_HEAVY_ASSERT(m_pBack == pItem);
4602 m_pBack = pItem->pPrev;
4605 m_ItemAllocator.Free(pItem);
4609 template<
typename T>
4610 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4612 if(pItem != VMA_NULL)
4614 ItemType*
const prevItem = pItem->pPrev;
4615 ItemType*
const newItem = m_ItemAllocator.Alloc();
4616 newItem->pPrev = prevItem;
4617 newItem->pNext = pItem;
4618 pItem->pPrev = newItem;
4619 if(prevItem != VMA_NULL)
4621 prevItem->pNext = newItem;
4625 VMA_HEAVY_ASSERT(m_pFront == pItem);
4635 template<
typename T>
4636 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4638 if(pItem != VMA_NULL)
4640 ItemType*
const nextItem = pItem->pNext;
4641 ItemType*
const newItem = m_ItemAllocator.Alloc();
4642 newItem->pNext = nextItem;
4643 newItem->pPrev = pItem;
4644 pItem->pNext = newItem;
4645 if(nextItem != VMA_NULL)
4647 nextItem->pPrev = newItem;
4651 VMA_HEAVY_ASSERT(m_pBack == pItem);
4661 template<
typename T>
4662 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4664 ItemType*
const newItem = InsertBefore(pItem);
4665 newItem->Value = value;
4669 template<
typename T>
4670 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4672 ItemType*
const newItem = InsertAfter(pItem);
4673 newItem->Value = value;
4677 template<
typename T,
typename AllocatorT>
4680 VMA_CLASS_NO_COPY(VmaList)
4691 T& operator*()
const 4693 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4694 return m_pItem->Value;
4696 T* operator->()
const 4698 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4699 return &m_pItem->Value;
4702 iterator& operator++()
4704 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4705 m_pItem = m_pItem->pNext;
4708 iterator& operator--()
4710 if(m_pItem != VMA_NULL)
4712 m_pItem = m_pItem->pPrev;
4716 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4717 m_pItem = m_pList->Back();
4722 iterator operator++(
int)
4724 iterator result = *
this;
4728 iterator operator--(
int)
4730 iterator result = *
this;
4735 bool operator==(
const iterator& rhs)
const 4737 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4738 return m_pItem == rhs.m_pItem;
4740 bool operator!=(
const iterator& rhs)
const 4742 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4743 return m_pItem != rhs.m_pItem;
4747 VmaRawList<T>* m_pList;
4748 VmaListItem<T>* m_pItem;
4750 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4756 friend class VmaList<T, AllocatorT>;
4759 class const_iterator
4768 const_iterator(
const iterator& src) :
4769 m_pList(src.m_pList),
4770 m_pItem(src.m_pItem)
4774 const T& operator*()
const 4776 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4777 return m_pItem->Value;
4779 const T* operator->()
const 4781 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4782 return &m_pItem->Value;
4785 const_iterator& operator++()
4787 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4788 m_pItem = m_pItem->pNext;
4791 const_iterator& operator--()
4793 if(m_pItem != VMA_NULL)
4795 m_pItem = m_pItem->pPrev;
4799 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4800 m_pItem = m_pList->Back();
4805 const_iterator operator++(
int)
4807 const_iterator result = *
this;
4811 const_iterator operator--(
int)
4813 const_iterator result = *
this;
4818 bool operator==(
const const_iterator& rhs)
const 4820 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4821 return m_pItem == rhs.m_pItem;
4823 bool operator!=(
const const_iterator& rhs)
const 4825 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4826 return m_pItem != rhs.m_pItem;
4830 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4836 const VmaRawList<T>* m_pList;
4837 const VmaListItem<T>* m_pItem;
4839 friend class VmaList<T, AllocatorT>;
4842 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4844 bool empty()
const {
return m_RawList.IsEmpty(); }
4845 size_t size()
const {
return m_RawList.GetCount(); }
4847 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4848 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4850 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4851 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4853 void clear() { m_RawList.Clear(); }
4854 void push_back(
const T& value) { m_RawList.PushBack(value); }
4855 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4856 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4859 VmaRawList<T> m_RawList;
4862 #endif // #if VMA_USE_STL_LIST 4870 #if VMA_USE_STL_UNORDERED_MAP 4872 #define VmaPair std::pair 4874 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4875 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4877 #else // #if VMA_USE_STL_UNORDERED_MAP 4879 template<
typename T1,
typename T2>
4885 VmaPair() : first(), second() { }
4886 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4892 template<
typename KeyT,
typename ValueT>
4896 typedef VmaPair<KeyT, ValueT> PairType;
4897 typedef PairType* iterator;
4899 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4901 iterator begin() {
return m_Vector.begin(); }
4902 iterator end() {
return m_Vector.end(); }
4904 void insert(
const PairType& pair);
4905 iterator find(
const KeyT& key);
4906 void erase(iterator it);
4909 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4912 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4914 template<
typename FirstT,
typename SecondT>
4915 struct VmaPairFirstLess
4917 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4919 return lhs.first < rhs.first;
4921 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4923 return lhs.first < rhsFirst;
4927 template<
typename KeyT,
typename ValueT>
4928 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4930 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4932 m_Vector.data() + m_Vector.size(),
4934 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4935 VmaVectorInsert(m_Vector, indexToInsert, pair);
4938 template<
typename KeyT,
typename ValueT>
4939 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4941 PairType* it = VmaBinaryFindFirstNotLess(
4943 m_Vector.data() + m_Vector.size(),
4945 VmaPairFirstLess<KeyT, ValueT>());
4946 if((it != m_Vector.end()) && (it->first == key))
4952 return m_Vector.end();
4956 template<
typename KeyT,
typename ValueT>
4957 void VmaMap<KeyT, ValueT>::erase(iterator it)
4959 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4962 #endif // #if VMA_USE_STL_UNORDERED_MAP 4968 class VmaDeviceMemoryBlock;
4970 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4972 struct VmaAllocation_T
4975 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4979 FLAG_USER_DATA_STRING = 0x01,
4983 enum ALLOCATION_TYPE
4985 ALLOCATION_TYPE_NONE,
4986 ALLOCATION_TYPE_BLOCK,
4987 ALLOCATION_TYPE_DEDICATED,
4995 void Ctor(uint32_t currentFrameIndex,
bool userDataString)
4999 m_pUserData = VMA_NULL;
5000 m_LastUseFrameIndex = currentFrameIndex;
5001 m_Type = (uint8_t)ALLOCATION_TYPE_NONE;
5002 m_SuballocationType = (uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN;
5004 m_Flags = userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0;
5006 #if VMA_STATS_STRING_ENABLED 5007 m_CreationFrameIndex = currentFrameIndex;
5008 m_BufferImageUsage = 0;
5014 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
5017 VMA_ASSERT(m_pUserData == VMA_NULL);
5020 void InitBlockAllocation(
5021 VmaDeviceMemoryBlock* block,
5022 VkDeviceSize offset,
5023 VkDeviceSize alignment,
5025 VmaSuballocationType suballocationType,
5029 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5030 VMA_ASSERT(block != VMA_NULL);
5031 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5032 m_Alignment = alignment;
5034 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5035 m_SuballocationType = (uint8_t)suballocationType;
5036 m_BlockAllocation.m_Block = block;
5037 m_BlockAllocation.m_Offset = offset;
5038 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
5043 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5044 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
5045 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
5046 m_BlockAllocation.m_Block = VMA_NULL;
5047 m_BlockAllocation.m_Offset = 0;
5048 m_BlockAllocation.m_CanBecomeLost =
true;
5051 void ChangeBlockAllocation(
5053 VmaDeviceMemoryBlock* block,
5054 VkDeviceSize offset);
5056 void ChangeOffset(VkDeviceSize newOffset);
5059 void InitDedicatedAllocation(
5060 uint32_t memoryTypeIndex,
5061 VkDeviceMemory hMemory,
5062 VmaSuballocationType suballocationType,
5066 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
5067 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
5068 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
5071 m_SuballocationType = (uint8_t)suballocationType;
5072 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
5073 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
5074 m_DedicatedAllocation.m_hMemory = hMemory;
5075 m_DedicatedAllocation.m_pMappedData = pMappedData;
5078 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
5079 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
5080 VkDeviceSize GetSize()
const {
return m_Size; }
5081 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
5082 void* GetUserData()
const {
return m_pUserData; }
5083 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
5084 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
5086 VmaDeviceMemoryBlock* GetBlock()
const 5088 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5089 return m_BlockAllocation.m_Block;
5091 VkDeviceSize GetOffset()
const;
5092 VkDeviceMemory GetMemory()
const;
5093 uint32_t GetMemoryTypeIndex()
const;
5094 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
5095 void* GetMappedData()
const;
5096 bool CanBecomeLost()
const;
5098 uint32_t GetLastUseFrameIndex()
const 5100 return m_LastUseFrameIndex.load();
5102 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
5104 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
5114 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5116 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
5118 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
5129 void BlockAllocMap();
5130 void BlockAllocUnmap();
5131 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
5134 #if VMA_STATS_STRING_ENABLED 5135 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
5136 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
5138 void InitBufferImageUsage(uint32_t bufferImageUsage)
5140 VMA_ASSERT(m_BufferImageUsage == 0);
5141 m_BufferImageUsage = bufferImageUsage;
5144 void PrintParameters(
class VmaJsonWriter& json)
const;
5148 VkDeviceSize m_Alignment;
5149 VkDeviceSize m_Size;
5151 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
5153 uint8_t m_SuballocationType;
5160 struct BlockAllocation
5162 VmaDeviceMemoryBlock* m_Block;
5163 VkDeviceSize m_Offset;
5164 bool m_CanBecomeLost;
5168 struct DedicatedAllocation
5170 uint32_t m_MemoryTypeIndex;
5171 VkDeviceMemory m_hMemory;
5172 void* m_pMappedData;
5178 BlockAllocation m_BlockAllocation;
5180 DedicatedAllocation m_DedicatedAllocation;
5183 #if VMA_STATS_STRING_ENABLED 5184 uint32_t m_CreationFrameIndex;
5185 uint32_t m_BufferImageUsage;
5195 struct VmaSuballocation
5197 VkDeviceSize offset;
5200 VmaSuballocationType type;
5204 struct VmaSuballocationOffsetLess
5206 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5208 return lhs.offset < rhs.offset;
5211 struct VmaSuballocationOffsetGreater
5213 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 5215 return lhs.offset > rhs.offset;
5219 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
5222 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
5224 enum class VmaAllocationRequestType
5246 struct VmaAllocationRequest
5248 VkDeviceSize offset;
5249 VkDeviceSize sumFreeSize;
5250 VkDeviceSize sumItemSize;
5251 VmaSuballocationList::iterator item;
5252 size_t itemsToMakeLostCount;
5254 VmaAllocationRequestType type;
5256 VkDeviceSize CalcCost()
const 5258 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
5266 class VmaBlockMetadata
5270 virtual ~VmaBlockMetadata() { }
5271 virtual void Init(VkDeviceSize size) { m_Size = size; }
5274 virtual bool Validate()
const = 0;
5275 VkDeviceSize GetSize()
const {
return m_Size; }
5276 virtual size_t GetAllocationCount()
const = 0;
5277 virtual VkDeviceSize GetSumFreeSize()
const = 0;
5278 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
5280 virtual bool IsEmpty()
const = 0;
5282 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
5284 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
5286 #if VMA_STATS_STRING_ENABLED 5287 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
5293 virtual bool CreateAllocationRequest(
5294 uint32_t currentFrameIndex,
5295 uint32_t frameInUseCount,
5296 VkDeviceSize bufferImageGranularity,
5297 VkDeviceSize allocSize,
5298 VkDeviceSize allocAlignment,
5300 VmaSuballocationType allocType,
5301 bool canMakeOtherLost,
5304 VmaAllocationRequest* pAllocationRequest) = 0;
5306 virtual bool MakeRequestedAllocationsLost(
5307 uint32_t currentFrameIndex,
5308 uint32_t frameInUseCount,
5309 VmaAllocationRequest* pAllocationRequest) = 0;
5311 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
5313 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
5317 const VmaAllocationRequest& request,
5318 VmaSuballocationType type,
5319 VkDeviceSize allocSize,
5324 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
5327 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
5329 #if VMA_STATS_STRING_ENABLED 5330 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
5331 VkDeviceSize unusedBytes,
5332 size_t allocationCount,
5333 size_t unusedRangeCount)
const;
5334 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
5335 VkDeviceSize offset,
5337 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
5338 VkDeviceSize offset,
5339 VkDeviceSize size)
const;
5340 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
5344 VkDeviceSize m_Size;
5345 const VkAllocationCallbacks* m_pAllocationCallbacks;
5348 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 5349 VMA_ASSERT(0 && "Validation failed: " #cond); \ 5353 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
5355 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
5358 virtual ~VmaBlockMetadata_Generic();
5359 virtual void Init(VkDeviceSize size);
5361 virtual bool Validate()
const;
5362 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
5363 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5364 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5365 virtual bool IsEmpty()
const;
5367 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5368 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5370 #if VMA_STATS_STRING_ENABLED 5371 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5374 virtual bool CreateAllocationRequest(
5375 uint32_t currentFrameIndex,
5376 uint32_t frameInUseCount,
5377 VkDeviceSize bufferImageGranularity,
5378 VkDeviceSize allocSize,
5379 VkDeviceSize allocAlignment,
5381 VmaSuballocationType allocType,
5382 bool canMakeOtherLost,
5384 VmaAllocationRequest* pAllocationRequest);
5386 virtual bool MakeRequestedAllocationsLost(
5387 uint32_t currentFrameIndex,
5388 uint32_t frameInUseCount,
5389 VmaAllocationRequest* pAllocationRequest);
5391 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5393 virtual VkResult CheckCorruption(
const void* pBlockData);
5396 const VmaAllocationRequest& request,
5397 VmaSuballocationType type,
5398 VkDeviceSize allocSize,
5402 virtual void FreeAtOffset(VkDeviceSize offset);
5407 bool IsBufferImageGranularityConflictPossible(
5408 VkDeviceSize bufferImageGranularity,
5409 VmaSuballocationType& inOutPrevSuballocType)
const;
5412 friend class VmaDefragmentationAlgorithm_Generic;
5413 friend class VmaDefragmentationAlgorithm_Fast;
5415 uint32_t m_FreeCount;
5416 VkDeviceSize m_SumFreeSize;
5417 VmaSuballocationList m_Suballocations;
5420 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
5422 bool ValidateFreeSuballocationList()
const;
5426 bool CheckAllocation(
5427 uint32_t currentFrameIndex,
5428 uint32_t frameInUseCount,
5429 VkDeviceSize bufferImageGranularity,
5430 VkDeviceSize allocSize,
5431 VkDeviceSize allocAlignment,
5432 VmaSuballocationType allocType,
5433 VmaSuballocationList::const_iterator suballocItem,
5434 bool canMakeOtherLost,
5435 VkDeviceSize* pOffset,
5436 size_t* itemsToMakeLostCount,
5437 VkDeviceSize* pSumFreeSize,
5438 VkDeviceSize* pSumItemSize)
const;
5440 void MergeFreeWithNext(VmaSuballocationList::iterator item);
5444 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
5447 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
5450 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5531 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5533 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5536 virtual ~VmaBlockMetadata_Linear();
5537 virtual void Init(VkDeviceSize size);
5539 virtual bool Validate()
const;
5540 virtual size_t GetAllocationCount()
const;
5541 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5542 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5543 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5545 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5546 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5548 #if VMA_STATS_STRING_ENABLED 5549 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5552 virtual bool CreateAllocationRequest(
5553 uint32_t currentFrameIndex,
5554 uint32_t frameInUseCount,
5555 VkDeviceSize bufferImageGranularity,
5556 VkDeviceSize allocSize,
5557 VkDeviceSize allocAlignment,
5559 VmaSuballocationType allocType,
5560 bool canMakeOtherLost,
5562 VmaAllocationRequest* pAllocationRequest);
5564 virtual bool MakeRequestedAllocationsLost(
5565 uint32_t currentFrameIndex,
5566 uint32_t frameInUseCount,
5567 VmaAllocationRequest* pAllocationRequest);
5569 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5571 virtual VkResult CheckCorruption(
const void* pBlockData);
5574 const VmaAllocationRequest& request,
5575 VmaSuballocationType type,
5576 VkDeviceSize allocSize,
5580 virtual void FreeAtOffset(VkDeviceSize offset);
5590 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5592 enum SECOND_VECTOR_MODE
5594 SECOND_VECTOR_EMPTY,
5599 SECOND_VECTOR_RING_BUFFER,
5605 SECOND_VECTOR_DOUBLE_STACK,
5608 VkDeviceSize m_SumFreeSize;
5609 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5610 uint32_t m_1stVectorIndex;
5611 SECOND_VECTOR_MODE m_2ndVectorMode;
5613 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5614 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5615 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5616 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5619 size_t m_1stNullItemsBeginCount;
5621 size_t m_1stNullItemsMiddleCount;
5623 size_t m_2ndNullItemsCount;
5625 bool ShouldCompact1st()
const;
5626 void CleanupAfterFree();
5628 bool CreateAllocationRequest_LowerAddress(
5629 uint32_t currentFrameIndex,
5630 uint32_t frameInUseCount,
5631 VkDeviceSize bufferImageGranularity,
5632 VkDeviceSize allocSize,
5633 VkDeviceSize allocAlignment,
5634 VmaSuballocationType allocType,
5635 bool canMakeOtherLost,
5637 VmaAllocationRequest* pAllocationRequest);
5638 bool CreateAllocationRequest_UpperAddress(
5639 uint32_t currentFrameIndex,
5640 uint32_t frameInUseCount,
5641 VkDeviceSize bufferImageGranularity,
5642 VkDeviceSize allocSize,
5643 VkDeviceSize allocAlignment,
5644 VmaSuballocationType allocType,
5645 bool canMakeOtherLost,
5647 VmaAllocationRequest* pAllocationRequest);
5661 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5663 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5666 virtual ~VmaBlockMetadata_Buddy();
5667 virtual void Init(VkDeviceSize size);
5669 virtual bool Validate()
const;
5670 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5671 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5672 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5673 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5675 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5676 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5678 #if VMA_STATS_STRING_ENABLED 5679 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5682 virtual bool CreateAllocationRequest(
5683 uint32_t currentFrameIndex,
5684 uint32_t frameInUseCount,
5685 VkDeviceSize bufferImageGranularity,
5686 VkDeviceSize allocSize,
5687 VkDeviceSize allocAlignment,
5689 VmaSuballocationType allocType,
5690 bool canMakeOtherLost,
5692 VmaAllocationRequest* pAllocationRequest);
5694 virtual bool MakeRequestedAllocationsLost(
5695 uint32_t currentFrameIndex,
5696 uint32_t frameInUseCount,
5697 VmaAllocationRequest* pAllocationRequest);
5699 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5701 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5704 const VmaAllocationRequest& request,
5705 VmaSuballocationType type,
5706 VkDeviceSize allocSize,
5709 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5710 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5713 static const VkDeviceSize MIN_NODE_SIZE = 32;
5714 static const size_t MAX_LEVELS = 30;
5716 struct ValidationContext
5718 size_t calculatedAllocationCount;
5719 size_t calculatedFreeCount;
5720 VkDeviceSize calculatedSumFreeSize;
5722 ValidationContext() :
5723 calculatedAllocationCount(0),
5724 calculatedFreeCount(0),
5725 calculatedSumFreeSize(0) { }
5730 VkDeviceSize offset;
5760 VkDeviceSize m_UsableSize;
5761 uint32_t m_LevelCount;
5767 } m_FreeList[MAX_LEVELS];
5769 size_t m_AllocationCount;
5773 VkDeviceSize m_SumFreeSize;
5775 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5776 void DeleteNode(Node* node);
5777 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5778 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5779 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5781 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5782 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5786 void AddToFreeListFront(uint32_t level, Node* node);
5790 void RemoveFromFreeList(uint32_t level, Node* node);
5792 #if VMA_STATS_STRING_ENABLED 5793 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5803 class VmaDeviceMemoryBlock
5805 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5807 VmaBlockMetadata* m_pMetadata;
5811 ~VmaDeviceMemoryBlock()
5813 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5814 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5821 uint32_t newMemoryTypeIndex,
5822 VkDeviceMemory newMemory,
5823 VkDeviceSize newSize,
5825 uint32_t algorithm);
5829 VmaPool GetParentPool()
const {
return m_hParentPool; }
5830 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5831 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5832 uint32_t GetId()
const {
return m_Id; }
5833 void* GetMappedData()
const {
return m_pMappedData; }
5836 bool Validate()
const;
5841 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5844 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5845 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5847 VkResult BindBufferMemory(
5851 VkResult BindImageMemory(
5858 uint32_t m_MemoryTypeIndex;
5860 VkDeviceMemory m_hMemory;
5868 uint32_t m_MapCount;
5869 void* m_pMappedData;
5872 struct VmaPointerLess
5874 bool operator()(
const void* lhs,
const void* rhs)
const 5880 struct VmaDefragmentationMove
5882 size_t srcBlockIndex;
5883 size_t dstBlockIndex;
5884 VkDeviceSize srcOffset;
5885 VkDeviceSize dstOffset;
5889 class VmaDefragmentationAlgorithm;
5897 struct VmaBlockVector
5899 VMA_CLASS_NO_COPY(VmaBlockVector)
5904 uint32_t memoryTypeIndex,
5905 VkDeviceSize preferredBlockSize,
5906 size_t minBlockCount,
5907 size_t maxBlockCount,
5908 VkDeviceSize bufferImageGranularity,
5909 uint32_t frameInUseCount,
5911 bool explicitBlockSize,
5912 uint32_t algorithm);
5915 VkResult CreateMinBlocks();
5917 VmaPool GetParentPool()
const {
return m_hParentPool; }
5918 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5919 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5920 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5921 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5922 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5926 bool IsEmpty()
const {
return m_Blocks.empty(); }
5927 bool IsCorruptionDetectionEnabled()
const;
5930 uint32_t currentFrameIndex,
5932 VkDeviceSize alignment,
5934 VmaSuballocationType suballocType,
5935 size_t allocationCount,
5944 #if VMA_STATS_STRING_ENABLED 5945 void PrintDetailedMap(
class VmaJsonWriter& json);
5948 void MakePoolAllocationsLost(
5949 uint32_t currentFrameIndex,
5950 size_t* pLostAllocationCount);
5951 VkResult CheckCorruption();
5955 class VmaBlockVectorDefragmentationContext* pCtx,
5957 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
5958 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
5959 VkCommandBuffer commandBuffer);
5960 void DefragmentationEnd(
5961 class VmaBlockVectorDefragmentationContext* pCtx,
5967 size_t GetBlockCount()
const {
return m_Blocks.size(); }
5968 VmaDeviceMemoryBlock* GetBlock(
size_t index)
const {
return m_Blocks[index]; }
5969 size_t CalcAllocationCount()
const;
5970 bool IsBufferImageGranularityConflictPossible()
const;
5973 friend class VmaDefragmentationAlgorithm_Generic;
5977 const uint32_t m_MemoryTypeIndex;
5978 const VkDeviceSize m_PreferredBlockSize;
5979 const size_t m_MinBlockCount;
5980 const size_t m_MaxBlockCount;
5981 const VkDeviceSize m_BufferImageGranularity;
5982 const uint32_t m_FrameInUseCount;
5983 const bool m_IsCustomPool;
5984 const bool m_ExplicitBlockSize;
5985 const uint32_t m_Algorithm;
5989 bool m_HasEmptyBlock;
5990 VMA_RW_MUTEX m_Mutex;
5992 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5993 uint32_t m_NextBlockId;
5995 VkDeviceSize CalcMaxBlockSize()
const;
5998 void Remove(VmaDeviceMemoryBlock* pBlock);
6002 void IncrementallySortBlocks();
6004 VkResult AllocatePage(
6005 uint32_t currentFrameIndex,
6007 VkDeviceSize alignment,
6009 VmaSuballocationType suballocType,
6013 VkResult AllocateFromBlock(
6014 VmaDeviceMemoryBlock* pBlock,
6015 uint32_t currentFrameIndex,
6017 VkDeviceSize alignment,
6020 VmaSuballocationType suballocType,
6024 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
6027 void ApplyDefragmentationMovesCpu(
6028 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6029 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves);
6031 void ApplyDefragmentationMovesGpu(
6032 class VmaBlockVectorDefragmentationContext* pDefragCtx,
6033 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6034 VkCommandBuffer commandBuffer);
6045 VMA_CLASS_NO_COPY(VmaPool_T)
6047 VmaBlockVector m_BlockVector;
6052 VkDeviceSize preferredBlockSize);
6055 uint32_t GetId()
const {
return m_Id; }
6056 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
6058 #if VMA_STATS_STRING_ENABLED 6073 class VmaDefragmentationAlgorithm
6075 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm)
6077 VmaDefragmentationAlgorithm(
6079 VmaBlockVector* pBlockVector,
6080 uint32_t currentFrameIndex) :
6081 m_hAllocator(hAllocator),
6082 m_pBlockVector(pBlockVector),
6083 m_CurrentFrameIndex(currentFrameIndex)
6086 virtual ~VmaDefragmentationAlgorithm()
6090 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) = 0;
6091 virtual void AddAll() = 0;
6093 virtual VkResult Defragment(
6094 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6095 VkDeviceSize maxBytesToMove,
6096 uint32_t maxAllocationsToMove) = 0;
6098 virtual VkDeviceSize GetBytesMoved()
const = 0;
6099 virtual uint32_t GetAllocationsMoved()
const = 0;
6103 VmaBlockVector*
const m_pBlockVector;
6104 const uint32_t m_CurrentFrameIndex;
6106 struct AllocationInfo
6109 VkBool32* m_pChanged;
6112 m_hAllocation(VK_NULL_HANDLE),
6113 m_pChanged(VMA_NULL)
6117 m_hAllocation(hAlloc),
6118 m_pChanged(pChanged)
6124 class VmaDefragmentationAlgorithm_Generic :
public VmaDefragmentationAlgorithm
6126 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Generic)
6128 VmaDefragmentationAlgorithm_Generic(
6130 VmaBlockVector* pBlockVector,
6131 uint32_t currentFrameIndex,
6132 bool overlappingMoveSupported);
6133 virtual ~VmaDefragmentationAlgorithm_Generic();
6135 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6136 virtual void AddAll() { m_AllAllocations =
true; }
6138 virtual VkResult Defragment(
6139 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6140 VkDeviceSize maxBytesToMove,
6141 uint32_t maxAllocationsToMove);
6143 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6144 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6147 uint32_t m_AllocationCount;
6148 bool m_AllAllocations;
6150 VkDeviceSize m_BytesMoved;
6151 uint32_t m_AllocationsMoved;
6153 struct AllocationInfoSizeGreater
6155 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6157 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
6161 struct AllocationInfoOffsetGreater
6163 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 6165 return lhs.m_hAllocation->GetOffset() > rhs.m_hAllocation->GetOffset();
6171 size_t m_OriginalBlockIndex;
6172 VmaDeviceMemoryBlock* m_pBlock;
6173 bool m_HasNonMovableAllocations;
6174 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
6176 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
6177 m_OriginalBlockIndex(SIZE_MAX),
6179 m_HasNonMovableAllocations(true),
6180 m_Allocations(pAllocationCallbacks)
6184 void CalcHasNonMovableAllocations()
6186 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
6187 const size_t defragmentAllocCount = m_Allocations.size();
6188 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
6191 void SortAllocationsBySizeDescending()
6193 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
6196 void SortAllocationsByOffsetDescending()
6198 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoOffsetGreater());
6202 struct BlockPointerLess
6204 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 6206 return pLhsBlockInfo->m_pBlock < pRhsBlock;
6208 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6210 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
6216 struct BlockInfoCompareMoveDestination
6218 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 6220 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
6224 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
6228 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
6236 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
6237 BlockInfoVector m_Blocks;
6239 VkResult DefragmentRound(
6240 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6241 VkDeviceSize maxBytesToMove,
6242 uint32_t maxAllocationsToMove);
6244 size_t CalcBlocksWithNonMovableCount()
const;
6246 static bool MoveMakesSense(
6247 size_t dstBlockIndex, VkDeviceSize dstOffset,
6248 size_t srcBlockIndex, VkDeviceSize srcOffset);
6251 class VmaDefragmentationAlgorithm_Fast :
public VmaDefragmentationAlgorithm
6253 VMA_CLASS_NO_COPY(VmaDefragmentationAlgorithm_Fast)
6255 VmaDefragmentationAlgorithm_Fast(
6257 VmaBlockVector* pBlockVector,
6258 uint32_t currentFrameIndex,
6259 bool overlappingMoveSupported);
6260 virtual ~VmaDefragmentationAlgorithm_Fast();
6262 virtual void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged) { ++m_AllocationCount; }
6263 virtual void AddAll() { m_AllAllocations =
true; }
6265 virtual VkResult Defragment(
6266 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
6267 VkDeviceSize maxBytesToMove,
6268 uint32_t maxAllocationsToMove);
6270 virtual VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
6271 virtual uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
6276 size_t origBlockIndex;
6279 class FreeSpaceDatabase
6285 s.blockInfoIndex = SIZE_MAX;
6286 for(
size_t i = 0; i < MAX_COUNT; ++i)
6288 m_FreeSpaces[i] = s;
6292 void Register(
size_t blockInfoIndex, VkDeviceSize offset, VkDeviceSize size)
6294 if(size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6300 size_t bestIndex = SIZE_MAX;
6301 for(
size_t i = 0; i < MAX_COUNT; ++i)
6304 if(m_FreeSpaces[i].blockInfoIndex == SIZE_MAX)
6309 if(m_FreeSpaces[i].size < size &&
6310 (bestIndex == SIZE_MAX || m_FreeSpaces[bestIndex].size > m_FreeSpaces[i].size))
6316 if(bestIndex != SIZE_MAX)
6318 m_FreeSpaces[bestIndex].blockInfoIndex = blockInfoIndex;
6319 m_FreeSpaces[bestIndex].offset = offset;
6320 m_FreeSpaces[bestIndex].size = size;
6324 bool Fetch(VkDeviceSize alignment, VkDeviceSize size,
6325 size_t& outBlockInfoIndex, VkDeviceSize& outDstOffset)
6327 size_t bestIndex = SIZE_MAX;
6328 VkDeviceSize bestFreeSpaceAfter = 0;
6329 for(
size_t i = 0; i < MAX_COUNT; ++i)
6332 if(m_FreeSpaces[i].blockInfoIndex != SIZE_MAX)
6334 const VkDeviceSize dstOffset = VmaAlignUp(m_FreeSpaces[i].offset, alignment);
6336 if(dstOffset + size <= m_FreeSpaces[i].offset + m_FreeSpaces[i].size)
6338 const VkDeviceSize freeSpaceAfter = (m_FreeSpaces[i].offset + m_FreeSpaces[i].size) -
6340 if(bestIndex == SIZE_MAX || freeSpaceAfter > bestFreeSpaceAfter)
6343 bestFreeSpaceAfter = freeSpaceAfter;
6349 if(bestIndex != SIZE_MAX)
6351 outBlockInfoIndex = m_FreeSpaces[bestIndex].blockInfoIndex;
6352 outDstOffset = VmaAlignUp(m_FreeSpaces[bestIndex].offset, alignment);
6354 if(bestFreeSpaceAfter >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6357 const VkDeviceSize alignmentPlusSize = (outDstOffset - m_FreeSpaces[bestIndex].offset) + size;
6358 m_FreeSpaces[bestIndex].offset += alignmentPlusSize;
6359 m_FreeSpaces[bestIndex].size -= alignmentPlusSize;
6364 m_FreeSpaces[bestIndex].blockInfoIndex = SIZE_MAX;
6374 static const size_t MAX_COUNT = 4;
6378 size_t blockInfoIndex;
6379 VkDeviceSize offset;
6381 } m_FreeSpaces[MAX_COUNT];
6384 const bool m_OverlappingMoveSupported;
6386 uint32_t m_AllocationCount;
6387 bool m_AllAllocations;
6389 VkDeviceSize m_BytesMoved;
6390 uint32_t m_AllocationsMoved;
6392 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> > m_BlockInfos;
6394 void PreprocessMetadata();
6395 void PostprocessMetadata();
6396 void InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc);
6399 struct VmaBlockDefragmentationContext
6403 BLOCK_FLAG_USED = 0x00000001,
6409 class VmaBlockVectorDefragmentationContext
6411 VMA_CLASS_NO_COPY(VmaBlockVectorDefragmentationContext)
6415 VmaVector< VmaBlockDefragmentationContext, VmaStlAllocator<VmaBlockDefragmentationContext> > blockContexts;
6417 VmaBlockVectorDefragmentationContext(
6420 VmaBlockVector* pBlockVector,
6421 uint32_t currFrameIndex);
6422 ~VmaBlockVectorDefragmentationContext();
6424 VmaPool GetCustomPool()
const {
return m_hCustomPool; }
6425 VmaBlockVector* GetBlockVector()
const {
return m_pBlockVector; }
6426 VmaDefragmentationAlgorithm* GetAlgorithm()
const {
return m_pAlgorithm; }
6428 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
6429 void AddAll() { m_AllAllocations =
true; }
6431 void Begin(
bool overlappingMoveSupported);
6438 VmaBlockVector*
const m_pBlockVector;
6439 const uint32_t m_CurrFrameIndex;
6441 VmaDefragmentationAlgorithm* m_pAlgorithm;
6449 VmaVector< AllocInfo, VmaStlAllocator<AllocInfo> > m_Allocations;
6450 bool m_AllAllocations;
6453 struct VmaDefragmentationContext_T
6456 VMA_CLASS_NO_COPY(VmaDefragmentationContext_T)
6458 VmaDefragmentationContext_T(
6460 uint32_t currFrameIndex,
6463 ~VmaDefragmentationContext_T();
6465 void AddPools(uint32_t poolCount,
VmaPool* pPools);
6466 void AddAllocations(
6467 uint32_t allocationCount,
6469 VkBool32* pAllocationsChanged);
6477 VkResult Defragment(
6478 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
6479 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
6484 const uint32_t m_CurrFrameIndex;
6485 const uint32_t m_Flags;
6488 VmaBlockVectorDefragmentationContext* m_DefaultPoolContexts[VK_MAX_MEMORY_TYPES];
6490 VmaVector< VmaBlockVectorDefragmentationContext*, VmaStlAllocator<VmaBlockVectorDefragmentationContext*> > m_CustomPoolContexts;
6493 #if VMA_RECORDING_ENABLED 6500 void WriteConfiguration(
6501 const VkPhysicalDeviceProperties& devProps,
6502 const VkPhysicalDeviceMemoryProperties& memProps,
6503 bool dedicatedAllocationExtensionEnabled);
6506 void RecordCreateAllocator(uint32_t frameIndex);
6507 void RecordDestroyAllocator(uint32_t frameIndex);
6508 void RecordCreatePool(uint32_t frameIndex,
6511 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
6512 void RecordAllocateMemory(uint32_t frameIndex,
6513 const VkMemoryRequirements& vkMemReq,
6516 void RecordAllocateMemoryPages(uint32_t frameIndex,
6517 const VkMemoryRequirements& vkMemReq,
6519 uint64_t allocationCount,
6521 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
6522 const VkMemoryRequirements& vkMemReq,
6523 bool requiresDedicatedAllocation,
6524 bool prefersDedicatedAllocation,
6527 void RecordAllocateMemoryForImage(uint32_t frameIndex,
6528 const VkMemoryRequirements& vkMemReq,
6529 bool requiresDedicatedAllocation,
6530 bool prefersDedicatedAllocation,
6533 void RecordFreeMemory(uint32_t frameIndex,
6535 void RecordFreeMemoryPages(uint32_t frameIndex,
6536 uint64_t allocationCount,
6538 void RecordSetAllocationUserData(uint32_t frameIndex,
6540 const void* pUserData);
6541 void RecordCreateLostAllocation(uint32_t frameIndex,
6543 void RecordMapMemory(uint32_t frameIndex,
6545 void RecordUnmapMemory(uint32_t frameIndex,
6547 void RecordFlushAllocation(uint32_t frameIndex,
6548 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6549 void RecordInvalidateAllocation(uint32_t frameIndex,
6550 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
6551 void RecordCreateBuffer(uint32_t frameIndex,
6552 const VkBufferCreateInfo& bufCreateInfo,
6555 void RecordCreateImage(uint32_t frameIndex,
6556 const VkImageCreateInfo& imageCreateInfo,
6559 void RecordDestroyBuffer(uint32_t frameIndex,
6561 void RecordDestroyImage(uint32_t frameIndex,
6563 void RecordTouchAllocation(uint32_t frameIndex,
6565 void RecordGetAllocationInfo(uint32_t frameIndex,
6567 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
6569 void RecordDefragmentationBegin(uint32_t frameIndex,
6572 void RecordDefragmentationEnd(uint32_t frameIndex,
6582 class UserDataString
6586 const char* GetString()
const {
return m_Str; }
6596 VMA_MUTEX m_FileMutex;
6598 int64_t m_StartCounter;
6600 void GetBasicParams(CallParams& outParams);
6603 template<
typename T>
6604 void PrintPointerList(uint64_t count,
const T* pItems)
6608 fprintf(m_File,
"%p", pItems[0]);
6609 for(uint64_t i = 1; i < count; ++i)
6611 fprintf(m_File,
" %p", pItems[i]);
6616 void PrintPointerList(uint64_t count,
const VmaAllocation* pItems);
6620 #endif // #if VMA_RECORDING_ENABLED 6625 class VmaAllocationObjectAllocator
6627 VMA_CLASS_NO_COPY(VmaAllocationObjectAllocator)
6629 VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks);
6636 VmaPoolAllocator<VmaAllocation_T> m_Allocator;
6640 struct VmaAllocator_T
6642 VMA_CLASS_NO_COPY(VmaAllocator_T)
6645 bool m_UseKhrDedicatedAllocation;
6647 bool m_AllocationCallbacksSpecified;
6648 VkAllocationCallbacks m_AllocationCallbacks;
6650 VmaAllocationObjectAllocator m_AllocationObjectAllocator;
6653 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
6654 VMA_MUTEX m_HeapSizeLimitMutex;
6656 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
6657 VkPhysicalDeviceMemoryProperties m_MemProps;
6660 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
6663 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
6664 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
6665 VMA_RW_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
6671 const VkAllocationCallbacks* GetAllocationCallbacks()
const 6673 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
6677 return m_VulkanFunctions;
6680 VkDeviceSize GetBufferImageGranularity()
const 6683 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
6684 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
6687 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
6688 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
6690 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 6692 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
6693 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
6696 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 6698 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
6699 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
6702 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 6704 return IsMemoryTypeNonCoherent(memTypeIndex) ?
6705 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
6706 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
6709 bool IsIntegratedGpu()
const 6711 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
6714 #if VMA_RECORDING_ENABLED 6715 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
6718 void GetBufferMemoryRequirements(
6720 VkMemoryRequirements& memReq,
6721 bool& requiresDedicatedAllocation,
6722 bool& prefersDedicatedAllocation)
const;
6723 void GetImageMemoryRequirements(
6725 VkMemoryRequirements& memReq,
6726 bool& requiresDedicatedAllocation,
6727 bool& prefersDedicatedAllocation)
const;
6730 VkResult AllocateMemory(
6731 const VkMemoryRequirements& vkMemReq,
6732 bool requiresDedicatedAllocation,
6733 bool prefersDedicatedAllocation,
6734 VkBuffer dedicatedBuffer,
6735 VkImage dedicatedImage,
6737 VmaSuballocationType suballocType,
6738 size_t allocationCount,
6743 size_t allocationCount,
6746 VkResult ResizeAllocation(
6748 VkDeviceSize newSize);
6750 void CalculateStats(
VmaStats* pStats);
6752 #if VMA_STATS_STRING_ENABLED 6753 void PrintDetailedMap(
class VmaJsonWriter& json);
6756 VkResult DefragmentationBegin(
6760 VkResult DefragmentationEnd(
6767 void DestroyPool(
VmaPool pool);
6770 void SetCurrentFrameIndex(uint32_t frameIndex);
6771 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
6773 void MakePoolAllocationsLost(
6775 size_t* pLostAllocationCount);
6776 VkResult CheckPoolCorruption(
VmaPool hPool);
6777 VkResult CheckCorruption(uint32_t memoryTypeBits);
6781 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
6782 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
6787 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
6788 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
6790 void FlushOrInvalidateAllocation(
6792 VkDeviceSize offset, VkDeviceSize size,
6793 VMA_CACHE_OPERATION op);
6795 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
6801 uint32_t GetGpuDefragmentationMemoryTypeBits();
6804 VkDeviceSize m_PreferredLargeHeapBlockSize;
6806 VkPhysicalDevice m_PhysicalDevice;
6807 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
6808 VMA_ATOMIC_UINT32 m_GpuDefragmentationMemoryTypeBits;
6810 VMA_RW_MUTEX m_PoolsMutex;
6812 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
6813 uint32_t m_NextPoolId;
6817 #if VMA_RECORDING_ENABLED 6818 VmaRecorder* m_pRecorder;
6823 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
6825 VkResult AllocateMemoryOfType(
6827 VkDeviceSize alignment,
6828 bool dedicatedAllocation,
6829 VkBuffer dedicatedBuffer,
6830 VkImage dedicatedImage,
6832 uint32_t memTypeIndex,
6833 VmaSuballocationType suballocType,
6834 size_t allocationCount,
6838 VkResult AllocateDedicatedMemoryPage(
6840 VmaSuballocationType suballocType,
6841 uint32_t memTypeIndex,
6842 const VkMemoryAllocateInfo& allocInfo,
6844 bool isUserDataString,
6849 VkResult AllocateDedicatedMemory(
6851 VmaSuballocationType suballocType,
6852 uint32_t memTypeIndex,
6854 bool isUserDataString,
6856 VkBuffer dedicatedBuffer,
6857 VkImage dedicatedImage,
6858 size_t allocationCount,
6867 uint32_t CalculateGpuDefragmentationMemoryTypeBits()
const;
6873 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
6875 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
6878 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
6880 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
6883 template<
typename T>
6886 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
6889 template<
typename T>
6890 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
6892 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
6895 template<
typename T>
6896 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
6901 VmaFree(hAllocator, ptr);
6905 template<
typename T>
6906 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
6910 for(
size_t i = count; i--; )
6912 VmaFree(hAllocator, ptr);
6919 #if VMA_STATS_STRING_ENABLED 6921 class VmaStringBuilder
6924 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
6925 size_t GetLength()
const {
return m_Data.size(); }
6926 const char* GetData()
const {
return m_Data.data(); }
6928 void Add(
char ch) { m_Data.push_back(ch); }
6929 void Add(
const char* pStr);
6930 void AddNewLine() { Add(
'\n'); }
6931 void AddNumber(uint32_t num);
6932 void AddNumber(uint64_t num);
6933 void AddPointer(
const void* ptr);
6936 VmaVector< char, VmaStlAllocator<char> > m_Data;
6939 void VmaStringBuilder::Add(
const char* pStr)
6941 const size_t strLen = strlen(pStr);
6944 const size_t oldCount = m_Data.size();
6945 m_Data.resize(oldCount + strLen);
6946 memcpy(m_Data.data() + oldCount, pStr, strLen);
6950 void VmaStringBuilder::AddNumber(uint32_t num)
6953 VmaUint32ToStr(buf,
sizeof(buf), num);
6957 void VmaStringBuilder::AddNumber(uint64_t num)
6960 VmaUint64ToStr(buf,
sizeof(buf), num);
6964 void VmaStringBuilder::AddPointer(
const void* ptr)
6967 VmaPtrToStr(buf,
sizeof(buf), ptr);
6971 #endif // #if VMA_STATS_STRING_ENABLED 6976 #if VMA_STATS_STRING_ENABLED 6980 VMA_CLASS_NO_COPY(VmaJsonWriter)
6982 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6985 void BeginObject(
bool singleLine =
false);
6988 void BeginArray(
bool singleLine =
false);
6991 void WriteString(
const char* pStr);
6992 void BeginString(
const char* pStr = VMA_NULL);
6993 void ContinueString(
const char* pStr);
6994 void ContinueString(uint32_t n);
6995 void ContinueString(uint64_t n);
6996 void ContinueString_Pointer(
const void* ptr);
6997 void EndString(
const char* pStr = VMA_NULL);
6999 void WriteNumber(uint32_t n);
7000 void WriteNumber(uint64_t n);
7001 void WriteBool(
bool b);
7005 static const char*
const INDENT;
7007 enum COLLECTION_TYPE
7009 COLLECTION_TYPE_OBJECT,
7010 COLLECTION_TYPE_ARRAY,
7014 COLLECTION_TYPE type;
7015 uint32_t valueCount;
7016 bool singleLineMode;
7019 VmaStringBuilder& m_SB;
7020 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
7021 bool m_InsideString;
7023 void BeginValue(
bool isString);
7024 void WriteIndent(
bool oneLess =
false);
7027 const char*
const VmaJsonWriter::INDENT =
" ";
7029 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
7031 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
7032 m_InsideString(false)
7036 VmaJsonWriter::~VmaJsonWriter()
7038 VMA_ASSERT(!m_InsideString);
7039 VMA_ASSERT(m_Stack.empty());
7042 void VmaJsonWriter::BeginObject(
bool singleLine)
7044 VMA_ASSERT(!m_InsideString);
7050 item.type = COLLECTION_TYPE_OBJECT;
7051 item.valueCount = 0;
7052 item.singleLineMode = singleLine;
7053 m_Stack.push_back(item);
7056 void VmaJsonWriter::EndObject()
7058 VMA_ASSERT(!m_InsideString);
7063 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
7067 void VmaJsonWriter::BeginArray(
bool singleLine)
7069 VMA_ASSERT(!m_InsideString);
7075 item.type = COLLECTION_TYPE_ARRAY;
7076 item.valueCount = 0;
7077 item.singleLineMode = singleLine;
7078 m_Stack.push_back(item);
7081 void VmaJsonWriter::EndArray()
7083 VMA_ASSERT(!m_InsideString);
7088 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
7092 void VmaJsonWriter::WriteString(
const char* pStr)
7098 void VmaJsonWriter::BeginString(
const char* pStr)
7100 VMA_ASSERT(!m_InsideString);
7104 m_InsideString =
true;
7105 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7107 ContinueString(pStr);
7111 void VmaJsonWriter::ContinueString(
const char* pStr)
7113 VMA_ASSERT(m_InsideString);
7115 const size_t strLen = strlen(pStr);
7116 for(
size_t i = 0; i < strLen; ++i)
7149 VMA_ASSERT(0 &&
"Character not currently supported.");
7155 void VmaJsonWriter::ContinueString(uint32_t n)
7157 VMA_ASSERT(m_InsideString);
7161 void VmaJsonWriter::ContinueString(uint64_t n)
7163 VMA_ASSERT(m_InsideString);
7167 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
7169 VMA_ASSERT(m_InsideString);
7170 m_SB.AddPointer(ptr);
7173 void VmaJsonWriter::EndString(
const char* pStr)
7175 VMA_ASSERT(m_InsideString);
7176 if(pStr != VMA_NULL && pStr[0] !=
'\0')
7178 ContinueString(pStr);
7181 m_InsideString =
false;
7184 void VmaJsonWriter::WriteNumber(uint32_t n)
7186 VMA_ASSERT(!m_InsideString);
7191 void VmaJsonWriter::WriteNumber(uint64_t n)
7193 VMA_ASSERT(!m_InsideString);
7198 void VmaJsonWriter::WriteBool(
bool b)
7200 VMA_ASSERT(!m_InsideString);
7202 m_SB.Add(b ?
"true" :
"false");
7205 void VmaJsonWriter::WriteNull()
7207 VMA_ASSERT(!m_InsideString);
7212 void VmaJsonWriter::BeginValue(
bool isString)
7214 if(!m_Stack.empty())
7216 StackItem& currItem = m_Stack.back();
7217 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7218 currItem.valueCount % 2 == 0)
7220 VMA_ASSERT(isString);
7223 if(currItem.type == COLLECTION_TYPE_OBJECT &&
7224 currItem.valueCount % 2 != 0)
7228 else if(currItem.valueCount > 0)
7237 ++currItem.valueCount;
7241 void VmaJsonWriter::WriteIndent(
bool oneLess)
7243 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
7247 size_t count = m_Stack.size();
7248 if(count > 0 && oneLess)
7252 for(
size_t i = 0; i < count; ++i)
7259 #endif // #if VMA_STATS_STRING_ENABLED 7263 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
7265 if(IsUserDataString())
7267 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
7269 FreeUserDataString(hAllocator);
7271 if(pUserData != VMA_NULL)
7273 const char*
const newStrSrc = (
char*)pUserData;
7274 const size_t newStrLen = strlen(newStrSrc);
7275 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
7276 memcpy(newStrDst, newStrSrc, newStrLen + 1);
7277 m_pUserData = newStrDst;
7282 m_pUserData = pUserData;
7286 void VmaAllocation_T::ChangeBlockAllocation(
7288 VmaDeviceMemoryBlock* block,
7289 VkDeviceSize offset)
7291 VMA_ASSERT(block != VMA_NULL);
7292 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7295 if(block != m_BlockAllocation.m_Block)
7297 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
7298 if(IsPersistentMap())
7300 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
7301 block->Map(hAllocator, mapRefCount, VMA_NULL);
7304 m_BlockAllocation.m_Block = block;
7305 m_BlockAllocation.m_Offset = offset;
7308 void VmaAllocation_T::ChangeOffset(VkDeviceSize newOffset)
7310 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
7311 m_BlockAllocation.m_Offset = newOffset;
7314 VkDeviceSize VmaAllocation_T::GetOffset()
const 7318 case ALLOCATION_TYPE_BLOCK:
7319 return m_BlockAllocation.m_Offset;
7320 case ALLOCATION_TYPE_DEDICATED:
7328 VkDeviceMemory VmaAllocation_T::GetMemory()
const 7332 case ALLOCATION_TYPE_BLOCK:
7333 return m_BlockAllocation.m_Block->GetDeviceMemory();
7334 case ALLOCATION_TYPE_DEDICATED:
7335 return m_DedicatedAllocation.m_hMemory;
7338 return VK_NULL_HANDLE;
7342 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 7346 case ALLOCATION_TYPE_BLOCK:
7347 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
7348 case ALLOCATION_TYPE_DEDICATED:
7349 return m_DedicatedAllocation.m_MemoryTypeIndex;
7356 void* VmaAllocation_T::GetMappedData()
const 7360 case ALLOCATION_TYPE_BLOCK:
7363 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
7364 VMA_ASSERT(pBlockData != VMA_NULL);
7365 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
7372 case ALLOCATION_TYPE_DEDICATED:
7373 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
7374 return m_DedicatedAllocation.m_pMappedData;
7381 bool VmaAllocation_T::CanBecomeLost()
const 7385 case ALLOCATION_TYPE_BLOCK:
7386 return m_BlockAllocation.m_CanBecomeLost;
7387 case ALLOCATION_TYPE_DEDICATED:
7395 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7397 VMA_ASSERT(CanBecomeLost());
7403 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
7406 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7411 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
7417 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
7427 #if VMA_STATS_STRING_ENABLED 7430 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
7439 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 7441 json.WriteString(
"Type");
7442 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
7444 json.WriteString(
"Size");
7445 json.WriteNumber(m_Size);
7447 if(m_pUserData != VMA_NULL)
7449 json.WriteString(
"UserData");
7450 if(IsUserDataString())
7452 json.WriteString((
const char*)m_pUserData);
7457 json.ContinueString_Pointer(m_pUserData);
7462 json.WriteString(
"CreationFrameIndex");
7463 json.WriteNumber(m_CreationFrameIndex);
7465 json.WriteString(
"LastUseFrameIndex");
7466 json.WriteNumber(GetLastUseFrameIndex());
7468 if(m_BufferImageUsage != 0)
7470 json.WriteString(
"Usage");
7471 json.WriteNumber(m_BufferImageUsage);
7477 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
7479 VMA_ASSERT(IsUserDataString());
7480 if(m_pUserData != VMA_NULL)
7482 char*
const oldStr = (
char*)m_pUserData;
7483 const size_t oldStrLen = strlen(oldStr);
7484 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
7485 m_pUserData = VMA_NULL;
7489 void VmaAllocation_T::BlockAllocMap()
7491 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7493 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7499 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
7503 void VmaAllocation_T::BlockAllocUnmap()
7505 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
7507 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7513 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
7517 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
7519 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7523 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
7525 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
7526 *ppData = m_DedicatedAllocation.m_pMappedData;
7532 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
7533 return VK_ERROR_MEMORY_MAP_FAILED;
7538 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
7539 hAllocator->m_hDevice,
7540 m_DedicatedAllocation.m_hMemory,
7545 if(result == VK_SUCCESS)
7547 m_DedicatedAllocation.m_pMappedData = *ppData;
7554 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
7556 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
7558 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
7563 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
7564 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
7565 hAllocator->m_hDevice,
7566 m_DedicatedAllocation.m_hMemory);
7571 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
7575 #if VMA_STATS_STRING_ENABLED 7577 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
7581 json.WriteString(
"Blocks");
7584 json.WriteString(
"Allocations");
7587 json.WriteString(
"UnusedRanges");
7590 json.WriteString(
"UsedBytes");
7593 json.WriteString(
"UnusedBytes");
7598 json.WriteString(
"AllocationSize");
7599 json.BeginObject(
true);
7600 json.WriteString(
"Min");
7602 json.WriteString(
"Avg");
7604 json.WriteString(
"Max");
7611 json.WriteString(
"UnusedRangeSize");
7612 json.BeginObject(
true);
7613 json.WriteString(
"Min");
7615 json.WriteString(
"Avg");
7617 json.WriteString(
"Max");
7625 #endif // #if VMA_STATS_STRING_ENABLED 7627 struct VmaSuballocationItemSizeLess
7630 const VmaSuballocationList::iterator lhs,
7631 const VmaSuballocationList::iterator rhs)
const 7633 return lhs->size < rhs->size;
7636 const VmaSuballocationList::iterator lhs,
7637 VkDeviceSize rhsSize)
const 7639 return lhs->size < rhsSize;
7647 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
7649 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
7653 #if VMA_STATS_STRING_ENABLED 7655 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
7656 VkDeviceSize unusedBytes,
7657 size_t allocationCount,
7658 size_t unusedRangeCount)
const 7662 json.WriteString(
"TotalBytes");
7663 json.WriteNumber(GetSize());
7665 json.WriteString(
"UnusedBytes");
7666 json.WriteNumber(unusedBytes);
7668 json.WriteString(
"Allocations");
7669 json.WriteNumber((uint64_t)allocationCount);
7671 json.WriteString(
"UnusedRanges");
7672 json.WriteNumber((uint64_t)unusedRangeCount);
7674 json.WriteString(
"Suballocations");
7678 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
7679 VkDeviceSize offset,
7682 json.BeginObject(
true);
7684 json.WriteString(
"Offset");
7685 json.WriteNumber(offset);
7687 hAllocation->PrintParameters(json);
7692 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
7693 VkDeviceSize offset,
7694 VkDeviceSize size)
const 7696 json.BeginObject(
true);
7698 json.WriteString(
"Offset");
7699 json.WriteNumber(offset);
7701 json.WriteString(
"Type");
7702 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
7704 json.WriteString(
"Size");
7705 json.WriteNumber(size);
7710 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 7716 #endif // #if VMA_STATS_STRING_ENABLED 7721 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
7722 VmaBlockMetadata(hAllocator),
7725 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7726 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
7730 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
7734 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
7736 VmaBlockMetadata::Init(size);
7739 m_SumFreeSize = size;
7741 VmaSuballocation suballoc = {};
7742 suballoc.offset = 0;
7743 suballoc.size = size;
7744 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7745 suballoc.hAllocation = VK_NULL_HANDLE;
7747 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7748 m_Suballocations.push_back(suballoc);
7749 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
7751 m_FreeSuballocationsBySize.push_back(suballocItem);
7754 bool VmaBlockMetadata_Generic::Validate()
const 7756 VMA_VALIDATE(!m_Suballocations.empty());
7759 VkDeviceSize calculatedOffset = 0;
7761 uint32_t calculatedFreeCount = 0;
7763 VkDeviceSize calculatedSumFreeSize = 0;
7766 size_t freeSuballocationsToRegister = 0;
7768 bool prevFree =
false;
7770 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7771 suballocItem != m_Suballocations.cend();
7774 const VmaSuballocation& subAlloc = *suballocItem;
7777 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
7779 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
7781 VMA_VALIDATE(!prevFree || !currFree);
7783 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
7787 calculatedSumFreeSize += subAlloc.size;
7788 ++calculatedFreeCount;
7789 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7791 ++freeSuballocationsToRegister;
7795 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
7799 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
7800 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
7803 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
7806 calculatedOffset += subAlloc.size;
7807 prevFree = currFree;
7812 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
7814 VkDeviceSize lastSize = 0;
7815 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
7817 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
7820 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7822 VMA_VALIDATE(suballocItem->size >= lastSize);
7824 lastSize = suballocItem->size;
7828 VMA_VALIDATE(ValidateFreeSuballocationList());
7829 VMA_VALIDATE(calculatedOffset == GetSize());
7830 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
7831 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
7836 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 7838 if(!m_FreeSuballocationsBySize.empty())
7840 return m_FreeSuballocationsBySize.back()->size;
7848 bool VmaBlockMetadata_Generic::IsEmpty()
const 7850 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
7853 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7857 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7869 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7870 suballocItem != m_Suballocations.cend();
7873 const VmaSuballocation& suballoc = *suballocItem;
7874 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
7887 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 7889 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
7891 inoutStats.
size += GetSize();
7898 #if VMA_STATS_STRING_ENABLED 7900 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 7902 PrintDetailedMap_Begin(json,
7904 m_Suballocations.size() - (size_t)m_FreeCount,
7908 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7909 suballocItem != m_Suballocations.cend();
7910 ++suballocItem, ++i)
7912 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7914 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
7918 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
7922 PrintDetailedMap_End(json);
7925 #endif // #if VMA_STATS_STRING_ENABLED 7927 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
7928 uint32_t currentFrameIndex,
7929 uint32_t frameInUseCount,
7930 VkDeviceSize bufferImageGranularity,
7931 VkDeviceSize allocSize,
7932 VkDeviceSize allocAlignment,
7934 VmaSuballocationType allocType,
7935 bool canMakeOtherLost,
7937 VmaAllocationRequest* pAllocationRequest)
7939 VMA_ASSERT(allocSize > 0);
7940 VMA_ASSERT(!upperAddress);
7941 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7942 VMA_ASSERT(pAllocationRequest != VMA_NULL);
7943 VMA_HEAVY_ASSERT(Validate());
7945 pAllocationRequest->type = VmaAllocationRequestType::Normal;
7948 if(canMakeOtherLost ==
false &&
7949 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
7955 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
7956 if(freeSuballocCount > 0)
7961 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7962 m_FreeSuballocationsBySize.data(),
7963 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7964 allocSize + 2 * VMA_DEBUG_MARGIN,
7965 VmaSuballocationItemSizeLess());
7966 size_t index = it - m_FreeSuballocationsBySize.data();
7967 for(; index < freeSuballocCount; ++index)
7972 bufferImageGranularity,
7976 m_FreeSuballocationsBySize[index],
7978 &pAllocationRequest->offset,
7979 &pAllocationRequest->itemsToMakeLostCount,
7980 &pAllocationRequest->sumFreeSize,
7981 &pAllocationRequest->sumItemSize))
7983 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7988 else if(strategy == VMA_ALLOCATION_INTERNAL_STRATEGY_MIN_OFFSET)
7990 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7991 it != m_Suballocations.end();
7994 if(it->type == VMA_SUBALLOCATION_TYPE_FREE && CheckAllocation(
7997 bufferImageGranularity,
8003 &pAllocationRequest->offset,
8004 &pAllocationRequest->itemsToMakeLostCount,
8005 &pAllocationRequest->sumFreeSize,
8006 &pAllocationRequest->sumItemSize))
8008 pAllocationRequest->item = it;
8016 for(
size_t index = freeSuballocCount; index--; )
8021 bufferImageGranularity,
8025 m_FreeSuballocationsBySize[index],
8027 &pAllocationRequest->offset,
8028 &pAllocationRequest->itemsToMakeLostCount,
8029 &pAllocationRequest->sumFreeSize,
8030 &pAllocationRequest->sumItemSize))
8032 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
8039 if(canMakeOtherLost)
8044 VmaAllocationRequest tmpAllocRequest = {};
8045 tmpAllocRequest.type = VmaAllocationRequestType::Normal;
8046 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
8047 suballocIt != m_Suballocations.end();
8050 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
8051 suballocIt->hAllocation->CanBecomeLost())
8056 bufferImageGranularity,
8062 &tmpAllocRequest.offset,
8063 &tmpAllocRequest.itemsToMakeLostCount,
8064 &tmpAllocRequest.sumFreeSize,
8065 &tmpAllocRequest.sumItemSize))
8069 *pAllocationRequest = tmpAllocRequest;
8070 pAllocationRequest->item = suballocIt;
8073 if(!found || tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
8075 *pAllocationRequest = tmpAllocRequest;
8076 pAllocationRequest->item = suballocIt;
8089 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
8090 uint32_t currentFrameIndex,
8091 uint32_t frameInUseCount,
8092 VmaAllocationRequest* pAllocationRequest)
8094 VMA_ASSERT(pAllocationRequest && pAllocationRequest->type == VmaAllocationRequestType::Normal);
8096 while(pAllocationRequest->itemsToMakeLostCount > 0)
8098 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
8100 ++pAllocationRequest->item;
8102 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8103 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
8104 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
8105 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8107 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
8108 --pAllocationRequest->itemsToMakeLostCount;
8116 VMA_HEAVY_ASSERT(Validate());
8117 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
8118 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
8123 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8125 uint32_t lostAllocationCount = 0;
8126 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8127 it != m_Suballocations.end();
8130 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
8131 it->hAllocation->CanBecomeLost() &&
8132 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8134 it = FreeSuballocation(it);
8135 ++lostAllocationCount;
8138 return lostAllocationCount;
8141 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
8143 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
8144 it != m_Suballocations.end();
8147 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
8149 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
8151 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8152 return VK_ERROR_VALIDATION_FAILED_EXT;
8154 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
8156 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8157 return VK_ERROR_VALIDATION_FAILED_EXT;
8165 void VmaBlockMetadata_Generic::Alloc(
8166 const VmaAllocationRequest& request,
8167 VmaSuballocationType type,
8168 VkDeviceSize allocSize,
8171 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
8172 VMA_ASSERT(request.item != m_Suballocations.end());
8173 VmaSuballocation& suballoc = *request.item;
8175 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8177 VMA_ASSERT(request.offset >= suballoc.offset);
8178 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
8179 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
8180 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
8184 UnregisterFreeSuballocation(request.item);
8186 suballoc.offset = request.offset;
8187 suballoc.size = allocSize;
8188 suballoc.type = type;
8189 suballoc.hAllocation = hAllocation;
8194 VmaSuballocation paddingSuballoc = {};
8195 paddingSuballoc.offset = request.offset + allocSize;
8196 paddingSuballoc.size = paddingEnd;
8197 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8198 VmaSuballocationList::iterator next = request.item;
8200 const VmaSuballocationList::iterator paddingEndItem =
8201 m_Suballocations.insert(next, paddingSuballoc);
8202 RegisterFreeSuballocation(paddingEndItem);
8208 VmaSuballocation paddingSuballoc = {};
8209 paddingSuballoc.offset = request.offset - paddingBegin;
8210 paddingSuballoc.size = paddingBegin;
8211 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8212 const VmaSuballocationList::iterator paddingBeginItem =
8213 m_Suballocations.insert(request.item, paddingSuballoc);
8214 RegisterFreeSuballocation(paddingBeginItem);
8218 m_FreeCount = m_FreeCount - 1;
8219 if(paddingBegin > 0)
8227 m_SumFreeSize -= allocSize;
8230 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
8232 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8233 suballocItem != m_Suballocations.end();
8236 VmaSuballocation& suballoc = *suballocItem;
8237 if(suballoc.hAllocation == allocation)
8239 FreeSuballocation(suballocItem);
8240 VMA_HEAVY_ASSERT(Validate());
8244 VMA_ASSERT(0 &&
"Not found!");
8247 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
8249 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
8250 suballocItem != m_Suballocations.end();
8253 VmaSuballocation& suballoc = *suballocItem;
8254 if(suballoc.offset == offset)
8256 FreeSuballocation(suballocItem);
8260 VMA_ASSERT(0 &&
"Not found!");
8263 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 8265 VkDeviceSize lastSize = 0;
8266 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
8268 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
8270 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
8271 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
8272 VMA_VALIDATE(it->size >= lastSize);
8273 lastSize = it->size;
8278 bool VmaBlockMetadata_Generic::CheckAllocation(
8279 uint32_t currentFrameIndex,
8280 uint32_t frameInUseCount,
8281 VkDeviceSize bufferImageGranularity,
8282 VkDeviceSize allocSize,
8283 VkDeviceSize allocAlignment,
8284 VmaSuballocationType allocType,
8285 VmaSuballocationList::const_iterator suballocItem,
8286 bool canMakeOtherLost,
8287 VkDeviceSize* pOffset,
8288 size_t* itemsToMakeLostCount,
8289 VkDeviceSize* pSumFreeSize,
8290 VkDeviceSize* pSumItemSize)
const 8292 VMA_ASSERT(allocSize > 0);
8293 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8294 VMA_ASSERT(suballocItem != m_Suballocations.cend());
8295 VMA_ASSERT(pOffset != VMA_NULL);
8297 *itemsToMakeLostCount = 0;
8301 if(canMakeOtherLost)
8303 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8305 *pSumFreeSize = suballocItem->size;
8309 if(suballocItem->hAllocation->CanBecomeLost() &&
8310 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8312 ++*itemsToMakeLostCount;
8313 *pSumItemSize = suballocItem->size;
8322 if(GetSize() - suballocItem->offset < allocSize)
8328 *pOffset = suballocItem->offset;
8331 if(VMA_DEBUG_MARGIN > 0)
8333 *pOffset += VMA_DEBUG_MARGIN;
8337 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8341 if(bufferImageGranularity > 1)
8343 bool bufferImageGranularityConflict =
false;
8344 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8345 while(prevSuballocItem != m_Suballocations.cbegin())
8348 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8349 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8351 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8353 bufferImageGranularityConflict =
true;
8361 if(bufferImageGranularityConflict)
8363 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8369 if(*pOffset >= suballocItem->offset + suballocItem->size)
8375 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
8378 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8380 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
8382 if(suballocItem->offset + totalSize > GetSize())
8389 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
8390 if(totalSize > suballocItem->size)
8392 VkDeviceSize remainingSize = totalSize - suballocItem->size;
8393 while(remainingSize > 0)
8396 if(lastSuballocItem == m_Suballocations.cend())
8400 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8402 *pSumFreeSize += lastSuballocItem->size;
8406 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
8407 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
8408 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8410 ++*itemsToMakeLostCount;
8411 *pSumItemSize += lastSuballocItem->size;
8418 remainingSize = (lastSuballocItem->size < remainingSize) ?
8419 remainingSize - lastSuballocItem->size : 0;
8425 if(bufferImageGranularity > 1)
8427 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
8429 while(nextSuballocItem != m_Suballocations.cend())
8431 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8432 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8434 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8436 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
8437 if(nextSuballoc.hAllocation->CanBecomeLost() &&
8438 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8440 ++*itemsToMakeLostCount;
8459 const VmaSuballocation& suballoc = *suballocItem;
8460 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8462 *pSumFreeSize = suballoc.size;
8465 if(suballoc.size < allocSize)
8471 *pOffset = suballoc.offset;
8474 if(VMA_DEBUG_MARGIN > 0)
8476 *pOffset += VMA_DEBUG_MARGIN;
8480 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
8484 if(bufferImageGranularity > 1)
8486 bool bufferImageGranularityConflict =
false;
8487 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
8488 while(prevSuballocItem != m_Suballocations.cbegin())
8491 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
8492 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
8494 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8496 bufferImageGranularityConflict =
true;
8504 if(bufferImageGranularityConflict)
8506 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
8511 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
8514 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
8517 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
8524 if(bufferImageGranularity > 1)
8526 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
8528 while(nextSuballocItem != m_Suballocations.cend())
8530 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
8531 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8533 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8552 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
8554 VMA_ASSERT(item != m_Suballocations.end());
8555 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8557 VmaSuballocationList::iterator nextItem = item;
8559 VMA_ASSERT(nextItem != m_Suballocations.end());
8560 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
8562 item->size += nextItem->size;
8564 m_Suballocations.erase(nextItem);
8567 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
8570 VmaSuballocation& suballoc = *suballocItem;
8571 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8572 suballoc.hAllocation = VK_NULL_HANDLE;
8576 m_SumFreeSize += suballoc.size;
8579 bool mergeWithNext =
false;
8580 bool mergeWithPrev =
false;
8582 VmaSuballocationList::iterator nextItem = suballocItem;
8584 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
8586 mergeWithNext =
true;
8589 VmaSuballocationList::iterator prevItem = suballocItem;
8590 if(suballocItem != m_Suballocations.begin())
8593 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
8595 mergeWithPrev =
true;
8601 UnregisterFreeSuballocation(nextItem);
8602 MergeFreeWithNext(suballocItem);
8607 UnregisterFreeSuballocation(prevItem);
8608 MergeFreeWithNext(prevItem);
8609 RegisterFreeSuballocation(prevItem);
8614 RegisterFreeSuballocation(suballocItem);
8615 return suballocItem;
8619 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
8621 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8622 VMA_ASSERT(item->size > 0);
8626 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8628 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8630 if(m_FreeSuballocationsBySize.empty())
8632 m_FreeSuballocationsBySize.push_back(item);
8636 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
8644 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
8646 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
8647 VMA_ASSERT(item->size > 0);
8651 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
8653 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
8655 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
8656 m_FreeSuballocationsBySize.data(),
8657 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
8659 VmaSuballocationItemSizeLess());
8660 for(
size_t index = it - m_FreeSuballocationsBySize.data();
8661 index < m_FreeSuballocationsBySize.size();
8664 if(m_FreeSuballocationsBySize[index] == item)
8666 VmaVectorRemove(m_FreeSuballocationsBySize, index);
8669 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
8671 VMA_ASSERT(0 &&
"Not found.");
8677 bool VmaBlockMetadata_Generic::IsBufferImageGranularityConflictPossible(
8678 VkDeviceSize bufferImageGranularity,
8679 VmaSuballocationType& inOutPrevSuballocType)
const 8681 if(bufferImageGranularity == 1 || IsEmpty())
8686 VkDeviceSize minAlignment = VK_WHOLE_SIZE;
8687 bool typeConflictFound =
false;
8688 for(VmaSuballocationList::const_iterator it = m_Suballocations.cbegin();
8689 it != m_Suballocations.cend();
8692 const VmaSuballocationType suballocType = it->type;
8693 if(suballocType != VMA_SUBALLOCATION_TYPE_FREE)
8695 minAlignment = VMA_MIN(minAlignment, it->hAllocation->GetAlignment());
8696 if(VmaIsBufferImageGranularityConflict(inOutPrevSuballocType, suballocType))
8698 typeConflictFound =
true;
8700 inOutPrevSuballocType = suballocType;
8704 return typeConflictFound || minAlignment >= bufferImageGranularity;
8710 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
8711 VmaBlockMetadata(hAllocator),
8713 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8714 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
8715 m_1stVectorIndex(0),
8716 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
8717 m_1stNullItemsBeginCount(0),
8718 m_1stNullItemsMiddleCount(0),
8719 m_2ndNullItemsCount(0)
8723 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
8727 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
8729 VmaBlockMetadata::Init(size);
8730 m_SumFreeSize = size;
8733 bool VmaBlockMetadata_Linear::Validate()
const 8735 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8736 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8738 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
8739 VMA_VALIDATE(!suballocations1st.empty() ||
8740 suballocations2nd.empty() ||
8741 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
8743 if(!suballocations1st.empty())
8746 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
8748 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
8750 if(!suballocations2nd.empty())
8753 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
8756 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
8757 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
8759 VkDeviceSize sumUsedSize = 0;
8760 const size_t suballoc1stCount = suballocations1st.size();
8761 VkDeviceSize offset = VMA_DEBUG_MARGIN;
8763 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8765 const size_t suballoc2ndCount = suballocations2nd.size();
8766 size_t nullItem2ndCount = 0;
8767 for(
size_t i = 0; i < suballoc2ndCount; ++i)
8769 const VmaSuballocation& suballoc = suballocations2nd[i];
8770 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8772 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8773 VMA_VALIDATE(suballoc.offset >= offset);
8777 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8778 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8779 sumUsedSize += suballoc.size;
8786 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8789 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8792 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
8794 const VmaSuballocation& suballoc = suballocations1st[i];
8795 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
8796 suballoc.hAllocation == VK_NULL_HANDLE);
8799 size_t nullItem1stCount = m_1stNullItemsBeginCount;
8801 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
8803 const VmaSuballocation& suballoc = suballocations1st[i];
8804 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8806 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8807 VMA_VALIDATE(suballoc.offset >= offset);
8808 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
8812 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8813 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8814 sumUsedSize += suballoc.size;
8821 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8823 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
8825 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8827 const size_t suballoc2ndCount = suballocations2nd.size();
8828 size_t nullItem2ndCount = 0;
8829 for(
size_t i = suballoc2ndCount; i--; )
8831 const VmaSuballocation& suballoc = suballocations2nd[i];
8832 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
8834 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
8835 VMA_VALIDATE(suballoc.offset >= offset);
8839 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
8840 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
8841 sumUsedSize += suballoc.size;
8848 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
8851 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
8854 VMA_VALIDATE(offset <= GetSize());
8855 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
8860 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 8862 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
8863 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
8866 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 8868 const VkDeviceSize size = GetSize();
8880 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8882 switch(m_2ndVectorMode)
8884 case SECOND_VECTOR_EMPTY:
8890 const size_t suballocations1stCount = suballocations1st.size();
8891 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
8892 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
8893 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
8895 firstSuballoc.offset,
8896 size - (lastSuballoc.offset + lastSuballoc.size));
8900 case SECOND_VECTOR_RING_BUFFER:
8905 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8906 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
8907 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
8908 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
8912 case SECOND_VECTOR_DOUBLE_STACK:
8917 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8918 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
8919 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
8920 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
8930 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 8932 const VkDeviceSize size = GetSize();
8933 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8934 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8935 const size_t suballoc1stCount = suballocations1st.size();
8936 const size_t suballoc2ndCount = suballocations2nd.size();
8947 VkDeviceSize lastOffset = 0;
8949 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8951 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8952 size_t nextAlloc2ndIndex = 0;
8953 while(lastOffset < freeSpace2ndTo1stEnd)
8956 while(nextAlloc2ndIndex < suballoc2ndCount &&
8957 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8959 ++nextAlloc2ndIndex;
8963 if(nextAlloc2ndIndex < suballoc2ndCount)
8965 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8968 if(lastOffset < suballoc.offset)
8971 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8985 lastOffset = suballoc.offset + suballoc.size;
8986 ++nextAlloc2ndIndex;
8992 if(lastOffset < freeSpace2ndTo1stEnd)
8994 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9002 lastOffset = freeSpace2ndTo1stEnd;
9007 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9008 const VkDeviceSize freeSpace1stTo2ndEnd =
9009 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9010 while(lastOffset < freeSpace1stTo2ndEnd)
9013 while(nextAlloc1stIndex < suballoc1stCount &&
9014 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9016 ++nextAlloc1stIndex;
9020 if(nextAlloc1stIndex < suballoc1stCount)
9022 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9025 if(lastOffset < suballoc.offset)
9028 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9042 lastOffset = suballoc.offset + suballoc.size;
9043 ++nextAlloc1stIndex;
9049 if(lastOffset < freeSpace1stTo2ndEnd)
9051 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9059 lastOffset = freeSpace1stTo2ndEnd;
9063 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9065 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9066 while(lastOffset < size)
9069 while(nextAlloc2ndIndex != SIZE_MAX &&
9070 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9072 --nextAlloc2ndIndex;
9076 if(nextAlloc2ndIndex != SIZE_MAX)
9078 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9081 if(lastOffset < suballoc.offset)
9084 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9098 lastOffset = suballoc.offset + suballoc.size;
9099 --nextAlloc2ndIndex;
9105 if(lastOffset < size)
9107 const VkDeviceSize unusedRangeSize = size - lastOffset;
9123 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 9125 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9126 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9127 const VkDeviceSize size = GetSize();
9128 const size_t suballoc1stCount = suballocations1st.size();
9129 const size_t suballoc2ndCount = suballocations2nd.size();
9131 inoutStats.
size += size;
9133 VkDeviceSize lastOffset = 0;
9135 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9137 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9138 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
9139 while(lastOffset < freeSpace2ndTo1stEnd)
9142 while(nextAlloc2ndIndex < suballoc2ndCount &&
9143 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9145 ++nextAlloc2ndIndex;
9149 if(nextAlloc2ndIndex < suballoc2ndCount)
9151 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9154 if(lastOffset < suballoc.offset)
9157 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9168 lastOffset = suballoc.offset + suballoc.size;
9169 ++nextAlloc2ndIndex;
9174 if(lastOffset < freeSpace2ndTo1stEnd)
9177 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9184 lastOffset = freeSpace2ndTo1stEnd;
9189 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9190 const VkDeviceSize freeSpace1stTo2ndEnd =
9191 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9192 while(lastOffset < freeSpace1stTo2ndEnd)
9195 while(nextAlloc1stIndex < suballoc1stCount &&
9196 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9198 ++nextAlloc1stIndex;
9202 if(nextAlloc1stIndex < suballoc1stCount)
9204 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9207 if(lastOffset < suballoc.offset)
9210 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9221 lastOffset = suballoc.offset + suballoc.size;
9222 ++nextAlloc1stIndex;
9227 if(lastOffset < freeSpace1stTo2ndEnd)
9230 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9237 lastOffset = freeSpace1stTo2ndEnd;
9241 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9243 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9244 while(lastOffset < size)
9247 while(nextAlloc2ndIndex != SIZE_MAX &&
9248 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9250 --nextAlloc2ndIndex;
9254 if(nextAlloc2ndIndex != SIZE_MAX)
9256 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9259 if(lastOffset < suballoc.offset)
9262 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9273 lastOffset = suballoc.offset + suballoc.size;
9274 --nextAlloc2ndIndex;
9279 if(lastOffset < size)
9282 const VkDeviceSize unusedRangeSize = size - lastOffset;
9295 #if VMA_STATS_STRING_ENABLED 9296 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 9298 const VkDeviceSize size = GetSize();
9299 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9300 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9301 const size_t suballoc1stCount = suballocations1st.size();
9302 const size_t suballoc2ndCount = suballocations2nd.size();
9306 size_t unusedRangeCount = 0;
9307 VkDeviceSize usedBytes = 0;
9309 VkDeviceSize lastOffset = 0;
9311 size_t alloc2ndCount = 0;
9312 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9314 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9315 size_t nextAlloc2ndIndex = 0;
9316 while(lastOffset < freeSpace2ndTo1stEnd)
9319 while(nextAlloc2ndIndex < suballoc2ndCount &&
9320 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9322 ++nextAlloc2ndIndex;
9326 if(nextAlloc2ndIndex < suballoc2ndCount)
9328 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9331 if(lastOffset < suballoc.offset)
9340 usedBytes += suballoc.size;
9343 lastOffset = suballoc.offset + suballoc.size;
9344 ++nextAlloc2ndIndex;
9349 if(lastOffset < freeSpace2ndTo1stEnd)
9356 lastOffset = freeSpace2ndTo1stEnd;
9361 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
9362 size_t alloc1stCount = 0;
9363 const VkDeviceSize freeSpace1stTo2ndEnd =
9364 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
9365 while(lastOffset < freeSpace1stTo2ndEnd)
9368 while(nextAlloc1stIndex < suballoc1stCount &&
9369 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9371 ++nextAlloc1stIndex;
9375 if(nextAlloc1stIndex < suballoc1stCount)
9377 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9380 if(lastOffset < suballoc.offset)
9389 usedBytes += suballoc.size;
9392 lastOffset = suballoc.offset + suballoc.size;
9393 ++nextAlloc1stIndex;
9398 if(lastOffset < size)
9405 lastOffset = freeSpace1stTo2ndEnd;
9409 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9411 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9412 while(lastOffset < size)
9415 while(nextAlloc2ndIndex != SIZE_MAX &&
9416 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9418 --nextAlloc2ndIndex;
9422 if(nextAlloc2ndIndex != SIZE_MAX)
9424 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9427 if(lastOffset < suballoc.offset)
9436 usedBytes += suballoc.size;
9439 lastOffset = suballoc.offset + suballoc.size;
9440 --nextAlloc2ndIndex;
9445 if(lastOffset < size)
9457 const VkDeviceSize unusedBytes = size - usedBytes;
9458 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
9463 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9465 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
9466 size_t nextAlloc2ndIndex = 0;
9467 while(lastOffset < freeSpace2ndTo1stEnd)
9470 while(nextAlloc2ndIndex < suballoc2ndCount &&
9471 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9473 ++nextAlloc2ndIndex;
9477 if(nextAlloc2ndIndex < suballoc2ndCount)
9479 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9482 if(lastOffset < suballoc.offset)
9485 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9486 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9491 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9494 lastOffset = suballoc.offset + suballoc.size;
9495 ++nextAlloc2ndIndex;
9500 if(lastOffset < freeSpace2ndTo1stEnd)
9503 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
9504 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9508 lastOffset = freeSpace2ndTo1stEnd;
9513 nextAlloc1stIndex = m_1stNullItemsBeginCount;
9514 while(lastOffset < freeSpace1stTo2ndEnd)
9517 while(nextAlloc1stIndex < suballoc1stCount &&
9518 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
9520 ++nextAlloc1stIndex;
9524 if(nextAlloc1stIndex < suballoc1stCount)
9526 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
9529 if(lastOffset < suballoc.offset)
9532 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9533 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9538 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9541 lastOffset = suballoc.offset + suballoc.size;
9542 ++nextAlloc1stIndex;
9547 if(lastOffset < freeSpace1stTo2ndEnd)
9550 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
9551 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9555 lastOffset = freeSpace1stTo2ndEnd;
9559 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9561 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
9562 while(lastOffset < size)
9565 while(nextAlloc2ndIndex != SIZE_MAX &&
9566 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
9568 --nextAlloc2ndIndex;
9572 if(nextAlloc2ndIndex != SIZE_MAX)
9574 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
9577 if(lastOffset < suballoc.offset)
9580 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
9581 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9586 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
9589 lastOffset = suballoc.offset + suballoc.size;
9590 --nextAlloc2ndIndex;
9595 if(lastOffset < size)
9598 const VkDeviceSize unusedRangeSize = size - lastOffset;
9599 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
9608 PrintDetailedMap_End(json);
9610 #endif // #if VMA_STATS_STRING_ENABLED 9612 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
9613 uint32_t currentFrameIndex,
9614 uint32_t frameInUseCount,
9615 VkDeviceSize bufferImageGranularity,
9616 VkDeviceSize allocSize,
9617 VkDeviceSize allocAlignment,
9619 VmaSuballocationType allocType,
9620 bool canMakeOtherLost,
9622 VmaAllocationRequest* pAllocationRequest)
9624 VMA_ASSERT(allocSize > 0);
9625 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
9626 VMA_ASSERT(pAllocationRequest != VMA_NULL);
9627 VMA_HEAVY_ASSERT(Validate());
9628 return upperAddress ?
9629 CreateAllocationRequest_UpperAddress(
9630 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9631 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest) :
9632 CreateAllocationRequest_LowerAddress(
9633 currentFrameIndex, frameInUseCount, bufferImageGranularity,
9634 allocSize, allocAlignment, allocType, canMakeOtherLost, strategy, pAllocationRequest);
9637 bool VmaBlockMetadata_Linear::CreateAllocationRequest_UpperAddress(
9638 uint32_t currentFrameIndex,
9639 uint32_t frameInUseCount,
9640 VkDeviceSize bufferImageGranularity,
9641 VkDeviceSize allocSize,
9642 VkDeviceSize allocAlignment,
9643 VmaSuballocationType allocType,
9644 bool canMakeOtherLost,
9646 VmaAllocationRequest* pAllocationRequest)
9648 const VkDeviceSize size = GetSize();
9649 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9650 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9652 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9654 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
9659 if(allocSize > size)
9663 VkDeviceSize resultBaseOffset = size - allocSize;
9664 if(!suballocations2nd.empty())
9666 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9667 resultBaseOffset = lastSuballoc.offset - allocSize;
9668 if(allocSize > lastSuballoc.offset)
9675 VkDeviceSize resultOffset = resultBaseOffset;
9678 if(VMA_DEBUG_MARGIN > 0)
9680 if(resultOffset < VMA_DEBUG_MARGIN)
9684 resultOffset -= VMA_DEBUG_MARGIN;
9688 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
9692 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9694 bool bufferImageGranularityConflict =
false;
9695 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9697 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9698 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9700 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
9702 bufferImageGranularityConflict =
true;
9710 if(bufferImageGranularityConflict)
9712 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
9717 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
9718 suballocations1st.back().offset + suballocations1st.back().size :
9720 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
9724 if(bufferImageGranularity > 1)
9726 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9728 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9729 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9731 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
9745 pAllocationRequest->offset = resultOffset;
9746 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
9747 pAllocationRequest->sumItemSize = 0;
9749 pAllocationRequest->itemsToMakeLostCount = 0;
9750 pAllocationRequest->type = VmaAllocationRequestType::UpperAddress;
9757 bool VmaBlockMetadata_Linear::CreateAllocationRequest_LowerAddress(
9758 uint32_t currentFrameIndex,
9759 uint32_t frameInUseCount,
9760 VkDeviceSize bufferImageGranularity,
9761 VkDeviceSize allocSize,
9762 VkDeviceSize allocAlignment,
9763 VmaSuballocationType allocType,
9764 bool canMakeOtherLost,
9766 VmaAllocationRequest* pAllocationRequest)
9768 const VkDeviceSize size = GetSize();
9769 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9770 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9772 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9776 VkDeviceSize resultBaseOffset = 0;
9777 if(!suballocations1st.empty())
9779 const VmaSuballocation& lastSuballoc = suballocations1st.back();
9780 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9784 VkDeviceSize resultOffset = resultBaseOffset;
9787 if(VMA_DEBUG_MARGIN > 0)
9789 resultOffset += VMA_DEBUG_MARGIN;
9793 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9797 if(bufferImageGranularity > 1 && !suballocations1st.empty())
9799 bool bufferImageGranularityConflict =
false;
9800 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
9802 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
9803 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9805 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9807 bufferImageGranularityConflict =
true;
9815 if(bufferImageGranularityConflict)
9817 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9821 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
9822 suballocations2nd.back().offset : size;
9825 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
9829 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9831 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
9833 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
9834 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
9836 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9850 pAllocationRequest->offset = resultOffset;
9851 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
9852 pAllocationRequest->sumItemSize = 0;
9854 pAllocationRequest->type = VmaAllocationRequestType::EndOf1st;
9855 pAllocationRequest->itemsToMakeLostCount = 0;
9862 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9864 VMA_ASSERT(!suballocations1st.empty());
9866 VkDeviceSize resultBaseOffset = 0;
9867 if(!suballocations2nd.empty())
9869 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
9870 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
9874 VkDeviceSize resultOffset = resultBaseOffset;
9877 if(VMA_DEBUG_MARGIN > 0)
9879 resultOffset += VMA_DEBUG_MARGIN;
9883 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
9887 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
9889 bool bufferImageGranularityConflict =
false;
9890 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
9892 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
9893 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
9895 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
9897 bufferImageGranularityConflict =
true;
9905 if(bufferImageGranularityConflict)
9907 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
9911 pAllocationRequest->itemsToMakeLostCount = 0;
9912 pAllocationRequest->sumItemSize = 0;
9913 size_t index1st = m_1stNullItemsBeginCount;
9915 if(canMakeOtherLost)
9917 while(index1st < suballocations1st.size() &&
9918 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
9921 const VmaSuballocation& suballoc = suballocations1st[index1st];
9922 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
9928 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9929 if(suballoc.hAllocation->CanBecomeLost() &&
9930 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9932 ++pAllocationRequest->itemsToMakeLostCount;
9933 pAllocationRequest->sumItemSize += suballoc.size;
9945 if(bufferImageGranularity > 1)
9947 while(index1st < suballocations1st.size())
9949 const VmaSuballocation& suballoc = suballocations1st[index1st];
9950 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
9952 if(suballoc.hAllocation != VK_NULL_HANDLE)
9955 if(suballoc.hAllocation->CanBecomeLost() &&
9956 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
9958 ++pAllocationRequest->itemsToMakeLostCount;
9959 pAllocationRequest->sumItemSize += suballoc.size;
9977 if(index1st == suballocations1st.size() &&
9978 resultOffset + allocSize + VMA_DEBUG_MARGIN > size)
9981 VMA_DEBUG_LOG(
"Unsupported special case in custom pool with linear allocation algorithm used as ring buffer with allocations that can be lost.");
9986 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= size) ||
9987 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
9991 if(bufferImageGranularity > 1)
9993 for(
size_t nextSuballocIndex = index1st;
9994 nextSuballocIndex < suballocations1st.size();
9995 nextSuballocIndex++)
9997 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
9998 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
10000 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
10014 pAllocationRequest->offset = resultOffset;
10015 pAllocationRequest->sumFreeSize =
10016 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
10018 - pAllocationRequest->sumItemSize;
10019 pAllocationRequest->type = VmaAllocationRequestType::EndOf2nd;
10028 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
10029 uint32_t currentFrameIndex,
10030 uint32_t frameInUseCount,
10031 VmaAllocationRequest* pAllocationRequest)
10033 if(pAllocationRequest->itemsToMakeLostCount == 0)
10038 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
10041 SuballocationVectorType* suballocations = &AccessSuballocations1st();
10042 size_t index = m_1stNullItemsBeginCount;
10043 size_t madeLostCount = 0;
10044 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
10046 if(index == suballocations->size())
10050 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10052 suballocations = &AccessSuballocations2nd();
10056 VMA_ASSERT(!suballocations->empty());
10058 VmaSuballocation& suballoc = (*suballocations)[index];
10059 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10061 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
10062 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
10063 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10065 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10066 suballoc.hAllocation = VK_NULL_HANDLE;
10067 m_SumFreeSize += suballoc.size;
10068 if(suballocations == &AccessSuballocations1st())
10070 ++m_1stNullItemsMiddleCount;
10074 ++m_2ndNullItemsCount;
10086 CleanupAfterFree();
10092 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10094 uint32_t lostAllocationCount = 0;
10096 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10097 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10099 VmaSuballocation& suballoc = suballocations1st[i];
10100 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10101 suballoc.hAllocation->CanBecomeLost() &&
10102 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10104 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10105 suballoc.hAllocation = VK_NULL_HANDLE;
10106 ++m_1stNullItemsMiddleCount;
10107 m_SumFreeSize += suballoc.size;
10108 ++lostAllocationCount;
10112 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10113 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10115 VmaSuballocation& suballoc = suballocations2nd[i];
10116 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
10117 suballoc.hAllocation->CanBecomeLost() &&
10118 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
10120 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10121 suballoc.hAllocation = VK_NULL_HANDLE;
10122 ++m_2ndNullItemsCount;
10123 m_SumFreeSize += suballoc.size;
10124 ++lostAllocationCount;
10128 if(lostAllocationCount)
10130 CleanupAfterFree();
10133 return lostAllocationCount;
10136 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
10138 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10139 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
10141 const VmaSuballocation& suballoc = suballocations1st[i];
10142 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10144 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10146 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10147 return VK_ERROR_VALIDATION_FAILED_EXT;
10149 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10151 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10152 return VK_ERROR_VALIDATION_FAILED_EXT;
10157 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10158 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
10160 const VmaSuballocation& suballoc = suballocations2nd[i];
10161 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
10163 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
10165 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
10166 return VK_ERROR_VALIDATION_FAILED_EXT;
10168 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
10170 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
10171 return VK_ERROR_VALIDATION_FAILED_EXT;
10179 void VmaBlockMetadata_Linear::Alloc(
10180 const VmaAllocationRequest& request,
10181 VmaSuballocationType type,
10182 VkDeviceSize allocSize,
10185 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
10187 switch(request.type)
10189 case VmaAllocationRequestType::UpperAddress:
10191 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
10192 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
10193 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10194 suballocations2nd.push_back(newSuballoc);
10195 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
10198 case VmaAllocationRequestType::EndOf1st:
10200 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10202 VMA_ASSERT(suballocations1st.empty() ||
10203 request.offset >= suballocations1st.back().offset + suballocations1st.back().size);
10205 VMA_ASSERT(request.offset + allocSize <= GetSize());
10207 suballocations1st.push_back(newSuballoc);
10210 case VmaAllocationRequestType::EndOf2nd:
10212 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10214 VMA_ASSERT(!suballocations1st.empty() &&
10215 request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset);
10216 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10218 switch(m_2ndVectorMode)
10220 case SECOND_VECTOR_EMPTY:
10222 VMA_ASSERT(suballocations2nd.empty());
10223 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
10225 case SECOND_VECTOR_RING_BUFFER:
10227 VMA_ASSERT(!suballocations2nd.empty());
10229 case SECOND_VECTOR_DOUBLE_STACK:
10230 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
10236 suballocations2nd.push_back(newSuballoc);
10240 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
10243 m_SumFreeSize -= newSuballoc.size;
10246 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
10248 FreeAtOffset(allocation->GetOffset());
10251 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
10253 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10254 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10256 if(!suballocations1st.empty())
10259 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
10260 if(firstSuballoc.offset == offset)
10262 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
10263 firstSuballoc.hAllocation = VK_NULL_HANDLE;
10264 m_SumFreeSize += firstSuballoc.size;
10265 ++m_1stNullItemsBeginCount;
10266 CleanupAfterFree();
10272 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
10273 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
10275 VmaSuballocation& lastSuballoc = suballocations2nd.back();
10276 if(lastSuballoc.offset == offset)
10278 m_SumFreeSize += lastSuballoc.size;
10279 suballocations2nd.pop_back();
10280 CleanupAfterFree();
10285 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
10287 VmaSuballocation& lastSuballoc = suballocations1st.back();
10288 if(lastSuballoc.offset == offset)
10290 m_SumFreeSize += lastSuballoc.size;
10291 suballocations1st.pop_back();
10292 CleanupAfterFree();
10299 VmaSuballocation refSuballoc;
10300 refSuballoc.offset = offset;
10302 SuballocationVectorType::iterator it = VmaBinaryFindSorted(
10303 suballocations1st.begin() + m_1stNullItemsBeginCount,
10304 suballocations1st.end(),
10306 VmaSuballocationOffsetLess());
10307 if(it != suballocations1st.end())
10309 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10310 it->hAllocation = VK_NULL_HANDLE;
10311 ++m_1stNullItemsMiddleCount;
10312 m_SumFreeSize += it->size;
10313 CleanupAfterFree();
10318 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
10321 VmaSuballocation refSuballoc;
10322 refSuballoc.offset = offset;
10324 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
10325 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetLess()) :
10326 VmaBinaryFindSorted(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc, VmaSuballocationOffsetGreater());
10327 if(it != suballocations2nd.end())
10329 it->type = VMA_SUBALLOCATION_TYPE_FREE;
10330 it->hAllocation = VK_NULL_HANDLE;
10331 ++m_2ndNullItemsCount;
10332 m_SumFreeSize += it->size;
10333 CleanupAfterFree();
10338 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
10341 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 10343 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10344 const size_t suballocCount = AccessSuballocations1st().size();
10345 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
10348 void VmaBlockMetadata_Linear::CleanupAfterFree()
10350 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
10351 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
10355 suballocations1st.clear();
10356 suballocations2nd.clear();
10357 m_1stNullItemsBeginCount = 0;
10358 m_1stNullItemsMiddleCount = 0;
10359 m_2ndNullItemsCount = 0;
10360 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10364 const size_t suballoc1stCount = suballocations1st.size();
10365 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
10366 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
10369 while(m_1stNullItemsBeginCount < suballoc1stCount &&
10370 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10372 ++m_1stNullItemsBeginCount;
10373 --m_1stNullItemsMiddleCount;
10377 while(m_1stNullItemsMiddleCount > 0 &&
10378 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
10380 --m_1stNullItemsMiddleCount;
10381 suballocations1st.pop_back();
10385 while(m_2ndNullItemsCount > 0 &&
10386 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
10388 --m_2ndNullItemsCount;
10389 suballocations2nd.pop_back();
10393 while(m_2ndNullItemsCount > 0 &&
10394 suballocations2nd[0].hAllocation == VK_NULL_HANDLE)
10396 --m_2ndNullItemsCount;
10397 VmaVectorRemove(suballocations2nd, 0);
10400 if(ShouldCompact1st())
10402 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
10403 size_t srcIndex = m_1stNullItemsBeginCount;
10404 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
10406 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
10410 if(dstIndex != srcIndex)
10412 suballocations1st[dstIndex] = suballocations1st[srcIndex];
10416 suballocations1st.resize(nonNullItemCount);
10417 m_1stNullItemsBeginCount = 0;
10418 m_1stNullItemsMiddleCount = 0;
10422 if(suballocations2nd.empty())
10424 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10428 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
10430 suballocations1st.clear();
10431 m_1stNullItemsBeginCount = 0;
10433 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
10436 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
10437 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
10438 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
10439 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
10441 ++m_1stNullItemsBeginCount;
10442 --m_1stNullItemsMiddleCount;
10444 m_2ndNullItemsCount = 0;
10445 m_1stVectorIndex ^= 1;
10450 VMA_HEAVY_ASSERT(Validate());
10457 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
10458 VmaBlockMetadata(hAllocator),
10460 m_AllocationCount(0),
10464 memset(m_FreeList, 0,
sizeof(m_FreeList));
10467 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
10469 DeleteNode(m_Root);
10472 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
10474 VmaBlockMetadata::Init(size);
10476 m_UsableSize = VmaPrevPow2(size);
10477 m_SumFreeSize = m_UsableSize;
10481 while(m_LevelCount < MAX_LEVELS &&
10482 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
10487 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
10488 rootNode->offset = 0;
10489 rootNode->type = Node::TYPE_FREE;
10490 rootNode->parent = VMA_NULL;
10491 rootNode->buddy = VMA_NULL;
10494 AddToFreeListFront(0, rootNode);
10497 bool VmaBlockMetadata_Buddy::Validate()
const 10500 ValidationContext ctx;
10501 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
10503 VMA_VALIDATE(
false &&
"ValidateNode failed.");
10505 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
10506 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
10509 for(uint32_t level = 0; level < m_LevelCount; ++level)
10511 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
10512 m_FreeList[level].front->free.prev == VMA_NULL);
10514 for(Node* node = m_FreeList[level].front;
10516 node = node->free.next)
10518 VMA_VALIDATE(node->type == Node::TYPE_FREE);
10520 if(node->free.next == VMA_NULL)
10522 VMA_VALIDATE(m_FreeList[level].back == node);
10526 VMA_VALIDATE(node->free.next->free.prev == node);
10532 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
10534 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
10540 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 10542 for(uint32_t level = 0; level < m_LevelCount; ++level)
10544 if(m_FreeList[level].front != VMA_NULL)
10546 return LevelToNodeSize(level);
10552 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 10554 const VkDeviceSize unusableSize = GetUnusableSize();
10565 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
10567 if(unusableSize > 0)
10576 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 10578 const VkDeviceSize unusableSize = GetUnusableSize();
10580 inoutStats.
size += GetSize();
10581 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
10586 if(unusableSize > 0)
10593 #if VMA_STATS_STRING_ENABLED 10595 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 10599 CalcAllocationStatInfo(stat);
10601 PrintDetailedMap_Begin(
10607 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
10609 const VkDeviceSize unusableSize = GetUnusableSize();
10610 if(unusableSize > 0)
10612 PrintDetailedMap_UnusedRange(json,
10617 PrintDetailedMap_End(json);
10620 #endif // #if VMA_STATS_STRING_ENABLED 10622 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
10623 uint32_t currentFrameIndex,
10624 uint32_t frameInUseCount,
10625 VkDeviceSize bufferImageGranularity,
10626 VkDeviceSize allocSize,
10627 VkDeviceSize allocAlignment,
10629 VmaSuballocationType allocType,
10630 bool canMakeOtherLost,
10632 VmaAllocationRequest* pAllocationRequest)
10634 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
10638 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
10639 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
10640 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
10642 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
10643 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
10646 if(allocSize > m_UsableSize)
10651 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10652 for(uint32_t level = targetLevel + 1; level--; )
10654 for(Node* freeNode = m_FreeList[level].front;
10655 freeNode != VMA_NULL;
10656 freeNode = freeNode->free.next)
10658 if(freeNode->offset % allocAlignment == 0)
10660 pAllocationRequest->type = VmaAllocationRequestType::Normal;
10661 pAllocationRequest->offset = freeNode->offset;
10662 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
10663 pAllocationRequest->sumItemSize = 0;
10664 pAllocationRequest->itemsToMakeLostCount = 0;
10665 pAllocationRequest->customData = (
void*)(uintptr_t)level;
10674 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
10675 uint32_t currentFrameIndex,
10676 uint32_t frameInUseCount,
10677 VmaAllocationRequest* pAllocationRequest)
10683 return pAllocationRequest->itemsToMakeLostCount == 0;
10686 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
10695 void VmaBlockMetadata_Buddy::Alloc(
10696 const VmaAllocationRequest& request,
10697 VmaSuballocationType type,
10698 VkDeviceSize allocSize,
10701 VMA_ASSERT(request.type == VmaAllocationRequestType::Normal);
10703 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
10704 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
10706 Node* currNode = m_FreeList[currLevel].front;
10707 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10708 while(currNode->offset != request.offset)
10710 currNode = currNode->free.next;
10711 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
10715 while(currLevel < targetLevel)
10719 RemoveFromFreeList(currLevel, currNode);
10721 const uint32_t childrenLevel = currLevel + 1;
10724 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
10725 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
10727 leftChild->offset = currNode->offset;
10728 leftChild->type = Node::TYPE_FREE;
10729 leftChild->parent = currNode;
10730 leftChild->buddy = rightChild;
10732 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
10733 rightChild->type = Node::TYPE_FREE;
10734 rightChild->parent = currNode;
10735 rightChild->buddy = leftChild;
10738 currNode->type = Node::TYPE_SPLIT;
10739 currNode->split.leftChild = leftChild;
10742 AddToFreeListFront(childrenLevel, rightChild);
10743 AddToFreeListFront(childrenLevel, leftChild);
10748 currNode = m_FreeList[currLevel].front;
10757 VMA_ASSERT(currLevel == targetLevel &&
10758 currNode != VMA_NULL &&
10759 currNode->type == Node::TYPE_FREE);
10760 RemoveFromFreeList(currLevel, currNode);
10763 currNode->type = Node::TYPE_ALLOCATION;
10764 currNode->allocation.alloc = hAllocation;
10766 ++m_AllocationCount;
10768 m_SumFreeSize -= allocSize;
10771 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
10773 if(node->type == Node::TYPE_SPLIT)
10775 DeleteNode(node->split.leftChild->buddy);
10776 DeleteNode(node->split.leftChild);
10779 vma_delete(GetAllocationCallbacks(), node);
10782 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 10784 VMA_VALIDATE(level < m_LevelCount);
10785 VMA_VALIDATE(curr->parent == parent);
10786 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
10787 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
10790 case Node::TYPE_FREE:
10792 ctx.calculatedSumFreeSize += levelNodeSize;
10793 ++ctx.calculatedFreeCount;
10795 case Node::TYPE_ALLOCATION:
10796 ++ctx.calculatedAllocationCount;
10797 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
10798 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
10800 case Node::TYPE_SPLIT:
10802 const uint32_t childrenLevel = level + 1;
10803 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
10804 const Node*
const leftChild = curr->split.leftChild;
10805 VMA_VALIDATE(leftChild != VMA_NULL);
10806 VMA_VALIDATE(leftChild->offset == curr->offset);
10807 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
10809 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
10811 const Node*
const rightChild = leftChild->buddy;
10812 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
10813 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
10815 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
10826 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 10829 uint32_t level = 0;
10830 VkDeviceSize currLevelNodeSize = m_UsableSize;
10831 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
10832 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
10835 currLevelNodeSize = nextLevelNodeSize;
10836 nextLevelNodeSize = currLevelNodeSize >> 1;
10841 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
10844 Node* node = m_Root;
10845 VkDeviceSize nodeOffset = 0;
10846 uint32_t level = 0;
10847 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
10848 while(node->type == Node::TYPE_SPLIT)
10850 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
10851 if(offset < nodeOffset + nextLevelSize)
10853 node = node->split.leftChild;
10857 node = node->split.leftChild->buddy;
10858 nodeOffset += nextLevelSize;
10861 levelNodeSize = nextLevelSize;
10864 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
10865 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
10868 --m_AllocationCount;
10869 m_SumFreeSize += alloc->GetSize();
10871 node->type = Node::TYPE_FREE;
10874 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
10876 RemoveFromFreeList(level, node->buddy);
10877 Node*
const parent = node->parent;
10879 vma_delete(GetAllocationCallbacks(), node->buddy);
10880 vma_delete(GetAllocationCallbacks(), node);
10881 parent->type = Node::TYPE_FREE;
10889 AddToFreeListFront(level, node);
10892 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 10896 case Node::TYPE_FREE:
10902 case Node::TYPE_ALLOCATION:
10904 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10910 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
10911 if(unusedRangeSize > 0)
10920 case Node::TYPE_SPLIT:
10922 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
10923 const Node*
const leftChild = node->split.leftChild;
10924 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
10925 const Node*
const rightChild = leftChild->buddy;
10926 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
10934 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
10936 VMA_ASSERT(node->type == Node::TYPE_FREE);
10939 Node*
const frontNode = m_FreeList[level].front;
10940 if(frontNode == VMA_NULL)
10942 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
10943 node->free.prev = node->free.next = VMA_NULL;
10944 m_FreeList[level].front = m_FreeList[level].back = node;
10948 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
10949 node->free.prev = VMA_NULL;
10950 node->free.next = frontNode;
10951 frontNode->free.prev = node;
10952 m_FreeList[level].front = node;
10956 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
10958 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
10961 if(node->free.prev == VMA_NULL)
10963 VMA_ASSERT(m_FreeList[level].front == node);
10964 m_FreeList[level].front = node->free.next;
10968 Node*
const prevFreeNode = node->free.prev;
10969 VMA_ASSERT(prevFreeNode->free.next == node);
10970 prevFreeNode->free.next = node->free.next;
10974 if(node->free.next == VMA_NULL)
10976 VMA_ASSERT(m_FreeList[level].back == node);
10977 m_FreeList[level].back = node->free.prev;
10981 Node*
const nextFreeNode = node->free.next;
10982 VMA_ASSERT(nextFreeNode->free.prev == node);
10983 nextFreeNode->free.prev = node->free.prev;
10987 #if VMA_STATS_STRING_ENABLED 10988 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 10992 case Node::TYPE_FREE:
10993 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
10995 case Node::TYPE_ALLOCATION:
10997 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
10998 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
10999 if(allocSize < levelNodeSize)
11001 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
11005 case Node::TYPE_SPLIT:
11007 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
11008 const Node*
const leftChild = node->split.leftChild;
11009 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
11010 const Node*
const rightChild = leftChild->buddy;
11011 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
11018 #endif // #if VMA_STATS_STRING_ENABLED 11024 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
11025 m_pMetadata(VMA_NULL),
11026 m_MemoryTypeIndex(UINT32_MAX),
11028 m_hMemory(VK_NULL_HANDLE),
11030 m_pMappedData(VMA_NULL)
11034 void VmaDeviceMemoryBlock::Init(
11037 uint32_t newMemoryTypeIndex,
11038 VkDeviceMemory newMemory,
11039 VkDeviceSize newSize,
11041 uint32_t algorithm)
11043 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
11045 m_hParentPool = hParentPool;
11046 m_MemoryTypeIndex = newMemoryTypeIndex;
11048 m_hMemory = newMemory;
11053 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
11056 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
11062 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
11064 m_pMetadata->Init(newSize);
11067 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
11071 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
11073 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
11074 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
11075 m_hMemory = VK_NULL_HANDLE;
11077 vma_delete(allocator, m_pMetadata);
11078 m_pMetadata = VMA_NULL;
11081 bool VmaDeviceMemoryBlock::Validate()
const 11083 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
11084 (m_pMetadata->GetSize() != 0));
11086 return m_pMetadata->Validate();
11089 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
11091 void* pData =
nullptr;
11092 VkResult res = Map(hAllocator, 1, &pData);
11093 if(res != VK_SUCCESS)
11098 res = m_pMetadata->CheckCorruption(pData);
11100 Unmap(hAllocator, 1);
11105 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
11112 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11113 if(m_MapCount != 0)
11115 m_MapCount += count;
11116 VMA_ASSERT(m_pMappedData != VMA_NULL);
11117 if(ppData != VMA_NULL)
11119 *ppData = m_pMappedData;
11125 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
11126 hAllocator->m_hDevice,
11132 if(result == VK_SUCCESS)
11134 if(ppData != VMA_NULL)
11136 *ppData = m_pMappedData;
11138 m_MapCount = count;
11144 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
11151 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11152 if(m_MapCount >= count)
11154 m_MapCount -= count;
11155 if(m_MapCount == 0)
11157 m_pMappedData = VMA_NULL;
11158 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
11163 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
11167 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11169 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11170 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11173 VkResult res = Map(hAllocator, 1, &pData);
11174 if(res != VK_SUCCESS)
11179 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
11180 VmaWriteMagicValue(pData, allocOffset + allocSize);
11182 Unmap(hAllocator, 1);
11187 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
11189 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
11190 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
11193 VkResult res = Map(hAllocator, 1, &pData);
11194 if(res != VK_SUCCESS)
11199 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
11201 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
11203 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
11205 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
11208 Unmap(hAllocator, 1);
11213 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
11218 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11219 hAllocation->GetBlock() ==
this);
11221 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11222 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
11223 hAllocator->m_hDevice,
11226 hAllocation->GetOffset());
11229 VkResult VmaDeviceMemoryBlock::BindImageMemory(
11234 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
11235 hAllocation->GetBlock() ==
this);
11237 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
11238 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
11239 hAllocator->m_hDevice,
11242 hAllocation->GetOffset());
11247 memset(&outInfo, 0,
sizeof(outInfo));
11266 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
11274 VmaPool_T::VmaPool_T(
11277 VkDeviceSize preferredBlockSize) :
11281 createInfo.memoryTypeIndex,
11282 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
11283 createInfo.minBlockCount,
11284 createInfo.maxBlockCount,
11286 createInfo.frameInUseCount,
11288 createInfo.blockSize != 0,
11294 VmaPool_T::~VmaPool_T()
11298 #if VMA_STATS_STRING_ENABLED 11300 #endif // #if VMA_STATS_STRING_ENABLED 11302 VmaBlockVector::VmaBlockVector(
11305 uint32_t memoryTypeIndex,
11306 VkDeviceSize preferredBlockSize,
11307 size_t minBlockCount,
11308 size_t maxBlockCount,
11309 VkDeviceSize bufferImageGranularity,
11310 uint32_t frameInUseCount,
11312 bool explicitBlockSize,
11313 uint32_t algorithm) :
11314 m_hAllocator(hAllocator),
11315 m_hParentPool(hParentPool),
11316 m_MemoryTypeIndex(memoryTypeIndex),
11317 m_PreferredBlockSize(preferredBlockSize),
11318 m_MinBlockCount(minBlockCount),
11319 m_MaxBlockCount(maxBlockCount),
11320 m_BufferImageGranularity(bufferImageGranularity),
11321 m_FrameInUseCount(frameInUseCount),
11322 m_IsCustomPool(isCustomPool),
11323 m_ExplicitBlockSize(explicitBlockSize),
11324 m_Algorithm(algorithm),
11325 m_HasEmptyBlock(false),
11326 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
11331 VmaBlockVector::~VmaBlockVector()
11333 for(
size_t i = m_Blocks.size(); i--; )
11335 m_Blocks[i]->Destroy(m_hAllocator);
11336 vma_delete(m_hAllocator, m_Blocks[i]);
11340 VkResult VmaBlockVector::CreateMinBlocks()
11342 for(
size_t i = 0; i < m_MinBlockCount; ++i)
11344 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
11345 if(res != VK_SUCCESS)
11353 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
11355 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
11357 const size_t blockCount = m_Blocks.size();
11366 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11368 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11369 VMA_ASSERT(pBlock);
11370 VMA_HEAVY_ASSERT(pBlock->Validate());
11371 pBlock->m_pMetadata->AddPoolStats(*pStats);
11375 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 11377 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
11378 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
11379 (VMA_DEBUG_MARGIN > 0) &&
11381 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
11384 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
11386 VkResult VmaBlockVector::Allocate(
11387 uint32_t currentFrameIndex,
11389 VkDeviceSize alignment,
11391 VmaSuballocationType suballocType,
11392 size_t allocationCount,
11396 VkResult res = VK_SUCCESS;
11398 if(IsCorruptionDetectionEnabled())
11400 size = VmaAlignUp<VkDeviceSize>(size,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11401 alignment = VmaAlignUp<VkDeviceSize>(alignment,
sizeof(VMA_CORRUPTION_DETECTION_MAGIC_VALUE));
11405 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11406 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
11408 res = AllocatePage(
11414 pAllocations + allocIndex);
11415 if(res != VK_SUCCESS)
11422 if(res != VK_SUCCESS)
11425 while(allocIndex--)
11427 Free(pAllocations[allocIndex]);
11429 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
11435 VkResult VmaBlockVector::AllocatePage(
11436 uint32_t currentFrameIndex,
11438 VkDeviceSize alignment,
11440 VmaSuballocationType suballocType,
11447 const bool canCreateNewBlock =
11449 (m_Blocks.size() < m_MaxBlockCount);
11456 canMakeOtherLost =
false;
11460 if(isUpperAddress &&
11463 return VK_ERROR_FEATURE_NOT_PRESENT;
11477 return VK_ERROR_FEATURE_NOT_PRESENT;
11481 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
11483 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11491 if(!canMakeOtherLost || canCreateNewBlock)
11500 if(!m_Blocks.empty())
11502 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
11503 VMA_ASSERT(pCurrBlock);
11504 VkResult res = AllocateFromBlock(
11514 if(res == VK_SUCCESS)
11516 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
11526 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11528 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11529 VMA_ASSERT(pCurrBlock);
11530 VkResult res = AllocateFromBlock(
11540 if(res == VK_SUCCESS)
11542 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11550 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11552 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11553 VMA_ASSERT(pCurrBlock);
11554 VkResult res = AllocateFromBlock(
11564 if(res == VK_SUCCESS)
11566 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
11574 if(canCreateNewBlock)
11577 VkDeviceSize newBlockSize = m_PreferredBlockSize;
11578 uint32_t newBlockSizeShift = 0;
11579 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
11581 if(!m_ExplicitBlockSize)
11584 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
11585 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
11587 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11588 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
11590 newBlockSize = smallerNewBlockSize;
11591 ++newBlockSizeShift;
11600 size_t newBlockIndex = 0;
11601 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
11603 if(!m_ExplicitBlockSize)
11605 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
11607 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
11608 if(smallerNewBlockSize >= size)
11610 newBlockSize = smallerNewBlockSize;
11611 ++newBlockSizeShift;
11612 res = CreateBlock(newBlockSize, &newBlockIndex);
11621 if(res == VK_SUCCESS)
11623 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
11624 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
11626 res = AllocateFromBlock(
11636 if(res == VK_SUCCESS)
11638 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
11644 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11651 if(canMakeOtherLost)
11653 uint32_t tryIndex = 0;
11654 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
11656 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
11657 VmaAllocationRequest bestRequest = {};
11658 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
11664 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
11666 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11667 VMA_ASSERT(pCurrBlock);
11668 VmaAllocationRequest currRequest = {};
11669 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11672 m_BufferImageGranularity,
11681 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11682 if(pBestRequestBlock == VMA_NULL ||
11683 currRequestCost < bestRequestCost)
11685 pBestRequestBlock = pCurrBlock;
11686 bestRequest = currRequest;
11687 bestRequestCost = currRequestCost;
11689 if(bestRequestCost == 0)
11700 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11702 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
11703 VMA_ASSERT(pCurrBlock);
11704 VmaAllocationRequest currRequest = {};
11705 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
11708 m_BufferImageGranularity,
11717 const VkDeviceSize currRequestCost = currRequest.CalcCost();
11718 if(pBestRequestBlock == VMA_NULL ||
11719 currRequestCost < bestRequestCost ||
11722 pBestRequestBlock = pCurrBlock;
11723 bestRequest = currRequest;
11724 bestRequestCost = currRequestCost;
11726 if(bestRequestCost == 0 ||
11736 if(pBestRequestBlock != VMA_NULL)
11740 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
11741 if(res != VK_SUCCESS)
11747 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
11753 if(pBestRequestBlock->m_pMetadata->IsEmpty())
11755 m_HasEmptyBlock =
false;
11758 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
11759 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
11760 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, *pAllocation);
11761 (*pAllocation)->InitBlockAllocation(
11763 bestRequest.offset,
11769 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
11770 VMA_DEBUG_LOG(
" Returned from existing block");
11771 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
11772 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11774 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11776 if(IsCorruptionDetectionEnabled())
11778 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
11779 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11794 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
11796 return VK_ERROR_TOO_MANY_OBJECTS;
11800 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11803 void VmaBlockVector::Free(
11806 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
11810 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
11812 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
11814 if(IsCorruptionDetectionEnabled())
11816 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
11817 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
11820 if(hAllocation->IsPersistentMap())
11822 pBlock->Unmap(m_hAllocator, 1);
11825 pBlock->m_pMetadata->Free(hAllocation);
11826 VMA_HEAVY_ASSERT(pBlock->Validate());
11828 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", m_MemoryTypeIndex);
11831 if(pBlock->m_pMetadata->IsEmpty())
11834 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
11836 pBlockToDelete = pBlock;
11842 m_HasEmptyBlock =
true;
11847 else if(m_HasEmptyBlock)
11849 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
11850 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
11852 pBlockToDelete = pLastBlock;
11853 m_Blocks.pop_back();
11854 m_HasEmptyBlock =
false;
11858 IncrementallySortBlocks();
11863 if(pBlockToDelete != VMA_NULL)
11865 VMA_DEBUG_LOG(
" Deleted empty allocation");
11866 pBlockToDelete->Destroy(m_hAllocator);
11867 vma_delete(m_hAllocator, pBlockToDelete);
11871 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 11873 VkDeviceSize result = 0;
11874 for(
size_t i = m_Blocks.size(); i--; )
11876 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
11877 if(result >= m_PreferredBlockSize)
11885 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
11887 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11889 if(m_Blocks[blockIndex] == pBlock)
11891 VmaVectorRemove(m_Blocks, blockIndex);
11898 void VmaBlockVector::IncrementallySortBlocks()
11903 for(
size_t i = 1; i < m_Blocks.size(); ++i)
11905 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
11907 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
11914 VkResult VmaBlockVector::AllocateFromBlock(
11915 VmaDeviceMemoryBlock* pBlock,
11916 uint32_t currentFrameIndex,
11918 VkDeviceSize alignment,
11921 VmaSuballocationType suballocType,
11930 VmaAllocationRequest currRequest = {};
11931 if(pBlock->m_pMetadata->CreateAllocationRequest(
11934 m_BufferImageGranularity,
11944 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
11948 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
11949 if(res != VK_SUCCESS)
11956 if(pBlock->m_pMetadata->IsEmpty())
11958 m_HasEmptyBlock =
false;
11961 *pAllocation = m_hAllocator->m_AllocationObjectAllocator.Allocate();
11962 (*pAllocation)->Ctor(currentFrameIndex, isUserDataString);
11963 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, *pAllocation);
11964 (*pAllocation)->InitBlockAllocation(
11966 currRequest.offset,
11972 VMA_HEAVY_ASSERT(pBlock->Validate());
11973 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
11974 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
11976 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
11978 if(IsCorruptionDetectionEnabled())
11980 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
11981 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
11985 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
11988 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
11990 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
11991 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
11992 allocInfo.allocationSize = blockSize;
11993 VkDeviceMemory mem = VK_NULL_HANDLE;
11994 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
12003 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
12009 allocInfo.allocationSize,
12013 m_Blocks.push_back(pBlock);
12014 if(pNewBlockIndex != VMA_NULL)
12016 *pNewBlockIndex = m_Blocks.size() - 1;
12022 void VmaBlockVector::ApplyDefragmentationMovesCpu(
12023 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12024 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves)
12026 const size_t blockCount = m_Blocks.size();
12027 const bool isNonCoherent = m_hAllocator->IsMemoryTypeNonCoherent(m_MemoryTypeIndex);
12031 BLOCK_FLAG_USED = 0x00000001,
12032 BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION = 0x00000002,
12040 VmaVector< BlockInfo, VmaStlAllocator<BlockInfo> >
12041 blockInfo(blockCount, VmaStlAllocator<BlockInfo>(m_hAllocator->GetAllocationCallbacks()));
12042 memset(blockInfo.data(), 0, blockCount *
sizeof(BlockInfo));
12045 const size_t moveCount = moves.size();
12046 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12048 const VmaDefragmentationMove& move = moves[moveIndex];
12049 blockInfo[move.srcBlockIndex].flags |= BLOCK_FLAG_USED;
12050 blockInfo[move.dstBlockIndex].flags |= BLOCK_FLAG_USED;
12053 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12056 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12058 BlockInfo& currBlockInfo = blockInfo[blockIndex];
12059 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12060 if((currBlockInfo.flags & BLOCK_FLAG_USED) != 0)
12062 currBlockInfo.pMappedData = pBlock->GetMappedData();
12064 if(currBlockInfo.pMappedData == VMA_NULL)
12066 pDefragCtx->res = pBlock->Map(m_hAllocator, 1, &currBlockInfo.pMappedData);
12067 if(pDefragCtx->res == VK_SUCCESS)
12069 currBlockInfo.flags |= BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION;
12076 if(pDefragCtx->res == VK_SUCCESS)
12078 const VkDeviceSize nonCoherentAtomSize = m_hAllocator->m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
12079 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
12081 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12083 const VmaDefragmentationMove& move = moves[moveIndex];
12085 const BlockInfo& srcBlockInfo = blockInfo[move.srcBlockIndex];
12086 const BlockInfo& dstBlockInfo = blockInfo[move.dstBlockIndex];
12088 VMA_ASSERT(srcBlockInfo.pMappedData && dstBlockInfo.pMappedData);
12093 VmaDeviceMemoryBlock*
const pSrcBlock = m_Blocks[move.srcBlockIndex];
12094 memRange.memory = pSrcBlock->GetDeviceMemory();
12095 memRange.offset = VmaAlignDown(move.srcOffset, nonCoherentAtomSize);
12096 memRange.size = VMA_MIN(
12097 VmaAlignUp(move.size + (move.srcOffset - memRange.offset), nonCoherentAtomSize),
12098 pSrcBlock->m_pMetadata->GetSize() - memRange.offset);
12099 (*m_hAllocator->GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12104 reinterpret_cast<char*>(dstBlockInfo.pMappedData) + move.dstOffset,
12105 reinterpret_cast<char*>(srcBlockInfo.pMappedData) + move.srcOffset,
12106 static_cast<size_t>(move.size));
12108 if(IsCorruptionDetectionEnabled())
12110 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset - VMA_DEBUG_MARGIN);
12111 VmaWriteMagicValue(dstBlockInfo.pMappedData, move.dstOffset + move.size);
12117 VmaDeviceMemoryBlock*
const pDstBlock = m_Blocks[move.dstBlockIndex];
12118 memRange.memory = pDstBlock->GetDeviceMemory();
12119 memRange.offset = VmaAlignDown(move.dstOffset, nonCoherentAtomSize);
12120 memRange.size = VMA_MIN(
12121 VmaAlignUp(move.size + (move.dstOffset - memRange.offset), nonCoherentAtomSize),
12122 pDstBlock->m_pMetadata->GetSize() - memRange.offset);
12123 (*m_hAllocator->GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hAllocator->m_hDevice, 1, &memRange);
12130 for(
size_t blockIndex = blockCount; blockIndex--; )
12132 const BlockInfo& currBlockInfo = blockInfo[blockIndex];
12133 if((currBlockInfo.flags & BLOCK_FLAG_MAPPED_FOR_DEFRAGMENTATION) != 0)
12135 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12136 pBlock->Unmap(m_hAllocator, 1);
12141 void VmaBlockVector::ApplyDefragmentationMovesGpu(
12142 class VmaBlockVectorDefragmentationContext* pDefragCtx,
12143 const VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12144 VkCommandBuffer commandBuffer)
12146 const size_t blockCount = m_Blocks.size();
12148 pDefragCtx->blockContexts.resize(blockCount);
12149 memset(pDefragCtx->blockContexts.data(), 0, blockCount *
sizeof(VmaBlockDefragmentationContext));
12152 const size_t moveCount = moves.size();
12153 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12155 const VmaDefragmentationMove& move = moves[moveIndex];
12156 pDefragCtx->blockContexts[move.srcBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12157 pDefragCtx->blockContexts[move.dstBlockIndex].flags |= VmaBlockDefragmentationContext::BLOCK_FLAG_USED;
12160 VMA_ASSERT(pDefragCtx->res == VK_SUCCESS);
12164 VkBufferCreateInfo bufCreateInfo;
12165 VmaFillGpuDefragmentationBufferCreateInfo(bufCreateInfo);
12167 for(
size_t blockIndex = 0; pDefragCtx->res == VK_SUCCESS && blockIndex < blockCount; ++blockIndex)
12169 VmaBlockDefragmentationContext& currBlockCtx = pDefragCtx->blockContexts[blockIndex];
12170 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12171 if((currBlockCtx.flags & VmaBlockDefragmentationContext::BLOCK_FLAG_USED) != 0)
12173 bufCreateInfo.size = pBlock->m_pMetadata->GetSize();
12174 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkCreateBuffer)(
12175 m_hAllocator->m_hDevice, &bufCreateInfo, m_hAllocator->GetAllocationCallbacks(), &currBlockCtx.hBuffer);
12176 if(pDefragCtx->res == VK_SUCCESS)
12178 pDefragCtx->res = (*m_hAllocator->GetVulkanFunctions().vkBindBufferMemory)(
12179 m_hAllocator->m_hDevice, currBlockCtx.hBuffer, pBlock->GetDeviceMemory(), 0);
12186 if(pDefragCtx->res == VK_SUCCESS)
12188 for(
size_t moveIndex = 0; moveIndex < moveCount; ++moveIndex)
12190 const VmaDefragmentationMove& move = moves[moveIndex];
12192 const VmaBlockDefragmentationContext& srcBlockCtx = pDefragCtx->blockContexts[move.srcBlockIndex];
12193 const VmaBlockDefragmentationContext& dstBlockCtx = pDefragCtx->blockContexts[move.dstBlockIndex];
12195 VMA_ASSERT(srcBlockCtx.hBuffer && dstBlockCtx.hBuffer);
12197 VkBufferCopy region = {
12201 (*m_hAllocator->GetVulkanFunctions().vkCmdCopyBuffer)(
12202 commandBuffer, srcBlockCtx.hBuffer, dstBlockCtx.hBuffer, 1, ®ion);
12207 if(pDefragCtx->res == VK_SUCCESS && moveCount > 0)
12209 pDefragCtx->res = VK_NOT_READY;
12215 m_HasEmptyBlock =
false;
12216 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
12218 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
12219 if(pBlock->m_pMetadata->IsEmpty())
12221 if(m_Blocks.size() > m_MinBlockCount)
12223 if(pDefragmentationStats != VMA_NULL)
12226 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
12229 VmaVectorRemove(m_Blocks, blockIndex);
12230 pBlock->Destroy(m_hAllocator);
12231 vma_delete(m_hAllocator, pBlock);
12235 m_HasEmptyBlock =
true;
12241 #if VMA_STATS_STRING_ENABLED 12243 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
12245 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12247 json.BeginObject();
12251 json.WriteString(
"MemoryTypeIndex");
12252 json.WriteNumber(m_MemoryTypeIndex);
12254 json.WriteString(
"BlockSize");
12255 json.WriteNumber(m_PreferredBlockSize);
12257 json.WriteString(
"BlockCount");
12258 json.BeginObject(
true);
12259 if(m_MinBlockCount > 0)
12261 json.WriteString(
"Min");
12262 json.WriteNumber((uint64_t)m_MinBlockCount);
12264 if(m_MaxBlockCount < SIZE_MAX)
12266 json.WriteString(
"Max");
12267 json.WriteNumber((uint64_t)m_MaxBlockCount);
12269 json.WriteString(
"Cur");
12270 json.WriteNumber((uint64_t)m_Blocks.size());
12273 if(m_FrameInUseCount > 0)
12275 json.WriteString(
"FrameInUseCount");
12276 json.WriteNumber(m_FrameInUseCount);
12279 if(m_Algorithm != 0)
12281 json.WriteString(
"Algorithm");
12282 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
12287 json.WriteString(
"PreferredBlockSize");
12288 json.WriteNumber(m_PreferredBlockSize);
12291 json.WriteString(
"Blocks");
12292 json.BeginObject();
12293 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12295 json.BeginString();
12296 json.ContinueString(m_Blocks[i]->GetId());
12299 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
12306 #endif // #if VMA_STATS_STRING_ENABLED 12308 void VmaBlockVector::Defragment(
12309 class VmaBlockVectorDefragmentationContext* pCtx,
12311 VkDeviceSize& maxCpuBytesToMove, uint32_t& maxCpuAllocationsToMove,
12312 VkDeviceSize& maxGpuBytesToMove, uint32_t& maxGpuAllocationsToMove,
12313 VkCommandBuffer commandBuffer)
12315 pCtx->res = VK_SUCCESS;
12317 const VkMemoryPropertyFlags memPropFlags =
12318 m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags;
12319 const bool isHostVisible = (memPropFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0;
12321 const bool canDefragmentOnCpu = maxCpuBytesToMove > 0 && maxCpuAllocationsToMove > 0 &&
12323 const bool canDefragmentOnGpu = maxGpuBytesToMove > 0 && maxGpuAllocationsToMove > 0 &&
12324 !IsCorruptionDetectionEnabled() &&
12325 ((1u << m_MemoryTypeIndex) & m_hAllocator->GetGpuDefragmentationMemoryTypeBits()) != 0;
12328 if(canDefragmentOnCpu || canDefragmentOnGpu)
12330 bool defragmentOnGpu;
12332 if(canDefragmentOnGpu != canDefragmentOnCpu)
12334 defragmentOnGpu = canDefragmentOnGpu;
12339 defragmentOnGpu = (memPropFlags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0 ||
12340 m_hAllocator->IsIntegratedGpu();
12343 bool overlappingMoveSupported = !defragmentOnGpu;
12345 if(m_hAllocator->m_UseMutex)
12347 m_Mutex.LockWrite();
12348 pCtx->mutexLocked =
true;
12351 pCtx->Begin(overlappingMoveSupported);
12355 const VkDeviceSize maxBytesToMove = defragmentOnGpu ? maxGpuBytesToMove : maxCpuBytesToMove;
12356 const uint32_t maxAllocationsToMove = defragmentOnGpu ? maxGpuAllocationsToMove : maxCpuAllocationsToMove;
12357 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> > moves =
12358 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >(VmaStlAllocator<VmaDefragmentationMove>(m_hAllocator->GetAllocationCallbacks()));
12359 pCtx->res = pCtx->GetAlgorithm()->Defragment(moves, maxBytesToMove, maxAllocationsToMove);
12362 if(pStats != VMA_NULL)
12364 const VkDeviceSize bytesMoved = pCtx->GetAlgorithm()->GetBytesMoved();
12365 const uint32_t allocationsMoved = pCtx->GetAlgorithm()->GetAllocationsMoved();
12368 VMA_ASSERT(bytesMoved <= maxBytesToMove);
12369 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
12370 if(defragmentOnGpu)
12372 maxGpuBytesToMove -= bytesMoved;
12373 maxGpuAllocationsToMove -= allocationsMoved;
12377 maxCpuBytesToMove -= bytesMoved;
12378 maxCpuAllocationsToMove -= allocationsMoved;
12382 if(pCtx->res >= VK_SUCCESS)
12384 if(defragmentOnGpu)
12386 ApplyDefragmentationMovesGpu(pCtx, moves, commandBuffer);
12390 ApplyDefragmentationMovesCpu(pCtx, moves);
12396 void VmaBlockVector::DefragmentationEnd(
12397 class VmaBlockVectorDefragmentationContext* pCtx,
12401 for(
size_t blockIndex = pCtx->blockContexts.size(); blockIndex--; )
12403 VmaBlockDefragmentationContext& blockCtx = pCtx->blockContexts[blockIndex];
12404 if(blockCtx.hBuffer)
12406 (*m_hAllocator->GetVulkanFunctions().vkDestroyBuffer)(
12407 m_hAllocator->m_hDevice, blockCtx.hBuffer, m_hAllocator->GetAllocationCallbacks());
12411 if(pCtx->res >= VK_SUCCESS)
12413 FreeEmptyBlocks(pStats);
12416 if(pCtx->mutexLocked)
12418 VMA_ASSERT(m_hAllocator->m_UseMutex);
12419 m_Mutex.UnlockWrite();
12423 size_t VmaBlockVector::CalcAllocationCount()
const 12426 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12428 result += m_Blocks[i]->m_pMetadata->GetAllocationCount();
12433 bool VmaBlockVector::IsBufferImageGranularityConflictPossible()
const 12435 if(m_BufferImageGranularity == 1)
12439 VmaSuballocationType lastSuballocType = VMA_SUBALLOCATION_TYPE_FREE;
12440 for(
size_t i = 0, count = m_Blocks.size(); i < count; ++i)
12442 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[i];
12443 VMA_ASSERT(m_Algorithm == 0);
12444 VmaBlockMetadata_Generic*
const pMetadata = (VmaBlockMetadata_Generic*)pBlock->m_pMetadata;
12445 if(pMetadata->IsBufferImageGranularityConflictPossible(m_BufferImageGranularity, lastSuballocType))
12453 void VmaBlockVector::MakePoolAllocationsLost(
12454 uint32_t currentFrameIndex,
12455 size_t* pLostAllocationCount)
12457 VmaMutexLockWrite lock(m_Mutex, m_hAllocator->m_UseMutex);
12458 size_t lostAllocationCount = 0;
12459 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12461 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12462 VMA_ASSERT(pBlock);
12463 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
12465 if(pLostAllocationCount != VMA_NULL)
12467 *pLostAllocationCount = lostAllocationCount;
12471 VkResult VmaBlockVector::CheckCorruption()
12473 if(!IsCorruptionDetectionEnabled())
12475 return VK_ERROR_FEATURE_NOT_PRESENT;
12478 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12479 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12481 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12482 VMA_ASSERT(pBlock);
12483 VkResult res = pBlock->CheckCorruption(m_hAllocator);
12484 if(res != VK_SUCCESS)
12492 void VmaBlockVector::AddStats(
VmaStats* pStats)
12494 const uint32_t memTypeIndex = m_MemoryTypeIndex;
12495 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
12497 VmaMutexLockRead lock(m_Mutex, m_hAllocator->m_UseMutex);
12499 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
12501 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
12502 VMA_ASSERT(pBlock);
12503 VMA_HEAVY_ASSERT(pBlock->Validate());
12505 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
12506 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12507 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12508 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12515 VmaDefragmentationAlgorithm_Generic::VmaDefragmentationAlgorithm_Generic(
12517 VmaBlockVector* pBlockVector,
12518 uint32_t currentFrameIndex,
12519 bool overlappingMoveSupported) :
12520 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12521 m_AllocationCount(0),
12522 m_AllAllocations(false),
12524 m_AllocationsMoved(0),
12525 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
12528 const size_t blockCount = m_pBlockVector->m_Blocks.size();
12529 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12531 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
12532 pBlockInfo->m_OriginalBlockIndex = blockIndex;
12533 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
12534 m_Blocks.push_back(pBlockInfo);
12538 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
12541 VmaDefragmentationAlgorithm_Generic::~VmaDefragmentationAlgorithm_Generic()
12543 for(
size_t i = m_Blocks.size(); i--; )
12545 vma_delete(m_hAllocator, m_Blocks[i]);
12549 void VmaDefragmentationAlgorithm_Generic::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
12552 if(hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
12554 VmaDeviceMemoryBlock* pBlock = hAlloc->GetBlock();
12555 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
12556 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
12558 AllocationInfo allocInfo = AllocationInfo(hAlloc, pChanged);
12559 (*it)->m_Allocations.push_back(allocInfo);
12566 ++m_AllocationCount;
12570 VkResult VmaDefragmentationAlgorithm_Generic::DefragmentRound(
12571 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12572 VkDeviceSize maxBytesToMove,
12573 uint32_t maxAllocationsToMove)
12575 if(m_Blocks.empty())
12588 size_t srcBlockMinIndex = 0;
12601 size_t srcBlockIndex = m_Blocks.size() - 1;
12602 size_t srcAllocIndex = SIZE_MAX;
12608 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
12610 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
12613 if(srcBlockIndex == srcBlockMinIndex)
12620 srcAllocIndex = SIZE_MAX;
12625 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
12629 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
12630 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
12632 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
12633 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
12634 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
12635 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
12638 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
12640 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
12641 VmaAllocationRequest dstAllocRequest;
12642 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
12643 m_CurrentFrameIndex,
12644 m_pBlockVector->GetFrameInUseCount(),
12645 m_pBlockVector->GetBufferImageGranularity(),
12652 &dstAllocRequest) &&
12654 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
12656 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
12659 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
12660 (m_BytesMoved + size > maxBytesToMove))
12665 VmaDefragmentationMove move;
12666 move.srcBlockIndex = pSrcBlockInfo->m_OriginalBlockIndex;
12667 move.dstBlockIndex = pDstBlockInfo->m_OriginalBlockIndex;
12668 move.srcOffset = srcOffset;
12669 move.dstOffset = dstAllocRequest.offset;
12671 moves.push_back(move);
12673 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
12677 allocInfo.m_hAllocation);
12678 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
12680 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
12682 if(allocInfo.m_pChanged != VMA_NULL)
12684 *allocInfo.m_pChanged = VK_TRUE;
12687 ++m_AllocationsMoved;
12688 m_BytesMoved += size;
12690 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
12698 if(srcAllocIndex > 0)
12704 if(srcBlockIndex > 0)
12707 srcAllocIndex = SIZE_MAX;
12717 size_t VmaDefragmentationAlgorithm_Generic::CalcBlocksWithNonMovableCount()
const 12720 for(
size_t i = 0; i < m_Blocks.size(); ++i)
12722 if(m_Blocks[i]->m_HasNonMovableAllocations)
12730 VkResult VmaDefragmentationAlgorithm_Generic::Defragment(
12731 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12732 VkDeviceSize maxBytesToMove,
12733 uint32_t maxAllocationsToMove)
12735 if(!m_AllAllocations && m_AllocationCount == 0)
12740 const size_t blockCount = m_Blocks.size();
12741 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
12743 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
12745 if(m_AllAllocations)
12747 VmaBlockMetadata_Generic* pMetadata = (VmaBlockMetadata_Generic*)pBlockInfo->m_pBlock->m_pMetadata;
12748 for(VmaSuballocationList::const_iterator it = pMetadata->m_Suballocations.begin();
12749 it != pMetadata->m_Suballocations.end();
12752 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
12754 AllocationInfo allocInfo = AllocationInfo(it->hAllocation, VMA_NULL);
12755 pBlockInfo->m_Allocations.push_back(allocInfo);
12760 pBlockInfo->CalcHasNonMovableAllocations();
12764 pBlockInfo->SortAllocationsByOffsetDescending();
12770 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
12773 const uint32_t roundCount = 2;
12776 VkResult result = VK_SUCCESS;
12777 for(uint32_t round = 0; (round < roundCount) && (result == VK_SUCCESS); ++round)
12779 result = DefragmentRound(moves, maxBytesToMove, maxAllocationsToMove);
12785 bool VmaDefragmentationAlgorithm_Generic::MoveMakesSense(
12786 size_t dstBlockIndex, VkDeviceSize dstOffset,
12787 size_t srcBlockIndex, VkDeviceSize srcOffset)
12789 if(dstBlockIndex < srcBlockIndex)
12793 if(dstBlockIndex > srcBlockIndex)
12797 if(dstOffset < srcOffset)
12807 VmaDefragmentationAlgorithm_Fast::VmaDefragmentationAlgorithm_Fast(
12809 VmaBlockVector* pBlockVector,
12810 uint32_t currentFrameIndex,
12811 bool overlappingMoveSupported) :
12812 VmaDefragmentationAlgorithm(hAllocator, pBlockVector, currentFrameIndex),
12813 m_OverlappingMoveSupported(overlappingMoveSupported),
12814 m_AllocationCount(0),
12815 m_AllAllocations(false),
12817 m_AllocationsMoved(0),
12818 m_BlockInfos(VmaStlAllocator<BlockInfo>(hAllocator->GetAllocationCallbacks()))
12820 VMA_ASSERT(VMA_DEBUG_MARGIN == 0);
12824 VmaDefragmentationAlgorithm_Fast::~VmaDefragmentationAlgorithm_Fast()
12828 VkResult VmaDefragmentationAlgorithm_Fast::Defragment(
12829 VmaVector< VmaDefragmentationMove, VmaStlAllocator<VmaDefragmentationMove> >& moves,
12830 VkDeviceSize maxBytesToMove,
12831 uint32_t maxAllocationsToMove)
12833 VMA_ASSERT(m_AllAllocations || m_pBlockVector->CalcAllocationCount() == m_AllocationCount);
12835 const size_t blockCount = m_pBlockVector->GetBlockCount();
12836 if(blockCount == 0 || maxBytesToMove == 0 || maxAllocationsToMove == 0)
12841 PreprocessMetadata();
12845 m_BlockInfos.resize(blockCount);
12846 for(
size_t i = 0; i < blockCount; ++i)
12848 m_BlockInfos[i].origBlockIndex = i;
12851 VMA_SORT(m_BlockInfos.begin(), m_BlockInfos.end(), [
this](
const BlockInfo& lhs,
const BlockInfo& rhs) ->
bool {
12852 return m_pBlockVector->GetBlock(lhs.origBlockIndex)->m_pMetadata->GetSumFreeSize() <
12853 m_pBlockVector->GetBlock(rhs.origBlockIndex)->m_pMetadata->GetSumFreeSize();
12858 FreeSpaceDatabase freeSpaceDb;
12860 size_t dstBlockInfoIndex = 0;
12861 size_t dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12862 VmaDeviceMemoryBlock* pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12863 VmaBlockMetadata_Generic* pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12864 VkDeviceSize dstBlockSize = pDstMetadata->GetSize();
12865 VkDeviceSize dstOffset = 0;
12868 for(
size_t srcBlockInfoIndex = 0; !end && srcBlockInfoIndex < blockCount; ++srcBlockInfoIndex)
12870 const size_t srcOrigBlockIndex = m_BlockInfos[srcBlockInfoIndex].origBlockIndex;
12871 VmaDeviceMemoryBlock*
const pSrcBlock = m_pBlockVector->GetBlock(srcOrigBlockIndex);
12872 VmaBlockMetadata_Generic*
const pSrcMetadata = (VmaBlockMetadata_Generic*)pSrcBlock->m_pMetadata;
12873 for(VmaSuballocationList::iterator srcSuballocIt = pSrcMetadata->m_Suballocations.begin();
12874 !end && srcSuballocIt != pSrcMetadata->m_Suballocations.end(); )
12876 VmaAllocation_T*
const pAlloc = srcSuballocIt->hAllocation;
12877 const VkDeviceSize srcAllocAlignment = pAlloc->GetAlignment();
12878 const VkDeviceSize srcAllocSize = srcSuballocIt->size;
12879 if(m_AllocationsMoved == maxAllocationsToMove ||
12880 m_BytesMoved + srcAllocSize > maxBytesToMove)
12885 const VkDeviceSize srcAllocOffset = srcSuballocIt->offset;
12888 size_t freeSpaceInfoIndex;
12889 VkDeviceSize dstAllocOffset;
12890 if(freeSpaceDb.Fetch(srcAllocAlignment, srcAllocSize,
12891 freeSpaceInfoIndex, dstAllocOffset))
12893 size_t freeSpaceOrigBlockIndex = m_BlockInfos[freeSpaceInfoIndex].origBlockIndex;
12894 VmaDeviceMemoryBlock* pFreeSpaceBlock = m_pBlockVector->GetBlock(freeSpaceOrigBlockIndex);
12895 VmaBlockMetadata_Generic* pFreeSpaceMetadata = (VmaBlockMetadata_Generic*)pFreeSpaceBlock->m_pMetadata;
12898 if(freeSpaceInfoIndex == srcBlockInfoIndex)
12900 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
12904 VmaSuballocation suballoc = *srcSuballocIt;
12905 suballoc.offset = dstAllocOffset;
12906 suballoc.hAllocation->ChangeOffset(dstAllocOffset);
12907 m_BytesMoved += srcAllocSize;
12908 ++m_AllocationsMoved;
12910 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12912 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12913 srcSuballocIt = nextSuballocIt;
12915 InsertSuballoc(pFreeSpaceMetadata, suballoc);
12917 VmaDefragmentationMove move = {
12918 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
12919 srcAllocOffset, dstAllocOffset,
12921 moves.push_back(move);
12928 VMA_ASSERT(freeSpaceInfoIndex < srcBlockInfoIndex);
12930 VmaSuballocation suballoc = *srcSuballocIt;
12931 suballoc.offset = dstAllocOffset;
12932 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pFreeSpaceBlock, dstAllocOffset);
12933 m_BytesMoved += srcAllocSize;
12934 ++m_AllocationsMoved;
12936 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
12938 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
12939 srcSuballocIt = nextSuballocIt;
12941 InsertSuballoc(pFreeSpaceMetadata, suballoc);
12943 VmaDefragmentationMove move = {
12944 srcOrigBlockIndex, freeSpaceOrigBlockIndex,
12945 srcAllocOffset, dstAllocOffset,
12947 moves.push_back(move);
12952 dstAllocOffset = VmaAlignUp(dstOffset, srcAllocAlignment);
12955 while(dstBlockInfoIndex < srcBlockInfoIndex &&
12956 dstAllocOffset + srcAllocSize > dstBlockSize)
12959 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, dstBlockSize - dstOffset);
12961 ++dstBlockInfoIndex;
12962 dstOrigBlockIndex = m_BlockInfos[dstBlockInfoIndex].origBlockIndex;
12963 pDstBlock = m_pBlockVector->GetBlock(dstOrigBlockIndex);
12964 pDstMetadata = (VmaBlockMetadata_Generic*)pDstBlock->m_pMetadata;
12965 dstBlockSize = pDstMetadata->GetSize();
12967 dstAllocOffset = 0;
12971 if(dstBlockInfoIndex == srcBlockInfoIndex)
12973 VMA_ASSERT(dstAllocOffset <= srcAllocOffset);
12975 const bool overlap = dstAllocOffset + srcAllocSize > srcAllocOffset;
12977 bool skipOver = overlap;
12978 if(overlap && m_OverlappingMoveSupported && dstAllocOffset < srcAllocOffset)
12982 skipOver = (srcAllocOffset - dstAllocOffset) * 64 < srcAllocSize;
12987 freeSpaceDb.Register(dstBlockInfoIndex, dstOffset, srcAllocOffset - dstOffset);
12989 dstOffset = srcAllocOffset + srcAllocSize;
12995 srcSuballocIt->offset = dstAllocOffset;
12996 srcSuballocIt->hAllocation->ChangeOffset(dstAllocOffset);
12997 dstOffset = dstAllocOffset + srcAllocSize;
12998 m_BytesMoved += srcAllocSize;
12999 ++m_AllocationsMoved;
13001 VmaDefragmentationMove move = {
13002 srcOrigBlockIndex, dstOrigBlockIndex,
13003 srcAllocOffset, dstAllocOffset,
13005 moves.push_back(move);
13013 VMA_ASSERT(dstBlockInfoIndex < srcBlockInfoIndex);
13014 VMA_ASSERT(dstAllocOffset + srcAllocSize <= dstBlockSize);
13016 VmaSuballocation suballoc = *srcSuballocIt;
13017 suballoc.offset = dstAllocOffset;
13018 suballoc.hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlock, dstAllocOffset);
13019 dstOffset = dstAllocOffset + srcAllocSize;
13020 m_BytesMoved += srcAllocSize;
13021 ++m_AllocationsMoved;
13023 VmaSuballocationList::iterator nextSuballocIt = srcSuballocIt;
13025 pSrcMetadata->m_Suballocations.erase(srcSuballocIt);
13026 srcSuballocIt = nextSuballocIt;
13028 pDstMetadata->m_Suballocations.push_back(suballoc);
13030 VmaDefragmentationMove move = {
13031 srcOrigBlockIndex, dstOrigBlockIndex,
13032 srcAllocOffset, dstAllocOffset,
13034 moves.push_back(move);
13040 m_BlockInfos.clear();
13042 PostprocessMetadata();
13047 void VmaDefragmentationAlgorithm_Fast::PreprocessMetadata()
13049 const size_t blockCount = m_pBlockVector->GetBlockCount();
13050 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13052 VmaBlockMetadata_Generic*
const pMetadata =
13053 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13054 pMetadata->m_FreeCount = 0;
13055 pMetadata->m_SumFreeSize = pMetadata->GetSize();
13056 pMetadata->m_FreeSuballocationsBySize.clear();
13057 for(VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13058 it != pMetadata->m_Suballocations.end(); )
13060 if(it->type == VMA_SUBALLOCATION_TYPE_FREE)
13062 VmaSuballocationList::iterator nextIt = it;
13064 pMetadata->m_Suballocations.erase(it);
13075 void VmaDefragmentationAlgorithm_Fast::PostprocessMetadata()
13077 const size_t blockCount = m_pBlockVector->GetBlockCount();
13078 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
13080 VmaBlockMetadata_Generic*
const pMetadata =
13081 (VmaBlockMetadata_Generic*)m_pBlockVector->GetBlock(blockIndex)->m_pMetadata;
13082 const VkDeviceSize blockSize = pMetadata->GetSize();
13085 if(pMetadata->m_Suballocations.empty())
13087 pMetadata->m_FreeCount = 1;
13089 VmaSuballocation suballoc = {
13093 VMA_SUBALLOCATION_TYPE_FREE };
13094 pMetadata->m_Suballocations.push_back(suballoc);
13095 pMetadata->RegisterFreeSuballocation(pMetadata->m_Suballocations.begin());
13100 VkDeviceSize offset = 0;
13101 VmaSuballocationList::iterator it;
13102 for(it = pMetadata->m_Suballocations.begin();
13103 it != pMetadata->m_Suballocations.end();
13106 VMA_ASSERT(it->type != VMA_SUBALLOCATION_TYPE_FREE);
13107 VMA_ASSERT(it->offset >= offset);
13110 if(it->offset > offset)
13112 ++pMetadata->m_FreeCount;
13113 const VkDeviceSize freeSize = it->offset - offset;
13114 VmaSuballocation suballoc = {
13118 VMA_SUBALLOCATION_TYPE_FREE };
13119 VmaSuballocationList::iterator precedingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13120 if(freeSize >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13122 pMetadata->m_FreeSuballocationsBySize.push_back(precedingFreeIt);
13126 pMetadata->m_SumFreeSize -= it->size;
13127 offset = it->offset + it->size;
13131 if(offset < blockSize)
13133 ++pMetadata->m_FreeCount;
13134 const VkDeviceSize freeSize = blockSize - offset;
13135 VmaSuballocation suballoc = {
13139 VMA_SUBALLOCATION_TYPE_FREE };
13140 VMA_ASSERT(it == pMetadata->m_Suballocations.end());
13141 VmaSuballocationList::iterator trailingFreeIt = pMetadata->m_Suballocations.insert(it, suballoc);
13142 if(freeSize > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
13144 pMetadata->m_FreeSuballocationsBySize.push_back(trailingFreeIt);
13149 pMetadata->m_FreeSuballocationsBySize.begin(),
13150 pMetadata->m_FreeSuballocationsBySize.end(),
13151 VmaSuballocationItemSizeLess());
13154 VMA_HEAVY_ASSERT(pMetadata->Validate());
13158 void VmaDefragmentationAlgorithm_Fast::InsertSuballoc(VmaBlockMetadata_Generic* pMetadata,
const VmaSuballocation& suballoc)
13161 VmaSuballocationList::iterator it = pMetadata->m_Suballocations.begin();
13162 while(it != pMetadata->m_Suballocations.end())
13164 if(it->offset < suballoc.offset)
13169 pMetadata->m_Suballocations.insert(it, suballoc);
13175 VmaBlockVectorDefragmentationContext::VmaBlockVectorDefragmentationContext(
13178 VmaBlockVector* pBlockVector,
13179 uint32_t currFrameIndex) :
13181 mutexLocked(false),
13182 blockContexts(VmaStlAllocator<VmaBlockDefragmentationContext>(hAllocator->GetAllocationCallbacks())),
13183 m_hAllocator(hAllocator),
13184 m_hCustomPool(hCustomPool),
13185 m_pBlockVector(pBlockVector),
13186 m_CurrFrameIndex(currFrameIndex),
13187 m_pAlgorithm(VMA_NULL),
13188 m_Allocations(VmaStlAllocator<AllocInfo>(hAllocator->GetAllocationCallbacks())),
13189 m_AllAllocations(false)
13193 VmaBlockVectorDefragmentationContext::~VmaBlockVectorDefragmentationContext()
13195 vma_delete(m_hAllocator, m_pAlgorithm);
13198 void VmaBlockVectorDefragmentationContext::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
13200 AllocInfo info = { hAlloc, pChanged };
13201 m_Allocations.push_back(info);
13204 void VmaBlockVectorDefragmentationContext::Begin(
bool overlappingMoveSupported)
13206 const bool allAllocations = m_AllAllocations ||
13207 m_Allocations.size() == m_pBlockVector->CalcAllocationCount();
13219 if(VMA_DEBUG_MARGIN == 0 &&
13221 !m_pBlockVector->IsBufferImageGranularityConflictPossible())
13223 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Fast)(
13224 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13228 m_pAlgorithm = vma_new(m_hAllocator, VmaDefragmentationAlgorithm_Generic)(
13229 m_hAllocator, m_pBlockVector, m_CurrFrameIndex, overlappingMoveSupported);
13234 m_pAlgorithm->AddAll();
13238 for(
size_t i = 0, count = m_Allocations.size(); i < count; ++i)
13240 m_pAlgorithm->AddAllocation(m_Allocations[i].hAlloc, m_Allocations[i].pChanged);
13248 VmaDefragmentationContext_T::VmaDefragmentationContext_T(
13250 uint32_t currFrameIndex,
13253 m_hAllocator(hAllocator),
13254 m_CurrFrameIndex(currFrameIndex),
13257 m_CustomPoolContexts(VmaStlAllocator<VmaBlockVectorDefragmentationContext*>(hAllocator->GetAllocationCallbacks()))
13259 memset(m_DefaultPoolContexts, 0,
sizeof(m_DefaultPoolContexts));
13262 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
13264 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13266 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[i];
13267 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13268 vma_delete(m_hAllocator, pBlockVectorCtx);
13270 for(
size_t i = m_hAllocator->m_MemProps.memoryTypeCount; i--; )
13272 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[i];
13273 if(pBlockVectorCtx)
13275 pBlockVectorCtx->GetBlockVector()->DefragmentationEnd(pBlockVectorCtx, m_pStats);
13276 vma_delete(m_hAllocator, pBlockVectorCtx);
13281 void VmaDefragmentationContext_T::AddPools(uint32_t poolCount,
VmaPool* pPools)
13283 for(uint32_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13285 VmaPool pool = pPools[poolIndex];
13288 if(pool->m_BlockVector.GetAlgorithm() == 0)
13290 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13292 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13294 if(m_CustomPoolContexts[i]->GetCustomPool() == pool)
13296 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13301 if(!pBlockVectorDefragCtx)
13303 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13306 &pool->m_BlockVector,
13308 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13311 pBlockVectorDefragCtx->AddAll();
13316 void VmaDefragmentationContext_T::AddAllocations(
13317 uint32_t allocationCount,
13319 VkBool32* pAllocationsChanged)
13322 for(uint32_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
13325 VMA_ASSERT(hAlloc);
13327 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
13329 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
13331 VmaBlockVectorDefragmentationContext* pBlockVectorDefragCtx = VMA_NULL;
13333 const VmaPool hAllocPool = hAlloc->GetBlock()->GetParentPool();
13335 if(hAllocPool != VK_NULL_HANDLE)
13338 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
13340 for(
size_t i = m_CustomPoolContexts.size(); i--; )
13342 if(m_CustomPoolContexts[i]->GetCustomPool() == hAllocPool)
13344 pBlockVectorDefragCtx = m_CustomPoolContexts[i];
13348 if(!pBlockVectorDefragCtx)
13350 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13353 &hAllocPool->m_BlockVector,
13355 m_CustomPoolContexts.push_back(pBlockVectorDefragCtx);
13362 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
13363 pBlockVectorDefragCtx = m_DefaultPoolContexts[memTypeIndex];
13364 if(!pBlockVectorDefragCtx)
13366 pBlockVectorDefragCtx = vma_new(m_hAllocator, VmaBlockVectorDefragmentationContext)(
13369 m_hAllocator->m_pBlockVectors[memTypeIndex],
13371 m_DefaultPoolContexts[memTypeIndex] = pBlockVectorDefragCtx;
13375 if(pBlockVectorDefragCtx)
13377 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
13378 &pAllocationsChanged[allocIndex] : VMA_NULL;
13379 pBlockVectorDefragCtx->AddAllocation(hAlloc, pChanged);
13385 VkResult VmaDefragmentationContext_T::Defragment(
13386 VkDeviceSize maxCpuBytesToMove, uint32_t maxCpuAllocationsToMove,
13387 VkDeviceSize maxGpuBytesToMove, uint32_t maxGpuAllocationsToMove,
13395 if(commandBuffer == VK_NULL_HANDLE)
13397 maxGpuBytesToMove = 0;
13398 maxGpuAllocationsToMove = 0;
13401 VkResult res = VK_SUCCESS;
13404 for(uint32_t memTypeIndex = 0;
13405 memTypeIndex < m_hAllocator->GetMemoryTypeCount() && res >= VK_SUCCESS;
13408 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_DefaultPoolContexts[memTypeIndex];
13409 if(pBlockVectorCtx)
13411 VMA_ASSERT(pBlockVectorCtx->GetBlockVector());
13412 pBlockVectorCtx->GetBlockVector()->Defragment(
13415 maxCpuBytesToMove, maxCpuAllocationsToMove,
13416 maxGpuBytesToMove, maxGpuAllocationsToMove,
13418 if(pBlockVectorCtx->res != VK_SUCCESS)
13420 res = pBlockVectorCtx->res;
13426 for(
size_t customCtxIndex = 0, customCtxCount = m_CustomPoolContexts.size();
13427 customCtxIndex < customCtxCount && res >= VK_SUCCESS;
13430 VmaBlockVectorDefragmentationContext* pBlockVectorCtx = m_CustomPoolContexts[customCtxIndex];
13431 VMA_ASSERT(pBlockVectorCtx && pBlockVectorCtx->GetBlockVector());
13432 pBlockVectorCtx->GetBlockVector()->Defragment(
13435 maxCpuBytesToMove, maxCpuAllocationsToMove,
13436 maxGpuBytesToMove, maxGpuAllocationsToMove,
13438 if(pBlockVectorCtx->res != VK_SUCCESS)
13440 res = pBlockVectorCtx->res;
13450 #if VMA_RECORDING_ENABLED 13452 VmaRecorder::VmaRecorder() :
13457 m_StartCounter(INT64_MAX)
13463 m_UseMutex = useMutex;
13464 m_Flags = settings.
flags;
13466 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
13467 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
13470 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
13473 return VK_ERROR_INITIALIZATION_FAILED;
13477 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
13478 fprintf(m_File,
"%s\n",
"1,6");
13483 VmaRecorder::~VmaRecorder()
13485 if(m_File != VMA_NULL)
13491 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
13493 CallParams callParams;
13494 GetBasicParams(callParams);
13496 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13497 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
13501 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
13503 CallParams callParams;
13504 GetBasicParams(callParams);
13506 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13507 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
13513 CallParams callParams;
13514 GetBasicParams(callParams);
13516 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13517 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
13528 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
13530 CallParams callParams;
13531 GetBasicParams(callParams);
13533 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13534 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
13539 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
13540 const VkMemoryRequirements& vkMemReq,
13544 CallParams callParams;
13545 GetBasicParams(callParams);
13547 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13548 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13549 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13551 vkMemReq.alignment,
13552 vkMemReq.memoryTypeBits,
13560 userDataStr.GetString());
13564 void VmaRecorder::RecordAllocateMemoryPages(uint32_t frameIndex,
13565 const VkMemoryRequirements& vkMemReq,
13567 uint64_t allocationCount,
13570 CallParams callParams;
13571 GetBasicParams(callParams);
13573 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13574 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13575 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryPages,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,", callParams.threadId, callParams.time, frameIndex,
13577 vkMemReq.alignment,
13578 vkMemReq.memoryTypeBits,
13585 PrintPointerList(allocationCount, pAllocations);
13586 fprintf(m_File,
",%s\n", userDataStr.GetString());
13590 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
13591 const VkMemoryRequirements& vkMemReq,
13592 bool requiresDedicatedAllocation,
13593 bool prefersDedicatedAllocation,
13597 CallParams callParams;
13598 GetBasicParams(callParams);
13600 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13601 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13602 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13604 vkMemReq.alignment,
13605 vkMemReq.memoryTypeBits,
13606 requiresDedicatedAllocation ? 1 : 0,
13607 prefersDedicatedAllocation ? 1 : 0,
13615 userDataStr.GetString());
13619 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
13620 const VkMemoryRequirements& vkMemReq,
13621 bool requiresDedicatedAllocation,
13622 bool prefersDedicatedAllocation,
13626 CallParams callParams;
13627 GetBasicParams(callParams);
13629 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13630 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
13631 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13633 vkMemReq.alignment,
13634 vkMemReq.memoryTypeBits,
13635 requiresDedicatedAllocation ? 1 : 0,
13636 prefersDedicatedAllocation ? 1 : 0,
13644 userDataStr.GetString());
13648 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
13651 CallParams callParams;
13652 GetBasicParams(callParams);
13654 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13655 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13660 void VmaRecorder::RecordFreeMemoryPages(uint32_t frameIndex,
13661 uint64_t allocationCount,
13664 CallParams callParams;
13665 GetBasicParams(callParams);
13667 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13668 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemoryPages,", callParams.threadId, callParams.time, frameIndex);
13669 PrintPointerList(allocationCount, pAllocations);
13670 fprintf(m_File,
"\n");
13674 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
13676 const void* pUserData)
13678 CallParams callParams;
13679 GetBasicParams(callParams);
13681 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13682 UserDataString userDataStr(
13685 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13687 userDataStr.GetString());
13691 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
13694 CallParams callParams;
13695 GetBasicParams(callParams);
13697 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13698 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13703 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
13706 CallParams callParams;
13707 GetBasicParams(callParams);
13709 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13710 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13715 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
13718 CallParams callParams;
13719 GetBasicParams(callParams);
13721 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13722 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
13727 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
13728 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13730 CallParams callParams;
13731 GetBasicParams(callParams);
13733 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13734 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13741 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
13742 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
13744 CallParams callParams;
13745 GetBasicParams(callParams);
13747 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13748 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
13755 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
13756 const VkBufferCreateInfo& bufCreateInfo,
13760 CallParams callParams;
13761 GetBasicParams(callParams);
13763 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13764 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13765 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13766 bufCreateInfo.flags,
13767 bufCreateInfo.size,
13768 bufCreateInfo.usage,
13769 bufCreateInfo.sharingMode,
13770 allocCreateInfo.
flags,
13771 allocCreateInfo.
usage,
13775 allocCreateInfo.
pool,
13777 userDataStr.GetString());
13781 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
13782 const VkImageCreateInfo& imageCreateInfo,
13786 CallParams callParams;
13787 GetBasicParams(callParams);
13789 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13790 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
13791 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
13792 imageCreateInfo.flags,
13793 imageCreateInfo.imageType,
13794 imageCreateInfo.format,
13795 imageCreateInfo.extent.width,
13796 imageCreateInfo.extent.height,
13797 imageCreateInfo.extent.depth,
13798 imageCreateInfo.mipLevels,
13799 imageCreateInfo.arrayLayers,
13800 imageCreateInfo.samples,
13801 imageCreateInfo.tiling,
13802 imageCreateInfo.usage,
13803 imageCreateInfo.sharingMode,
13804 imageCreateInfo.initialLayout,
13805 allocCreateInfo.
flags,
13806 allocCreateInfo.
usage,
13810 allocCreateInfo.
pool,
13812 userDataStr.GetString());
13816 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
13819 CallParams callParams;
13820 GetBasicParams(callParams);
13822 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13823 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
13828 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
13831 CallParams callParams;
13832 GetBasicParams(callParams);
13834 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13835 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
13840 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
13843 CallParams callParams;
13844 GetBasicParams(callParams);
13846 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13847 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
13852 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
13855 CallParams callParams;
13856 GetBasicParams(callParams);
13858 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13859 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
13864 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
13867 CallParams callParams;
13868 GetBasicParams(callParams);
13870 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13871 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
13876 void VmaRecorder::RecordDefragmentationBegin(uint32_t frameIndex,
13880 CallParams callParams;
13881 GetBasicParams(callParams);
13883 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13884 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationBegin,%u,", callParams.threadId, callParams.time, frameIndex,
13887 fprintf(m_File,
",");
13889 fprintf(m_File,
",%llu,%u,%llu,%u,%p,%p\n",
13899 void VmaRecorder::RecordDefragmentationEnd(uint32_t frameIndex,
13902 CallParams callParams;
13903 GetBasicParams(callParams);
13905 VmaMutexLock lock(m_FileMutex, m_UseMutex);
13906 fprintf(m_File,
"%u,%.3f,%u,vmaDefragmentationEnd,%p\n", callParams.threadId, callParams.time, frameIndex,
13913 if(pUserData != VMA_NULL)
13917 m_Str = (
const char*)pUserData;
13921 sprintf_s(m_PtrStr,
"%p", pUserData);
13931 void VmaRecorder::WriteConfiguration(
13932 const VkPhysicalDeviceProperties& devProps,
13933 const VkPhysicalDeviceMemoryProperties& memProps,
13934 bool dedicatedAllocationExtensionEnabled)
13936 fprintf(m_File,
"Config,Begin\n");
13938 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
13939 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
13940 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
13941 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
13942 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
13943 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
13945 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
13946 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
13947 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
13949 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
13950 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
13952 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
13953 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
13955 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
13956 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
13958 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
13959 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
13962 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
13964 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
13965 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
13966 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
13967 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
13968 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
13969 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
13970 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
13971 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
13972 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
13974 fprintf(m_File,
"Config,End\n");
13977 void VmaRecorder::GetBasicParams(CallParams& outParams)
13979 outParams.threadId = GetCurrentThreadId();
13981 LARGE_INTEGER counter;
13982 QueryPerformanceCounter(&counter);
13983 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
13986 void VmaRecorder::PrintPointerList(uint64_t count,
const VmaAllocation* pItems)
13990 fprintf(m_File,
"%p", pItems[0]);
13991 for(uint64_t i = 1; i < count; ++i)
13993 fprintf(m_File,
" %p", pItems[i]);
13998 void VmaRecorder::Flush()
14006 #endif // #if VMA_RECORDING_ENABLED 14011 VmaAllocationObjectAllocator::VmaAllocationObjectAllocator(
const VkAllocationCallbacks* pAllocationCallbacks) :
14012 m_Allocator(pAllocationCallbacks, 1024)
14018 VmaMutexLock mutexLock(m_Mutex);
14019 return m_Allocator.Alloc();
14022 void VmaAllocationObjectAllocator::Free(
VmaAllocation hAlloc)
14024 VmaMutexLock mutexLock(m_Mutex);
14025 m_Allocator.Free(hAlloc);
14034 m_hDevice(pCreateInfo->device),
14035 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
14036 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
14037 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
14038 m_AllocationObjectAllocator(&m_AllocationCallbacks),
14039 m_PreferredLargeHeapBlockSize(0),
14040 m_PhysicalDevice(pCreateInfo->physicalDevice),
14041 m_CurrentFrameIndex(0),
14042 m_GpuDefragmentationMemoryTypeBits(UINT32_MAX),
14043 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
14046 ,m_pRecorder(VMA_NULL)
14049 if(VMA_DEBUG_DETECT_CORRUPTION)
14052 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
14057 #if !(VMA_DEDICATED_ALLOCATION) 14060 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
14064 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
14065 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
14066 memset(&m_MemProps, 0,
sizeof(m_MemProps));
14068 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
14069 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
14071 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14073 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
14084 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
14085 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
14087 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
14088 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
14089 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
14090 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
14097 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
14099 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
14100 if(limit != VK_WHOLE_SIZE)
14102 m_HeapSizeLimit[heapIndex] = limit;
14103 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
14105 m_MemProps.memoryHeaps[heapIndex].size = limit;
14111 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14113 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
14115 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
14119 preferredBlockSize,
14122 GetBufferImageGranularity(),
14129 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
14136 VkResult res = VK_SUCCESS;
14141 #if VMA_RECORDING_ENABLED 14142 m_pRecorder = vma_new(
this, VmaRecorder)();
14144 if(res != VK_SUCCESS)
14148 m_pRecorder->WriteConfiguration(
14149 m_PhysicalDeviceProperties,
14151 m_UseKhrDedicatedAllocation);
14152 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
14154 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
14155 return VK_ERROR_FEATURE_NOT_PRESENT;
14162 VmaAllocator_T::~VmaAllocator_T()
14164 #if VMA_RECORDING_ENABLED 14165 if(m_pRecorder != VMA_NULL)
14167 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
14168 vma_delete(
this, m_pRecorder);
14172 VMA_ASSERT(m_Pools.empty());
14174 for(
size_t i = GetMemoryTypeCount(); i--; )
14176 if(m_pDedicatedAllocations[i] != VMA_NULL && !m_pDedicatedAllocations[i]->empty())
14178 VMA_ASSERT(0 &&
"Unfreed dedicated allocations found.");
14181 vma_delete(
this, m_pDedicatedAllocations[i]);
14182 vma_delete(
this, m_pBlockVectors[i]);
14186 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
14188 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14189 m_VulkanFunctions.vkGetPhysicalDeviceProperties = (PFN_vkGetPhysicalDeviceProperties)vkGetPhysicalDeviceProperties;
14190 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = (PFN_vkGetPhysicalDeviceMemoryProperties)vkGetPhysicalDeviceMemoryProperties;
14191 m_VulkanFunctions.vkAllocateMemory = (PFN_vkAllocateMemory)vkAllocateMemory;
14192 m_VulkanFunctions.vkFreeMemory = (PFN_vkFreeMemory)vkFreeMemory;
14193 m_VulkanFunctions.vkMapMemory = (PFN_vkMapMemory)vkMapMemory;
14194 m_VulkanFunctions.vkUnmapMemory = (PFN_vkUnmapMemory)vkUnmapMemory;
14195 m_VulkanFunctions.vkFlushMappedMemoryRanges = (PFN_vkFlushMappedMemoryRanges)vkFlushMappedMemoryRanges;
14196 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = (PFN_vkInvalidateMappedMemoryRanges)vkInvalidateMappedMemoryRanges;
14197 m_VulkanFunctions.vkBindBufferMemory = (PFN_vkBindBufferMemory)vkBindBufferMemory;
14198 m_VulkanFunctions.vkBindImageMemory = (PFN_vkBindImageMemory)vkBindImageMemory;
14199 m_VulkanFunctions.vkGetBufferMemoryRequirements = (PFN_vkGetBufferMemoryRequirements)vkGetBufferMemoryRequirements;
14200 m_VulkanFunctions.vkGetImageMemoryRequirements = (PFN_vkGetImageMemoryRequirements)vkGetImageMemoryRequirements;
14201 m_VulkanFunctions.vkCreateBuffer = (PFN_vkCreateBuffer)vkCreateBuffer;
14202 m_VulkanFunctions.vkDestroyBuffer = (PFN_vkDestroyBuffer)vkDestroyBuffer;
14203 m_VulkanFunctions.vkCreateImage = (PFN_vkCreateImage)vkCreateImage;
14204 m_VulkanFunctions.vkDestroyImage = (PFN_vkDestroyImage)vkDestroyImage;
14205 m_VulkanFunctions.vkCmdCopyBuffer = (PFN_vkCmdCopyBuffer)vkCmdCopyBuffer;
14206 #if VMA_DEDICATED_ALLOCATION 14207 if(m_UseKhrDedicatedAllocation)
14209 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
14210 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
14211 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
14212 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
14214 #endif // #if VMA_DEDICATED_ALLOCATION 14215 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 14217 #define VMA_COPY_IF_NOT_NULL(funcName) \ 14218 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 14220 if(pVulkanFunctions != VMA_NULL)
14222 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
14223 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
14224 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
14225 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
14226 VMA_COPY_IF_NOT_NULL(vkMapMemory);
14227 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
14228 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
14229 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
14230 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
14231 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
14232 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
14233 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
14234 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
14235 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
14236 VMA_COPY_IF_NOT_NULL(vkCreateImage);
14237 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
14238 VMA_COPY_IF_NOT_NULL(vkCmdCopyBuffer);
14239 #if VMA_DEDICATED_ALLOCATION 14240 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
14241 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
14245 #undef VMA_COPY_IF_NOT_NULL 14249 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
14250 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
14251 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
14252 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
14253 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
14254 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
14255 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
14256 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
14257 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
14258 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
14259 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
14260 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
14261 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
14262 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
14263 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
14264 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
14265 VMA_ASSERT(m_VulkanFunctions.vkCmdCopyBuffer != VMA_NULL);
14266 #if VMA_DEDICATED_ALLOCATION 14267 if(m_UseKhrDedicatedAllocation)
14269 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
14270 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
14275 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
14277 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14278 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
14279 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
14280 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
14283 VkResult VmaAllocator_T::AllocateMemoryOfType(
14285 VkDeviceSize alignment,
14286 bool dedicatedAllocation,
14287 VkBuffer dedicatedBuffer,
14288 VkImage dedicatedImage,
14290 uint32_t memTypeIndex,
14291 VmaSuballocationType suballocType,
14292 size_t allocationCount,
14295 VMA_ASSERT(pAllocations != VMA_NULL);
14296 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, AllocationCount=%zu, Size=%llu", memTypeIndex, allocationCount, size);
14302 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14307 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
14308 VMA_ASSERT(blockVector);
14310 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
14311 bool preferDedicatedMemory =
14312 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
14313 dedicatedAllocation ||
14315 size > preferredBlockSize / 2;
14317 if(preferDedicatedMemory &&
14319 finalCreateInfo.
pool == VK_NULL_HANDLE)
14328 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14332 return AllocateDedicatedMemory(
14347 VkResult res = blockVector->Allocate(
14348 m_CurrentFrameIndex.load(),
14355 if(res == VK_SUCCESS)
14363 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14367 res = AllocateDedicatedMemory(
14373 finalCreateInfo.pUserData,
14378 if(res == VK_SUCCESS)
14381 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
14387 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14394 VkResult VmaAllocator_T::AllocateDedicatedMemory(
14396 VmaSuballocationType suballocType,
14397 uint32_t memTypeIndex,
14399 bool isUserDataString,
14401 VkBuffer dedicatedBuffer,
14402 VkImage dedicatedImage,
14403 size_t allocationCount,
14406 VMA_ASSERT(allocationCount > 0 && pAllocations);
14408 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
14409 allocInfo.memoryTypeIndex = memTypeIndex;
14410 allocInfo.allocationSize = size;
14412 #if VMA_DEDICATED_ALLOCATION 14413 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
14414 if(m_UseKhrDedicatedAllocation)
14416 if(dedicatedBuffer != VK_NULL_HANDLE)
14418 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
14419 dedicatedAllocInfo.buffer = dedicatedBuffer;
14420 allocInfo.pNext = &dedicatedAllocInfo;
14422 else if(dedicatedImage != VK_NULL_HANDLE)
14424 dedicatedAllocInfo.image = dedicatedImage;
14425 allocInfo.pNext = &dedicatedAllocInfo;
14428 #endif // #if VMA_DEDICATED_ALLOCATION 14431 VkResult res = VK_SUCCESS;
14432 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14434 res = AllocateDedicatedMemoryPage(
14442 pAllocations + allocIndex);
14443 if(res != VK_SUCCESS)
14449 if(res == VK_SUCCESS)
14453 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14454 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
14455 VMA_ASSERT(pDedicatedAllocations);
14456 for(allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
14458 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, pAllocations[allocIndex]);
14462 VMA_DEBUG_LOG(
" Allocated DedicatedMemory Count=%zu, MemoryTypeIndex=#%u", allocationCount, memTypeIndex);
14467 while(allocIndex--)
14470 VkDeviceMemory hMemory = currAlloc->GetMemory();
14482 FreeVulkanMemory(memTypeIndex, currAlloc->GetSize(), hMemory);
14484 currAlloc->SetUserData(
this, VMA_NULL);
14486 m_AllocationObjectAllocator.Free(currAlloc);
14489 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14495 VkResult VmaAllocator_T::AllocateDedicatedMemoryPage(
14497 VmaSuballocationType suballocType,
14498 uint32_t memTypeIndex,
14499 const VkMemoryAllocateInfo& allocInfo,
14501 bool isUserDataString,
14505 VkDeviceMemory hMemory = VK_NULL_HANDLE;
14506 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
14509 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
14513 void* pMappedData = VMA_NULL;
14516 res = (*m_VulkanFunctions.vkMapMemory)(
14525 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
14526 FreeVulkanMemory(memTypeIndex, size, hMemory);
14531 *pAllocation = m_AllocationObjectAllocator.Allocate();
14532 (*pAllocation)->Ctor(m_CurrentFrameIndex.load(), isUserDataString);
14533 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
14534 (*pAllocation)->SetUserData(
this, pUserData);
14535 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14537 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
14543 void VmaAllocator_T::GetBufferMemoryRequirements(
14545 VkMemoryRequirements& memReq,
14546 bool& requiresDedicatedAllocation,
14547 bool& prefersDedicatedAllocation)
const 14549 #if VMA_DEDICATED_ALLOCATION 14550 if(m_UseKhrDedicatedAllocation)
14552 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
14553 memReqInfo.buffer = hBuffer;
14555 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14557 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14558 memReq2.pNext = &memDedicatedReq;
14560 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14562 memReq = memReq2.memoryRequirements;
14563 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14564 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14567 #endif // #if VMA_DEDICATED_ALLOCATION 14569 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
14570 requiresDedicatedAllocation =
false;
14571 prefersDedicatedAllocation =
false;
14575 void VmaAllocator_T::GetImageMemoryRequirements(
14577 VkMemoryRequirements& memReq,
14578 bool& requiresDedicatedAllocation,
14579 bool& prefersDedicatedAllocation)
const 14581 #if VMA_DEDICATED_ALLOCATION 14582 if(m_UseKhrDedicatedAllocation)
14584 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
14585 memReqInfo.image = hImage;
14587 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
14589 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
14590 memReq2.pNext = &memDedicatedReq;
14592 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
14594 memReq = memReq2.memoryRequirements;
14595 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
14596 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
14599 #endif // #if VMA_DEDICATED_ALLOCATION 14601 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
14602 requiresDedicatedAllocation =
false;
14603 prefersDedicatedAllocation =
false;
14607 VkResult VmaAllocator_T::AllocateMemory(
14608 const VkMemoryRequirements& vkMemReq,
14609 bool requiresDedicatedAllocation,
14610 bool prefersDedicatedAllocation,
14611 VkBuffer dedicatedBuffer,
14612 VkImage dedicatedImage,
14614 VmaSuballocationType suballocType,
14615 size_t allocationCount,
14618 memset(pAllocations, 0,
sizeof(
VmaAllocation) * allocationCount);
14620 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
14622 if(vkMemReq.size == 0)
14624 return VK_ERROR_VALIDATION_FAILED_EXT;
14629 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
14630 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14635 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
14636 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14638 if(requiresDedicatedAllocation)
14642 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
14643 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14645 if(createInfo.
pool != VK_NULL_HANDLE)
14647 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
14648 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14651 if((createInfo.
pool != VK_NULL_HANDLE) &&
14654 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
14655 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14658 if(createInfo.
pool != VK_NULL_HANDLE)
14660 const VkDeviceSize alignmentForPool = VMA_MAX(
14661 vkMemReq.alignment,
14662 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
14667 (m_MemProps.memoryTypes[createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
14672 return createInfo.
pool->m_BlockVector.Allocate(
14673 m_CurrentFrameIndex.load(),
14684 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
14685 uint32_t memTypeIndex = UINT32_MAX;
14687 if(res == VK_SUCCESS)
14689 VkDeviceSize alignmentForMemType = VMA_MAX(
14690 vkMemReq.alignment,
14691 GetMemoryTypeMinAlignment(memTypeIndex));
14693 res = AllocateMemoryOfType(
14695 alignmentForMemType,
14696 requiresDedicatedAllocation || prefersDedicatedAllocation,
14705 if(res == VK_SUCCESS)
14715 memoryTypeBits &= ~(1u << memTypeIndex);
14718 if(res == VK_SUCCESS)
14720 alignmentForMemType = VMA_MAX(
14721 vkMemReq.alignment,
14722 GetMemoryTypeMinAlignment(memTypeIndex));
14724 res = AllocateMemoryOfType(
14726 alignmentForMemType,
14727 requiresDedicatedAllocation || prefersDedicatedAllocation,
14736 if(res == VK_SUCCESS)
14746 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
14757 void VmaAllocator_T::FreeMemory(
14758 size_t allocationCount,
14761 VMA_ASSERT(pAllocations);
14763 for(
size_t allocIndex = allocationCount; allocIndex--; )
14767 if(allocation != VK_NULL_HANDLE)
14769 if(TouchAllocation(allocation))
14771 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
14773 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
14776 switch(allocation->GetType())
14778 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
14780 VmaBlockVector* pBlockVector = VMA_NULL;
14781 VmaPool hPool = allocation->GetBlock()->GetParentPool();
14782 if(hPool != VK_NULL_HANDLE)
14784 pBlockVector = &hPool->m_BlockVector;
14788 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
14789 pBlockVector = m_pBlockVectors[memTypeIndex];
14791 pBlockVector->Free(allocation);
14794 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
14795 FreeDedicatedMemory(allocation);
14802 allocation->SetUserData(
this, VMA_NULL);
14803 allocation->Dtor();
14804 m_AllocationObjectAllocator.Free(allocation);
14809 VkResult VmaAllocator_T::ResizeAllocation(
14811 VkDeviceSize newSize)
14814 if(newSize == 0 || alloc->GetLastUseFrameIndex() == VMA_FRAME_INDEX_LOST)
14816 return VK_ERROR_VALIDATION_FAILED_EXT;
14818 if(newSize == alloc->GetSize())
14822 return VK_ERROR_OUT_OF_POOL_MEMORY;
14825 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
14828 InitStatInfo(pStats->
total);
14829 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
14831 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
14835 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14837 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
14838 VMA_ASSERT(pBlockVector);
14839 pBlockVector->AddStats(pStats);
14844 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
14845 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
14847 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
14852 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
14854 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
14855 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
14856 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
14857 VMA_ASSERT(pDedicatedAllocVector);
14858 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
14861 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
14862 VmaAddStatInfo(pStats->
total, allocationStatInfo);
14863 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
14864 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
14869 VmaPostprocessCalcStatInfo(pStats->
total);
14870 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
14871 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
14872 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
14873 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
14876 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
14878 VkResult VmaAllocator_T::DefragmentationBegin(
14888 *pContext = vma_new(
this, VmaDefragmentationContext_T)(
14889 this, m_CurrentFrameIndex.load(), info.
flags, pStats);
14892 (*pContext)->AddAllocations(
14895 VkResult res = (*pContext)->Defragment(
14900 if(res != VK_NOT_READY)
14902 vma_delete(
this, *pContext);
14903 *pContext = VMA_NULL;
14909 VkResult VmaAllocator_T::DefragmentationEnd(
14912 vma_delete(
this, context);
14918 if(hAllocation->CanBecomeLost())
14924 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14925 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14928 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
14932 pAllocationInfo->
offset = 0;
14933 pAllocationInfo->
size = hAllocation->GetSize();
14935 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14938 else if(localLastUseFrameIndex == localCurrFrameIndex)
14940 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
14941 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
14942 pAllocationInfo->
offset = hAllocation->GetOffset();
14943 pAllocationInfo->
size = hAllocation->GetSize();
14945 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14950 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14952 localLastUseFrameIndex = localCurrFrameIndex;
14959 #if VMA_STATS_STRING_ENABLED 14960 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14961 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14964 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
14965 if(localLastUseFrameIndex == localCurrFrameIndex)
14971 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
14973 localLastUseFrameIndex = localCurrFrameIndex;
14979 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
14980 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
14981 pAllocationInfo->
offset = hAllocation->GetOffset();
14982 pAllocationInfo->
size = hAllocation->GetSize();
14983 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
14984 pAllocationInfo->
pUserData = hAllocation->GetUserData();
14988 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
14991 if(hAllocation->CanBecomeLost())
14993 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
14994 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
14997 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
15001 else if(localLastUseFrameIndex == localCurrFrameIndex)
15007 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15009 localLastUseFrameIndex = localCurrFrameIndex;
15016 #if VMA_STATS_STRING_ENABLED 15017 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
15018 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
15021 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
15022 if(localLastUseFrameIndex == localCurrFrameIndex)
15028 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
15030 localLastUseFrameIndex = localCurrFrameIndex;
15042 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
15052 return VK_ERROR_INITIALIZATION_FAILED;
15055 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
15057 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
15059 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
15060 if(res != VK_SUCCESS)
15062 vma_delete(
this, *pPool);
15069 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15070 (*pPool)->SetId(m_NextPoolId++);
15071 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
15077 void VmaAllocator_T::DestroyPool(
VmaPool pool)
15081 VmaMutexLockWrite lock(m_PoolsMutex, m_UseMutex);
15082 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
15083 VMA_ASSERT(success &&
"Pool not found in Allocator.");
15086 vma_delete(
this, pool);
15091 pool->m_BlockVector.GetPoolStats(pPoolStats);
15094 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
15096 m_CurrentFrameIndex.store(frameIndex);
15099 void VmaAllocator_T::MakePoolAllocationsLost(
15101 size_t* pLostAllocationCount)
15103 hPool->m_BlockVector.MakePoolAllocationsLost(
15104 m_CurrentFrameIndex.load(),
15105 pLostAllocationCount);
15108 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
15110 return hPool->m_BlockVector.CheckCorruption();
15113 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
15115 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
15118 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15120 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
15122 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
15123 VMA_ASSERT(pBlockVector);
15124 VkResult localRes = pBlockVector->CheckCorruption();
15127 case VK_ERROR_FEATURE_NOT_PRESENT:
15130 finalRes = VK_SUCCESS;
15140 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15141 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
15143 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
15145 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
15148 case VK_ERROR_FEATURE_NOT_PRESENT:
15151 finalRes = VK_SUCCESS;
15163 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
15165 *pAllocation = m_AllocationObjectAllocator.Allocate();
15166 (*pAllocation)->Ctor(VMA_FRAME_INDEX_LOST,
false);
15167 (*pAllocation)->InitLost();
15170 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
15172 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
15175 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15177 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15178 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
15180 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15181 if(res == VK_SUCCESS)
15183 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
15188 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
15193 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
15196 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
15198 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
15204 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
15206 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
15208 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
15211 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
15213 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
15214 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
15216 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
15217 m_HeapSizeLimit[heapIndex] += size;
15221 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
15223 if(hAllocation->CanBecomeLost())
15225 return VK_ERROR_MEMORY_MAP_FAILED;
15228 switch(hAllocation->GetType())
15230 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15232 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15233 char *pBytes = VMA_NULL;
15234 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
15235 if(res == VK_SUCCESS)
15237 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
15238 hAllocation->BlockAllocMap();
15242 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15243 return hAllocation->DedicatedAllocMap(
this, ppData);
15246 return VK_ERROR_MEMORY_MAP_FAILED;
15252 switch(hAllocation->GetType())
15254 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15256 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
15257 hAllocation->BlockAllocUnmap();
15258 pBlock->Unmap(
this, 1);
15261 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15262 hAllocation->DedicatedAllocUnmap(
this);
15269 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
15271 VkResult res = VK_SUCCESS;
15272 switch(hAllocation->GetType())
15274 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15275 res = GetVulkanFunctions().vkBindBufferMemory(
15278 hAllocation->GetMemory(),
15281 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15283 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15284 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
15285 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
15294 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
15296 VkResult res = VK_SUCCESS;
15297 switch(hAllocation->GetType())
15299 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15300 res = GetVulkanFunctions().vkBindImageMemory(
15303 hAllocation->GetMemory(),
15306 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15308 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
15309 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
15310 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
15319 void VmaAllocator_T::FlushOrInvalidateAllocation(
15321 VkDeviceSize offset, VkDeviceSize size,
15322 VMA_CACHE_OPERATION op)
15324 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
15325 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
15327 const VkDeviceSize allocationSize = hAllocation->GetSize();
15328 VMA_ASSERT(offset <= allocationSize);
15330 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
15332 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
15333 memRange.memory = hAllocation->GetMemory();
15335 switch(hAllocation->GetType())
15337 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
15338 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15339 if(size == VK_WHOLE_SIZE)
15341 memRange.size = allocationSize - memRange.offset;
15345 VMA_ASSERT(offset + size <= allocationSize);
15346 memRange.size = VMA_MIN(
15347 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
15348 allocationSize - memRange.offset);
15352 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
15355 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
15356 if(size == VK_WHOLE_SIZE)
15358 size = allocationSize - offset;
15362 VMA_ASSERT(offset + size <= allocationSize);
15364 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
15367 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
15368 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
15369 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
15370 memRange.offset += allocationOffset;
15371 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
15382 case VMA_CACHE_FLUSH:
15383 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
15385 case VMA_CACHE_INVALIDATE:
15386 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
15395 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
15397 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
15399 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
15401 VmaMutexLockWrite lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15402 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
15403 VMA_ASSERT(pDedicatedAllocations);
15404 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
15405 VMA_ASSERT(success);
15408 VkDeviceMemory hMemory = allocation->GetMemory();
15420 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
15422 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
15425 uint32_t VmaAllocator_T::CalculateGpuDefragmentationMemoryTypeBits()
const 15427 VkBufferCreateInfo dummyBufCreateInfo;
15428 VmaFillGpuDefragmentationBufferCreateInfo(dummyBufCreateInfo);
15430 uint32_t memoryTypeBits = 0;
15433 VkBuffer buf = VK_NULL_HANDLE;
15434 VkResult res = (*GetVulkanFunctions().vkCreateBuffer)(
15435 m_hDevice, &dummyBufCreateInfo, GetAllocationCallbacks(), &buf);
15436 if(res == VK_SUCCESS)
15439 VkMemoryRequirements memReq;
15440 (*GetVulkanFunctions().vkGetBufferMemoryRequirements)(m_hDevice, buf, &memReq);
15441 memoryTypeBits = memReq.memoryTypeBits;
15444 (*GetVulkanFunctions().vkDestroyBuffer)(m_hDevice, buf, GetAllocationCallbacks());
15447 return memoryTypeBits;
15450 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
15452 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
15453 !hAllocation->CanBecomeLost() &&
15454 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15456 void* pData = VMA_NULL;
15457 VkResult res = Map(hAllocation, &pData);
15458 if(res == VK_SUCCESS)
15460 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
15461 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
15462 Unmap(hAllocation);
15466 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
15471 uint32_t VmaAllocator_T::GetGpuDefragmentationMemoryTypeBits()
15473 uint32_t memoryTypeBits = m_GpuDefragmentationMemoryTypeBits.load();
15474 if(memoryTypeBits == UINT32_MAX)
15476 memoryTypeBits = CalculateGpuDefragmentationMemoryTypeBits();
15477 m_GpuDefragmentationMemoryTypeBits.store(memoryTypeBits);
15479 return memoryTypeBits;
15482 #if VMA_STATS_STRING_ENABLED 15484 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
15486 bool dedicatedAllocationsStarted =
false;
15487 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15489 VmaMutexLockRead dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
15490 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
15491 VMA_ASSERT(pDedicatedAllocVector);
15492 if(pDedicatedAllocVector->empty() ==
false)
15494 if(dedicatedAllocationsStarted ==
false)
15496 dedicatedAllocationsStarted =
true;
15497 json.WriteString(
"DedicatedAllocations");
15498 json.BeginObject();
15501 json.BeginString(
"Type ");
15502 json.ContinueString(memTypeIndex);
15507 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
15509 json.BeginObject(
true);
15511 hAlloc->PrintParameters(json);
15518 if(dedicatedAllocationsStarted)
15524 bool allocationsStarted =
false;
15525 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
15527 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
15529 if(allocationsStarted ==
false)
15531 allocationsStarted =
true;
15532 json.WriteString(
"DefaultPools");
15533 json.BeginObject();
15536 json.BeginString(
"Type ");
15537 json.ContinueString(memTypeIndex);
15540 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
15543 if(allocationsStarted)
15551 VmaMutexLockRead lock(m_PoolsMutex, m_UseMutex);
15552 const size_t poolCount = m_Pools.size();
15555 json.WriteString(
"Pools");
15556 json.BeginObject();
15557 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
15559 json.BeginString();
15560 json.ContinueString(m_Pools[poolIndex]->GetId());
15563 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
15570 #endif // #if VMA_STATS_STRING_ENABLED 15579 VMA_ASSERT(pCreateInfo && pAllocator);
15580 VMA_DEBUG_LOG(
"vmaCreateAllocator");
15582 return (*pAllocator)->Init(pCreateInfo);
15588 if(allocator != VK_NULL_HANDLE)
15590 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
15591 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
15592 vma_delete(&allocationCallbacks, allocator);
15598 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
15600 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
15601 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
15606 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
15608 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
15609 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
15614 uint32_t memoryTypeIndex,
15615 VkMemoryPropertyFlags* pFlags)
15617 VMA_ASSERT(allocator && pFlags);
15618 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
15619 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
15624 uint32_t frameIndex)
15626 VMA_ASSERT(allocator);
15627 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
15629 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15631 allocator->SetCurrentFrameIndex(frameIndex);
15638 VMA_ASSERT(allocator && pStats);
15639 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15640 allocator->CalculateStats(pStats);
15643 #if VMA_STATS_STRING_ENABLED 15647 char** ppStatsString,
15648 VkBool32 detailedMap)
15650 VMA_ASSERT(allocator && ppStatsString);
15651 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15653 VmaStringBuilder sb(allocator);
15655 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
15656 json.BeginObject();
15659 allocator->CalculateStats(&stats);
15661 json.WriteString(
"Total");
15662 VmaPrintStatInfo(json, stats.
total);
15664 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
15666 json.BeginString(
"Heap ");
15667 json.ContinueString(heapIndex);
15669 json.BeginObject();
15671 json.WriteString(
"Size");
15672 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
15674 json.WriteString(
"Flags");
15675 json.BeginArray(
true);
15676 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
15678 json.WriteString(
"DEVICE_LOCAL");
15684 json.WriteString(
"Stats");
15685 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
15688 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
15690 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
15692 json.BeginString(
"Type ");
15693 json.ContinueString(typeIndex);
15696 json.BeginObject();
15698 json.WriteString(
"Flags");
15699 json.BeginArray(
true);
15700 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
15701 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
15703 json.WriteString(
"DEVICE_LOCAL");
15705 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
15707 json.WriteString(
"HOST_VISIBLE");
15709 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
15711 json.WriteString(
"HOST_COHERENT");
15713 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
15715 json.WriteString(
"HOST_CACHED");
15717 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
15719 json.WriteString(
"LAZILY_ALLOCATED");
15725 json.WriteString(
"Stats");
15726 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
15735 if(detailedMap == VK_TRUE)
15737 allocator->PrintDetailedMap(json);
15743 const size_t len = sb.GetLength();
15744 char*
const pChars = vma_new_array(allocator,
char, len + 1);
15747 memcpy(pChars, sb.GetData(), len);
15749 pChars[len] =
'\0';
15750 *ppStatsString = pChars;
15755 char* pStatsString)
15757 if(pStatsString != VMA_NULL)
15759 VMA_ASSERT(allocator);
15760 size_t len = strlen(pStatsString);
15761 vma_delete_array(allocator, pStatsString, len + 1);
15765 #endif // #if VMA_STATS_STRING_ENABLED 15772 uint32_t memoryTypeBits,
15774 uint32_t* pMemoryTypeIndex)
15776 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15777 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15778 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15785 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
15786 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
15789 switch(pAllocationCreateInfo->
usage)
15794 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15796 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15800 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
15803 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15804 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
15806 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
15810 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
15811 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
15817 *pMemoryTypeIndex = UINT32_MAX;
15818 uint32_t minCost = UINT32_MAX;
15819 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
15820 memTypeIndex < allocator->GetMemoryTypeCount();
15821 ++memTypeIndex, memTypeBit <<= 1)
15824 if((memTypeBit & memoryTypeBits) != 0)
15826 const VkMemoryPropertyFlags currFlags =
15827 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
15829 if((requiredFlags & ~currFlags) == 0)
15832 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
15834 if(currCost < minCost)
15836 *pMemoryTypeIndex = memTypeIndex;
15841 minCost = currCost;
15846 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
15851 const VkBufferCreateInfo* pBufferCreateInfo,
15853 uint32_t* pMemoryTypeIndex)
15855 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15856 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
15857 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15858 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15860 const VkDevice hDev = allocator->m_hDevice;
15861 VkBuffer hBuffer = VK_NULL_HANDLE;
15862 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
15863 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
15864 if(res == VK_SUCCESS)
15866 VkMemoryRequirements memReq = {};
15867 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
15868 hDev, hBuffer, &memReq);
15872 memReq.memoryTypeBits,
15873 pAllocationCreateInfo,
15876 allocator->GetVulkanFunctions().vkDestroyBuffer(
15877 hDev, hBuffer, allocator->GetAllocationCallbacks());
15884 const VkImageCreateInfo* pImageCreateInfo,
15886 uint32_t* pMemoryTypeIndex)
15888 VMA_ASSERT(allocator != VK_NULL_HANDLE);
15889 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
15890 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
15891 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
15893 const VkDevice hDev = allocator->m_hDevice;
15894 VkImage hImage = VK_NULL_HANDLE;
15895 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
15896 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
15897 if(res == VK_SUCCESS)
15899 VkMemoryRequirements memReq = {};
15900 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
15901 hDev, hImage, &memReq);
15905 memReq.memoryTypeBits,
15906 pAllocationCreateInfo,
15909 allocator->GetVulkanFunctions().vkDestroyImage(
15910 hDev, hImage, allocator->GetAllocationCallbacks());
15920 VMA_ASSERT(allocator && pCreateInfo && pPool);
15922 VMA_DEBUG_LOG(
"vmaCreatePool");
15924 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15926 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
15928 #if VMA_RECORDING_ENABLED 15929 if(allocator->GetRecorder() != VMA_NULL)
15931 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
15942 VMA_ASSERT(allocator);
15944 if(pool == VK_NULL_HANDLE)
15949 VMA_DEBUG_LOG(
"vmaDestroyPool");
15951 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15953 #if VMA_RECORDING_ENABLED 15954 if(allocator->GetRecorder() != VMA_NULL)
15956 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
15960 allocator->DestroyPool(pool);
15968 VMA_ASSERT(allocator && pool && pPoolStats);
15970 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15972 allocator->GetPoolStats(pool, pPoolStats);
15978 size_t* pLostAllocationCount)
15980 VMA_ASSERT(allocator && pool);
15982 VMA_DEBUG_GLOBAL_MUTEX_LOCK
15984 #if VMA_RECORDING_ENABLED 15985 if(allocator->GetRecorder() != VMA_NULL)
15987 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
15991 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
15996 VMA_ASSERT(allocator && pool);
15998 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16000 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
16002 return allocator->CheckPoolCorruption(pool);
16007 const VkMemoryRequirements* pVkMemoryRequirements,
16012 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
16014 VMA_DEBUG_LOG(
"vmaAllocateMemory");
16016 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16018 VkResult result = allocator->AllocateMemory(
16019 *pVkMemoryRequirements,
16025 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16029 #if VMA_RECORDING_ENABLED 16030 if(allocator->GetRecorder() != VMA_NULL)
16032 allocator->GetRecorder()->RecordAllocateMemory(
16033 allocator->GetCurrentFrameIndex(),
16034 *pVkMemoryRequirements,
16040 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16042 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16050 const VkMemoryRequirements* pVkMemoryRequirements,
16052 size_t allocationCount,
16056 if(allocationCount == 0)
16061 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocations);
16063 VMA_DEBUG_LOG(
"vmaAllocateMemoryPages");
16065 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16067 VkResult result = allocator->AllocateMemory(
16068 *pVkMemoryRequirements,
16074 VMA_SUBALLOCATION_TYPE_UNKNOWN,
16078 #if VMA_RECORDING_ENABLED 16079 if(allocator->GetRecorder() != VMA_NULL)
16081 allocator->GetRecorder()->RecordAllocateMemoryPages(
16082 allocator->GetCurrentFrameIndex(),
16083 *pVkMemoryRequirements,
16085 (uint64_t)allocationCount,
16090 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
16092 for(
size_t i = 0; i < allocationCount; ++i)
16094 allocator->GetAllocationInfo(pAllocations[i], pAllocationInfo + i);
16108 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16110 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
16112 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16114 VkMemoryRequirements vkMemReq = {};
16115 bool requiresDedicatedAllocation =
false;
16116 bool prefersDedicatedAllocation =
false;
16117 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
16118 requiresDedicatedAllocation,
16119 prefersDedicatedAllocation);
16121 VkResult result = allocator->AllocateMemory(
16123 requiresDedicatedAllocation,
16124 prefersDedicatedAllocation,
16128 VMA_SUBALLOCATION_TYPE_BUFFER,
16132 #if VMA_RECORDING_ENABLED 16133 if(allocator->GetRecorder() != VMA_NULL)
16135 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
16136 allocator->GetCurrentFrameIndex(),
16138 requiresDedicatedAllocation,
16139 prefersDedicatedAllocation,
16145 if(pAllocationInfo && result == VK_SUCCESS)
16147 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16160 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
16162 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
16164 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16166 VkMemoryRequirements vkMemReq = {};
16167 bool requiresDedicatedAllocation =
false;
16168 bool prefersDedicatedAllocation =
false;
16169 allocator->GetImageMemoryRequirements(image, vkMemReq,
16170 requiresDedicatedAllocation, prefersDedicatedAllocation);
16172 VkResult result = allocator->AllocateMemory(
16174 requiresDedicatedAllocation,
16175 prefersDedicatedAllocation,
16179 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
16183 #if VMA_RECORDING_ENABLED 16184 if(allocator->GetRecorder() != VMA_NULL)
16186 allocator->GetRecorder()->RecordAllocateMemoryForImage(
16187 allocator->GetCurrentFrameIndex(),
16189 requiresDedicatedAllocation,
16190 prefersDedicatedAllocation,
16196 if(pAllocationInfo && result == VK_SUCCESS)
16198 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16208 VMA_ASSERT(allocator);
16210 if(allocation == VK_NULL_HANDLE)
16215 VMA_DEBUG_LOG(
"vmaFreeMemory");
16217 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16219 #if VMA_RECORDING_ENABLED 16220 if(allocator->GetRecorder() != VMA_NULL)
16222 allocator->GetRecorder()->RecordFreeMemory(
16223 allocator->GetCurrentFrameIndex(),
16228 allocator->FreeMemory(
16235 size_t allocationCount,
16238 if(allocationCount == 0)
16243 VMA_ASSERT(allocator);
16245 VMA_DEBUG_LOG(
"vmaFreeMemoryPages");
16247 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16249 #if VMA_RECORDING_ENABLED 16250 if(allocator->GetRecorder() != VMA_NULL)
16252 allocator->GetRecorder()->RecordFreeMemoryPages(
16253 allocator->GetCurrentFrameIndex(),
16254 (uint64_t)allocationCount,
16259 allocator->FreeMemory(allocationCount, pAllocations);
16265 VkDeviceSize newSize)
16267 VMA_ASSERT(allocator && allocation);
16269 VMA_DEBUG_LOG(
"vmaResizeAllocation");
16271 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16273 return allocator->ResizeAllocation(allocation, newSize);
16281 VMA_ASSERT(allocator && allocation && pAllocationInfo);
16283 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16285 #if VMA_RECORDING_ENABLED 16286 if(allocator->GetRecorder() != VMA_NULL)
16288 allocator->GetRecorder()->RecordGetAllocationInfo(
16289 allocator->GetCurrentFrameIndex(),
16294 allocator->GetAllocationInfo(allocation, pAllocationInfo);
16301 VMA_ASSERT(allocator && allocation);
16303 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16305 #if VMA_RECORDING_ENABLED 16306 if(allocator->GetRecorder() != VMA_NULL)
16308 allocator->GetRecorder()->RecordTouchAllocation(
16309 allocator->GetCurrentFrameIndex(),
16314 return allocator->TouchAllocation(allocation);
16322 VMA_ASSERT(allocator && allocation);
16324 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16326 allocation->SetUserData(allocator, pUserData);
16328 #if VMA_RECORDING_ENABLED 16329 if(allocator->GetRecorder() != VMA_NULL)
16331 allocator->GetRecorder()->RecordSetAllocationUserData(
16332 allocator->GetCurrentFrameIndex(),
16343 VMA_ASSERT(allocator && pAllocation);
16345 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
16347 allocator->CreateLostAllocation(pAllocation);
16349 #if VMA_RECORDING_ENABLED 16350 if(allocator->GetRecorder() != VMA_NULL)
16352 allocator->GetRecorder()->RecordCreateLostAllocation(
16353 allocator->GetCurrentFrameIndex(),
16364 VMA_ASSERT(allocator && allocation && ppData);
16366 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16368 VkResult res = allocator->Map(allocation, ppData);
16370 #if VMA_RECORDING_ENABLED 16371 if(allocator->GetRecorder() != VMA_NULL)
16373 allocator->GetRecorder()->RecordMapMemory(
16374 allocator->GetCurrentFrameIndex(),
16386 VMA_ASSERT(allocator && allocation);
16388 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16390 #if VMA_RECORDING_ENABLED 16391 if(allocator->GetRecorder() != VMA_NULL)
16393 allocator->GetRecorder()->RecordUnmapMemory(
16394 allocator->GetCurrentFrameIndex(),
16399 allocator->Unmap(allocation);
16404 VMA_ASSERT(allocator && allocation);
16406 VMA_DEBUG_LOG(
"vmaFlushAllocation");
16408 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16410 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
16412 #if VMA_RECORDING_ENABLED 16413 if(allocator->GetRecorder() != VMA_NULL)
16415 allocator->GetRecorder()->RecordFlushAllocation(
16416 allocator->GetCurrentFrameIndex(),
16417 allocation, offset, size);
16424 VMA_ASSERT(allocator && allocation);
16426 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
16428 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16430 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
16432 #if VMA_RECORDING_ENABLED 16433 if(allocator->GetRecorder() != VMA_NULL)
16435 allocator->GetRecorder()->RecordInvalidateAllocation(
16436 allocator->GetCurrentFrameIndex(),
16437 allocation, offset, size);
16444 VMA_ASSERT(allocator);
16446 VMA_DEBUG_LOG(
"vmaCheckCorruption");
16448 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16450 return allocator->CheckCorruption(memoryTypeBits);
16456 size_t allocationCount,
16457 VkBool32* pAllocationsChanged,
16467 if(pDefragmentationInfo != VMA_NULL)
16481 if(res == VK_NOT_READY)
16494 VMA_ASSERT(allocator && pInfo && pContext);
16505 VMA_HEAVY_ASSERT(VmaValidatePointerArray(pInfo->
poolCount, pInfo->
pPools));
16507 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
16509 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16511 VkResult res = allocator->DefragmentationBegin(*pInfo, pStats, pContext);
16513 #if VMA_RECORDING_ENABLED 16514 if(allocator->GetRecorder() != VMA_NULL)
16516 allocator->GetRecorder()->RecordDefragmentationBegin(
16517 allocator->GetCurrentFrameIndex(), *pInfo, *pContext);
16528 VMA_ASSERT(allocator);
16530 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
16532 if(context != VK_NULL_HANDLE)
16534 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16536 #if VMA_RECORDING_ENABLED 16537 if(allocator->GetRecorder() != VMA_NULL)
16539 allocator->GetRecorder()->RecordDefragmentationEnd(
16540 allocator->GetCurrentFrameIndex(), context);
16544 return allocator->DefragmentationEnd(context);
16557 VMA_ASSERT(allocator && allocation && buffer);
16559 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
16561 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16563 return allocator->BindBufferMemory(allocation, buffer);
16571 VMA_ASSERT(allocator && allocation && image);
16573 VMA_DEBUG_LOG(
"vmaBindImageMemory");
16575 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16577 return allocator->BindImageMemory(allocation, image);
16582 const VkBufferCreateInfo* pBufferCreateInfo,
16588 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
16590 if(pBufferCreateInfo->size == 0)
16592 return VK_ERROR_VALIDATION_FAILED_EXT;
16595 VMA_DEBUG_LOG(
"vmaCreateBuffer");
16597 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16599 *pBuffer = VK_NULL_HANDLE;
16600 *pAllocation = VK_NULL_HANDLE;
16603 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
16604 allocator->m_hDevice,
16606 allocator->GetAllocationCallbacks(),
16611 VkMemoryRequirements vkMemReq = {};
16612 bool requiresDedicatedAllocation =
false;
16613 bool prefersDedicatedAllocation =
false;
16614 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
16615 requiresDedicatedAllocation, prefersDedicatedAllocation);
16619 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
16621 VMA_ASSERT(vkMemReq.alignment %
16622 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
16624 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
16626 VMA_ASSERT(vkMemReq.alignment %
16627 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
16629 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
16631 VMA_ASSERT(vkMemReq.alignment %
16632 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
16636 res = allocator->AllocateMemory(
16638 requiresDedicatedAllocation,
16639 prefersDedicatedAllocation,
16642 *pAllocationCreateInfo,
16643 VMA_SUBALLOCATION_TYPE_BUFFER,
16647 #if VMA_RECORDING_ENABLED 16648 if(allocator->GetRecorder() != VMA_NULL)
16650 allocator->GetRecorder()->RecordCreateBuffer(
16651 allocator->GetCurrentFrameIndex(),
16652 *pBufferCreateInfo,
16653 *pAllocationCreateInfo,
16663 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
16668 #if VMA_STATS_STRING_ENABLED 16669 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
16671 if(pAllocationInfo != VMA_NULL)
16673 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16678 allocator->FreeMemory(
16681 *pAllocation = VK_NULL_HANDLE;
16682 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16683 *pBuffer = VK_NULL_HANDLE;
16686 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
16687 *pBuffer = VK_NULL_HANDLE;
16698 VMA_ASSERT(allocator);
16700 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16705 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
16707 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16709 #if VMA_RECORDING_ENABLED 16710 if(allocator->GetRecorder() != VMA_NULL)
16712 allocator->GetRecorder()->RecordDestroyBuffer(
16713 allocator->GetCurrentFrameIndex(),
16718 if(buffer != VK_NULL_HANDLE)
16720 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
16723 if(allocation != VK_NULL_HANDLE)
16725 allocator->FreeMemory(
16733 const VkImageCreateInfo* pImageCreateInfo,
16739 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
16741 if(pImageCreateInfo->extent.width == 0 ||
16742 pImageCreateInfo->extent.height == 0 ||
16743 pImageCreateInfo->extent.depth == 0 ||
16744 pImageCreateInfo->mipLevels == 0 ||
16745 pImageCreateInfo->arrayLayers == 0)
16747 return VK_ERROR_VALIDATION_FAILED_EXT;
16750 VMA_DEBUG_LOG(
"vmaCreateImage");
16752 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16754 *pImage = VK_NULL_HANDLE;
16755 *pAllocation = VK_NULL_HANDLE;
16758 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
16759 allocator->m_hDevice,
16761 allocator->GetAllocationCallbacks(),
16765 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
16766 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
16767 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
16770 VkMemoryRequirements vkMemReq = {};
16771 bool requiresDedicatedAllocation =
false;
16772 bool prefersDedicatedAllocation =
false;
16773 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
16774 requiresDedicatedAllocation, prefersDedicatedAllocation);
16776 res = allocator->AllocateMemory(
16778 requiresDedicatedAllocation,
16779 prefersDedicatedAllocation,
16782 *pAllocationCreateInfo,
16787 #if VMA_RECORDING_ENABLED 16788 if(allocator->GetRecorder() != VMA_NULL)
16790 allocator->GetRecorder()->RecordCreateImage(
16791 allocator->GetCurrentFrameIndex(),
16793 *pAllocationCreateInfo,
16803 res = allocator->BindImageMemory(*pAllocation, *pImage);
16808 #if VMA_STATS_STRING_ENABLED 16809 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
16811 if(pAllocationInfo != VMA_NULL)
16813 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
16818 allocator->FreeMemory(
16821 *pAllocation = VK_NULL_HANDLE;
16822 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16823 *pImage = VK_NULL_HANDLE;
16826 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
16827 *pImage = VK_NULL_HANDLE;
16838 VMA_ASSERT(allocator);
16840 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
16845 VMA_DEBUG_LOG(
"vmaDestroyImage");
16847 VMA_DEBUG_GLOBAL_MUTEX_LOCK
16849 #if VMA_RECORDING_ENABLED 16850 if(allocator->GetRecorder() != VMA_NULL)
16852 allocator->GetRecorder()->RecordDestroyImage(
16853 allocator->GetCurrentFrameIndex(),
16858 if(image != VK_NULL_HANDLE)
16860 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
16862 if(allocation != VK_NULL_HANDLE)
16864 allocator->FreeMemory(
16870 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1786
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:2086
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1844
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side,...
Definition: vk_mem_alloc.h:2883
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
Definition: vk_mem_alloc.h:1818
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2417
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1798
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:2048
Definition: vk_mem_alloc.h:2152
VmaDefragmentationFlags flags
Reserved for future use. Should be 0.
Definition: vk_mem_alloc.h:2836
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1790
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2517
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1841
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2919
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2306
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1685
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2398
Definition: vk_mem_alloc.h:2123
uint32_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2839
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1779
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:2205
Definition: vk_mem_alloc.h:2075
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1853
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2334
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1.
Definition: vk_mem_alloc.h:1907
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1838
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:2079
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1979
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1795
VmaPool * pPools
Either null or pointer to array of pools to be defragmented.
Definition: vk_mem_alloc.h:2873
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1978
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2923
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(),...
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1870
VmaStatInfo total
Definition: vk_mem_alloc.h:1988
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2931
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:2189
Definition: vk_mem_alloc.h:2147
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2914
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1796
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1721
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1847
void vmaFreeMemoryPages(VmaAllocator allocator, size_t allocationCount, VmaAllocation *pAllocations)
Frees memory and destroys multiple allocations.
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2348
Definition: vk_mem_alloc.h:2342
PFN_vkCmdCopyBuffer vkCmdCopyBuffer
Definition: vk_mem_alloc.h:1802
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1914
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2527
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1791
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1816
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:2226
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2368
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost.
Definition: vk_mem_alloc.h:2404
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1777
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2351
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2888
VmaMemoryUsage
Definition: vk_mem_alloc.h:2026
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2848
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2909
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects.
Definition: vk_mem_alloc.h:2927
Definition: vk_mem_alloc.h:2065
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:2213
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1794
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1984
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1727
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2827
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
Definition: vk_mem_alloc.h:2825
Definition: vk_mem_alloc.h:2173
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2854
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1748
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1820
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1753
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2929
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:2200
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2414
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1787
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1967
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes....
Definition: vk_mem_alloc.h:2363
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1740
Definition: vk_mem_alloc.h:2338
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:2130
Represents Opaque object that represents started defragmentation process.
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1980
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1744
Definition: vk_mem_alloc.h:2163
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2354
Definition: vk_mem_alloc.h:2074
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1793
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:2195
Definition: vk_mem_alloc.h:2186
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1970
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1789
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2376
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1856
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2407
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:2184
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2878
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:2219
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1895
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1986
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it.
Definition: vk_mem_alloc.h:2110
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1979
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1800
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1826
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin(). None at the moment. Reserved for future use.
Definition: vk_mem_alloc.h:2824
VkCommandBuffer commandBuffer
Optional. Command buffer where GPU copy commands will be posted.
Definition: vk_mem_alloc.h:2902
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1742
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1799
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2390
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1792
Definition: vk_mem_alloc.h:2141
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1834
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2541
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB....
Definition: vk_mem_alloc.h:1850
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1979
VkResult vmaAllocateMemoryPages(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, size_t allocationCount, VmaAllocation *pAllocations, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation for multiple allocation objects at once.
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1976
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2395
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2833
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions.
Definition: vk_mem_alloc.h:2156
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory,...
Definition: vk_mem_alloc.h:2522
Definition: vk_mem_alloc.h:2170
Definition: vk_mem_alloc.h:2182
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places.
Definition: vk_mem_alloc.h:2925
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1785
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1974
Definition: vk_mem_alloc.h:2031
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2344
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1823
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1972
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1797
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1801
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:2097
Definition: vk_mem_alloc.h:2177
Definition: vk_mem_alloc.h:2058
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2536
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1775
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1788
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2323
VkResult vmaResizeAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize newSize)
Deprecated.
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2503
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:2167
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2288
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1980
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame.
Definition: vk_mem_alloc.h:2136
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1810
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1987
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2401
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1980
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side,...
Definition: vk_mem_alloc.h:2893
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2508
uint32_t poolCount
Numer of pools in pPools array.
Definition: vk_mem_alloc.h:2857