23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1464 #ifndef VMA_RECORDING_ENABLED 1466 #define VMA_RECORDING_ENABLED 1 1468 #define VMA_RECORDING_ENABLED 0 1473 #define NOMINMAX // For windows.h 1476 #include <vulkan/vulkan.h> 1478 #if VMA_RECORDING_ENABLED 1479 #include <windows.h> 1482 #if !defined(VMA_DEDICATED_ALLOCATION) 1483 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1484 #define VMA_DEDICATED_ALLOCATION 1 1486 #define VMA_DEDICATED_ALLOCATION 0 1504 uint32_t memoryType,
1505 VkDeviceMemory memory,
1510 uint32_t memoryType,
1511 VkDeviceMemory memory,
1583 #if VMA_DEDICATED_ALLOCATION 1584 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1585 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1711 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1719 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1729 uint32_t memoryTypeIndex,
1730 VkMemoryPropertyFlags* pFlags);
1742 uint32_t frameIndex);
1775 #define VMA_STATS_STRING_ENABLED 1 1777 #if VMA_STATS_STRING_ENABLED 1784 char** ppStatsString,
1785 VkBool32 detailedMap);
1789 char* pStatsString);
1791 #endif // #if VMA_STATS_STRING_ENABLED 2020 uint32_t memoryTypeBits,
2022 uint32_t* pMemoryTypeIndex);
2038 const VkBufferCreateInfo* pBufferCreateInfo,
2040 uint32_t* pMemoryTypeIndex);
2056 const VkImageCreateInfo* pImageCreateInfo,
2058 uint32_t* pMemoryTypeIndex);
2230 size_t* pLostAllocationCount);
2329 const VkMemoryRequirements* pVkMemoryRequirements,
2719 size_t allocationCount,
2720 VkBool32* pAllocationsChanged,
2786 const VkBufferCreateInfo* pBufferCreateInfo,
2811 const VkImageCreateInfo* pImageCreateInfo,
2837 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2840 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2841 #define VMA_IMPLEMENTATION 2844 #ifdef VMA_IMPLEMENTATION 2845 #undef VMA_IMPLEMENTATION 2867 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2868 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2880 #if VMA_USE_STL_CONTAINERS 2881 #define VMA_USE_STL_VECTOR 1 2882 #define VMA_USE_STL_UNORDERED_MAP 1 2883 #define VMA_USE_STL_LIST 1 2886 #if VMA_USE_STL_VECTOR 2890 #if VMA_USE_STL_UNORDERED_MAP 2891 #include <unordered_map> 2894 #if VMA_USE_STL_LIST 2903 #include <algorithm> 2909 #define VMA_NULL nullptr 2912 #if defined(__APPLE__) || defined(__ANDROID__) 2914 void *aligned_alloc(
size_t alignment,
size_t size)
2917 if(alignment <
sizeof(
void*))
2919 alignment =
sizeof(
void*);
2923 if(posix_memalign(&pointer, alignment, size) == 0)
2937 #define VMA_ASSERT(expr) assert(expr) 2939 #define VMA_ASSERT(expr) 2945 #ifndef VMA_HEAVY_ASSERT 2947 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2949 #define VMA_HEAVY_ASSERT(expr) 2953 #ifndef VMA_ALIGN_OF 2954 #define VMA_ALIGN_OF(type) (__alignof(type)) 2957 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2959 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2961 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2965 #ifndef VMA_SYSTEM_FREE 2967 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2969 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2974 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2978 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2982 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2986 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2989 #ifndef VMA_DEBUG_LOG 2990 #define VMA_DEBUG_LOG(format, ...) 3000 #if VMA_STATS_STRING_ENABLED 3001 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
3003 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
3005 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
3007 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
3009 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
3011 snprintf(outStr, strLen,
"%p", ptr);
3021 void Lock() { m_Mutex.lock(); }
3022 void Unlock() { m_Mutex.unlock(); }
3026 #define VMA_MUTEX VmaMutex 3037 #ifndef VMA_ATOMIC_UINT32 3038 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 3041 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 3046 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 3049 #ifndef VMA_DEBUG_ALIGNMENT 3054 #define VMA_DEBUG_ALIGNMENT (1) 3057 #ifndef VMA_DEBUG_MARGIN 3062 #define VMA_DEBUG_MARGIN (0) 3065 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 3070 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 3073 #ifndef VMA_DEBUG_DETECT_CORRUPTION 3079 #define VMA_DEBUG_DETECT_CORRUPTION (0) 3082 #ifndef VMA_DEBUG_GLOBAL_MUTEX 3087 #define VMA_DEBUG_GLOBAL_MUTEX (0) 3090 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 3095 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 3098 #ifndef VMA_SMALL_HEAP_MAX_SIZE 3099 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 3103 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 3104 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 3108 #ifndef VMA_CLASS_NO_COPY 3109 #define VMA_CLASS_NO_COPY(className) \ 3111 className(const className&) = delete; \ 3112 className& operator=(const className&) = delete; 3115 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
3118 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
3120 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
3121 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
3127 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3128 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3131 static inline uint32_t VmaCountBitsSet(uint32_t v)
3133 uint32_t c = v - ((v >> 1) & 0x55555555);
3134 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3135 c = ((c >> 4) + c) & 0x0F0F0F0F;
3136 c = ((c >> 8) + c) & 0x00FF00FF;
3137 c = ((c >> 16) + c) & 0x0000FFFF;
3143 template <
typename T>
3144 static inline T VmaAlignUp(T val, T align)
3146 return (val + align - 1) / align * align;
3150 template <
typename T>
3151 static inline T VmaAlignDown(T val, T align)
3153 return val / align * align;
3157 template <
typename T>
3158 static inline T VmaRoundDiv(T x, T y)
3160 return (x + (y / (T)2)) / y;
3168 template <
typename T>
3169 inline bool VmaIsPow2(T x)
3171 return (x & (x-1)) == 0;
3175 static inline uint32_t VmaNextPow2(uint32_t v)
3186 static inline uint64_t VmaNextPow2(uint64_t v)
3200 static inline uint32_t VmaPrevPow2(uint32_t v)
3210 static inline uint64_t VmaPrevPow2(uint64_t v)
3222 static inline bool VmaStrIsEmpty(
const char* pStr)
3224 return pStr == VMA_NULL || *pStr ==
'\0';
3227 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3245 template<
typename Iterator,
typename Compare>
3246 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3248 Iterator centerValue = end; --centerValue;
3249 Iterator insertIndex = beg;
3250 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3252 if(cmp(*memTypeIndex, *centerValue))
3254 if(insertIndex != memTypeIndex)
3256 VMA_SWAP(*memTypeIndex, *insertIndex);
3261 if(insertIndex != centerValue)
3263 VMA_SWAP(*insertIndex, *centerValue);
3268 template<
typename Iterator,
typename Compare>
3269 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3273 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3274 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3275 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3279 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3281 #endif // #ifndef VMA_SORT 3290 static inline bool VmaBlocksOnSamePage(
3291 VkDeviceSize resourceAOffset,
3292 VkDeviceSize resourceASize,
3293 VkDeviceSize resourceBOffset,
3294 VkDeviceSize pageSize)
3296 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3297 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3298 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3299 VkDeviceSize resourceBStart = resourceBOffset;
3300 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3301 return resourceAEndPage == resourceBStartPage;
3304 enum VmaSuballocationType
3306 VMA_SUBALLOCATION_TYPE_FREE = 0,
3307 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3308 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3309 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3310 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3311 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3312 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3321 static inline bool VmaIsBufferImageGranularityConflict(
3322 VmaSuballocationType suballocType1,
3323 VmaSuballocationType suballocType2)
3325 if(suballocType1 > suballocType2)
3327 VMA_SWAP(suballocType1, suballocType2);
3330 switch(suballocType1)
3332 case VMA_SUBALLOCATION_TYPE_FREE:
3334 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3336 case VMA_SUBALLOCATION_TYPE_BUFFER:
3338 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3339 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3340 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3342 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3343 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3344 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3345 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3347 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3348 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3356 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3358 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3359 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3360 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3362 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3366 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3368 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3369 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3370 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3372 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3383 VMA_CLASS_NO_COPY(VmaMutexLock)
3385 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3386 m_pMutex(useMutex ? &mutex : VMA_NULL)
3403 VMA_MUTEX* m_pMutex;
3406 #if VMA_DEBUG_GLOBAL_MUTEX 3407 static VMA_MUTEX gDebugGlobalMutex;
3408 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3410 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3414 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3425 template <
typename CmpLess,
typename IterT,
typename KeyT>
3426 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3428 size_t down = 0, up = (end - beg);
3431 const size_t mid = (down + up) / 2;
3432 if(cmp(*(beg+mid), key))
3447 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3449 if((pAllocationCallbacks != VMA_NULL) &&
3450 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3452 return (*pAllocationCallbacks->pfnAllocation)(
3453 pAllocationCallbacks->pUserData,
3456 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3460 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3464 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3466 if((pAllocationCallbacks != VMA_NULL) &&
3467 (pAllocationCallbacks->pfnFree != VMA_NULL))
3469 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3473 VMA_SYSTEM_FREE(ptr);
3477 template<
typename T>
3478 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3480 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3483 template<
typename T>
3484 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3486 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3489 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3491 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3493 template<
typename T>
3494 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3497 VmaFree(pAllocationCallbacks, ptr);
3500 template<
typename T>
3501 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3505 for(
size_t i = count; i--; )
3509 VmaFree(pAllocationCallbacks, ptr);
3514 template<
typename T>
3515 class VmaStlAllocator
3518 const VkAllocationCallbacks*
const m_pCallbacks;
3519 typedef T value_type;
3521 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3522 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3524 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3525 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3527 template<
typename U>
3528 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3530 return m_pCallbacks == rhs.m_pCallbacks;
3532 template<
typename U>
3533 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3535 return m_pCallbacks != rhs.m_pCallbacks;
3538 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3541 #if VMA_USE_STL_VECTOR 3543 #define VmaVector std::vector 3545 template<
typename T,
typename allocatorT>
3546 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3548 vec.insert(vec.begin() + index, item);
3551 template<
typename T,
typename allocatorT>
3552 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3554 vec.erase(vec.begin() + index);
3557 #else // #if VMA_USE_STL_VECTOR 3562 template<
typename T,
typename AllocatorT>
3566 typedef T value_type;
3568 VmaVector(
const AllocatorT& allocator) :
3569 m_Allocator(allocator),
3576 VmaVector(
size_t count,
const AllocatorT& allocator) :
3577 m_Allocator(allocator),
3578 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3584 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3585 m_Allocator(src.m_Allocator),
3586 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3587 m_Count(src.m_Count),
3588 m_Capacity(src.m_Count)
3592 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3598 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3601 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3605 resize(rhs.m_Count);
3608 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3614 bool empty()
const {
return m_Count == 0; }
3615 size_t size()
const {
return m_Count; }
3616 T* data() {
return m_pArray; }
3617 const T* data()
const {
return m_pArray; }
3619 T& operator[](
size_t index)
3621 VMA_HEAVY_ASSERT(index < m_Count);
3622 return m_pArray[index];
3624 const T& operator[](
size_t index)
const 3626 VMA_HEAVY_ASSERT(index < m_Count);
3627 return m_pArray[index];
3632 VMA_HEAVY_ASSERT(m_Count > 0);
3635 const T& front()
const 3637 VMA_HEAVY_ASSERT(m_Count > 0);
3642 VMA_HEAVY_ASSERT(m_Count > 0);
3643 return m_pArray[m_Count - 1];
3645 const T& back()
const 3647 VMA_HEAVY_ASSERT(m_Count > 0);
3648 return m_pArray[m_Count - 1];
3651 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3653 newCapacity = VMA_MAX(newCapacity, m_Count);
3655 if((newCapacity < m_Capacity) && !freeMemory)
3657 newCapacity = m_Capacity;
3660 if(newCapacity != m_Capacity)
3662 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3665 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3667 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3668 m_Capacity = newCapacity;
3669 m_pArray = newArray;
3673 void resize(
size_t newCount,
bool freeMemory =
false)
3675 size_t newCapacity = m_Capacity;
3676 if(newCount > m_Capacity)
3678 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3682 newCapacity = newCount;
3685 if(newCapacity != m_Capacity)
3687 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3688 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3689 if(elementsToCopy != 0)
3691 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3693 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3694 m_Capacity = newCapacity;
3695 m_pArray = newArray;
3701 void clear(
bool freeMemory =
false)
3703 resize(0, freeMemory);
3706 void insert(
size_t index,
const T& src)
3708 VMA_HEAVY_ASSERT(index <= m_Count);
3709 const size_t oldCount = size();
3710 resize(oldCount + 1);
3711 if(index < oldCount)
3713 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3715 m_pArray[index] = src;
3718 void remove(
size_t index)
3720 VMA_HEAVY_ASSERT(index < m_Count);
3721 const size_t oldCount = size();
3722 if(index < oldCount - 1)
3724 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
3726 resize(oldCount - 1);
3729 void push_back(
const T& src)
3731 const size_t newIndex = size();
3732 resize(newIndex + 1);
3733 m_pArray[newIndex] = src;
3738 VMA_HEAVY_ASSERT(m_Count > 0);
3742 void push_front(
const T& src)
3749 VMA_HEAVY_ASSERT(m_Count > 0);
3753 typedef T* iterator;
3755 iterator begin() {
return m_pArray; }
3756 iterator end() {
return m_pArray + m_Count; }
3759 AllocatorT m_Allocator;
3765 template<
typename T,
typename allocatorT>
3766 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3768 vec.insert(index, item);
3771 template<
typename T,
typename allocatorT>
3772 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3777 #endif // #if VMA_USE_STL_VECTOR 3779 template<
typename CmpLess,
typename VectorT>
3780 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3782 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3784 vector.data() + vector.size(),
3786 CmpLess()) - vector.data();
3787 VmaVectorInsert(vector, indexToInsert, value);
3788 return indexToInsert;
3791 template<
typename CmpLess,
typename VectorT>
3792 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3795 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3800 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3802 size_t indexToRemove = it - vector.begin();
3803 VmaVectorRemove(vector, indexToRemove);
3809 template<
typename CmpLess,
typename IterT,
typename KeyT>
3810 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
3813 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3814 beg, end, value, comparator);
3816 (!comparator(*it, value) && !comparator(value, *it)))
3831 template<
typename T>
3832 class VmaPoolAllocator
3834 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3836 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3837 ~VmaPoolAllocator();
3845 uint32_t NextFreeIndex;
3852 uint32_t FirstFreeIndex;
3855 const VkAllocationCallbacks* m_pAllocationCallbacks;
3856 size_t m_ItemsPerBlock;
3857 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3859 ItemBlock& CreateNewBlock();
3862 template<
typename T>
3863 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3864 m_pAllocationCallbacks(pAllocationCallbacks),
3865 m_ItemsPerBlock(itemsPerBlock),
3866 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3868 VMA_ASSERT(itemsPerBlock > 0);
3871 template<
typename T>
3872 VmaPoolAllocator<T>::~VmaPoolAllocator()
3877 template<
typename T>
3878 void VmaPoolAllocator<T>::Clear()
3880 for(
size_t i = m_ItemBlocks.size(); i--; )
3881 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3882 m_ItemBlocks.clear();
3885 template<
typename T>
3886 T* VmaPoolAllocator<T>::Alloc()
3888 for(
size_t i = m_ItemBlocks.size(); i--; )
3890 ItemBlock& block = m_ItemBlocks[i];
3892 if(block.FirstFreeIndex != UINT32_MAX)
3894 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3895 block.FirstFreeIndex = pItem->NextFreeIndex;
3896 return &pItem->Value;
3901 ItemBlock& newBlock = CreateNewBlock();
3902 Item*
const pItem = &newBlock.pItems[0];
3903 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3904 return &pItem->Value;
3907 template<
typename T>
3908 void VmaPoolAllocator<T>::Free(T* ptr)
3911 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3913 ItemBlock& block = m_ItemBlocks[i];
3917 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3920 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3922 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3923 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3924 block.FirstFreeIndex = index;
3928 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3931 template<
typename T>
3932 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3934 ItemBlock newBlock = {
3935 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3937 m_ItemBlocks.push_back(newBlock);
3940 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3941 newBlock.pItems[i].NextFreeIndex = i + 1;
3942 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3943 return m_ItemBlocks.back();
3949 #if VMA_USE_STL_LIST 3951 #define VmaList std::list 3953 #else // #if VMA_USE_STL_LIST 3955 template<
typename T>
3964 template<
typename T>
3967 VMA_CLASS_NO_COPY(VmaRawList)
3969 typedef VmaListItem<T> ItemType;
3971 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3975 size_t GetCount()
const {
return m_Count; }
3976 bool IsEmpty()
const {
return m_Count == 0; }
3978 ItemType* Front() {
return m_pFront; }
3979 const ItemType* Front()
const {
return m_pFront; }
3980 ItemType* Back() {
return m_pBack; }
3981 const ItemType* Back()
const {
return m_pBack; }
3983 ItemType* PushBack();
3984 ItemType* PushFront();
3985 ItemType* PushBack(
const T& value);
3986 ItemType* PushFront(
const T& value);
3991 ItemType* InsertBefore(ItemType* pItem);
3993 ItemType* InsertAfter(ItemType* pItem);
3995 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3996 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3998 void Remove(ItemType* pItem);
4001 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
4002 VmaPoolAllocator<ItemType> m_ItemAllocator;
4008 template<
typename T>
4009 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
4010 m_pAllocationCallbacks(pAllocationCallbacks),
4011 m_ItemAllocator(pAllocationCallbacks, 128),
4018 template<
typename T>
4019 VmaRawList<T>::~VmaRawList()
4025 template<
typename T>
4026 void VmaRawList<T>::Clear()
4028 if(IsEmpty() ==
false)
4030 ItemType* pItem = m_pBack;
4031 while(pItem != VMA_NULL)
4033 ItemType*
const pPrevItem = pItem->pPrev;
4034 m_ItemAllocator.Free(pItem);
4037 m_pFront = VMA_NULL;
4043 template<
typename T>
4044 VmaListItem<T>* VmaRawList<T>::PushBack()
4046 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4047 pNewItem->pNext = VMA_NULL;
4050 pNewItem->pPrev = VMA_NULL;
4051 m_pFront = pNewItem;
4057 pNewItem->pPrev = m_pBack;
4058 m_pBack->pNext = pNewItem;
4065 template<
typename T>
4066 VmaListItem<T>* VmaRawList<T>::PushFront()
4068 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
4069 pNewItem->pPrev = VMA_NULL;
4072 pNewItem->pNext = VMA_NULL;
4073 m_pFront = pNewItem;
4079 pNewItem->pNext = m_pFront;
4080 m_pFront->pPrev = pNewItem;
4081 m_pFront = pNewItem;
4087 template<
typename T>
4088 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
4090 ItemType*
const pNewItem = PushBack();
4091 pNewItem->Value = value;
4095 template<
typename T>
4096 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
4098 ItemType*
const pNewItem = PushFront();
4099 pNewItem->Value = value;
4103 template<
typename T>
4104 void VmaRawList<T>::PopBack()
4106 VMA_HEAVY_ASSERT(m_Count > 0);
4107 ItemType*
const pBackItem = m_pBack;
4108 ItemType*
const pPrevItem = pBackItem->pPrev;
4109 if(pPrevItem != VMA_NULL)
4111 pPrevItem->pNext = VMA_NULL;
4113 m_pBack = pPrevItem;
4114 m_ItemAllocator.Free(pBackItem);
4118 template<
typename T>
4119 void VmaRawList<T>::PopFront()
4121 VMA_HEAVY_ASSERT(m_Count > 0);
4122 ItemType*
const pFrontItem = m_pFront;
4123 ItemType*
const pNextItem = pFrontItem->pNext;
4124 if(pNextItem != VMA_NULL)
4126 pNextItem->pPrev = VMA_NULL;
4128 m_pFront = pNextItem;
4129 m_ItemAllocator.Free(pFrontItem);
4133 template<
typename T>
4134 void VmaRawList<T>::Remove(ItemType* pItem)
4136 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4137 VMA_HEAVY_ASSERT(m_Count > 0);
4139 if(pItem->pPrev != VMA_NULL)
4141 pItem->pPrev->pNext = pItem->pNext;
4145 VMA_HEAVY_ASSERT(m_pFront == pItem);
4146 m_pFront = pItem->pNext;
4149 if(pItem->pNext != VMA_NULL)
4151 pItem->pNext->pPrev = pItem->pPrev;
4155 VMA_HEAVY_ASSERT(m_pBack == pItem);
4156 m_pBack = pItem->pPrev;
4159 m_ItemAllocator.Free(pItem);
4163 template<
typename T>
4164 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4166 if(pItem != VMA_NULL)
4168 ItemType*
const prevItem = pItem->pPrev;
4169 ItemType*
const newItem = m_ItemAllocator.Alloc();
4170 newItem->pPrev = prevItem;
4171 newItem->pNext = pItem;
4172 pItem->pPrev = newItem;
4173 if(prevItem != VMA_NULL)
4175 prevItem->pNext = newItem;
4179 VMA_HEAVY_ASSERT(m_pFront == pItem);
4189 template<
typename T>
4190 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4192 if(pItem != VMA_NULL)
4194 ItemType*
const nextItem = pItem->pNext;
4195 ItemType*
const newItem = m_ItemAllocator.Alloc();
4196 newItem->pNext = nextItem;
4197 newItem->pPrev = pItem;
4198 pItem->pNext = newItem;
4199 if(nextItem != VMA_NULL)
4201 nextItem->pPrev = newItem;
4205 VMA_HEAVY_ASSERT(m_pBack == pItem);
4215 template<
typename T>
4216 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4218 ItemType*
const newItem = InsertBefore(pItem);
4219 newItem->Value = value;
4223 template<
typename T>
4224 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4226 ItemType*
const newItem = InsertAfter(pItem);
4227 newItem->Value = value;
4231 template<
typename T,
typename AllocatorT>
4234 VMA_CLASS_NO_COPY(VmaList)
4245 T& operator*()
const 4247 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4248 return m_pItem->Value;
4250 T* operator->()
const 4252 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4253 return &m_pItem->Value;
4256 iterator& operator++()
4258 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4259 m_pItem = m_pItem->pNext;
4262 iterator& operator--()
4264 if(m_pItem != VMA_NULL)
4266 m_pItem = m_pItem->pPrev;
4270 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4271 m_pItem = m_pList->Back();
4276 iterator operator++(
int)
4278 iterator result = *
this;
4282 iterator operator--(
int)
4284 iterator result = *
this;
4289 bool operator==(
const iterator& rhs)
const 4291 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4292 return m_pItem == rhs.m_pItem;
4294 bool operator!=(
const iterator& rhs)
const 4296 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4297 return m_pItem != rhs.m_pItem;
4301 VmaRawList<T>* m_pList;
4302 VmaListItem<T>* m_pItem;
4304 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4310 friend class VmaList<T, AllocatorT>;
4313 class const_iterator
4322 const_iterator(
const iterator& src) :
4323 m_pList(src.m_pList),
4324 m_pItem(src.m_pItem)
4328 const T& operator*()
const 4330 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4331 return m_pItem->Value;
4333 const T* operator->()
const 4335 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4336 return &m_pItem->Value;
4339 const_iterator& operator++()
4341 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4342 m_pItem = m_pItem->pNext;
4345 const_iterator& operator--()
4347 if(m_pItem != VMA_NULL)
4349 m_pItem = m_pItem->pPrev;
4353 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4354 m_pItem = m_pList->Back();
4359 const_iterator operator++(
int)
4361 const_iterator result = *
this;
4365 const_iterator operator--(
int)
4367 const_iterator result = *
this;
4372 bool operator==(
const const_iterator& rhs)
const 4374 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4375 return m_pItem == rhs.m_pItem;
4377 bool operator!=(
const const_iterator& rhs)
const 4379 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4380 return m_pItem != rhs.m_pItem;
4384 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4390 const VmaRawList<T>* m_pList;
4391 const VmaListItem<T>* m_pItem;
4393 friend class VmaList<T, AllocatorT>;
4396 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4398 bool empty()
const {
return m_RawList.IsEmpty(); }
4399 size_t size()
const {
return m_RawList.GetCount(); }
4401 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4402 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4404 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4405 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4407 void clear() { m_RawList.Clear(); }
4408 void push_back(
const T& value) { m_RawList.PushBack(value); }
4409 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4410 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4413 VmaRawList<T> m_RawList;
4416 #endif // #if VMA_USE_STL_LIST 4424 #if VMA_USE_STL_UNORDERED_MAP 4426 #define VmaPair std::pair 4428 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4429 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4431 #else // #if VMA_USE_STL_UNORDERED_MAP 4433 template<
typename T1,
typename T2>
4439 VmaPair() : first(), second() { }
4440 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4446 template<
typename KeyT,
typename ValueT>
4450 typedef VmaPair<KeyT, ValueT> PairType;
4451 typedef PairType* iterator;
4453 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4455 iterator begin() {
return m_Vector.begin(); }
4456 iterator end() {
return m_Vector.end(); }
4458 void insert(
const PairType& pair);
4459 iterator find(
const KeyT& key);
4460 void erase(iterator it);
4463 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4466 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4468 template<
typename FirstT,
typename SecondT>
4469 struct VmaPairFirstLess
4471 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4473 return lhs.first < rhs.first;
4475 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4477 return lhs.first < rhsFirst;
4481 template<
typename KeyT,
typename ValueT>
4482 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4484 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4486 m_Vector.data() + m_Vector.size(),
4488 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4489 VmaVectorInsert(m_Vector, indexToInsert, pair);
4492 template<
typename KeyT,
typename ValueT>
4493 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4495 PairType* it = VmaBinaryFindFirstNotLess(
4497 m_Vector.data() + m_Vector.size(),
4499 VmaPairFirstLess<KeyT, ValueT>());
4500 if((it != m_Vector.end()) && (it->first == key))
4506 return m_Vector.end();
4510 template<
typename KeyT,
typename ValueT>
4511 void VmaMap<KeyT, ValueT>::erase(iterator it)
4513 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4516 #endif // #if VMA_USE_STL_UNORDERED_MAP 4522 class VmaDeviceMemoryBlock;
4524 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4526 struct VmaAllocation_T
4528 VMA_CLASS_NO_COPY(VmaAllocation_T)
4530 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4534 FLAG_USER_DATA_STRING = 0x01,
4538 enum ALLOCATION_TYPE
4540 ALLOCATION_TYPE_NONE,
4541 ALLOCATION_TYPE_BLOCK,
4542 ALLOCATION_TYPE_DEDICATED,
4545 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4548 m_pUserData(VMA_NULL),
4549 m_LastUseFrameIndex(currentFrameIndex),
4550 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4551 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4553 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4555 #if VMA_STATS_STRING_ENABLED 4556 m_CreationFrameIndex = currentFrameIndex;
4557 m_BufferImageUsage = 0;
4563 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4566 VMA_ASSERT(m_pUserData == VMA_NULL);
4569 void InitBlockAllocation(
4571 VmaDeviceMemoryBlock* block,
4572 VkDeviceSize offset,
4573 VkDeviceSize alignment,
4575 VmaSuballocationType suballocationType,
4579 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4580 VMA_ASSERT(block != VMA_NULL);
4581 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4582 m_Alignment = alignment;
4584 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4585 m_SuballocationType = (uint8_t)suballocationType;
4586 m_BlockAllocation.m_hPool = hPool;
4587 m_BlockAllocation.m_Block = block;
4588 m_BlockAllocation.m_Offset = offset;
4589 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4594 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4595 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4596 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4597 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4598 m_BlockAllocation.m_Block = VMA_NULL;
4599 m_BlockAllocation.m_Offset = 0;
4600 m_BlockAllocation.m_CanBecomeLost =
true;
4603 void ChangeBlockAllocation(
4605 VmaDeviceMemoryBlock* block,
4606 VkDeviceSize offset);
4609 void InitDedicatedAllocation(
4610 uint32_t memoryTypeIndex,
4611 VkDeviceMemory hMemory,
4612 VmaSuballocationType suballocationType,
4616 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4617 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4618 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4621 m_SuballocationType = (uint8_t)suballocationType;
4622 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4623 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4624 m_DedicatedAllocation.m_hMemory = hMemory;
4625 m_DedicatedAllocation.m_pMappedData = pMappedData;
4628 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4629 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4630 VkDeviceSize GetSize()
const {
return m_Size; }
4631 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4632 void* GetUserData()
const {
return m_pUserData; }
4633 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4634 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4636 VmaDeviceMemoryBlock* GetBlock()
const 4638 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4639 return m_BlockAllocation.m_Block;
4641 VkDeviceSize GetOffset()
const;
4642 VkDeviceMemory GetMemory()
const;
4643 uint32_t GetMemoryTypeIndex()
const;
4644 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4645 void* GetMappedData()
const;
4646 bool CanBecomeLost()
const;
4649 uint32_t GetLastUseFrameIndex()
const 4651 return m_LastUseFrameIndex.load();
4653 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4655 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4665 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4667 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4669 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4680 void BlockAllocMap();
4681 void BlockAllocUnmap();
4682 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4685 #if VMA_STATS_STRING_ENABLED 4686 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4687 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4689 void InitBufferImageUsage(uint32_t bufferImageUsage)
4691 VMA_ASSERT(m_BufferImageUsage == 0);
4692 m_BufferImageUsage = bufferImageUsage;
4695 void PrintParameters(
class VmaJsonWriter& json)
const;
4699 VkDeviceSize m_Alignment;
4700 VkDeviceSize m_Size;
4702 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4704 uint8_t m_SuballocationType;
4711 struct BlockAllocation
4714 VmaDeviceMemoryBlock* m_Block;
4715 VkDeviceSize m_Offset;
4716 bool m_CanBecomeLost;
4720 struct DedicatedAllocation
4722 uint32_t m_MemoryTypeIndex;
4723 VkDeviceMemory m_hMemory;
4724 void* m_pMappedData;
4730 BlockAllocation m_BlockAllocation;
4732 DedicatedAllocation m_DedicatedAllocation;
4735 #if VMA_STATS_STRING_ENABLED 4736 uint32_t m_CreationFrameIndex;
4737 uint32_t m_BufferImageUsage;
4747 struct VmaSuballocation
4749 VkDeviceSize offset;
4752 VmaSuballocationType type;
4756 struct VmaSuballocationOffsetLess
4758 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4760 return lhs.offset < rhs.offset;
4763 struct VmaSuballocationOffsetGreater
4765 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4767 return lhs.offset > rhs.offset;
4771 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
4774 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4789 struct VmaAllocationRequest
4791 VkDeviceSize offset;
4792 VkDeviceSize sumFreeSize;
4793 VkDeviceSize sumItemSize;
4794 VmaSuballocationList::iterator item;
4795 size_t itemsToMakeLostCount;
4798 VkDeviceSize CalcCost()
const 4800 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4808 class VmaBlockMetadata
4812 virtual ~VmaBlockMetadata() { }
4813 virtual void Init(VkDeviceSize size) { m_Size = size; }
4816 virtual bool Validate()
const = 0;
4817 VkDeviceSize GetSize()
const {
return m_Size; }
4818 virtual size_t GetAllocationCount()
const = 0;
4819 virtual VkDeviceSize GetSumFreeSize()
const = 0;
4820 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
4822 virtual bool IsEmpty()
const = 0;
4824 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
4826 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
4828 #if VMA_STATS_STRING_ENABLED 4829 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
4835 virtual bool CreateAllocationRequest(
4836 uint32_t currentFrameIndex,
4837 uint32_t frameInUseCount,
4838 VkDeviceSize bufferImageGranularity,
4839 VkDeviceSize allocSize,
4840 VkDeviceSize allocAlignment,
4842 VmaSuballocationType allocType,
4843 bool canMakeOtherLost,
4845 VmaAllocationRequest* pAllocationRequest) = 0;
4847 virtual bool MakeRequestedAllocationsLost(
4848 uint32_t currentFrameIndex,
4849 uint32_t frameInUseCount,
4850 VmaAllocationRequest* pAllocationRequest) = 0;
4852 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
4854 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
4858 const VmaAllocationRequest& request,
4859 VmaSuballocationType type,
4860 VkDeviceSize allocSize,
4866 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
4869 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
4871 #if VMA_STATS_STRING_ENABLED 4872 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
4873 VkDeviceSize unusedBytes,
4874 size_t allocationCount,
4875 size_t unusedRangeCount)
const;
4876 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
4877 VkDeviceSize offset,
4879 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
4880 VkDeviceSize offset,
4881 VkDeviceSize size)
const;
4882 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
4886 VkDeviceSize m_Size;
4887 const VkAllocationCallbacks* m_pAllocationCallbacks;
4890 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 4891 VMA_ASSERT(0 && "Validation failed: " #cond); \ 4895 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
4897 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
4900 virtual ~VmaBlockMetadata_Generic();
4901 virtual void Init(VkDeviceSize size);
4903 virtual bool Validate()
const;
4904 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4905 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4906 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4907 virtual bool IsEmpty()
const;
4909 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4910 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4912 #if VMA_STATS_STRING_ENABLED 4913 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4916 virtual bool CreateAllocationRequest(
4917 uint32_t currentFrameIndex,
4918 uint32_t frameInUseCount,
4919 VkDeviceSize bufferImageGranularity,
4920 VkDeviceSize allocSize,
4921 VkDeviceSize allocAlignment,
4923 VmaSuballocationType allocType,
4924 bool canMakeOtherLost,
4926 VmaAllocationRequest* pAllocationRequest);
4928 virtual bool MakeRequestedAllocationsLost(
4929 uint32_t currentFrameIndex,
4930 uint32_t frameInUseCount,
4931 VmaAllocationRequest* pAllocationRequest);
4933 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4935 virtual VkResult CheckCorruption(
const void* pBlockData);
4938 const VmaAllocationRequest& request,
4939 VmaSuballocationType type,
4940 VkDeviceSize allocSize,
4945 virtual void FreeAtOffset(VkDeviceSize offset);
4948 uint32_t m_FreeCount;
4949 VkDeviceSize m_SumFreeSize;
4950 VmaSuballocationList m_Suballocations;
4953 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4955 bool ValidateFreeSuballocationList()
const;
4959 bool CheckAllocation(
4960 uint32_t currentFrameIndex,
4961 uint32_t frameInUseCount,
4962 VkDeviceSize bufferImageGranularity,
4963 VkDeviceSize allocSize,
4964 VkDeviceSize allocAlignment,
4965 VmaSuballocationType allocType,
4966 VmaSuballocationList::const_iterator suballocItem,
4967 bool canMakeOtherLost,
4968 VkDeviceSize* pOffset,
4969 size_t* itemsToMakeLostCount,
4970 VkDeviceSize* pSumFreeSize,
4971 VkDeviceSize* pSumItemSize)
const;
4973 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4977 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4980 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4983 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
5064 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
5066 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
5069 virtual ~VmaBlockMetadata_Linear();
5070 virtual void Init(VkDeviceSize size);
5072 virtual bool Validate()
const;
5073 virtual size_t GetAllocationCount()
const;
5074 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
5075 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5076 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
5078 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5079 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5081 #if VMA_STATS_STRING_ENABLED 5082 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5085 virtual bool CreateAllocationRequest(
5086 uint32_t currentFrameIndex,
5087 uint32_t frameInUseCount,
5088 VkDeviceSize bufferImageGranularity,
5089 VkDeviceSize allocSize,
5090 VkDeviceSize allocAlignment,
5092 VmaSuballocationType allocType,
5093 bool canMakeOtherLost,
5095 VmaAllocationRequest* pAllocationRequest);
5097 virtual bool MakeRequestedAllocationsLost(
5098 uint32_t currentFrameIndex,
5099 uint32_t frameInUseCount,
5100 VmaAllocationRequest* pAllocationRequest);
5102 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5104 virtual VkResult CheckCorruption(
const void* pBlockData);
5107 const VmaAllocationRequest& request,
5108 VmaSuballocationType type,
5109 VkDeviceSize allocSize,
5114 virtual void FreeAtOffset(VkDeviceSize offset);
5124 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
5126 enum SECOND_VECTOR_MODE
5128 SECOND_VECTOR_EMPTY,
5133 SECOND_VECTOR_RING_BUFFER,
5139 SECOND_VECTOR_DOUBLE_STACK,
5142 VkDeviceSize m_SumFreeSize;
5143 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5144 uint32_t m_1stVectorIndex;
5145 SECOND_VECTOR_MODE m_2ndVectorMode;
5147 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5148 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5149 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5150 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5153 size_t m_1stNullItemsBeginCount;
5155 size_t m_1stNullItemsMiddleCount;
5157 size_t m_2ndNullItemsCount;
5159 bool ShouldCompact1st()
const;
5160 void CleanupAfterFree();
5174 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5176 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5179 virtual ~VmaBlockMetadata_Buddy();
5180 virtual void Init(VkDeviceSize size);
5182 virtual bool Validate()
const;
5183 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5184 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5185 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5186 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5188 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5189 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5191 #if VMA_STATS_STRING_ENABLED 5192 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5195 virtual bool CreateAllocationRequest(
5196 uint32_t currentFrameIndex,
5197 uint32_t frameInUseCount,
5198 VkDeviceSize bufferImageGranularity,
5199 VkDeviceSize allocSize,
5200 VkDeviceSize allocAlignment,
5202 VmaSuballocationType allocType,
5203 bool canMakeOtherLost,
5205 VmaAllocationRequest* pAllocationRequest);
5207 virtual bool MakeRequestedAllocationsLost(
5208 uint32_t currentFrameIndex,
5209 uint32_t frameInUseCount,
5210 VmaAllocationRequest* pAllocationRequest);
5212 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5214 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5217 const VmaAllocationRequest& request,
5218 VmaSuballocationType type,
5219 VkDeviceSize allocSize,
5223 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5224 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5227 static const VkDeviceSize MIN_NODE_SIZE = 32;
5228 static const size_t MAX_LEVELS = 30;
5230 struct ValidationContext
5232 size_t calculatedAllocationCount;
5233 size_t calculatedFreeCount;
5234 VkDeviceSize calculatedSumFreeSize;
5236 ValidationContext() :
5237 calculatedAllocationCount(0),
5238 calculatedFreeCount(0),
5239 calculatedSumFreeSize(0) { }
5244 VkDeviceSize offset;
5274 VkDeviceSize m_UsableSize;
5275 uint32_t m_LevelCount;
5281 } m_FreeList[MAX_LEVELS];
5283 size_t m_AllocationCount;
5287 VkDeviceSize m_SumFreeSize;
5289 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5290 void DeleteNode(Node* node);
5291 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5292 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5293 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5295 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5296 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5300 void AddToFreeListFront(uint32_t level, Node* node);
5304 void RemoveFromFreeList(uint32_t level, Node* node);
5306 #if VMA_STATS_STRING_ENABLED 5307 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5317 class VmaDeviceMemoryBlock
5319 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5321 VmaBlockMetadata* m_pMetadata;
5325 ~VmaDeviceMemoryBlock()
5327 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5328 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5334 uint32_t newMemoryTypeIndex,
5335 VkDeviceMemory newMemory,
5336 VkDeviceSize newSize,
5338 uint32_t algorithm);
5342 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5343 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5344 uint32_t GetId()
const {
return m_Id; }
5345 void* GetMappedData()
const {
return m_pMappedData; }
5348 bool Validate()
const;
5353 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5356 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5357 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5359 VkResult BindBufferMemory(
5363 VkResult BindImageMemory(
5369 uint32_t m_MemoryTypeIndex;
5371 VkDeviceMemory m_hMemory;
5376 uint32_t m_MapCount;
5377 void* m_pMappedData;
5380 struct VmaPointerLess
5382 bool operator()(
const void* lhs,
const void* rhs)
const 5388 class VmaDefragmentator;
5396 struct VmaBlockVector
5398 VMA_CLASS_NO_COPY(VmaBlockVector)
5402 uint32_t memoryTypeIndex,
5403 VkDeviceSize preferredBlockSize,
5404 size_t minBlockCount,
5405 size_t maxBlockCount,
5406 VkDeviceSize bufferImageGranularity,
5407 uint32_t frameInUseCount,
5409 bool explicitBlockSize,
5410 uint32_t algorithm);
5413 VkResult CreateMinBlocks();
5415 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5416 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5417 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5418 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5419 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5423 bool IsEmpty()
const {
return m_Blocks.empty(); }
5424 bool IsCorruptionDetectionEnabled()
const;
5428 uint32_t currentFrameIndex,
5430 VkDeviceSize alignment,
5432 VmaSuballocationType suballocType,
5441 #if VMA_STATS_STRING_ENABLED 5442 void PrintDetailedMap(
class VmaJsonWriter& json);
5445 void MakePoolAllocationsLost(
5446 uint32_t currentFrameIndex,
5447 size_t* pLostAllocationCount);
5448 VkResult CheckCorruption();
5450 VmaDefragmentator* EnsureDefragmentator(
5452 uint32_t currentFrameIndex);
5454 VkResult Defragment(
5456 VkDeviceSize& maxBytesToMove,
5457 uint32_t& maxAllocationsToMove);
5459 void DestroyDefragmentator();
5462 friend class VmaDefragmentator;
5465 const uint32_t m_MemoryTypeIndex;
5466 const VkDeviceSize m_PreferredBlockSize;
5467 const size_t m_MinBlockCount;
5468 const size_t m_MaxBlockCount;
5469 const VkDeviceSize m_BufferImageGranularity;
5470 const uint32_t m_FrameInUseCount;
5471 const bool m_IsCustomPool;
5472 const bool m_ExplicitBlockSize;
5473 const uint32_t m_Algorithm;
5474 bool m_HasEmptyBlock;
5477 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5481 VmaDefragmentator* m_pDefragmentator;
5482 uint32_t m_NextBlockId;
5484 VkDeviceSize CalcMaxBlockSize()
const;
5487 void Remove(VmaDeviceMemoryBlock* pBlock);
5491 void IncrementallySortBlocks();
5494 VkResult AllocateFromBlock(
5495 VmaDeviceMemoryBlock* pBlock,
5497 uint32_t currentFrameIndex,
5499 VkDeviceSize alignment,
5502 VmaSuballocationType suballocType,
5506 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5511 VMA_CLASS_NO_COPY(VmaPool_T)
5513 VmaBlockVector m_BlockVector;
5518 VkDeviceSize preferredBlockSize);
5521 uint32_t GetId()
const {
return m_Id; }
5522 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5524 #if VMA_STATS_STRING_ENABLED 5532 class VmaDefragmentator
5534 VMA_CLASS_NO_COPY(VmaDefragmentator)
5537 VmaBlockVector*
const m_pBlockVector;
5538 uint32_t m_CurrentFrameIndex;
5539 VkDeviceSize m_BytesMoved;
5540 uint32_t m_AllocationsMoved;
5542 struct AllocationInfo
5545 VkBool32* m_pChanged;
5548 m_hAllocation(VK_NULL_HANDLE),
5549 m_pChanged(VMA_NULL)
5554 struct AllocationInfoSizeGreater
5556 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5558 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
5563 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5567 VmaDeviceMemoryBlock* m_pBlock;
5568 bool m_HasNonMovableAllocations;
5569 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5571 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
5573 m_HasNonMovableAllocations(true),
5574 m_Allocations(pAllocationCallbacks),
5575 m_pMappedDataForDefragmentation(VMA_NULL)
5579 void CalcHasNonMovableAllocations()
5581 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
5582 const size_t defragmentAllocCount = m_Allocations.size();
5583 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
5586 void SortAllocationsBySizeDescecnding()
5588 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
5591 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
5596 void* m_pMappedDataForDefragmentation;
5599 struct BlockPointerLess
5601 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 5603 return pLhsBlockInfo->m_pBlock < pRhsBlock;
5605 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5607 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
5613 struct BlockInfoCompareMoveDestination
5615 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5617 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
5621 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
5625 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
5633 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
5634 BlockInfoVector m_Blocks;
5636 VkResult DefragmentRound(
5637 VkDeviceSize maxBytesToMove,
5638 uint32_t maxAllocationsToMove);
5640 static bool MoveMakesSense(
5641 size_t dstBlockIndex, VkDeviceSize dstOffset,
5642 size_t srcBlockIndex, VkDeviceSize srcOffset);
5647 VmaBlockVector* pBlockVector,
5648 uint32_t currentFrameIndex);
5650 ~VmaDefragmentator();
5652 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
5653 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
5655 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
5657 VkResult Defragment(
5658 VkDeviceSize maxBytesToMove,
5659 uint32_t maxAllocationsToMove);
5662 struct VmaDefragmentationContext_T
5665 VmaDefragmentationContext_T();
5666 ~VmaDefragmentationContext_T();
5671 #if VMA_RECORDING_ENABLED 5678 void WriteConfiguration(
5679 const VkPhysicalDeviceProperties& devProps,
5680 const VkPhysicalDeviceMemoryProperties& memProps,
5681 bool dedicatedAllocationExtensionEnabled);
5684 void RecordCreateAllocator(uint32_t frameIndex);
5685 void RecordDestroyAllocator(uint32_t frameIndex);
5686 void RecordCreatePool(uint32_t frameIndex,
5689 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
5690 void RecordAllocateMemory(uint32_t frameIndex,
5691 const VkMemoryRequirements& vkMemReq,
5694 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
5695 const VkMemoryRequirements& vkMemReq,
5696 bool requiresDedicatedAllocation,
5697 bool prefersDedicatedAllocation,
5700 void RecordAllocateMemoryForImage(uint32_t frameIndex,
5701 const VkMemoryRequirements& vkMemReq,
5702 bool requiresDedicatedAllocation,
5703 bool prefersDedicatedAllocation,
5706 void RecordFreeMemory(uint32_t frameIndex,
5708 void RecordSetAllocationUserData(uint32_t frameIndex,
5710 const void* pUserData);
5711 void RecordCreateLostAllocation(uint32_t frameIndex,
5713 void RecordMapMemory(uint32_t frameIndex,
5715 void RecordUnmapMemory(uint32_t frameIndex,
5717 void RecordFlushAllocation(uint32_t frameIndex,
5718 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5719 void RecordInvalidateAllocation(uint32_t frameIndex,
5720 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5721 void RecordCreateBuffer(uint32_t frameIndex,
5722 const VkBufferCreateInfo& bufCreateInfo,
5725 void RecordCreateImage(uint32_t frameIndex,
5726 const VkImageCreateInfo& imageCreateInfo,
5729 void RecordDestroyBuffer(uint32_t frameIndex,
5731 void RecordDestroyImage(uint32_t frameIndex,
5733 void RecordTouchAllocation(uint32_t frameIndex,
5735 void RecordGetAllocationInfo(uint32_t frameIndex,
5737 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
5747 class UserDataString
5751 const char* GetString()
const {
return m_Str; }
5761 VMA_MUTEX m_FileMutex;
5763 int64_t m_StartCounter;
5765 void GetBasicParams(CallParams& outParams);
5769 #endif // #if VMA_RECORDING_ENABLED 5772 struct VmaAllocator_T
5774 VMA_CLASS_NO_COPY(VmaAllocator_T)
5777 bool m_UseKhrDedicatedAllocation;
5779 bool m_AllocationCallbacksSpecified;
5780 VkAllocationCallbacks m_AllocationCallbacks;
5784 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
5785 VMA_MUTEX m_HeapSizeLimitMutex;
5787 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
5788 VkPhysicalDeviceMemoryProperties m_MemProps;
5791 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
5794 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
5795 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
5796 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
5802 const VkAllocationCallbacks* GetAllocationCallbacks()
const 5804 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
5808 return m_VulkanFunctions;
5811 VkDeviceSize GetBufferImageGranularity()
const 5814 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
5815 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
5818 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
5819 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
5821 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 5823 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
5824 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
5827 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 5829 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
5830 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5833 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 5835 return IsMemoryTypeNonCoherent(memTypeIndex) ?
5836 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
5837 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
5840 bool IsIntegratedGpu()
const 5842 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
5845 #if VMA_RECORDING_ENABLED 5846 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
5849 void GetBufferMemoryRequirements(
5851 VkMemoryRequirements& memReq,
5852 bool& requiresDedicatedAllocation,
5853 bool& prefersDedicatedAllocation)
const;
5854 void GetImageMemoryRequirements(
5856 VkMemoryRequirements& memReq,
5857 bool& requiresDedicatedAllocation,
5858 bool& prefersDedicatedAllocation)
const;
5861 VkResult AllocateMemory(
5862 const VkMemoryRequirements& vkMemReq,
5863 bool requiresDedicatedAllocation,
5864 bool prefersDedicatedAllocation,
5865 VkBuffer dedicatedBuffer,
5866 VkImage dedicatedImage,
5868 VmaSuballocationType suballocType,
5874 void CalculateStats(
VmaStats* pStats);
5876 #if VMA_STATS_STRING_ENABLED 5877 void PrintDetailedMap(
class VmaJsonWriter& json);
5880 VkResult DefragmentationBegin(
5884 VkResult DefragmentationEnd(
5891 void DestroyPool(
VmaPool pool);
5894 void SetCurrentFrameIndex(uint32_t frameIndex);
5895 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
5897 void MakePoolAllocationsLost(
5899 size_t* pLostAllocationCount);
5900 VkResult CheckPoolCorruption(
VmaPool hPool);
5901 VkResult CheckCorruption(uint32_t memoryTypeBits);
5905 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
5906 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
5911 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
5912 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
5914 void FlushOrInvalidateAllocation(
5916 VkDeviceSize offset, VkDeviceSize size,
5917 VMA_CACHE_OPERATION op);
5919 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
5922 VkDeviceSize m_PreferredLargeHeapBlockSize;
5924 VkPhysicalDevice m_PhysicalDevice;
5925 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
5927 VMA_MUTEX m_PoolsMutex;
5929 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
5930 uint32_t m_NextPoolId;
5934 #if VMA_RECORDING_ENABLED 5935 VmaRecorder* m_pRecorder;
5940 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
5942 VkResult AllocateMemoryOfType(
5944 VkDeviceSize alignment,
5945 bool dedicatedAllocation,
5946 VkBuffer dedicatedBuffer,
5947 VkImage dedicatedImage,
5949 uint32_t memTypeIndex,
5950 VmaSuballocationType suballocType,
5954 VkResult AllocateDedicatedMemory(
5956 VmaSuballocationType suballocType,
5957 uint32_t memTypeIndex,
5959 bool isUserDataString,
5961 VkBuffer dedicatedBuffer,
5962 VkImage dedicatedImage,
5972 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
5974 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
5977 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
5979 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
5982 template<
typename T>
5985 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
5988 template<
typename T>
5989 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
5991 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
5994 template<
typename T>
5995 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
6000 VmaFree(hAllocator, ptr);
6004 template<
typename T>
6005 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
6009 for(
size_t i = count; i--; )
6011 VmaFree(hAllocator, ptr);
6018 #if VMA_STATS_STRING_ENABLED 6020 class VmaStringBuilder
6023 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
6024 size_t GetLength()
const {
return m_Data.size(); }
6025 const char* GetData()
const {
return m_Data.data(); }
6027 void Add(
char ch) { m_Data.push_back(ch); }
6028 void Add(
const char* pStr);
6029 void AddNewLine() { Add(
'\n'); }
6030 void AddNumber(uint32_t num);
6031 void AddNumber(uint64_t num);
6032 void AddPointer(
const void* ptr);
6035 VmaVector< char, VmaStlAllocator<char> > m_Data;
6038 void VmaStringBuilder::Add(
const char* pStr)
6040 const size_t strLen = strlen(pStr);
6043 const size_t oldCount = m_Data.size();
6044 m_Data.resize(oldCount + strLen);
6045 memcpy(m_Data.data() + oldCount, pStr, strLen);
6049 void VmaStringBuilder::AddNumber(uint32_t num)
6052 VmaUint32ToStr(buf,
sizeof(buf), num);
6056 void VmaStringBuilder::AddNumber(uint64_t num)
6059 VmaUint64ToStr(buf,
sizeof(buf), num);
6063 void VmaStringBuilder::AddPointer(
const void* ptr)
6066 VmaPtrToStr(buf,
sizeof(buf), ptr);
6070 #endif // #if VMA_STATS_STRING_ENABLED 6075 #if VMA_STATS_STRING_ENABLED 6079 VMA_CLASS_NO_COPY(VmaJsonWriter)
6081 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
6084 void BeginObject(
bool singleLine =
false);
6087 void BeginArray(
bool singleLine =
false);
6090 void WriteString(
const char* pStr);
6091 void BeginString(
const char* pStr = VMA_NULL);
6092 void ContinueString(
const char* pStr);
6093 void ContinueString(uint32_t n);
6094 void ContinueString(uint64_t n);
6095 void ContinueString_Pointer(
const void* ptr);
6096 void EndString(
const char* pStr = VMA_NULL);
6098 void WriteNumber(uint32_t n);
6099 void WriteNumber(uint64_t n);
6100 void WriteBool(
bool b);
6104 static const char*
const INDENT;
6106 enum COLLECTION_TYPE
6108 COLLECTION_TYPE_OBJECT,
6109 COLLECTION_TYPE_ARRAY,
6113 COLLECTION_TYPE type;
6114 uint32_t valueCount;
6115 bool singleLineMode;
6118 VmaStringBuilder& m_SB;
6119 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
6120 bool m_InsideString;
6122 void BeginValue(
bool isString);
6123 void WriteIndent(
bool oneLess =
false);
6126 const char*
const VmaJsonWriter::INDENT =
" ";
6128 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
6130 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
6131 m_InsideString(false)
6135 VmaJsonWriter::~VmaJsonWriter()
6137 VMA_ASSERT(!m_InsideString);
6138 VMA_ASSERT(m_Stack.empty());
6141 void VmaJsonWriter::BeginObject(
bool singleLine)
6143 VMA_ASSERT(!m_InsideString);
6149 item.type = COLLECTION_TYPE_OBJECT;
6150 item.valueCount = 0;
6151 item.singleLineMode = singleLine;
6152 m_Stack.push_back(item);
6155 void VmaJsonWriter::EndObject()
6157 VMA_ASSERT(!m_InsideString);
6162 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
6166 void VmaJsonWriter::BeginArray(
bool singleLine)
6168 VMA_ASSERT(!m_InsideString);
6174 item.type = COLLECTION_TYPE_ARRAY;
6175 item.valueCount = 0;
6176 item.singleLineMode = singleLine;
6177 m_Stack.push_back(item);
6180 void VmaJsonWriter::EndArray()
6182 VMA_ASSERT(!m_InsideString);
6187 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
6191 void VmaJsonWriter::WriteString(
const char* pStr)
6197 void VmaJsonWriter::BeginString(
const char* pStr)
6199 VMA_ASSERT(!m_InsideString);
6203 m_InsideString =
true;
6204 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6206 ContinueString(pStr);
6210 void VmaJsonWriter::ContinueString(
const char* pStr)
6212 VMA_ASSERT(m_InsideString);
6214 const size_t strLen = strlen(pStr);
6215 for(
size_t i = 0; i < strLen; ++i)
6248 VMA_ASSERT(0 &&
"Character not currently supported.");
6254 void VmaJsonWriter::ContinueString(uint32_t n)
6256 VMA_ASSERT(m_InsideString);
6260 void VmaJsonWriter::ContinueString(uint64_t n)
6262 VMA_ASSERT(m_InsideString);
6266 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
6268 VMA_ASSERT(m_InsideString);
6269 m_SB.AddPointer(ptr);
6272 void VmaJsonWriter::EndString(
const char* pStr)
6274 VMA_ASSERT(m_InsideString);
6275 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6277 ContinueString(pStr);
6280 m_InsideString =
false;
6283 void VmaJsonWriter::WriteNumber(uint32_t n)
6285 VMA_ASSERT(!m_InsideString);
6290 void VmaJsonWriter::WriteNumber(uint64_t n)
6292 VMA_ASSERT(!m_InsideString);
6297 void VmaJsonWriter::WriteBool(
bool b)
6299 VMA_ASSERT(!m_InsideString);
6301 m_SB.Add(b ?
"true" :
"false");
6304 void VmaJsonWriter::WriteNull()
6306 VMA_ASSERT(!m_InsideString);
6311 void VmaJsonWriter::BeginValue(
bool isString)
6313 if(!m_Stack.empty())
6315 StackItem& currItem = m_Stack.back();
6316 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6317 currItem.valueCount % 2 == 0)
6319 VMA_ASSERT(isString);
6322 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6323 currItem.valueCount % 2 != 0)
6327 else if(currItem.valueCount > 0)
6336 ++currItem.valueCount;
6340 void VmaJsonWriter::WriteIndent(
bool oneLess)
6342 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
6346 size_t count = m_Stack.size();
6347 if(count > 0 && oneLess)
6351 for(
size_t i = 0; i < count; ++i)
6358 #endif // #if VMA_STATS_STRING_ENABLED 6362 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
6364 if(IsUserDataString())
6366 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
6368 FreeUserDataString(hAllocator);
6370 if(pUserData != VMA_NULL)
6372 const char*
const newStrSrc = (
char*)pUserData;
6373 const size_t newStrLen = strlen(newStrSrc);
6374 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
6375 memcpy(newStrDst, newStrSrc, newStrLen + 1);
6376 m_pUserData = newStrDst;
6381 m_pUserData = pUserData;
6385 void VmaAllocation_T::ChangeBlockAllocation(
6387 VmaDeviceMemoryBlock* block,
6388 VkDeviceSize offset)
6390 VMA_ASSERT(block != VMA_NULL);
6391 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6394 if(block != m_BlockAllocation.m_Block)
6396 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
6397 if(IsPersistentMap())
6399 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
6400 block->Map(hAllocator, mapRefCount, VMA_NULL);
6403 m_BlockAllocation.m_Block = block;
6404 m_BlockAllocation.m_Offset = offset;
6407 VkDeviceSize VmaAllocation_T::GetOffset()
const 6411 case ALLOCATION_TYPE_BLOCK:
6412 return m_BlockAllocation.m_Offset;
6413 case ALLOCATION_TYPE_DEDICATED:
6421 VkDeviceMemory VmaAllocation_T::GetMemory()
const 6425 case ALLOCATION_TYPE_BLOCK:
6426 return m_BlockAllocation.m_Block->GetDeviceMemory();
6427 case ALLOCATION_TYPE_DEDICATED:
6428 return m_DedicatedAllocation.m_hMemory;
6431 return VK_NULL_HANDLE;
6435 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 6439 case ALLOCATION_TYPE_BLOCK:
6440 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
6441 case ALLOCATION_TYPE_DEDICATED:
6442 return m_DedicatedAllocation.m_MemoryTypeIndex;
6449 void* VmaAllocation_T::GetMappedData()
const 6453 case ALLOCATION_TYPE_BLOCK:
6456 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
6457 VMA_ASSERT(pBlockData != VMA_NULL);
6458 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
6465 case ALLOCATION_TYPE_DEDICATED:
6466 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
6467 return m_DedicatedAllocation.m_pMappedData;
6474 bool VmaAllocation_T::CanBecomeLost()
const 6478 case ALLOCATION_TYPE_BLOCK:
6479 return m_BlockAllocation.m_CanBecomeLost;
6480 case ALLOCATION_TYPE_DEDICATED:
6488 VmaPool VmaAllocation_T::GetPool()
const 6490 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6491 return m_BlockAllocation.m_hPool;
6494 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6496 VMA_ASSERT(CanBecomeLost());
6502 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
6505 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6510 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
6516 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
6526 #if VMA_STATS_STRING_ENABLED 6529 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
6538 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 6540 json.WriteString(
"Type");
6541 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
6543 json.WriteString(
"Size");
6544 json.WriteNumber(m_Size);
6546 if(m_pUserData != VMA_NULL)
6548 json.WriteString(
"UserData");
6549 if(IsUserDataString())
6551 json.WriteString((
const char*)m_pUserData);
6556 json.ContinueString_Pointer(m_pUserData);
6561 json.WriteString(
"CreationFrameIndex");
6562 json.WriteNumber(m_CreationFrameIndex);
6564 json.WriteString(
"LastUseFrameIndex");
6565 json.WriteNumber(GetLastUseFrameIndex());
6567 if(m_BufferImageUsage != 0)
6569 json.WriteString(
"Usage");
6570 json.WriteNumber(m_BufferImageUsage);
6576 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
6578 VMA_ASSERT(IsUserDataString());
6579 if(m_pUserData != VMA_NULL)
6581 char*
const oldStr = (
char*)m_pUserData;
6582 const size_t oldStrLen = strlen(oldStr);
6583 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
6584 m_pUserData = VMA_NULL;
6588 void VmaAllocation_T::BlockAllocMap()
6590 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6592 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6598 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
6602 void VmaAllocation_T::BlockAllocUnmap()
6604 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6606 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6612 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
6616 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
6618 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6622 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6624 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
6625 *ppData = m_DedicatedAllocation.m_pMappedData;
6631 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
6632 return VK_ERROR_MEMORY_MAP_FAILED;
6637 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6638 hAllocator->m_hDevice,
6639 m_DedicatedAllocation.m_hMemory,
6644 if(result == VK_SUCCESS)
6646 m_DedicatedAllocation.m_pMappedData = *ppData;
6653 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
6655 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6657 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6662 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
6663 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
6664 hAllocator->m_hDevice,
6665 m_DedicatedAllocation.m_hMemory);
6670 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
6674 #if VMA_STATS_STRING_ENABLED 6676 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
6680 json.WriteString(
"Blocks");
6683 json.WriteString(
"Allocations");
6686 json.WriteString(
"UnusedRanges");
6689 json.WriteString(
"UsedBytes");
6692 json.WriteString(
"UnusedBytes");
6697 json.WriteString(
"AllocationSize");
6698 json.BeginObject(
true);
6699 json.WriteString(
"Min");
6701 json.WriteString(
"Avg");
6703 json.WriteString(
"Max");
6710 json.WriteString(
"UnusedRangeSize");
6711 json.BeginObject(
true);
6712 json.WriteString(
"Min");
6714 json.WriteString(
"Avg");
6716 json.WriteString(
"Max");
6724 #endif // #if VMA_STATS_STRING_ENABLED 6726 struct VmaSuballocationItemSizeLess
6729 const VmaSuballocationList::iterator lhs,
6730 const VmaSuballocationList::iterator rhs)
const 6732 return lhs->size < rhs->size;
6735 const VmaSuballocationList::iterator lhs,
6736 VkDeviceSize rhsSize)
const 6738 return lhs->size < rhsSize;
6746 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
6748 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
6752 #if VMA_STATS_STRING_ENABLED 6754 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6755 VkDeviceSize unusedBytes,
6756 size_t allocationCount,
6757 size_t unusedRangeCount)
const 6761 json.WriteString(
"TotalBytes");
6762 json.WriteNumber(GetSize());
6764 json.WriteString(
"UnusedBytes");
6765 json.WriteNumber(unusedBytes);
6767 json.WriteString(
"Allocations");
6768 json.WriteNumber((uint64_t)allocationCount);
6770 json.WriteString(
"UnusedRanges");
6771 json.WriteNumber((uint64_t)unusedRangeCount);
6773 json.WriteString(
"Suballocations");
6777 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6778 VkDeviceSize offset,
6781 json.BeginObject(
true);
6783 json.WriteString(
"Offset");
6784 json.WriteNumber(offset);
6786 hAllocation->PrintParameters(json);
6791 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6792 VkDeviceSize offset,
6793 VkDeviceSize size)
const 6795 json.BeginObject(
true);
6797 json.WriteString(
"Offset");
6798 json.WriteNumber(offset);
6800 json.WriteString(
"Type");
6801 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
6803 json.WriteString(
"Size");
6804 json.WriteNumber(size);
6809 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 6815 #endif // #if VMA_STATS_STRING_ENABLED 6820 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
6821 VmaBlockMetadata(hAllocator),
6824 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
6825 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
6829 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
6833 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
6835 VmaBlockMetadata::Init(size);
6838 m_SumFreeSize = size;
6840 VmaSuballocation suballoc = {};
6841 suballoc.offset = 0;
6842 suballoc.size = size;
6843 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6844 suballoc.hAllocation = VK_NULL_HANDLE;
6846 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
6847 m_Suballocations.push_back(suballoc);
6848 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
6850 m_FreeSuballocationsBySize.push_back(suballocItem);
6853 bool VmaBlockMetadata_Generic::Validate()
const 6855 VMA_VALIDATE(!m_Suballocations.empty());
6858 VkDeviceSize calculatedOffset = 0;
6860 uint32_t calculatedFreeCount = 0;
6862 VkDeviceSize calculatedSumFreeSize = 0;
6865 size_t freeSuballocationsToRegister = 0;
6867 bool prevFree =
false;
6869 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6870 suballocItem != m_Suballocations.cend();
6873 const VmaSuballocation& subAlloc = *suballocItem;
6876 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
6878 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
6880 VMA_VALIDATE(!prevFree || !currFree);
6882 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
6886 calculatedSumFreeSize += subAlloc.size;
6887 ++calculatedFreeCount;
6888 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6890 ++freeSuballocationsToRegister;
6894 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
6898 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
6899 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
6902 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
6905 calculatedOffset += subAlloc.size;
6906 prevFree = currFree;
6911 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
6913 VkDeviceSize lastSize = 0;
6914 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
6916 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
6919 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6921 VMA_VALIDATE(suballocItem->size >= lastSize);
6923 lastSize = suballocItem->size;
6927 VMA_VALIDATE(ValidateFreeSuballocationList());
6928 VMA_VALIDATE(calculatedOffset == GetSize());
6929 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
6930 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
6935 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 6937 if(!m_FreeSuballocationsBySize.empty())
6939 return m_FreeSuballocationsBySize.back()->size;
6947 bool VmaBlockMetadata_Generic::IsEmpty()
const 6949 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
6952 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 6956 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6968 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6969 suballocItem != m_Suballocations.cend();
6972 const VmaSuballocation& suballoc = *suballocItem;
6973 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
6986 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 6988 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6990 inoutStats.
size += GetSize();
6997 #if VMA_STATS_STRING_ENABLED 6999 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 7001 PrintDetailedMap_Begin(json,
7003 m_Suballocations.size() - (size_t)m_FreeCount,
7007 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
7008 suballocItem != m_Suballocations.cend();
7009 ++suballocItem, ++i)
7011 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7013 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
7017 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
7021 PrintDetailedMap_End(json);
7024 #endif // #if VMA_STATS_STRING_ENABLED 7026 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
7027 uint32_t currentFrameIndex,
7028 uint32_t frameInUseCount,
7029 VkDeviceSize bufferImageGranularity,
7030 VkDeviceSize allocSize,
7031 VkDeviceSize allocAlignment,
7033 VmaSuballocationType allocType,
7034 bool canMakeOtherLost,
7036 VmaAllocationRequest* pAllocationRequest)
7038 VMA_ASSERT(allocSize > 0);
7039 VMA_ASSERT(!upperAddress);
7040 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7041 VMA_ASSERT(pAllocationRequest != VMA_NULL);
7042 VMA_HEAVY_ASSERT(Validate());
7045 if(canMakeOtherLost ==
false &&
7046 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
7052 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
7053 if(freeSuballocCount > 0)
7058 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7059 m_FreeSuballocationsBySize.data(),
7060 m_FreeSuballocationsBySize.data() + freeSuballocCount,
7061 allocSize + 2 * VMA_DEBUG_MARGIN,
7062 VmaSuballocationItemSizeLess());
7063 size_t index = it - m_FreeSuballocationsBySize.data();
7064 for(; index < freeSuballocCount; ++index)
7069 bufferImageGranularity,
7073 m_FreeSuballocationsBySize[index],
7075 &pAllocationRequest->offset,
7076 &pAllocationRequest->itemsToMakeLostCount,
7077 &pAllocationRequest->sumFreeSize,
7078 &pAllocationRequest->sumItemSize))
7080 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7088 for(
size_t index = freeSuballocCount; index--; )
7093 bufferImageGranularity,
7097 m_FreeSuballocationsBySize[index],
7099 &pAllocationRequest->offset,
7100 &pAllocationRequest->itemsToMakeLostCount,
7101 &pAllocationRequest->sumFreeSize,
7102 &pAllocationRequest->sumItemSize))
7104 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
7111 if(canMakeOtherLost)
7115 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
7116 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
7118 VmaAllocationRequest tmpAllocRequest = {};
7119 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
7120 suballocIt != m_Suballocations.end();
7123 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
7124 suballocIt->hAllocation->CanBecomeLost())
7129 bufferImageGranularity,
7135 &tmpAllocRequest.offset,
7136 &tmpAllocRequest.itemsToMakeLostCount,
7137 &tmpAllocRequest.sumFreeSize,
7138 &tmpAllocRequest.sumItemSize))
7140 tmpAllocRequest.item = suballocIt;
7142 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
7145 *pAllocationRequest = tmpAllocRequest;
7151 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
7160 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
7161 uint32_t currentFrameIndex,
7162 uint32_t frameInUseCount,
7163 VmaAllocationRequest* pAllocationRequest)
7165 while(pAllocationRequest->itemsToMakeLostCount > 0)
7167 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
7169 ++pAllocationRequest->item;
7171 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7172 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
7173 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
7174 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7176 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
7177 --pAllocationRequest->itemsToMakeLostCount;
7185 VMA_HEAVY_ASSERT(Validate());
7186 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7187 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
7192 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7194 uint32_t lostAllocationCount = 0;
7195 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7196 it != m_Suballocations.end();
7199 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
7200 it->hAllocation->CanBecomeLost() &&
7201 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7203 it = FreeSuballocation(it);
7204 ++lostAllocationCount;
7207 return lostAllocationCount;
7210 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
7212 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7213 it != m_Suballocations.end();
7216 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
7218 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
7220 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
7221 return VK_ERROR_VALIDATION_FAILED_EXT;
7223 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
7225 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
7226 return VK_ERROR_VALIDATION_FAILED_EXT;
7234 void VmaBlockMetadata_Generic::Alloc(
7235 const VmaAllocationRequest& request,
7236 VmaSuballocationType type,
7237 VkDeviceSize allocSize,
7241 VMA_ASSERT(!upperAddress);
7242 VMA_ASSERT(request.item != m_Suballocations.end());
7243 VmaSuballocation& suballoc = *request.item;
7245 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7247 VMA_ASSERT(request.offset >= suballoc.offset);
7248 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
7249 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
7250 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
7254 UnregisterFreeSuballocation(request.item);
7256 suballoc.offset = request.offset;
7257 suballoc.size = allocSize;
7258 suballoc.type = type;
7259 suballoc.hAllocation = hAllocation;
7264 VmaSuballocation paddingSuballoc = {};
7265 paddingSuballoc.offset = request.offset + allocSize;
7266 paddingSuballoc.size = paddingEnd;
7267 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7268 VmaSuballocationList::iterator next = request.item;
7270 const VmaSuballocationList::iterator paddingEndItem =
7271 m_Suballocations.insert(next, paddingSuballoc);
7272 RegisterFreeSuballocation(paddingEndItem);
7278 VmaSuballocation paddingSuballoc = {};
7279 paddingSuballoc.offset = request.offset - paddingBegin;
7280 paddingSuballoc.size = paddingBegin;
7281 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7282 const VmaSuballocationList::iterator paddingBeginItem =
7283 m_Suballocations.insert(request.item, paddingSuballoc);
7284 RegisterFreeSuballocation(paddingBeginItem);
7288 m_FreeCount = m_FreeCount - 1;
7289 if(paddingBegin > 0)
7297 m_SumFreeSize -= allocSize;
7300 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
7302 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7303 suballocItem != m_Suballocations.end();
7306 VmaSuballocation& suballoc = *suballocItem;
7307 if(suballoc.hAllocation == allocation)
7309 FreeSuballocation(suballocItem);
7310 VMA_HEAVY_ASSERT(Validate());
7314 VMA_ASSERT(0 &&
"Not found!");
7317 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
7319 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7320 suballocItem != m_Suballocations.end();
7323 VmaSuballocation& suballoc = *suballocItem;
7324 if(suballoc.offset == offset)
7326 FreeSuballocation(suballocItem);
7330 VMA_ASSERT(0 &&
"Not found!");
7333 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 7335 VkDeviceSize lastSize = 0;
7336 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
7338 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
7340 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
7341 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7342 VMA_VALIDATE(it->size >= lastSize);
7343 lastSize = it->size;
7348 bool VmaBlockMetadata_Generic::CheckAllocation(
7349 uint32_t currentFrameIndex,
7350 uint32_t frameInUseCount,
7351 VkDeviceSize bufferImageGranularity,
7352 VkDeviceSize allocSize,
7353 VkDeviceSize allocAlignment,
7354 VmaSuballocationType allocType,
7355 VmaSuballocationList::const_iterator suballocItem,
7356 bool canMakeOtherLost,
7357 VkDeviceSize* pOffset,
7358 size_t* itemsToMakeLostCount,
7359 VkDeviceSize* pSumFreeSize,
7360 VkDeviceSize* pSumItemSize)
const 7362 VMA_ASSERT(allocSize > 0);
7363 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7364 VMA_ASSERT(suballocItem != m_Suballocations.cend());
7365 VMA_ASSERT(pOffset != VMA_NULL);
7367 *itemsToMakeLostCount = 0;
7371 if(canMakeOtherLost)
7373 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7375 *pSumFreeSize = suballocItem->size;
7379 if(suballocItem->hAllocation->CanBecomeLost() &&
7380 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7382 ++*itemsToMakeLostCount;
7383 *pSumItemSize = suballocItem->size;
7392 if(GetSize() - suballocItem->offset < allocSize)
7398 *pOffset = suballocItem->offset;
7401 if(VMA_DEBUG_MARGIN > 0)
7403 *pOffset += VMA_DEBUG_MARGIN;
7407 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7411 if(bufferImageGranularity > 1)
7413 bool bufferImageGranularityConflict =
false;
7414 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7415 while(prevSuballocItem != m_Suballocations.cbegin())
7418 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7419 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7421 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7423 bufferImageGranularityConflict =
true;
7431 if(bufferImageGranularityConflict)
7433 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7439 if(*pOffset >= suballocItem->offset + suballocItem->size)
7445 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
7448 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7450 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
7452 if(suballocItem->offset + totalSize > GetSize())
7459 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
7460 if(totalSize > suballocItem->size)
7462 VkDeviceSize remainingSize = totalSize - suballocItem->size;
7463 while(remainingSize > 0)
7466 if(lastSuballocItem == m_Suballocations.cend())
7470 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7472 *pSumFreeSize += lastSuballocItem->size;
7476 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
7477 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
7478 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7480 ++*itemsToMakeLostCount;
7481 *pSumItemSize += lastSuballocItem->size;
7488 remainingSize = (lastSuballocItem->size < remainingSize) ?
7489 remainingSize - lastSuballocItem->size : 0;
7495 if(bufferImageGranularity > 1)
7497 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
7499 while(nextSuballocItem != m_Suballocations.cend())
7501 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7502 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7504 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7506 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
7507 if(nextSuballoc.hAllocation->CanBecomeLost() &&
7508 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7510 ++*itemsToMakeLostCount;
7529 const VmaSuballocation& suballoc = *suballocItem;
7530 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7532 *pSumFreeSize = suballoc.size;
7535 if(suballoc.size < allocSize)
7541 *pOffset = suballoc.offset;
7544 if(VMA_DEBUG_MARGIN > 0)
7546 *pOffset += VMA_DEBUG_MARGIN;
7550 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7554 if(bufferImageGranularity > 1)
7556 bool bufferImageGranularityConflict =
false;
7557 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7558 while(prevSuballocItem != m_Suballocations.cbegin())
7561 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7562 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7564 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7566 bufferImageGranularityConflict =
true;
7574 if(bufferImageGranularityConflict)
7576 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7581 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
7584 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7587 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
7594 if(bufferImageGranularity > 1)
7596 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
7598 while(nextSuballocItem != m_Suballocations.cend())
7600 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7601 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7603 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7622 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
7624 VMA_ASSERT(item != m_Suballocations.end());
7625 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7627 VmaSuballocationList::iterator nextItem = item;
7629 VMA_ASSERT(nextItem != m_Suballocations.end());
7630 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7632 item->size += nextItem->size;
7634 m_Suballocations.erase(nextItem);
7637 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
7640 VmaSuballocation& suballoc = *suballocItem;
7641 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7642 suballoc.hAllocation = VK_NULL_HANDLE;
7646 m_SumFreeSize += suballoc.size;
7649 bool mergeWithNext =
false;
7650 bool mergeWithPrev =
false;
7652 VmaSuballocationList::iterator nextItem = suballocItem;
7654 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
7656 mergeWithNext =
true;
7659 VmaSuballocationList::iterator prevItem = suballocItem;
7660 if(suballocItem != m_Suballocations.begin())
7663 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7665 mergeWithPrev =
true;
7671 UnregisterFreeSuballocation(nextItem);
7672 MergeFreeWithNext(suballocItem);
7677 UnregisterFreeSuballocation(prevItem);
7678 MergeFreeWithNext(prevItem);
7679 RegisterFreeSuballocation(prevItem);
7684 RegisterFreeSuballocation(suballocItem);
7685 return suballocItem;
7689 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
7691 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7692 VMA_ASSERT(item->size > 0);
7696 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7698 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7700 if(m_FreeSuballocationsBySize.empty())
7702 m_FreeSuballocationsBySize.push_back(item);
7706 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
7714 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
7716 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7717 VMA_ASSERT(item->size > 0);
7721 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7723 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7725 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7726 m_FreeSuballocationsBySize.data(),
7727 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
7729 VmaSuballocationItemSizeLess());
7730 for(
size_t index = it - m_FreeSuballocationsBySize.data();
7731 index < m_FreeSuballocationsBySize.size();
7734 if(m_FreeSuballocationsBySize[index] == item)
7736 VmaVectorRemove(m_FreeSuballocationsBySize, index);
7739 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
7741 VMA_ASSERT(0 &&
"Not found.");
7750 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
7751 VmaBlockMetadata(hAllocator),
7753 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7754 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7755 m_1stVectorIndex(0),
7756 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
7757 m_1stNullItemsBeginCount(0),
7758 m_1stNullItemsMiddleCount(0),
7759 m_2ndNullItemsCount(0)
7763 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
7767 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
7769 VmaBlockMetadata::Init(size);
7770 m_SumFreeSize = size;
7773 bool VmaBlockMetadata_Linear::Validate()
const 7775 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7776 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7778 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
7779 VMA_VALIDATE(!suballocations1st.empty() ||
7780 suballocations2nd.empty() ||
7781 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
7783 if(!suballocations1st.empty())
7786 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
7788 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
7790 if(!suballocations2nd.empty())
7793 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
7796 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
7797 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
7799 VkDeviceSize sumUsedSize = 0;
7800 const size_t suballoc1stCount = suballocations1st.size();
7801 VkDeviceSize offset = VMA_DEBUG_MARGIN;
7803 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7805 const size_t suballoc2ndCount = suballocations2nd.size();
7806 size_t nullItem2ndCount = 0;
7807 for(
size_t i = 0; i < suballoc2ndCount; ++i)
7809 const VmaSuballocation& suballoc = suballocations2nd[i];
7810 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7812 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7813 VMA_VALIDATE(suballoc.offset >= offset);
7817 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7818 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7819 sumUsedSize += suballoc.size;
7826 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7829 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7832 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
7834 const VmaSuballocation& suballoc = suballocations1st[i];
7835 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
7836 suballoc.hAllocation == VK_NULL_HANDLE);
7839 size_t nullItem1stCount = m_1stNullItemsBeginCount;
7841 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
7843 const VmaSuballocation& suballoc = suballocations1st[i];
7844 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7846 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7847 VMA_VALIDATE(suballoc.offset >= offset);
7848 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
7852 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7853 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7854 sumUsedSize += suballoc.size;
7861 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7863 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
7865 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7867 const size_t suballoc2ndCount = suballocations2nd.size();
7868 size_t nullItem2ndCount = 0;
7869 for(
size_t i = suballoc2ndCount; i--; )
7871 const VmaSuballocation& suballoc = suballocations2nd[i];
7872 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7874 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7875 VMA_VALIDATE(suballoc.offset >= offset);
7879 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7880 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7881 sumUsedSize += suballoc.size;
7888 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7891 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7894 VMA_VALIDATE(offset <= GetSize());
7895 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
7900 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 7902 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
7903 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
7906 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 7908 const VkDeviceSize size = GetSize();
7920 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7922 switch(m_2ndVectorMode)
7924 case SECOND_VECTOR_EMPTY:
7930 const size_t suballocations1stCount = suballocations1st.size();
7931 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
7932 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
7933 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
7935 firstSuballoc.offset,
7936 size - (lastSuballoc.offset + lastSuballoc.size));
7940 case SECOND_VECTOR_RING_BUFFER:
7945 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7946 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
7947 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
7948 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
7952 case SECOND_VECTOR_DOUBLE_STACK:
7957 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7958 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
7959 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
7960 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
7970 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7972 const VkDeviceSize size = GetSize();
7973 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7974 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7975 const size_t suballoc1stCount = suballocations1st.size();
7976 const size_t suballoc2ndCount = suballocations2nd.size();
7987 VkDeviceSize lastOffset = 0;
7989 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7991 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
7992 size_t nextAlloc2ndIndex = 0;
7993 while(lastOffset < freeSpace2ndTo1stEnd)
7996 while(nextAlloc2ndIndex < suballoc2ndCount &&
7997 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7999 ++nextAlloc2ndIndex;
8003 if(nextAlloc2ndIndex < suballoc2ndCount)
8005 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8008 if(lastOffset < suballoc.offset)
8011 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8025 lastOffset = suballoc.offset + suballoc.size;
8026 ++nextAlloc2ndIndex;
8032 if(lastOffset < freeSpace2ndTo1stEnd)
8034 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8042 lastOffset = freeSpace2ndTo1stEnd;
8047 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8048 const VkDeviceSize freeSpace1stTo2ndEnd =
8049 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8050 while(lastOffset < freeSpace1stTo2ndEnd)
8053 while(nextAlloc1stIndex < suballoc1stCount &&
8054 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8056 ++nextAlloc1stIndex;
8060 if(nextAlloc1stIndex < suballoc1stCount)
8062 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8065 if(lastOffset < suballoc.offset)
8068 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8082 lastOffset = suballoc.offset + suballoc.size;
8083 ++nextAlloc1stIndex;
8089 if(lastOffset < freeSpace1stTo2ndEnd)
8091 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8099 lastOffset = freeSpace1stTo2ndEnd;
8103 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8105 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8106 while(lastOffset < size)
8109 while(nextAlloc2ndIndex != SIZE_MAX &&
8110 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8112 --nextAlloc2ndIndex;
8116 if(nextAlloc2ndIndex != SIZE_MAX)
8118 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8121 if(lastOffset < suballoc.offset)
8124 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8138 lastOffset = suballoc.offset + suballoc.size;
8139 --nextAlloc2ndIndex;
8145 if(lastOffset < size)
8147 const VkDeviceSize unusedRangeSize = size - lastOffset;
8163 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 8165 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8166 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8167 const VkDeviceSize size = GetSize();
8168 const size_t suballoc1stCount = suballocations1st.size();
8169 const size_t suballoc2ndCount = suballocations2nd.size();
8171 inoutStats.
size += size;
8173 VkDeviceSize lastOffset = 0;
8175 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8177 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8178 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
8179 while(lastOffset < freeSpace2ndTo1stEnd)
8182 while(nextAlloc2ndIndex < suballoc2ndCount &&
8183 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8185 ++nextAlloc2ndIndex;
8189 if(nextAlloc2ndIndex < suballoc2ndCount)
8191 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8194 if(lastOffset < suballoc.offset)
8197 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8208 lastOffset = suballoc.offset + suballoc.size;
8209 ++nextAlloc2ndIndex;
8214 if(lastOffset < freeSpace2ndTo1stEnd)
8217 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8224 lastOffset = freeSpace2ndTo1stEnd;
8229 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8230 const VkDeviceSize freeSpace1stTo2ndEnd =
8231 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8232 while(lastOffset < freeSpace1stTo2ndEnd)
8235 while(nextAlloc1stIndex < suballoc1stCount &&
8236 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8238 ++nextAlloc1stIndex;
8242 if(nextAlloc1stIndex < suballoc1stCount)
8244 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8247 if(lastOffset < suballoc.offset)
8250 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8261 lastOffset = suballoc.offset + suballoc.size;
8262 ++nextAlloc1stIndex;
8267 if(lastOffset < freeSpace1stTo2ndEnd)
8270 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8277 lastOffset = freeSpace1stTo2ndEnd;
8281 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8283 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8284 while(lastOffset < size)
8287 while(nextAlloc2ndIndex != SIZE_MAX &&
8288 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8290 --nextAlloc2ndIndex;
8294 if(nextAlloc2ndIndex != SIZE_MAX)
8296 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8299 if(lastOffset < suballoc.offset)
8302 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8313 lastOffset = suballoc.offset + suballoc.size;
8314 --nextAlloc2ndIndex;
8319 if(lastOffset < size)
8322 const VkDeviceSize unusedRangeSize = size - lastOffset;
8335 #if VMA_STATS_STRING_ENABLED 8336 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 8338 const VkDeviceSize size = GetSize();
8339 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8340 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8341 const size_t suballoc1stCount = suballocations1st.size();
8342 const size_t suballoc2ndCount = suballocations2nd.size();
8346 size_t unusedRangeCount = 0;
8347 VkDeviceSize usedBytes = 0;
8349 VkDeviceSize lastOffset = 0;
8351 size_t alloc2ndCount = 0;
8352 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8354 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8355 size_t nextAlloc2ndIndex = 0;
8356 while(lastOffset < freeSpace2ndTo1stEnd)
8359 while(nextAlloc2ndIndex < suballoc2ndCount &&
8360 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8362 ++nextAlloc2ndIndex;
8366 if(nextAlloc2ndIndex < suballoc2ndCount)
8368 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8371 if(lastOffset < suballoc.offset)
8380 usedBytes += suballoc.size;
8383 lastOffset = suballoc.offset + suballoc.size;
8384 ++nextAlloc2ndIndex;
8389 if(lastOffset < freeSpace2ndTo1stEnd)
8396 lastOffset = freeSpace2ndTo1stEnd;
8401 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8402 size_t alloc1stCount = 0;
8403 const VkDeviceSize freeSpace1stTo2ndEnd =
8404 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8405 while(lastOffset < freeSpace1stTo2ndEnd)
8408 while(nextAlloc1stIndex < suballoc1stCount &&
8409 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8411 ++nextAlloc1stIndex;
8415 if(nextAlloc1stIndex < suballoc1stCount)
8417 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8420 if(lastOffset < suballoc.offset)
8429 usedBytes += suballoc.size;
8432 lastOffset = suballoc.offset + suballoc.size;
8433 ++nextAlloc1stIndex;
8438 if(lastOffset < size)
8445 lastOffset = freeSpace1stTo2ndEnd;
8449 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8451 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8452 while(lastOffset < size)
8455 while(nextAlloc2ndIndex != SIZE_MAX &&
8456 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8458 --nextAlloc2ndIndex;
8462 if(nextAlloc2ndIndex != SIZE_MAX)
8464 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8467 if(lastOffset < suballoc.offset)
8476 usedBytes += suballoc.size;
8479 lastOffset = suballoc.offset + suballoc.size;
8480 --nextAlloc2ndIndex;
8485 if(lastOffset < size)
8497 const VkDeviceSize unusedBytes = size - usedBytes;
8498 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
8503 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8505 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8506 size_t nextAlloc2ndIndex = 0;
8507 while(lastOffset < freeSpace2ndTo1stEnd)
8510 while(nextAlloc2ndIndex < suballoc2ndCount &&
8511 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8513 ++nextAlloc2ndIndex;
8517 if(nextAlloc2ndIndex < suballoc2ndCount)
8519 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8522 if(lastOffset < suballoc.offset)
8525 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8526 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8531 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8534 lastOffset = suballoc.offset + suballoc.size;
8535 ++nextAlloc2ndIndex;
8540 if(lastOffset < freeSpace2ndTo1stEnd)
8543 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8544 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8548 lastOffset = freeSpace2ndTo1stEnd;
8553 nextAlloc1stIndex = m_1stNullItemsBeginCount;
8554 while(lastOffset < freeSpace1stTo2ndEnd)
8557 while(nextAlloc1stIndex < suballoc1stCount &&
8558 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8560 ++nextAlloc1stIndex;
8564 if(nextAlloc1stIndex < suballoc1stCount)
8566 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8569 if(lastOffset < suballoc.offset)
8572 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8573 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8578 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8581 lastOffset = suballoc.offset + suballoc.size;
8582 ++nextAlloc1stIndex;
8587 if(lastOffset < freeSpace1stTo2ndEnd)
8590 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8591 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8595 lastOffset = freeSpace1stTo2ndEnd;
8599 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8601 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8602 while(lastOffset < size)
8605 while(nextAlloc2ndIndex != SIZE_MAX &&
8606 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8608 --nextAlloc2ndIndex;
8612 if(nextAlloc2ndIndex != SIZE_MAX)
8614 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8617 if(lastOffset < suballoc.offset)
8620 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8621 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8626 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8629 lastOffset = suballoc.offset + suballoc.size;
8630 --nextAlloc2ndIndex;
8635 if(lastOffset < size)
8638 const VkDeviceSize unusedRangeSize = size - lastOffset;
8639 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8648 PrintDetailedMap_End(json);
8650 #endif // #if VMA_STATS_STRING_ENABLED 8652 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
8653 uint32_t currentFrameIndex,
8654 uint32_t frameInUseCount,
8655 VkDeviceSize bufferImageGranularity,
8656 VkDeviceSize allocSize,
8657 VkDeviceSize allocAlignment,
8659 VmaSuballocationType allocType,
8660 bool canMakeOtherLost,
8662 VmaAllocationRequest* pAllocationRequest)
8664 VMA_ASSERT(allocSize > 0);
8665 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8666 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8667 VMA_HEAVY_ASSERT(Validate());
8669 const VkDeviceSize size = GetSize();
8670 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8671 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8675 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8677 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
8682 if(allocSize > size)
8686 VkDeviceSize resultBaseOffset = size - allocSize;
8687 if(!suballocations2nd.empty())
8689 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8690 resultBaseOffset = lastSuballoc.offset - allocSize;
8691 if(allocSize > lastSuballoc.offset)
8698 VkDeviceSize resultOffset = resultBaseOffset;
8701 if(VMA_DEBUG_MARGIN > 0)
8703 if(resultOffset < VMA_DEBUG_MARGIN)
8707 resultOffset -= VMA_DEBUG_MARGIN;
8711 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
8715 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8717 bool bufferImageGranularityConflict =
false;
8718 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8720 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8721 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8723 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
8725 bufferImageGranularityConflict =
true;
8733 if(bufferImageGranularityConflict)
8735 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
8740 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
8741 suballocations1st.back().offset + suballocations1st.back().size :
8743 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
8747 if(bufferImageGranularity > 1)
8749 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8751 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8752 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8754 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
8768 pAllocationRequest->offset = resultOffset;
8769 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
8770 pAllocationRequest->sumItemSize = 0;
8772 pAllocationRequest->itemsToMakeLostCount = 0;
8778 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8782 VkDeviceSize resultBaseOffset = 0;
8783 if(!suballocations1st.empty())
8785 const VmaSuballocation& lastSuballoc = suballocations1st.back();
8786 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8790 VkDeviceSize resultOffset = resultBaseOffset;
8793 if(VMA_DEBUG_MARGIN > 0)
8795 resultOffset += VMA_DEBUG_MARGIN;
8799 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8803 if(bufferImageGranularity > 1 && !suballocations1st.empty())
8805 bool bufferImageGranularityConflict =
false;
8806 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8808 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8809 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8811 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8813 bufferImageGranularityConflict =
true;
8821 if(bufferImageGranularityConflict)
8823 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8827 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
8828 suballocations2nd.back().offset : size;
8831 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
8835 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8837 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8839 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8840 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8842 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8856 pAllocationRequest->offset = resultOffset;
8857 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
8858 pAllocationRequest->sumItemSize = 0;
8860 pAllocationRequest->itemsToMakeLostCount = 0;
8867 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8869 VMA_ASSERT(!suballocations1st.empty());
8871 VkDeviceSize resultBaseOffset = 0;
8872 if(!suballocations2nd.empty())
8874 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8875 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8879 VkDeviceSize resultOffset = resultBaseOffset;
8882 if(VMA_DEBUG_MARGIN > 0)
8884 resultOffset += VMA_DEBUG_MARGIN;
8888 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8892 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8894 bool bufferImageGranularityConflict =
false;
8895 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
8897 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
8898 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8900 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8902 bufferImageGranularityConflict =
true;
8910 if(bufferImageGranularityConflict)
8912 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8916 pAllocationRequest->itemsToMakeLostCount = 0;
8917 pAllocationRequest->sumItemSize = 0;
8918 size_t index1st = m_1stNullItemsBeginCount;
8920 if(canMakeOtherLost)
8922 while(index1st < suballocations1st.size() &&
8923 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
8926 const VmaSuballocation& suballoc = suballocations1st[index1st];
8927 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
8933 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8934 if(suballoc.hAllocation->CanBecomeLost() &&
8935 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8937 ++pAllocationRequest->itemsToMakeLostCount;
8938 pAllocationRequest->sumItemSize += suballoc.size;
8950 if(bufferImageGranularity > 1)
8952 while(index1st < suballocations1st.size())
8954 const VmaSuballocation& suballoc = suballocations1st[index1st];
8955 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
8957 if(suballoc.hAllocation != VK_NULL_HANDLE)
8960 if(suballoc.hAllocation->CanBecomeLost() &&
8961 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8963 ++pAllocationRequest->itemsToMakeLostCount;
8964 pAllocationRequest->sumItemSize += suballoc.size;
8983 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
8984 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
8988 if(bufferImageGranularity > 1)
8990 for(
size_t nextSuballocIndex = index1st;
8991 nextSuballocIndex < suballocations1st.size();
8992 nextSuballocIndex++)
8994 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
8995 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8997 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
9011 pAllocationRequest->offset = resultOffset;
9012 pAllocationRequest->sumFreeSize =
9013 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
9015 - pAllocationRequest->sumItemSize;
9025 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
9026 uint32_t currentFrameIndex,
9027 uint32_t frameInUseCount,
9028 VmaAllocationRequest* pAllocationRequest)
9030 if(pAllocationRequest->itemsToMakeLostCount == 0)
9035 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
9037 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9038 size_t index1st = m_1stNullItemsBeginCount;
9039 size_t madeLostCount = 0;
9040 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
9042 VMA_ASSERT(index1st < suballocations1st.size());
9043 VmaSuballocation& suballoc = suballocations1st[index1st];
9044 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9046 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
9047 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
9048 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9050 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9051 suballoc.hAllocation = VK_NULL_HANDLE;
9052 m_SumFreeSize += suballoc.size;
9053 ++m_1stNullItemsMiddleCount;
9070 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9072 uint32_t lostAllocationCount = 0;
9074 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9075 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
9077 VmaSuballocation& suballoc = suballocations1st[i];
9078 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
9079 suballoc.hAllocation->CanBecomeLost() &&
9080 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9082 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9083 suballoc.hAllocation = VK_NULL_HANDLE;
9084 ++m_1stNullItemsMiddleCount;
9085 m_SumFreeSize += suballoc.size;
9086 ++lostAllocationCount;
9090 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9091 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9093 VmaSuballocation& suballoc = suballocations2nd[i];
9094 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
9095 suballoc.hAllocation->CanBecomeLost() &&
9096 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
9098 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9099 suballoc.hAllocation = VK_NULL_HANDLE;
9100 ++m_2ndNullItemsCount;
9101 ++lostAllocationCount;
9105 if(lostAllocationCount)
9110 return lostAllocationCount;
9113 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
9115 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9116 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
9118 const VmaSuballocation& suballoc = suballocations1st[i];
9119 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9121 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9123 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9124 return VK_ERROR_VALIDATION_FAILED_EXT;
9126 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9128 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9129 return VK_ERROR_VALIDATION_FAILED_EXT;
9134 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9135 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9137 const VmaSuballocation& suballoc = suballocations2nd[i];
9138 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9140 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9142 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9143 return VK_ERROR_VALIDATION_FAILED_EXT;
9145 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9147 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9148 return VK_ERROR_VALIDATION_FAILED_EXT;
9156 void VmaBlockMetadata_Linear::Alloc(
9157 const VmaAllocationRequest& request,
9158 VmaSuballocationType type,
9159 VkDeviceSize allocSize,
9163 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
9167 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
9168 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
9169 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9170 suballocations2nd.push_back(newSuballoc);
9171 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
9175 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9178 if(suballocations1st.empty())
9180 suballocations1st.push_back(newSuballoc);
9185 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
9188 VMA_ASSERT(request.offset + allocSize <= GetSize());
9189 suballocations1st.push_back(newSuballoc);
9192 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
9194 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9196 switch(m_2ndVectorMode)
9198 case SECOND_VECTOR_EMPTY:
9200 VMA_ASSERT(suballocations2nd.empty());
9201 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
9203 case SECOND_VECTOR_RING_BUFFER:
9205 VMA_ASSERT(!suballocations2nd.empty());
9207 case SECOND_VECTOR_DOUBLE_STACK:
9208 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
9214 suballocations2nd.push_back(newSuballoc);
9218 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
9223 m_SumFreeSize -= newSuballoc.size;
9226 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
9228 FreeAtOffset(allocation->GetOffset());
9231 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
9233 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9234 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9236 if(!suballocations1st.empty())
9239 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9240 if(firstSuballoc.offset == offset)
9242 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9243 firstSuballoc.hAllocation = VK_NULL_HANDLE;
9244 m_SumFreeSize += firstSuballoc.size;
9245 ++m_1stNullItemsBeginCount;
9252 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
9253 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9255 VmaSuballocation& lastSuballoc = suballocations2nd.back();
9256 if(lastSuballoc.offset == offset)
9258 m_SumFreeSize += lastSuballoc.size;
9259 suballocations2nd.pop_back();
9265 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
9267 VmaSuballocation& lastSuballoc = suballocations1st.back();
9268 if(lastSuballoc.offset == offset)
9270 m_SumFreeSize += lastSuballoc.size;
9271 suballocations1st.pop_back();
9279 VmaSuballocation refSuballoc;
9280 refSuballoc.offset = offset;
9282 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
9283 suballocations1st.begin() + m_1stNullItemsBeginCount,
9284 suballocations1st.end(),
9286 if(it != suballocations1st.end())
9288 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9289 it->hAllocation = VK_NULL_HANDLE;
9290 ++m_1stNullItemsMiddleCount;
9291 m_SumFreeSize += it->size;
9297 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
9300 VmaSuballocation refSuballoc;
9301 refSuballoc.offset = offset;
9303 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
9304 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
9305 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
9306 if(it != suballocations2nd.end())
9308 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9309 it->hAllocation = VK_NULL_HANDLE;
9310 ++m_2ndNullItemsCount;
9311 m_SumFreeSize += it->size;
9317 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
9320 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 9322 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9323 const size_t suballocCount = AccessSuballocations1st().size();
9324 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
9327 void VmaBlockMetadata_Linear::CleanupAfterFree()
9329 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9330 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9334 suballocations1st.clear();
9335 suballocations2nd.clear();
9336 m_1stNullItemsBeginCount = 0;
9337 m_1stNullItemsMiddleCount = 0;
9338 m_2ndNullItemsCount = 0;
9339 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9343 const size_t suballoc1stCount = suballocations1st.size();
9344 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9345 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
9348 while(m_1stNullItemsBeginCount < suballoc1stCount &&
9349 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9351 ++m_1stNullItemsBeginCount;
9352 --m_1stNullItemsMiddleCount;
9356 while(m_1stNullItemsMiddleCount > 0 &&
9357 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
9359 --m_1stNullItemsMiddleCount;
9360 suballocations1st.pop_back();
9364 while(m_2ndNullItemsCount > 0 &&
9365 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
9367 --m_2ndNullItemsCount;
9368 suballocations2nd.pop_back();
9371 if(ShouldCompact1st())
9373 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
9374 size_t srcIndex = m_1stNullItemsBeginCount;
9375 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
9377 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
9381 if(dstIndex != srcIndex)
9383 suballocations1st[dstIndex] = suballocations1st[srcIndex];
9387 suballocations1st.resize(nonNullItemCount);
9388 m_1stNullItemsBeginCount = 0;
9389 m_1stNullItemsMiddleCount = 0;
9393 if(suballocations2nd.empty())
9395 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9399 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
9401 suballocations1st.clear();
9402 m_1stNullItemsBeginCount = 0;
9404 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9407 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9408 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
9409 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
9410 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9412 ++m_1stNullItemsBeginCount;
9413 --m_1stNullItemsMiddleCount;
9415 m_2ndNullItemsCount = 0;
9416 m_1stVectorIndex ^= 1;
9421 VMA_HEAVY_ASSERT(Validate());
9428 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
9429 VmaBlockMetadata(hAllocator),
9431 m_AllocationCount(0),
9435 memset(m_FreeList, 0,
sizeof(m_FreeList));
9438 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
9443 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
9445 VmaBlockMetadata::Init(size);
9447 m_UsableSize = VmaPrevPow2(size);
9448 m_SumFreeSize = m_UsableSize;
9452 while(m_LevelCount < MAX_LEVELS &&
9453 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
9458 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
9459 rootNode->offset = 0;
9460 rootNode->type = Node::TYPE_FREE;
9461 rootNode->parent = VMA_NULL;
9462 rootNode->buddy = VMA_NULL;
9465 AddToFreeListFront(0, rootNode);
9468 bool VmaBlockMetadata_Buddy::Validate()
const 9471 ValidationContext ctx;
9472 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
9474 VMA_VALIDATE(
false &&
"ValidateNode failed.");
9476 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
9477 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
9480 for(uint32_t level = 0; level < m_LevelCount; ++level)
9482 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
9483 m_FreeList[level].front->free.prev == VMA_NULL);
9485 for(Node* node = m_FreeList[level].front;
9487 node = node->free.next)
9489 VMA_VALIDATE(node->type == Node::TYPE_FREE);
9491 if(node->free.next == VMA_NULL)
9493 VMA_VALIDATE(m_FreeList[level].back == node);
9497 VMA_VALIDATE(node->free.next->free.prev == node);
9503 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
9505 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
9511 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 9513 for(uint32_t level = 0; level < m_LevelCount; ++level)
9515 if(m_FreeList[level].front != VMA_NULL)
9517 return LevelToNodeSize(level);
9523 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9525 const VkDeviceSize unusableSize = GetUnusableSize();
9536 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
9538 if(unusableSize > 0)
9547 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 9549 const VkDeviceSize unusableSize = GetUnusableSize();
9551 inoutStats.
size += GetSize();
9552 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
9557 if(unusableSize > 0)
9564 #if VMA_STATS_STRING_ENABLED 9566 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 9570 CalcAllocationStatInfo(stat);
9572 PrintDetailedMap_Begin(
9578 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
9580 const VkDeviceSize unusableSize = GetUnusableSize();
9581 if(unusableSize > 0)
9583 PrintDetailedMap_UnusedRange(json,
9588 PrintDetailedMap_End(json);
9591 #endif // #if VMA_STATS_STRING_ENABLED 9593 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
9594 uint32_t currentFrameIndex,
9595 uint32_t frameInUseCount,
9596 VkDeviceSize bufferImageGranularity,
9597 VkDeviceSize allocSize,
9598 VkDeviceSize allocAlignment,
9600 VmaSuballocationType allocType,
9601 bool canMakeOtherLost,
9603 VmaAllocationRequest* pAllocationRequest)
9605 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
9609 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
9610 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
9611 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
9613 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
9614 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
9617 if(allocSize > m_UsableSize)
9622 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9623 for(uint32_t level = targetLevel + 1; level--; )
9625 for(Node* freeNode = m_FreeList[level].front;
9626 freeNode != VMA_NULL;
9627 freeNode = freeNode->free.next)
9629 if(freeNode->offset % allocAlignment == 0)
9631 pAllocationRequest->offset = freeNode->offset;
9632 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
9633 pAllocationRequest->sumItemSize = 0;
9634 pAllocationRequest->itemsToMakeLostCount = 0;
9635 pAllocationRequest->customData = (
void*)(uintptr_t)level;
9644 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
9645 uint32_t currentFrameIndex,
9646 uint32_t frameInUseCount,
9647 VmaAllocationRequest* pAllocationRequest)
9653 return pAllocationRequest->itemsToMakeLostCount == 0;
9656 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9665 void VmaBlockMetadata_Buddy::Alloc(
9666 const VmaAllocationRequest& request,
9667 VmaSuballocationType type,
9668 VkDeviceSize allocSize,
9672 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9673 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
9675 Node* currNode = m_FreeList[currLevel].front;
9676 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9677 while(currNode->offset != request.offset)
9679 currNode = currNode->free.next;
9680 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9684 while(currLevel < targetLevel)
9688 RemoveFromFreeList(currLevel, currNode);
9690 const uint32_t childrenLevel = currLevel + 1;
9693 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
9694 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
9696 leftChild->offset = currNode->offset;
9697 leftChild->type = Node::TYPE_FREE;
9698 leftChild->parent = currNode;
9699 leftChild->buddy = rightChild;
9701 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
9702 rightChild->type = Node::TYPE_FREE;
9703 rightChild->parent = currNode;
9704 rightChild->buddy = leftChild;
9707 currNode->type = Node::TYPE_SPLIT;
9708 currNode->split.leftChild = leftChild;
9711 AddToFreeListFront(childrenLevel, rightChild);
9712 AddToFreeListFront(childrenLevel, leftChild);
9717 currNode = m_FreeList[currLevel].front;
9726 VMA_ASSERT(currLevel == targetLevel &&
9727 currNode != VMA_NULL &&
9728 currNode->type == Node::TYPE_FREE);
9729 RemoveFromFreeList(currLevel, currNode);
9732 currNode->type = Node::TYPE_ALLOCATION;
9733 currNode->allocation.alloc = hAllocation;
9735 ++m_AllocationCount;
9737 m_SumFreeSize -= allocSize;
9740 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
9742 if(node->type == Node::TYPE_SPLIT)
9744 DeleteNode(node->split.leftChild->buddy);
9745 DeleteNode(node->split.leftChild);
9748 vma_delete(GetAllocationCallbacks(), node);
9751 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 9753 VMA_VALIDATE(level < m_LevelCount);
9754 VMA_VALIDATE(curr->parent == parent);
9755 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
9756 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
9759 case Node::TYPE_FREE:
9761 ctx.calculatedSumFreeSize += levelNodeSize;
9762 ++ctx.calculatedFreeCount;
9764 case Node::TYPE_ALLOCATION:
9765 ++ctx.calculatedAllocationCount;
9766 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
9767 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
9769 case Node::TYPE_SPLIT:
9771 const uint32_t childrenLevel = level + 1;
9772 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
9773 const Node*
const leftChild = curr->split.leftChild;
9774 VMA_VALIDATE(leftChild != VMA_NULL);
9775 VMA_VALIDATE(leftChild->offset == curr->offset);
9776 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
9778 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
9780 const Node*
const rightChild = leftChild->buddy;
9781 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
9782 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
9784 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
9795 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 9799 VkDeviceSize currLevelNodeSize = m_UsableSize;
9800 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
9801 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
9804 currLevelNodeSize = nextLevelNodeSize;
9805 nextLevelNodeSize = currLevelNodeSize >> 1;
9810 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
9813 Node* node = m_Root;
9814 VkDeviceSize nodeOffset = 0;
9816 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
9817 while(node->type == Node::TYPE_SPLIT)
9819 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
9820 if(offset < nodeOffset + nextLevelSize)
9822 node = node->split.leftChild;
9826 node = node->split.leftChild->buddy;
9827 nodeOffset += nextLevelSize;
9830 levelNodeSize = nextLevelSize;
9833 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
9834 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
9837 --m_AllocationCount;
9838 m_SumFreeSize += alloc->GetSize();
9840 node->type = Node::TYPE_FREE;
9843 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
9845 RemoveFromFreeList(level, node->buddy);
9846 Node*
const parent = node->parent;
9848 vma_delete(GetAllocationCallbacks(), node->buddy);
9849 vma_delete(GetAllocationCallbacks(), node);
9850 parent->type = Node::TYPE_FREE;
9858 AddToFreeListFront(level, node);
9861 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 9865 case Node::TYPE_FREE:
9871 case Node::TYPE_ALLOCATION:
9873 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
9879 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
9880 if(unusedRangeSize > 0)
9889 case Node::TYPE_SPLIT:
9891 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9892 const Node*
const leftChild = node->split.leftChild;
9893 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
9894 const Node*
const rightChild = leftChild->buddy;
9895 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
9903 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
9905 VMA_ASSERT(node->type == Node::TYPE_FREE);
9908 Node*
const frontNode = m_FreeList[level].front;
9909 if(frontNode == VMA_NULL)
9911 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
9912 node->free.prev = node->free.next = VMA_NULL;
9913 m_FreeList[level].front = m_FreeList[level].back = node;
9917 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
9918 node->free.prev = VMA_NULL;
9919 node->free.next = frontNode;
9920 frontNode->free.prev = node;
9921 m_FreeList[level].front = node;
9925 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
9927 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
9930 if(node->free.prev == VMA_NULL)
9932 VMA_ASSERT(m_FreeList[level].front == node);
9933 m_FreeList[level].front = node->free.next;
9937 Node*
const prevFreeNode = node->free.prev;
9938 VMA_ASSERT(prevFreeNode->free.next == node);
9939 prevFreeNode->free.next = node->free.next;
9943 if(node->free.next == VMA_NULL)
9945 VMA_ASSERT(m_FreeList[level].back == node);
9946 m_FreeList[level].back = node->free.prev;
9950 Node*
const nextFreeNode = node->free.next;
9951 VMA_ASSERT(nextFreeNode->free.prev == node);
9952 nextFreeNode->free.prev = node->free.prev;
9956 #if VMA_STATS_STRING_ENABLED 9957 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 9961 case Node::TYPE_FREE:
9962 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
9964 case Node::TYPE_ALLOCATION:
9966 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
9967 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
9968 if(allocSize < levelNodeSize)
9970 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
9974 case Node::TYPE_SPLIT:
9976 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9977 const Node*
const leftChild = node->split.leftChild;
9978 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
9979 const Node*
const rightChild = leftChild->buddy;
9980 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
9987 #endif // #if VMA_STATS_STRING_ENABLED 9993 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
9994 m_pMetadata(VMA_NULL),
9995 m_MemoryTypeIndex(UINT32_MAX),
9997 m_hMemory(VK_NULL_HANDLE),
9999 m_pMappedData(VMA_NULL)
10003 void VmaDeviceMemoryBlock::Init(
10005 uint32_t newMemoryTypeIndex,
10006 VkDeviceMemory newMemory,
10007 VkDeviceSize newSize,
10009 uint32_t algorithm)
10011 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
10013 m_MemoryTypeIndex = newMemoryTypeIndex;
10015 m_hMemory = newMemory;
10020 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
10023 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
10029 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
10031 m_pMetadata->Init(newSize);
10034 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
10038 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
10040 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
10041 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
10042 m_hMemory = VK_NULL_HANDLE;
10044 vma_delete(allocator, m_pMetadata);
10045 m_pMetadata = VMA_NULL;
10048 bool VmaDeviceMemoryBlock::Validate()
const 10050 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
10051 (m_pMetadata->GetSize() != 0));
10053 return m_pMetadata->Validate();
10056 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
10058 void* pData =
nullptr;
10059 VkResult res = Map(hAllocator, 1, &pData);
10060 if(res != VK_SUCCESS)
10065 res = m_pMetadata->CheckCorruption(pData);
10067 Unmap(hAllocator, 1);
10072 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
10079 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10080 if(m_MapCount != 0)
10082 m_MapCount += count;
10083 VMA_ASSERT(m_pMappedData != VMA_NULL);
10084 if(ppData != VMA_NULL)
10086 *ppData = m_pMappedData;
10092 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
10093 hAllocator->m_hDevice,
10099 if(result == VK_SUCCESS)
10101 if(ppData != VMA_NULL)
10103 *ppData = m_pMappedData;
10105 m_MapCount = count;
10111 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
10118 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10119 if(m_MapCount >= count)
10121 m_MapCount -= count;
10122 if(m_MapCount == 0)
10124 m_pMappedData = VMA_NULL;
10125 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
10130 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
10134 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10136 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10137 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10140 VkResult res = Map(hAllocator, 1, &pData);
10141 if(res != VK_SUCCESS)
10146 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
10147 VmaWriteMagicValue(pData, allocOffset + allocSize);
10149 Unmap(hAllocator, 1);
10154 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10156 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10157 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10160 VkResult res = Map(hAllocator, 1, &pData);
10161 if(res != VK_SUCCESS)
10166 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
10168 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
10170 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
10172 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
10175 Unmap(hAllocator, 1);
10180 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
10185 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10186 hAllocation->GetBlock() ==
this);
10188 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10189 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
10190 hAllocator->m_hDevice,
10193 hAllocation->GetOffset());
10196 VkResult VmaDeviceMemoryBlock::BindImageMemory(
10201 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10202 hAllocation->GetBlock() ==
this);
10204 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10205 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
10206 hAllocator->m_hDevice,
10209 hAllocation->GetOffset());
10214 memset(&outInfo, 0,
sizeof(outInfo));
10233 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
10241 VmaPool_T::VmaPool_T(
10244 VkDeviceSize preferredBlockSize) :
10247 createInfo.memoryTypeIndex,
10248 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
10249 createInfo.minBlockCount,
10250 createInfo.maxBlockCount,
10252 createInfo.frameInUseCount,
10254 createInfo.blockSize != 0,
10260 VmaPool_T::~VmaPool_T()
10264 #if VMA_STATS_STRING_ENABLED 10266 #endif // #if VMA_STATS_STRING_ENABLED 10268 VmaBlockVector::VmaBlockVector(
10270 uint32_t memoryTypeIndex,
10271 VkDeviceSize preferredBlockSize,
10272 size_t minBlockCount,
10273 size_t maxBlockCount,
10274 VkDeviceSize bufferImageGranularity,
10275 uint32_t frameInUseCount,
10277 bool explicitBlockSize,
10278 uint32_t algorithm) :
10279 m_hAllocator(hAllocator),
10280 m_MemoryTypeIndex(memoryTypeIndex),
10281 m_PreferredBlockSize(preferredBlockSize),
10282 m_MinBlockCount(minBlockCount),
10283 m_MaxBlockCount(maxBlockCount),
10284 m_BufferImageGranularity(bufferImageGranularity),
10285 m_FrameInUseCount(frameInUseCount),
10286 m_IsCustomPool(isCustomPool),
10287 m_ExplicitBlockSize(explicitBlockSize),
10288 m_Algorithm(algorithm),
10289 m_HasEmptyBlock(false),
10290 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
10291 m_pDefragmentator(VMA_NULL),
10296 VmaBlockVector::~VmaBlockVector()
10298 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
10300 for(
size_t i = m_Blocks.size(); i--; )
10302 m_Blocks[i]->Destroy(m_hAllocator);
10303 vma_delete(m_hAllocator, m_Blocks[i]);
10307 VkResult VmaBlockVector::CreateMinBlocks()
10309 for(
size_t i = 0; i < m_MinBlockCount; ++i)
10311 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
10312 if(res != VK_SUCCESS)
10320 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
10322 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10324 const size_t blockCount = m_Blocks.size();
10333 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10335 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10336 VMA_ASSERT(pBlock);
10337 VMA_HEAVY_ASSERT(pBlock->Validate());
10338 pBlock->m_pMetadata->AddPoolStats(*pStats);
10342 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 10344 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
10345 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
10346 (VMA_DEBUG_MARGIN > 0) &&
10347 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
10350 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
10352 VkResult VmaBlockVector::Allocate(
10354 uint32_t currentFrameIndex,
10356 VkDeviceSize alignment,
10358 VmaSuballocationType suballocType,
10365 const bool canCreateNewBlock =
10367 (m_Blocks.size() < m_MaxBlockCount);
10374 canMakeOtherLost =
false;
10378 if(isUpperAddress &&
10381 return VK_ERROR_FEATURE_NOT_PRESENT;
10395 return VK_ERROR_FEATURE_NOT_PRESENT;
10399 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
10401 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10404 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10411 if(!canMakeOtherLost || canCreateNewBlock)
10420 if(!m_Blocks.empty())
10422 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
10423 VMA_ASSERT(pCurrBlock);
10424 VkResult res = AllocateFromBlock(
10435 if(res == VK_SUCCESS)
10437 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
10447 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10449 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10450 VMA_ASSERT(pCurrBlock);
10451 VkResult res = AllocateFromBlock(
10462 if(res == VK_SUCCESS)
10464 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10472 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10474 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10475 VMA_ASSERT(pCurrBlock);
10476 VkResult res = AllocateFromBlock(
10487 if(res == VK_SUCCESS)
10489 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10497 if(canCreateNewBlock)
10500 VkDeviceSize newBlockSize = m_PreferredBlockSize;
10501 uint32_t newBlockSizeShift = 0;
10502 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
10504 if(!m_ExplicitBlockSize)
10507 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
10508 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
10510 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10511 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
10513 newBlockSize = smallerNewBlockSize;
10514 ++newBlockSizeShift;
10523 size_t newBlockIndex = 0;
10524 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
10526 if(!m_ExplicitBlockSize)
10528 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
10530 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10531 if(smallerNewBlockSize >= size)
10533 newBlockSize = smallerNewBlockSize;
10534 ++newBlockSizeShift;
10535 res = CreateBlock(newBlockSize, &newBlockIndex);
10544 if(res == VK_SUCCESS)
10546 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
10547 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
10549 res = AllocateFromBlock(
10560 if(res == VK_SUCCESS)
10562 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
10568 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10575 if(canMakeOtherLost)
10577 uint32_t tryIndex = 0;
10578 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
10580 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
10581 VmaAllocationRequest bestRequest = {};
10582 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
10588 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10590 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10591 VMA_ASSERT(pCurrBlock);
10592 VmaAllocationRequest currRequest = {};
10593 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10596 m_BufferImageGranularity,
10605 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10606 if(pBestRequestBlock == VMA_NULL ||
10607 currRequestCost < bestRequestCost)
10609 pBestRequestBlock = pCurrBlock;
10610 bestRequest = currRequest;
10611 bestRequestCost = currRequestCost;
10613 if(bestRequestCost == 0)
10624 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10626 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10627 VMA_ASSERT(pCurrBlock);
10628 VmaAllocationRequest currRequest = {};
10629 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10632 m_BufferImageGranularity,
10641 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10642 if(pBestRequestBlock == VMA_NULL ||
10643 currRequestCost < bestRequestCost ||
10646 pBestRequestBlock = pCurrBlock;
10647 bestRequest = currRequest;
10648 bestRequestCost = currRequestCost;
10650 if(bestRequestCost == 0 ||
10660 if(pBestRequestBlock != VMA_NULL)
10664 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
10665 if(res != VK_SUCCESS)
10671 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
10677 if(pBestRequestBlock->m_pMetadata->IsEmpty())
10679 m_HasEmptyBlock =
false;
10682 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10683 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
10684 (*pAllocation)->InitBlockAllocation(
10687 bestRequest.offset,
10693 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
10694 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
10695 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
10696 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10698 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10700 if(IsCorruptionDetectionEnabled())
10702 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
10703 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10718 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
10720 return VK_ERROR_TOO_MANY_OBJECTS;
10724 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10727 void VmaBlockVector::Free(
10730 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
10734 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10736 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
10738 if(IsCorruptionDetectionEnabled())
10740 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
10741 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
10744 if(hAllocation->IsPersistentMap())
10746 pBlock->Unmap(m_hAllocator, 1);
10749 pBlock->m_pMetadata->Free(hAllocation);
10750 VMA_HEAVY_ASSERT(pBlock->Validate());
10752 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
10755 if(pBlock->m_pMetadata->IsEmpty())
10758 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
10760 pBlockToDelete = pBlock;
10766 m_HasEmptyBlock =
true;
10771 else if(m_HasEmptyBlock)
10773 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
10774 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
10776 pBlockToDelete = pLastBlock;
10777 m_Blocks.pop_back();
10778 m_HasEmptyBlock =
false;
10782 IncrementallySortBlocks();
10787 if(pBlockToDelete != VMA_NULL)
10789 VMA_DEBUG_LOG(
" Deleted empty allocation");
10790 pBlockToDelete->Destroy(m_hAllocator);
10791 vma_delete(m_hAllocator, pBlockToDelete);
10795 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 10797 VkDeviceSize result = 0;
10798 for(
size_t i = m_Blocks.size(); i--; )
10800 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
10801 if(result >= m_PreferredBlockSize)
10809 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
10811 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10813 if(m_Blocks[blockIndex] == pBlock)
10815 VmaVectorRemove(m_Blocks, blockIndex);
10822 void VmaBlockVector::IncrementallySortBlocks()
10827 for(
size_t i = 1; i < m_Blocks.size(); ++i)
10829 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
10831 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
10838 VkResult VmaBlockVector::AllocateFromBlock(
10839 VmaDeviceMemoryBlock* pBlock,
10841 uint32_t currentFrameIndex,
10843 VkDeviceSize alignment,
10846 VmaSuballocationType suballocType,
10855 VmaAllocationRequest currRequest = {};
10856 if(pBlock->m_pMetadata->CreateAllocationRequest(
10859 m_BufferImageGranularity,
10869 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
10873 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
10874 if(res != VK_SUCCESS)
10881 if(pBlock->m_pMetadata->IsEmpty())
10883 m_HasEmptyBlock =
false;
10886 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10887 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
10888 (*pAllocation)->InitBlockAllocation(
10891 currRequest.offset,
10897 VMA_HEAVY_ASSERT(pBlock->Validate());
10898 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
10899 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10901 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10903 if(IsCorruptionDetectionEnabled())
10905 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
10906 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10910 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10913 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
10915 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
10916 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
10917 allocInfo.allocationSize = blockSize;
10918 VkDeviceMemory mem = VK_NULL_HANDLE;
10919 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
10928 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
10933 allocInfo.allocationSize,
10937 m_Blocks.push_back(pBlock);
10938 if(pNewBlockIndex != VMA_NULL)
10940 *pNewBlockIndex = m_Blocks.size() - 1;
10946 #if VMA_STATS_STRING_ENABLED 10948 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
10950 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10952 json.BeginObject();
10956 json.WriteString(
"MemoryTypeIndex");
10957 json.WriteNumber(m_MemoryTypeIndex);
10959 json.WriteString(
"BlockSize");
10960 json.WriteNumber(m_PreferredBlockSize);
10962 json.WriteString(
"BlockCount");
10963 json.BeginObject(
true);
10964 if(m_MinBlockCount > 0)
10966 json.WriteString(
"Min");
10967 json.WriteNumber((uint64_t)m_MinBlockCount);
10969 if(m_MaxBlockCount < SIZE_MAX)
10971 json.WriteString(
"Max");
10972 json.WriteNumber((uint64_t)m_MaxBlockCount);
10974 json.WriteString(
"Cur");
10975 json.WriteNumber((uint64_t)m_Blocks.size());
10978 if(m_FrameInUseCount > 0)
10980 json.WriteString(
"FrameInUseCount");
10981 json.WriteNumber(m_FrameInUseCount);
10984 if(m_Algorithm != 0)
10986 json.WriteString(
"Algorithm");
10987 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
10992 json.WriteString(
"PreferredBlockSize");
10993 json.WriteNumber(m_PreferredBlockSize);
10996 json.WriteString(
"Blocks");
10997 json.BeginObject();
10998 for(
size_t i = 0; i < m_Blocks.size(); ++i)
11000 json.BeginString();
11001 json.ContinueString(m_Blocks[i]->GetId());
11004 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
11011 #endif // #if VMA_STATS_STRING_ENABLED 11013 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
11015 uint32_t currentFrameIndex)
11017 if(m_pDefragmentator == VMA_NULL)
11019 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
11022 currentFrameIndex);
11025 return m_pDefragmentator;
11028 VkResult VmaBlockVector::Defragment(
11030 VkDeviceSize& maxBytesToMove,
11031 uint32_t& maxAllocationsToMove)
11033 if(m_pDefragmentator == VMA_NULL)
11038 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11041 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
11044 if(pDefragmentationStats != VMA_NULL)
11046 const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
11047 const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
11048 pDefragmentationStats->
bytesMoved += bytesMoved;
11050 VMA_ASSERT(bytesMoved <= maxBytesToMove);
11051 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
11052 maxBytesToMove -= bytesMoved;
11053 maxAllocationsToMove -= allocationsMoved;
11057 m_HasEmptyBlock =
false;
11058 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
11060 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
11061 if(pBlock->m_pMetadata->IsEmpty())
11063 if(m_Blocks.size() > m_MinBlockCount)
11065 if(pDefragmentationStats != VMA_NULL)
11068 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
11071 VmaVectorRemove(m_Blocks, blockIndex);
11072 pBlock->Destroy(m_hAllocator);
11073 vma_delete(m_hAllocator, pBlock);
11077 m_HasEmptyBlock =
true;
11085 void VmaBlockVector::DestroyDefragmentator()
11087 if(m_pDefragmentator != VMA_NULL)
11089 vma_delete(m_hAllocator, m_pDefragmentator);
11090 m_pDefragmentator = VMA_NULL;
11094 void VmaBlockVector::MakePoolAllocationsLost(
11095 uint32_t currentFrameIndex,
11096 size_t* pLostAllocationCount)
11098 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11099 size_t lostAllocationCount = 0;
11100 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11102 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11103 VMA_ASSERT(pBlock);
11104 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
11106 if(pLostAllocationCount != VMA_NULL)
11108 *pLostAllocationCount = lostAllocationCount;
11112 VkResult VmaBlockVector::CheckCorruption()
11114 if(!IsCorruptionDetectionEnabled())
11116 return VK_ERROR_FEATURE_NOT_PRESENT;
11119 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11120 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11122 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11123 VMA_ASSERT(pBlock);
11124 VkResult res = pBlock->CheckCorruption(m_hAllocator);
11125 if(res != VK_SUCCESS)
11133 void VmaBlockVector::AddStats(
VmaStats* pStats)
11135 const uint32_t memTypeIndex = m_MemoryTypeIndex;
11136 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
11138 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11140 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11142 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11143 VMA_ASSERT(pBlock);
11144 VMA_HEAVY_ASSERT(pBlock->Validate());
11146 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
11147 VmaAddStatInfo(pStats->
total, allocationStatInfo);
11148 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
11149 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
11156 VmaDefragmentator::VmaDefragmentator(
11158 VmaBlockVector* pBlockVector,
11159 uint32_t currentFrameIndex) :
11160 m_hAllocator(hAllocator),
11161 m_pBlockVector(pBlockVector),
11162 m_CurrentFrameIndex(currentFrameIndex),
11164 m_AllocationsMoved(0),
11165 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
11166 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
11168 VMA_ASSERT(pBlockVector->GetAlgorithm() == 0);
11171 VmaDefragmentator::~VmaDefragmentator()
11173 for(
size_t i = m_Blocks.size(); i--; )
11175 vma_delete(m_hAllocator, m_Blocks[i]);
11179 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
11181 AllocationInfo allocInfo;
11182 allocInfo.m_hAllocation = hAlloc;
11183 allocInfo.m_pChanged = pChanged;
11184 m_Allocations.push_back(allocInfo);
11187 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
11190 if(m_pMappedDataForDefragmentation)
11192 *ppMappedData = m_pMappedDataForDefragmentation;
11197 if(m_pBlock->GetMappedData())
11199 *ppMappedData = m_pBlock->GetMappedData();
11204 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
11205 *ppMappedData = m_pMappedDataForDefragmentation;
11209 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
11211 if(m_pMappedDataForDefragmentation != VMA_NULL)
11213 m_pBlock->Unmap(hAllocator, 1);
11217 VkResult VmaDefragmentator::DefragmentRound(
11218 VkDeviceSize maxBytesToMove,
11219 uint32_t maxAllocationsToMove)
11221 if(m_Blocks.empty())
11226 size_t srcBlockIndex = m_Blocks.size() - 1;
11227 size_t srcAllocIndex = SIZE_MAX;
11233 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
11235 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
11238 if(srcBlockIndex == 0)
11245 srcAllocIndex = SIZE_MAX;
11250 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
11254 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
11255 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
11257 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
11258 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
11259 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
11260 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
11263 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
11265 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
11266 VmaAllocationRequest dstAllocRequest;
11267 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
11268 m_CurrentFrameIndex,
11269 m_pBlockVector->GetFrameInUseCount(),
11270 m_pBlockVector->GetBufferImageGranularity(),
11277 &dstAllocRequest) &&
11279 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
11281 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
11284 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
11285 (m_BytesMoved + size > maxBytesToMove))
11287 return VK_INCOMPLETE;
11290 void* pDstMappedData = VMA_NULL;
11291 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
11292 if(res != VK_SUCCESS)
11297 void* pSrcMappedData = VMA_NULL;
11298 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
11299 if(res != VK_SUCCESS)
11306 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
11307 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
11308 static_cast<size_t>(size));
11310 if(VMA_DEBUG_MARGIN > 0)
11312 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset - VMA_DEBUG_MARGIN);
11313 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset + size);
11316 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
11321 allocInfo.m_hAllocation);
11322 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
11324 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
11326 if(allocInfo.m_pChanged != VMA_NULL)
11328 *allocInfo.m_pChanged = VK_TRUE;
11331 ++m_AllocationsMoved;
11332 m_BytesMoved += size;
11334 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
11342 if(srcAllocIndex > 0)
11348 if(srcBlockIndex > 0)
11351 srcAllocIndex = SIZE_MAX;
11361 VkResult VmaDefragmentator::Defragment(
11362 VkDeviceSize maxBytesToMove,
11363 uint32_t maxAllocationsToMove)
11365 if(m_Allocations.empty())
11371 const size_t blockCount = m_pBlockVector->m_Blocks.size();
11372 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11374 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
11375 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
11376 m_Blocks.push_back(pBlockInfo);
11380 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
11383 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
11385 AllocationInfo& allocInfo = m_Allocations[blockIndex];
11387 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
11389 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
11390 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
11391 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
11393 (*it)->m_Allocations.push_back(allocInfo);
11401 m_Allocations.clear();
11403 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11405 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
11406 pBlockInfo->CalcHasNonMovableAllocations();
11407 pBlockInfo->SortAllocationsBySizeDescecnding();
11411 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
11414 VkResult result = VK_SUCCESS;
11415 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
11417 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
11421 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11423 m_Blocks[blockIndex]->Unmap(m_hAllocator);
11429 bool VmaDefragmentator::MoveMakesSense(
11430 size_t dstBlockIndex, VkDeviceSize dstOffset,
11431 size_t srcBlockIndex, VkDeviceSize srcOffset)
11433 if(dstBlockIndex < srcBlockIndex)
11437 if(dstBlockIndex > srcBlockIndex)
11441 if(dstOffset < srcOffset)
11451 VmaDefragmentationContext_T::VmaDefragmentationContext_T()
11455 VmaDefragmentationContext_T::~VmaDefragmentationContext_T()
11462 #if VMA_RECORDING_ENABLED 11464 VmaRecorder::VmaRecorder() :
11469 m_StartCounter(INT64_MAX)
11475 m_UseMutex = useMutex;
11476 m_Flags = settings.
flags;
11478 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
11479 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
11482 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
11485 return VK_ERROR_INITIALIZATION_FAILED;
11489 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
11490 fprintf(m_File,
"%s\n",
"1,3");
11495 VmaRecorder::~VmaRecorder()
11497 if(m_File != VMA_NULL)
11503 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
11505 CallParams callParams;
11506 GetBasicParams(callParams);
11508 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11509 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
11513 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
11515 CallParams callParams;
11516 GetBasicParams(callParams);
11518 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11519 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
11525 CallParams callParams;
11526 GetBasicParams(callParams);
11528 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11529 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
11540 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
11542 CallParams callParams;
11543 GetBasicParams(callParams);
11545 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11546 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
11551 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
11552 const VkMemoryRequirements& vkMemReq,
11556 CallParams callParams;
11557 GetBasicParams(callParams);
11559 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11560 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11561 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11563 vkMemReq.alignment,
11564 vkMemReq.memoryTypeBits,
11572 userDataStr.GetString());
11576 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
11577 const VkMemoryRequirements& vkMemReq,
11578 bool requiresDedicatedAllocation,
11579 bool prefersDedicatedAllocation,
11583 CallParams callParams;
11584 GetBasicParams(callParams);
11586 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11587 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11588 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11590 vkMemReq.alignment,
11591 vkMemReq.memoryTypeBits,
11592 requiresDedicatedAllocation ? 1 : 0,
11593 prefersDedicatedAllocation ? 1 : 0,
11601 userDataStr.GetString());
11605 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
11606 const VkMemoryRequirements& vkMemReq,
11607 bool requiresDedicatedAllocation,
11608 bool prefersDedicatedAllocation,
11612 CallParams callParams;
11613 GetBasicParams(callParams);
11615 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11616 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11617 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11619 vkMemReq.alignment,
11620 vkMemReq.memoryTypeBits,
11621 requiresDedicatedAllocation ? 1 : 0,
11622 prefersDedicatedAllocation ? 1 : 0,
11630 userDataStr.GetString());
11634 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
11637 CallParams callParams;
11638 GetBasicParams(callParams);
11640 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11641 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11646 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
11648 const void* pUserData)
11650 CallParams callParams;
11651 GetBasicParams(callParams);
11653 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11654 UserDataString userDataStr(
11657 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11659 userDataStr.GetString());
11663 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
11666 CallParams callParams;
11667 GetBasicParams(callParams);
11669 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11670 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11675 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
11678 CallParams callParams;
11679 GetBasicParams(callParams);
11681 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11682 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11687 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
11690 CallParams callParams;
11691 GetBasicParams(callParams);
11693 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11694 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11699 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
11700 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11702 CallParams callParams;
11703 GetBasicParams(callParams);
11705 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11706 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11713 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
11714 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11716 CallParams callParams;
11717 GetBasicParams(callParams);
11719 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11720 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11727 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
11728 const VkBufferCreateInfo& bufCreateInfo,
11732 CallParams callParams;
11733 GetBasicParams(callParams);
11735 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11736 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11737 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11738 bufCreateInfo.flags,
11739 bufCreateInfo.size,
11740 bufCreateInfo.usage,
11741 bufCreateInfo.sharingMode,
11742 allocCreateInfo.
flags,
11743 allocCreateInfo.
usage,
11747 allocCreateInfo.
pool,
11749 userDataStr.GetString());
11753 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
11754 const VkImageCreateInfo& imageCreateInfo,
11758 CallParams callParams;
11759 GetBasicParams(callParams);
11761 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11762 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11763 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11764 imageCreateInfo.flags,
11765 imageCreateInfo.imageType,
11766 imageCreateInfo.format,
11767 imageCreateInfo.extent.width,
11768 imageCreateInfo.extent.height,
11769 imageCreateInfo.extent.depth,
11770 imageCreateInfo.mipLevels,
11771 imageCreateInfo.arrayLayers,
11772 imageCreateInfo.samples,
11773 imageCreateInfo.tiling,
11774 imageCreateInfo.usage,
11775 imageCreateInfo.sharingMode,
11776 imageCreateInfo.initialLayout,
11777 allocCreateInfo.
flags,
11778 allocCreateInfo.
usage,
11782 allocCreateInfo.
pool,
11784 userDataStr.GetString());
11788 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
11791 CallParams callParams;
11792 GetBasicParams(callParams);
11794 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11795 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
11800 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
11803 CallParams callParams;
11804 GetBasicParams(callParams);
11806 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11807 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
11812 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
11815 CallParams callParams;
11816 GetBasicParams(callParams);
11818 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11819 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11824 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
11827 CallParams callParams;
11828 GetBasicParams(callParams);
11830 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11831 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
11836 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
11839 CallParams callParams;
11840 GetBasicParams(callParams);
11842 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11843 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
11850 if(pUserData != VMA_NULL)
11854 m_Str = (
const char*)pUserData;
11858 sprintf_s(m_PtrStr,
"%p", pUserData);
11868 void VmaRecorder::WriteConfiguration(
11869 const VkPhysicalDeviceProperties& devProps,
11870 const VkPhysicalDeviceMemoryProperties& memProps,
11871 bool dedicatedAllocationExtensionEnabled)
11873 fprintf(m_File,
"Config,Begin\n");
11875 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
11876 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
11877 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
11878 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
11879 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
11880 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
11882 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
11883 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
11884 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
11886 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
11887 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
11889 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
11890 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
11892 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
11893 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
11895 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
11896 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
11899 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
11901 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
11902 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
11903 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
11904 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
11905 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
11906 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
11907 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
11908 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
11909 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
11911 fprintf(m_File,
"Config,End\n");
11914 void VmaRecorder::GetBasicParams(CallParams& outParams)
11916 outParams.threadId = GetCurrentThreadId();
11918 LARGE_INTEGER counter;
11919 QueryPerformanceCounter(&counter);
11920 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
11923 void VmaRecorder::Flush()
11931 #endif // #if VMA_RECORDING_ENABLED 11939 m_hDevice(pCreateInfo->device),
11940 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
11941 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
11942 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
11943 m_PreferredLargeHeapBlockSize(0),
11944 m_PhysicalDevice(pCreateInfo->physicalDevice),
11945 m_CurrentFrameIndex(0),
11946 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
11949 ,m_pRecorder(VMA_NULL)
11952 if(VMA_DEBUG_DETECT_CORRUPTION)
11955 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
11960 #if !(VMA_DEDICATED_ALLOCATION) 11963 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
11967 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
11968 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
11969 memset(&m_MemProps, 0,
sizeof(m_MemProps));
11971 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
11972 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
11974 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
11976 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
11987 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
11988 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
11990 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
11991 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
11992 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
11993 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
12000 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
12002 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
12003 if(limit != VK_WHOLE_SIZE)
12005 m_HeapSizeLimit[heapIndex] = limit;
12006 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
12008 m_MemProps.memoryHeaps[heapIndex].size = limit;
12014 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12016 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
12018 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
12021 preferredBlockSize,
12024 GetBufferImageGranularity(),
12031 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
12038 VkResult res = VK_SUCCESS;
12043 #if VMA_RECORDING_ENABLED 12044 m_pRecorder = vma_new(
this, VmaRecorder)();
12046 if(res != VK_SUCCESS)
12050 m_pRecorder->WriteConfiguration(
12051 m_PhysicalDeviceProperties,
12053 m_UseKhrDedicatedAllocation);
12054 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
12056 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
12057 return VK_ERROR_FEATURE_NOT_PRESENT;
12064 VmaAllocator_T::~VmaAllocator_T()
12066 #if VMA_RECORDING_ENABLED 12067 if(m_pRecorder != VMA_NULL)
12069 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
12070 vma_delete(
this, m_pRecorder);
12074 VMA_ASSERT(m_Pools.empty());
12076 for(
size_t i = GetMemoryTypeCount(); i--; )
12078 vma_delete(
this, m_pDedicatedAllocations[i]);
12079 vma_delete(
this, m_pBlockVectors[i]);
12083 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
12085 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 12086 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
12087 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
12088 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
12089 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
12090 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
12091 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
12092 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
12093 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
12094 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
12095 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
12096 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
12097 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
12098 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
12099 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
12100 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
12101 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
12102 #if VMA_DEDICATED_ALLOCATION 12103 if(m_UseKhrDedicatedAllocation)
12105 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
12106 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
12107 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
12108 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
12110 #endif // #if VMA_DEDICATED_ALLOCATION 12111 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 12113 #define VMA_COPY_IF_NOT_NULL(funcName) \ 12114 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 12116 if(pVulkanFunctions != VMA_NULL)
12118 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
12119 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
12120 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
12121 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
12122 VMA_COPY_IF_NOT_NULL(vkMapMemory);
12123 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
12124 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
12125 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
12126 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
12127 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
12128 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
12129 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
12130 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
12131 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
12132 VMA_COPY_IF_NOT_NULL(vkCreateImage);
12133 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
12134 #if VMA_DEDICATED_ALLOCATION 12135 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
12136 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
12140 #undef VMA_COPY_IF_NOT_NULL 12144 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
12145 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
12146 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
12147 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
12148 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
12149 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
12150 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
12151 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
12152 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
12153 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
12154 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
12155 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
12156 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
12157 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
12158 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
12159 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
12160 #if VMA_DEDICATED_ALLOCATION 12161 if(m_UseKhrDedicatedAllocation)
12163 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
12164 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
12169 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
12171 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12172 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
12173 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
12174 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
12177 VkResult VmaAllocator_T::AllocateMemoryOfType(
12179 VkDeviceSize alignment,
12180 bool dedicatedAllocation,
12181 VkBuffer dedicatedBuffer,
12182 VkImage dedicatedImage,
12184 uint32_t memTypeIndex,
12185 VmaSuballocationType suballocType,
12188 VMA_ASSERT(pAllocation != VMA_NULL);
12189 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
12195 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
12200 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
12201 VMA_ASSERT(blockVector);
12203 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
12204 bool preferDedicatedMemory =
12205 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
12206 dedicatedAllocation ||
12208 size > preferredBlockSize / 2;
12210 if(preferDedicatedMemory &&
12212 finalCreateInfo.
pool == VK_NULL_HANDLE)
12221 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12225 return AllocateDedicatedMemory(
12239 VkResult res = blockVector->Allocate(
12241 m_CurrentFrameIndex.load(),
12247 if(res == VK_SUCCESS)
12255 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12259 res = AllocateDedicatedMemory(
12265 finalCreateInfo.pUserData,
12269 if(res == VK_SUCCESS)
12272 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
12278 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12285 VkResult VmaAllocator_T::AllocateDedicatedMemory(
12287 VmaSuballocationType suballocType,
12288 uint32_t memTypeIndex,
12290 bool isUserDataString,
12292 VkBuffer dedicatedBuffer,
12293 VkImage dedicatedImage,
12296 VMA_ASSERT(pAllocation);
12298 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12299 allocInfo.memoryTypeIndex = memTypeIndex;
12300 allocInfo.allocationSize = size;
12302 #if VMA_DEDICATED_ALLOCATION 12303 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
12304 if(m_UseKhrDedicatedAllocation)
12306 if(dedicatedBuffer != VK_NULL_HANDLE)
12308 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
12309 dedicatedAllocInfo.buffer = dedicatedBuffer;
12310 allocInfo.pNext = &dedicatedAllocInfo;
12312 else if(dedicatedImage != VK_NULL_HANDLE)
12314 dedicatedAllocInfo.image = dedicatedImage;
12315 allocInfo.pNext = &dedicatedAllocInfo;
12318 #endif // #if VMA_DEDICATED_ALLOCATION 12321 VkDeviceMemory hMemory = VK_NULL_HANDLE;
12322 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
12325 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12329 void* pMappedData = VMA_NULL;
12332 res = (*m_VulkanFunctions.vkMapMemory)(
12341 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
12342 FreeVulkanMemory(memTypeIndex, size, hMemory);
12347 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
12348 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
12349 (*pAllocation)->SetUserData(
this, pUserData);
12350 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12352 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12357 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12358 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
12359 VMA_ASSERT(pDedicatedAllocations);
12360 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
12363 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
12368 void VmaAllocator_T::GetBufferMemoryRequirements(
12370 VkMemoryRequirements& memReq,
12371 bool& requiresDedicatedAllocation,
12372 bool& prefersDedicatedAllocation)
const 12374 #if VMA_DEDICATED_ALLOCATION 12375 if(m_UseKhrDedicatedAllocation)
12377 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
12378 memReqInfo.buffer = hBuffer;
12380 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12382 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12383 memReq2.pNext = &memDedicatedReq;
12385 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12387 memReq = memReq2.memoryRequirements;
12388 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12389 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12392 #endif // #if VMA_DEDICATED_ALLOCATION 12394 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
12395 requiresDedicatedAllocation =
false;
12396 prefersDedicatedAllocation =
false;
12400 void VmaAllocator_T::GetImageMemoryRequirements(
12402 VkMemoryRequirements& memReq,
12403 bool& requiresDedicatedAllocation,
12404 bool& prefersDedicatedAllocation)
const 12406 #if VMA_DEDICATED_ALLOCATION 12407 if(m_UseKhrDedicatedAllocation)
12409 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
12410 memReqInfo.image = hImage;
12412 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12414 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12415 memReq2.pNext = &memDedicatedReq;
12417 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12419 memReq = memReq2.memoryRequirements;
12420 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12421 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12424 #endif // #if VMA_DEDICATED_ALLOCATION 12426 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
12427 requiresDedicatedAllocation =
false;
12428 prefersDedicatedAllocation =
false;
12432 VkResult VmaAllocator_T::AllocateMemory(
12433 const VkMemoryRequirements& vkMemReq,
12434 bool requiresDedicatedAllocation,
12435 bool prefersDedicatedAllocation,
12436 VkBuffer dedicatedBuffer,
12437 VkImage dedicatedImage,
12439 VmaSuballocationType suballocType,
12442 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
12447 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
12448 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12453 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
12454 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12456 if(requiresDedicatedAllocation)
12460 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
12461 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12463 if(createInfo.
pool != VK_NULL_HANDLE)
12465 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
12466 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12469 if((createInfo.
pool != VK_NULL_HANDLE) &&
12472 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
12473 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12476 if(createInfo.
pool != VK_NULL_HANDLE)
12478 const VkDeviceSize alignmentForPool = VMA_MAX(
12479 vkMemReq.alignment,
12480 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
12481 return createInfo.
pool->m_BlockVector.Allocate(
12483 m_CurrentFrameIndex.load(),
12493 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
12494 uint32_t memTypeIndex = UINT32_MAX;
12496 if(res == VK_SUCCESS)
12498 VkDeviceSize alignmentForMemType = VMA_MAX(
12499 vkMemReq.alignment,
12500 GetMemoryTypeMinAlignment(memTypeIndex));
12502 res = AllocateMemoryOfType(
12504 alignmentForMemType,
12505 requiresDedicatedAllocation || prefersDedicatedAllocation,
12513 if(res == VK_SUCCESS)
12523 memoryTypeBits &= ~(1u << memTypeIndex);
12526 if(res == VK_SUCCESS)
12528 alignmentForMemType = VMA_MAX(
12529 vkMemReq.alignment,
12530 GetMemoryTypeMinAlignment(memTypeIndex));
12532 res = AllocateMemoryOfType(
12534 alignmentForMemType,
12535 requiresDedicatedAllocation || prefersDedicatedAllocation,
12543 if(res == VK_SUCCESS)
12553 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12564 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
12566 VMA_ASSERT(allocation);
12568 if(TouchAllocation(allocation))
12570 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12572 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
12575 switch(allocation->GetType())
12577 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12579 VmaBlockVector* pBlockVector = VMA_NULL;
12580 VmaPool hPool = allocation->GetPool();
12581 if(hPool != VK_NULL_HANDLE)
12583 pBlockVector = &hPool->m_BlockVector;
12587 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
12588 pBlockVector = m_pBlockVectors[memTypeIndex];
12590 pBlockVector->Free(allocation);
12593 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12594 FreeDedicatedMemory(allocation);
12601 allocation->SetUserData(
this, VMA_NULL);
12602 vma_delete(
this, allocation);
12605 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
12608 InitStatInfo(pStats->
total);
12609 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
12611 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
12615 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12617 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
12618 VMA_ASSERT(pBlockVector);
12619 pBlockVector->AddStats(pStats);
12624 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12625 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
12627 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
12632 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12634 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12635 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12636 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
12637 VMA_ASSERT(pDedicatedAllocVector);
12638 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
12641 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
12642 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12643 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12644 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12649 VmaPostprocessCalcStatInfo(pStats->
total);
12650 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
12651 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
12652 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
12653 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
12656 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
12658 VkResult VmaAllocator_T::DefragmentationBegin(
12667 if(pStats != VMA_NULL)
12672 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
12674 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
12676 const size_t poolCount = m_Pools.size();
12679 for(
size_t allocIndex = 0; allocIndex < info.
allocationCount; ++allocIndex)
12682 VMA_ASSERT(hAlloc);
12683 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
12685 const VkMemoryPropertyFlags requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12686 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
12688 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags) &&
12690 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
12692 VmaBlockVector* pAllocBlockVector = VMA_NULL;
12694 const VmaPool hAllocPool = hAlloc->GetPool();
12696 if(hAllocPool != VK_NULL_HANDLE)
12699 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
12701 pAllocBlockVector = &hAllocPool->m_BlockVector;
12707 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
12710 if(pAllocBlockVector != VMA_NULL)
12712 VmaDefragmentator*
const pDefragmentator =
12713 pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
12716 pDefragmentator->AddAllocation(hAlloc, pChanged);
12721 VkResult result = VK_SUCCESS;
12729 for(uint32_t memTypeIndex = 0;
12730 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
12734 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12736 result = m_pBlockVectors[memTypeIndex]->Defragment(
12739 maxAllocationsToMove);
12744 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
12746 result = m_Pools[poolIndex]->m_BlockVector.Defragment(
12749 maxAllocationsToMove);
12755 for(
size_t poolIndex = poolCount; poolIndex--; )
12757 m_Pools[poolIndex]->m_BlockVector.DestroyDefragmentator();
12761 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
12763 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12765 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
12772 VkResult VmaAllocator_T::DefragmentationEnd(
12775 vma_delete(
this, context);
12781 if(hAllocation->CanBecomeLost())
12787 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12788 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12791 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12795 pAllocationInfo->
offset = 0;
12796 pAllocationInfo->
size = hAllocation->GetSize();
12798 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12801 else if(localLastUseFrameIndex == localCurrFrameIndex)
12803 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12804 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12805 pAllocationInfo->
offset = hAllocation->GetOffset();
12806 pAllocationInfo->
size = hAllocation->GetSize();
12808 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12813 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12815 localLastUseFrameIndex = localCurrFrameIndex;
12822 #if VMA_STATS_STRING_ENABLED 12823 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12824 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12827 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12828 if(localLastUseFrameIndex == localCurrFrameIndex)
12834 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12836 localLastUseFrameIndex = localCurrFrameIndex;
12842 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12843 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12844 pAllocationInfo->
offset = hAllocation->GetOffset();
12845 pAllocationInfo->
size = hAllocation->GetSize();
12846 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
12847 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12851 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
12854 if(hAllocation->CanBecomeLost())
12856 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12857 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12860 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12864 else if(localLastUseFrameIndex == localCurrFrameIndex)
12870 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12872 localLastUseFrameIndex = localCurrFrameIndex;
12879 #if VMA_STATS_STRING_ENABLED 12880 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12881 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12884 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12885 if(localLastUseFrameIndex == localCurrFrameIndex)
12891 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12893 localLastUseFrameIndex = localCurrFrameIndex;
12905 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
12915 return VK_ERROR_INITIALIZATION_FAILED;
12918 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
12920 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
12922 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
12923 if(res != VK_SUCCESS)
12925 vma_delete(
this, *pPool);
12932 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12933 (*pPool)->SetId(m_NextPoolId++);
12934 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
12940 void VmaAllocator_T::DestroyPool(
VmaPool pool)
12944 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12945 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
12946 VMA_ASSERT(success &&
"Pool not found in Allocator.");
12949 vma_delete(
this, pool);
12954 pool->m_BlockVector.GetPoolStats(pPoolStats);
12957 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
12959 m_CurrentFrameIndex.store(frameIndex);
12962 void VmaAllocator_T::MakePoolAllocationsLost(
12964 size_t* pLostAllocationCount)
12966 hPool->m_BlockVector.MakePoolAllocationsLost(
12967 m_CurrentFrameIndex.load(),
12968 pLostAllocationCount);
12971 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
12973 return hPool->m_BlockVector.CheckCorruption();
12976 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
12978 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
12981 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12983 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
12985 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
12986 VMA_ASSERT(pBlockVector);
12987 VkResult localRes = pBlockVector->CheckCorruption();
12990 case VK_ERROR_FEATURE_NOT_PRESENT:
12993 finalRes = VK_SUCCESS;
13003 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13004 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
13006 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
13008 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
13011 case VK_ERROR_FEATURE_NOT_PRESENT:
13014 finalRes = VK_SUCCESS;
13026 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
13028 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
13029 (*pAllocation)->InitLost();
13032 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
13034 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
13037 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
13039 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
13040 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
13042 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
13043 if(res == VK_SUCCESS)
13045 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
13050 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
13055 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
13058 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
13060 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
13066 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
13068 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
13070 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
13073 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
13075 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
13076 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
13078 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
13079 m_HeapSizeLimit[heapIndex] += size;
13083 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
13085 if(hAllocation->CanBecomeLost())
13087 return VK_ERROR_MEMORY_MAP_FAILED;
13090 switch(hAllocation->GetType())
13092 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13094 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
13095 char *pBytes = VMA_NULL;
13096 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
13097 if(res == VK_SUCCESS)
13099 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
13100 hAllocation->BlockAllocMap();
13104 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13105 return hAllocation->DedicatedAllocMap(
this, ppData);
13108 return VK_ERROR_MEMORY_MAP_FAILED;
13114 switch(hAllocation->GetType())
13116 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13118 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
13119 hAllocation->BlockAllocUnmap();
13120 pBlock->Unmap(
this, 1);
13123 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13124 hAllocation->DedicatedAllocUnmap(
this);
13131 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
13133 VkResult res = VK_SUCCESS;
13134 switch(hAllocation->GetType())
13136 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13137 res = GetVulkanFunctions().vkBindBufferMemory(
13140 hAllocation->GetMemory(),
13143 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13145 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13146 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
13147 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
13156 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
13158 VkResult res = VK_SUCCESS;
13159 switch(hAllocation->GetType())
13161 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13162 res = GetVulkanFunctions().vkBindImageMemory(
13165 hAllocation->GetMemory(),
13168 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13170 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13171 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
13172 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
13181 void VmaAllocator_T::FlushOrInvalidateAllocation(
13183 VkDeviceSize offset, VkDeviceSize size,
13184 VMA_CACHE_OPERATION op)
13186 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
13187 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
13189 const VkDeviceSize allocationSize = hAllocation->GetSize();
13190 VMA_ASSERT(offset <= allocationSize);
13192 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13194 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13195 memRange.memory = hAllocation->GetMemory();
13197 switch(hAllocation->GetType())
13199 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13200 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13201 if(size == VK_WHOLE_SIZE)
13203 memRange.size = allocationSize - memRange.offset;
13207 VMA_ASSERT(offset + size <= allocationSize);
13208 memRange.size = VMA_MIN(
13209 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
13210 allocationSize - memRange.offset);
13214 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13217 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13218 if(size == VK_WHOLE_SIZE)
13220 size = allocationSize - offset;
13224 VMA_ASSERT(offset + size <= allocationSize);
13226 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
13229 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
13230 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
13231 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
13232 memRange.offset += allocationOffset;
13233 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
13244 case VMA_CACHE_FLUSH:
13245 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
13247 case VMA_CACHE_INVALIDATE:
13248 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
13257 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
13259 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
13261 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
13263 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13264 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
13265 VMA_ASSERT(pDedicatedAllocations);
13266 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
13267 VMA_ASSERT(success);
13270 VkDeviceMemory hMemory = allocation->GetMemory();
13282 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
13284 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
13287 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
13289 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
13290 !hAllocation->CanBecomeLost() &&
13291 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13293 void* pData = VMA_NULL;
13294 VkResult res = Map(hAllocation, &pData);
13295 if(res == VK_SUCCESS)
13297 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
13298 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
13299 Unmap(hAllocation);
13303 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
13308 #if VMA_STATS_STRING_ENABLED 13310 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
13312 bool dedicatedAllocationsStarted =
false;
13313 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13315 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13316 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
13317 VMA_ASSERT(pDedicatedAllocVector);
13318 if(pDedicatedAllocVector->empty() ==
false)
13320 if(dedicatedAllocationsStarted ==
false)
13322 dedicatedAllocationsStarted =
true;
13323 json.WriteString(
"DedicatedAllocations");
13324 json.BeginObject();
13327 json.BeginString(
"Type ");
13328 json.ContinueString(memTypeIndex);
13333 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
13335 json.BeginObject(
true);
13337 hAlloc->PrintParameters(json);
13344 if(dedicatedAllocationsStarted)
13350 bool allocationsStarted =
false;
13351 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13353 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
13355 if(allocationsStarted ==
false)
13357 allocationsStarted =
true;
13358 json.WriteString(
"DefaultPools");
13359 json.BeginObject();
13362 json.BeginString(
"Type ");
13363 json.ContinueString(memTypeIndex);
13366 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
13369 if(allocationsStarted)
13377 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13378 const size_t poolCount = m_Pools.size();
13381 json.WriteString(
"Pools");
13382 json.BeginObject();
13383 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13385 json.BeginString();
13386 json.ContinueString(m_Pools[poolIndex]->GetId());
13389 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
13396 #endif // #if VMA_STATS_STRING_ENABLED 13405 VMA_ASSERT(pCreateInfo && pAllocator);
13406 VMA_DEBUG_LOG(
"vmaCreateAllocator");
13408 return (*pAllocator)->Init(pCreateInfo);
13414 if(allocator != VK_NULL_HANDLE)
13416 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
13417 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
13418 vma_delete(&allocationCallbacks, allocator);
13424 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
13426 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
13427 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
13432 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
13434 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
13435 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
13440 uint32_t memoryTypeIndex,
13441 VkMemoryPropertyFlags* pFlags)
13443 VMA_ASSERT(allocator && pFlags);
13444 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
13445 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
13450 uint32_t frameIndex)
13452 VMA_ASSERT(allocator);
13453 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
13455 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13457 allocator->SetCurrentFrameIndex(frameIndex);
13464 VMA_ASSERT(allocator && pStats);
13465 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13466 allocator->CalculateStats(pStats);
13469 #if VMA_STATS_STRING_ENABLED 13473 char** ppStatsString,
13474 VkBool32 detailedMap)
13476 VMA_ASSERT(allocator && ppStatsString);
13477 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13479 VmaStringBuilder sb(allocator);
13481 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
13482 json.BeginObject();
13485 allocator->CalculateStats(&stats);
13487 json.WriteString(
"Total");
13488 VmaPrintStatInfo(json, stats.
total);
13490 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
13492 json.BeginString(
"Heap ");
13493 json.ContinueString(heapIndex);
13495 json.BeginObject();
13497 json.WriteString(
"Size");
13498 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
13500 json.WriteString(
"Flags");
13501 json.BeginArray(
true);
13502 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
13504 json.WriteString(
"DEVICE_LOCAL");
13510 json.WriteString(
"Stats");
13511 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
13514 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
13516 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
13518 json.BeginString(
"Type ");
13519 json.ContinueString(typeIndex);
13522 json.BeginObject();
13524 json.WriteString(
"Flags");
13525 json.BeginArray(
true);
13526 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
13527 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
13529 json.WriteString(
"DEVICE_LOCAL");
13531 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13533 json.WriteString(
"HOST_VISIBLE");
13535 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
13537 json.WriteString(
"HOST_COHERENT");
13539 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
13541 json.WriteString(
"HOST_CACHED");
13543 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
13545 json.WriteString(
"LAZILY_ALLOCATED");
13551 json.WriteString(
"Stats");
13552 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
13561 if(detailedMap == VK_TRUE)
13563 allocator->PrintDetailedMap(json);
13569 const size_t len = sb.GetLength();
13570 char*
const pChars = vma_new_array(allocator,
char, len + 1);
13573 memcpy(pChars, sb.GetData(), len);
13575 pChars[len] =
'\0';
13576 *ppStatsString = pChars;
13581 char* pStatsString)
13583 if(pStatsString != VMA_NULL)
13585 VMA_ASSERT(allocator);
13586 size_t len = strlen(pStatsString);
13587 vma_delete_array(allocator, pStatsString, len + 1);
13591 #endif // #if VMA_STATS_STRING_ENABLED 13598 uint32_t memoryTypeBits,
13600 uint32_t* pMemoryTypeIndex)
13602 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13603 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13604 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13611 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
13612 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
13617 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13621 switch(pAllocationCreateInfo->
usage)
13626 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13628 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13632 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
13635 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13636 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13638 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13642 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13643 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
13649 *pMemoryTypeIndex = UINT32_MAX;
13650 uint32_t minCost = UINT32_MAX;
13651 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
13652 memTypeIndex < allocator->GetMemoryTypeCount();
13653 ++memTypeIndex, memTypeBit <<= 1)
13656 if((memTypeBit & memoryTypeBits) != 0)
13658 const VkMemoryPropertyFlags currFlags =
13659 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
13661 if((requiredFlags & ~currFlags) == 0)
13664 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
13666 if(currCost < minCost)
13668 *pMemoryTypeIndex = memTypeIndex;
13673 minCost = currCost;
13678 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
13683 const VkBufferCreateInfo* pBufferCreateInfo,
13685 uint32_t* pMemoryTypeIndex)
13687 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13688 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
13689 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13690 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13692 const VkDevice hDev = allocator->m_hDevice;
13693 VkBuffer hBuffer = VK_NULL_HANDLE;
13694 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
13695 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
13696 if(res == VK_SUCCESS)
13698 VkMemoryRequirements memReq = {};
13699 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
13700 hDev, hBuffer, &memReq);
13704 memReq.memoryTypeBits,
13705 pAllocationCreateInfo,
13708 allocator->GetVulkanFunctions().vkDestroyBuffer(
13709 hDev, hBuffer, allocator->GetAllocationCallbacks());
13716 const VkImageCreateInfo* pImageCreateInfo,
13718 uint32_t* pMemoryTypeIndex)
13720 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13721 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
13722 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13723 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13725 const VkDevice hDev = allocator->m_hDevice;
13726 VkImage hImage = VK_NULL_HANDLE;
13727 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
13728 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
13729 if(res == VK_SUCCESS)
13731 VkMemoryRequirements memReq = {};
13732 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
13733 hDev, hImage, &memReq);
13737 memReq.memoryTypeBits,
13738 pAllocationCreateInfo,
13741 allocator->GetVulkanFunctions().vkDestroyImage(
13742 hDev, hImage, allocator->GetAllocationCallbacks());
13752 VMA_ASSERT(allocator && pCreateInfo && pPool);
13754 VMA_DEBUG_LOG(
"vmaCreatePool");
13756 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13758 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
13760 #if VMA_RECORDING_ENABLED 13761 if(allocator->GetRecorder() != VMA_NULL)
13763 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
13774 VMA_ASSERT(allocator);
13776 if(pool == VK_NULL_HANDLE)
13781 VMA_DEBUG_LOG(
"vmaDestroyPool");
13783 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13785 #if VMA_RECORDING_ENABLED 13786 if(allocator->GetRecorder() != VMA_NULL)
13788 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
13792 allocator->DestroyPool(pool);
13800 VMA_ASSERT(allocator && pool && pPoolStats);
13802 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13804 allocator->GetPoolStats(pool, pPoolStats);
13810 size_t* pLostAllocationCount)
13812 VMA_ASSERT(allocator && pool);
13814 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13816 #if VMA_RECORDING_ENABLED 13817 if(allocator->GetRecorder() != VMA_NULL)
13819 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
13823 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
13828 VMA_ASSERT(allocator && pool);
13830 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13832 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
13834 return allocator->CheckPoolCorruption(pool);
13839 const VkMemoryRequirements* pVkMemoryRequirements,
13844 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
13846 VMA_DEBUG_LOG(
"vmaAllocateMemory");
13848 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13850 VkResult result = allocator->AllocateMemory(
13851 *pVkMemoryRequirements,
13857 VMA_SUBALLOCATION_TYPE_UNKNOWN,
13860 #if VMA_RECORDING_ENABLED 13861 if(allocator->GetRecorder() != VMA_NULL)
13863 allocator->GetRecorder()->RecordAllocateMemory(
13864 allocator->GetCurrentFrameIndex(),
13865 *pVkMemoryRequirements,
13871 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
13873 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13886 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
13888 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
13890 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13892 VkMemoryRequirements vkMemReq = {};
13893 bool requiresDedicatedAllocation =
false;
13894 bool prefersDedicatedAllocation =
false;
13895 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
13896 requiresDedicatedAllocation,
13897 prefersDedicatedAllocation);
13899 VkResult result = allocator->AllocateMemory(
13901 requiresDedicatedAllocation,
13902 prefersDedicatedAllocation,
13906 VMA_SUBALLOCATION_TYPE_BUFFER,
13909 #if VMA_RECORDING_ENABLED 13910 if(allocator->GetRecorder() != VMA_NULL)
13912 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
13913 allocator->GetCurrentFrameIndex(),
13915 requiresDedicatedAllocation,
13916 prefersDedicatedAllocation,
13922 if(pAllocationInfo && result == VK_SUCCESS)
13924 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13937 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
13939 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
13941 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13943 VkMemoryRequirements vkMemReq = {};
13944 bool requiresDedicatedAllocation =
false;
13945 bool prefersDedicatedAllocation =
false;
13946 allocator->GetImageMemoryRequirements(image, vkMemReq,
13947 requiresDedicatedAllocation, prefersDedicatedAllocation);
13949 VkResult result = allocator->AllocateMemory(
13951 requiresDedicatedAllocation,
13952 prefersDedicatedAllocation,
13956 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
13959 #if VMA_RECORDING_ENABLED 13960 if(allocator->GetRecorder() != VMA_NULL)
13962 allocator->GetRecorder()->RecordAllocateMemoryForImage(
13963 allocator->GetCurrentFrameIndex(),
13965 requiresDedicatedAllocation,
13966 prefersDedicatedAllocation,
13972 if(pAllocationInfo && result == VK_SUCCESS)
13974 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13984 VMA_ASSERT(allocator);
13986 if(allocation == VK_NULL_HANDLE)
13991 VMA_DEBUG_LOG(
"vmaFreeMemory");
13993 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13995 #if VMA_RECORDING_ENABLED 13996 if(allocator->GetRecorder() != VMA_NULL)
13998 allocator->GetRecorder()->RecordFreeMemory(
13999 allocator->GetCurrentFrameIndex(),
14004 allocator->FreeMemory(allocation);
14012 VMA_ASSERT(allocator && allocation && pAllocationInfo);
14014 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14016 #if VMA_RECORDING_ENABLED 14017 if(allocator->GetRecorder() != VMA_NULL)
14019 allocator->GetRecorder()->RecordGetAllocationInfo(
14020 allocator->GetCurrentFrameIndex(),
14025 allocator->GetAllocationInfo(allocation, pAllocationInfo);
14032 VMA_ASSERT(allocator && allocation);
14034 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14036 #if VMA_RECORDING_ENABLED 14037 if(allocator->GetRecorder() != VMA_NULL)
14039 allocator->GetRecorder()->RecordTouchAllocation(
14040 allocator->GetCurrentFrameIndex(),
14045 return allocator->TouchAllocation(allocation);
14053 VMA_ASSERT(allocator && allocation);
14055 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14057 allocation->SetUserData(allocator, pUserData);
14059 #if VMA_RECORDING_ENABLED 14060 if(allocator->GetRecorder() != VMA_NULL)
14062 allocator->GetRecorder()->RecordSetAllocationUserData(
14063 allocator->GetCurrentFrameIndex(),
14074 VMA_ASSERT(allocator && pAllocation);
14076 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
14078 allocator->CreateLostAllocation(pAllocation);
14080 #if VMA_RECORDING_ENABLED 14081 if(allocator->GetRecorder() != VMA_NULL)
14083 allocator->GetRecorder()->RecordCreateLostAllocation(
14084 allocator->GetCurrentFrameIndex(),
14095 VMA_ASSERT(allocator && allocation && ppData);
14097 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14099 VkResult res = allocator->Map(allocation, ppData);
14101 #if VMA_RECORDING_ENABLED 14102 if(allocator->GetRecorder() != VMA_NULL)
14104 allocator->GetRecorder()->RecordMapMemory(
14105 allocator->GetCurrentFrameIndex(),
14117 VMA_ASSERT(allocator && allocation);
14119 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14121 #if VMA_RECORDING_ENABLED 14122 if(allocator->GetRecorder() != VMA_NULL)
14124 allocator->GetRecorder()->RecordUnmapMemory(
14125 allocator->GetCurrentFrameIndex(),
14130 allocator->Unmap(allocation);
14135 VMA_ASSERT(allocator && allocation);
14137 VMA_DEBUG_LOG(
"vmaFlushAllocation");
14139 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14141 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
14143 #if VMA_RECORDING_ENABLED 14144 if(allocator->GetRecorder() != VMA_NULL)
14146 allocator->GetRecorder()->RecordFlushAllocation(
14147 allocator->GetCurrentFrameIndex(),
14148 allocation, offset, size);
14155 VMA_ASSERT(allocator && allocation);
14157 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
14159 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14161 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
14163 #if VMA_RECORDING_ENABLED 14164 if(allocator->GetRecorder() != VMA_NULL)
14166 allocator->GetRecorder()->RecordInvalidateAllocation(
14167 allocator->GetCurrentFrameIndex(),
14168 allocation, offset, size);
14175 VMA_ASSERT(allocator);
14177 VMA_DEBUG_LOG(
"vmaCheckCorruption");
14179 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14181 return allocator->CheckCorruption(memoryTypeBits);
14187 size_t allocationCount,
14188 VkBool32* pAllocationsChanged,
14198 if(pDefragmentationInfo != VMA_NULL)
14212 if(res == VK_NOT_READY)
14225 VMA_ASSERT(allocator && pInfo && pContext);
14227 VMA_DEBUG_LOG(
"vmaDefragmentationBegin");
14229 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14231 return allocator->DefragmentationBegin(*pInfo, pStats, pContext);
14238 VMA_ASSERT(allocator);
14240 VMA_DEBUG_LOG(
"vmaDefragmentationEnd");
14242 if(context != VK_NULL_HANDLE)
14244 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14246 return allocator->DefragmentationEnd(context);
14259 VMA_ASSERT(allocator && allocation && buffer);
14261 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
14263 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14265 return allocator->BindBufferMemory(allocation, buffer);
14273 VMA_ASSERT(allocator && allocation && image);
14275 VMA_DEBUG_LOG(
"vmaBindImageMemory");
14277 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14279 return allocator->BindImageMemory(allocation, image);
14284 const VkBufferCreateInfo* pBufferCreateInfo,
14290 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
14292 VMA_DEBUG_LOG(
"vmaCreateBuffer");
14294 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14296 *pBuffer = VK_NULL_HANDLE;
14297 *pAllocation = VK_NULL_HANDLE;
14300 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
14301 allocator->m_hDevice,
14303 allocator->GetAllocationCallbacks(),
14308 VkMemoryRequirements vkMemReq = {};
14309 bool requiresDedicatedAllocation =
false;
14310 bool prefersDedicatedAllocation =
false;
14311 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
14312 requiresDedicatedAllocation, prefersDedicatedAllocation);
14316 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
14318 VMA_ASSERT(vkMemReq.alignment %
14319 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
14321 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
14323 VMA_ASSERT(vkMemReq.alignment %
14324 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
14326 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
14328 VMA_ASSERT(vkMemReq.alignment %
14329 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
14333 res = allocator->AllocateMemory(
14335 requiresDedicatedAllocation,
14336 prefersDedicatedAllocation,
14339 *pAllocationCreateInfo,
14340 VMA_SUBALLOCATION_TYPE_BUFFER,
14343 #if VMA_RECORDING_ENABLED 14344 if(allocator->GetRecorder() != VMA_NULL)
14346 allocator->GetRecorder()->RecordCreateBuffer(
14347 allocator->GetCurrentFrameIndex(),
14348 *pBufferCreateInfo,
14349 *pAllocationCreateInfo,
14357 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
14361 #if VMA_STATS_STRING_ENABLED 14362 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
14364 if(pAllocationInfo != VMA_NULL)
14366 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14371 allocator->FreeMemory(*pAllocation);
14372 *pAllocation = VK_NULL_HANDLE;
14373 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14374 *pBuffer = VK_NULL_HANDLE;
14377 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14378 *pBuffer = VK_NULL_HANDLE;
14389 VMA_ASSERT(allocator);
14391 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14396 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
14398 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14400 #if VMA_RECORDING_ENABLED 14401 if(allocator->GetRecorder() != VMA_NULL)
14403 allocator->GetRecorder()->RecordDestroyBuffer(
14404 allocator->GetCurrentFrameIndex(),
14409 if(buffer != VK_NULL_HANDLE)
14411 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
14414 if(allocation != VK_NULL_HANDLE)
14416 allocator->FreeMemory(allocation);
14422 const VkImageCreateInfo* pImageCreateInfo,
14428 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
14430 VMA_DEBUG_LOG(
"vmaCreateImage");
14432 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14434 *pImage = VK_NULL_HANDLE;
14435 *pAllocation = VK_NULL_HANDLE;
14438 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
14439 allocator->m_hDevice,
14441 allocator->GetAllocationCallbacks(),
14445 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
14446 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
14447 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
14450 VkMemoryRequirements vkMemReq = {};
14451 bool requiresDedicatedAllocation =
false;
14452 bool prefersDedicatedAllocation =
false;
14453 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
14454 requiresDedicatedAllocation, prefersDedicatedAllocation);
14456 res = allocator->AllocateMemory(
14458 requiresDedicatedAllocation,
14459 prefersDedicatedAllocation,
14462 *pAllocationCreateInfo,
14466 #if VMA_RECORDING_ENABLED 14467 if(allocator->GetRecorder() != VMA_NULL)
14469 allocator->GetRecorder()->RecordCreateImage(
14470 allocator->GetCurrentFrameIndex(),
14472 *pAllocationCreateInfo,
14480 res = allocator->BindImageMemory(*pAllocation, *pImage);
14484 #if VMA_STATS_STRING_ENABLED 14485 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
14487 if(pAllocationInfo != VMA_NULL)
14489 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14494 allocator->FreeMemory(*pAllocation);
14495 *pAllocation = VK_NULL_HANDLE;
14496 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14497 *pImage = VK_NULL_HANDLE;
14500 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14501 *pImage = VK_NULL_HANDLE;
14512 VMA_ASSERT(allocator);
14514 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14519 VMA_DEBUG_LOG(
"vmaDestroyImage");
14521 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14523 #if VMA_RECORDING_ENABLED 14524 if(allocator->GetRecorder() != VMA_NULL)
14526 allocator->GetRecorder()->RecordDestroyImage(
14527 allocator->GetCurrentFrameIndex(),
14532 if(image != VK_NULL_HANDLE)
14534 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
14536 if(allocation != VK_NULL_HANDLE)
14538 allocator->FreeMemory(allocation);
14542 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1567
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1868
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1624
uint32_t maxCpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on CPU side...
Definition: vk_mem_alloc.h:2592
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Deprecated. Compacts memory by moving allocations.
uint32_t allocationsLost
Number of allocations that became lost in the process of defragmentation.
Definition: vk_mem_alloc.h:2633
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
Definition: vk_mem_alloc.h:1598
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2190
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1579
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1825
Definition: vk_mem_alloc.h:1928
VmaDefragmentationFlags flags
Flags for defragmentation. Use VmaDefragmentationFlagBits enum.
Definition: vk_mem_alloc.h:2567
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1571
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2290
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1621
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2619
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2079
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1468
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2171
Definition: vk_mem_alloc.h:1905
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1560
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1978
Definition: vk_mem_alloc.h:1852
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1633
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2107
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1686
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1618
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1856
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1758
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1576
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1757
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2623
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1650
VmaStatInfo total
Definition: vk_mem_alloc.h:1767
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2631
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1962
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2614
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1577
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1502
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1627
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2121
Definition: vk_mem_alloc.h:2115
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1693
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2300
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1572
VkResult vmaDefragmentationBegin(VmaAllocator allocator, const VmaDefragmentationInfo2 *pInfo, VmaDefragmentationStats *pStats, VmaDefragmentationContext *pContext)
Begins defragmentation process.
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1596
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1999
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2141
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2177
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1558
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2124
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VkDeviceSize maxGpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2597
VmaMemoryUsage
Definition: vk_mem_alloc.h:1803
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
VmaAllocation * pAllocations
Pointer to array of allocations that can be defragmented.
Definition: vk_mem_alloc.h:2576
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2609
size_t allocationCount
Number of allocations in pAllocations array.
Definition: vk_mem_alloc.h:2570
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2627
Definition: vk_mem_alloc.h:1842
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1986
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1575
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Deprecated. Optional configuration parameters to be passed to function vmaDefragment().
VkResult vmaDefragmentationEnd(VmaAllocator allocator, VmaDefragmentationContext context)
Ends defragmentation process.
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1763
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1508
VkFlags VmaDefragmentationFlags
Definition: vk_mem_alloc.h:2558
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
Definition: vk_mem_alloc.h:2556
Definition: vk_mem_alloc.h:1946
VkBool32 * pAllocationsChanged
Optional, output. Pointer to array that will be filled with information whether the allocation at cer...
Definition: vk_mem_alloc.h:2582
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1529
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1600
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1534
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2629
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1973
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2187
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1568
Definition: vk_mem_alloc.h:2537
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1746
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2136
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1521
Definition: vk_mem_alloc.h:2111
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1912
Represents Opaque object that represents started defragmentation process.
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1759
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1525
Definition: vk_mem_alloc.h:1936
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2127
Definition: vk_mem_alloc.h:1851
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1574
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1968
Definition: vk_mem_alloc.h:1959
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1749
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1570
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2149
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1636
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2180
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1957
VkDeviceSize maxCpuBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places using ...
Definition: vk_mem_alloc.h:2587
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1992
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1674
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1765
Definition: vk_mem_alloc.h:2544
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1892
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1758
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1581
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1606
VmaDefragmentationFlagBits
Flags to be used in vmaDefragmentationBegin().
Definition: vk_mem_alloc.h:2533
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1523
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1580
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2163
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1573
Definition: vk_mem_alloc.h:1923
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1614
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2314
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1630
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1758
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1755
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2168
Parameters for defragmentation.
Definition: vk_mem_alloc.h:2564
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
Definition: vk_mem_alloc.h:1932
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2295
Definition: vk_mem_alloc.h:1943
Definition: vk_mem_alloc.h:1955
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2625
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1566
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1753
Definition: vk_mem_alloc.h:1808
Definition: vk_mem_alloc.h:2549
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2117
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1603
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1751
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1578
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1582
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1879
A bit mask to extract only ALGORITHM bits from entire set of flags.
Definition: vk_mem_alloc.h:2552
Definition: vk_mem_alloc.h:1950
Definition: vk_mem_alloc.h:1835
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2309
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1556
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1569
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2096
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2276
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1940
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2061
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1759
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
Definition: vk_mem_alloc.h:1918
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1590
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1766
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2174
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1759
struct VmaDefragmentationInfo2 VmaDefragmentationInfo2
Parameters for defragmentation.
uint32_t maxGpuAllocationsToMove
Maximum number of allocations that can be moved to a different place using transfers on GPU side...
Definition: vk_mem_alloc.h:2602
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2281