23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1464 #ifndef VMA_RECORDING_ENABLED 1466 #define VMA_RECORDING_ENABLED 1 1468 #define VMA_RECORDING_ENABLED 0 1473 #define NOMINMAX // For windows.h 1476 #include <vulkan/vulkan.h> 1478 #if VMA_RECORDING_ENABLED 1479 #include <windows.h> 1482 #if !defined(VMA_DEDICATED_ALLOCATION) 1483 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1484 #define VMA_DEDICATED_ALLOCATION 1 1486 #define VMA_DEDICATED_ALLOCATION 0 1504 uint32_t memoryType,
1505 VkDeviceMemory memory,
1510 uint32_t memoryType,
1511 VkDeviceMemory memory,
1583 #if VMA_DEDICATED_ALLOCATION 1584 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1585 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1711 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1719 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1729 uint32_t memoryTypeIndex,
1730 VkMemoryPropertyFlags* pFlags);
1742 uint32_t frameIndex);
1775 #define VMA_STATS_STRING_ENABLED 1 1777 #if VMA_STATS_STRING_ENABLED 1784 char** ppStatsString,
1785 VkBool32 detailedMap);
1789 char* pStatsString);
1791 #endif // #if VMA_STATS_STRING_ENABLED 2020 uint32_t memoryTypeBits,
2022 uint32_t* pMemoryTypeIndex);
2038 const VkBufferCreateInfo* pBufferCreateInfo,
2040 uint32_t* pMemoryTypeIndex);
2056 const VkImageCreateInfo* pImageCreateInfo,
2058 uint32_t* pMemoryTypeIndex);
2230 size_t* pLostAllocationCount);
2329 const VkMemoryRequirements* pVkMemoryRequirements,
2591 size_t allocationCount,
2592 VkBool32* pAllocationsChanged,
2658 const VkBufferCreateInfo* pBufferCreateInfo,
2683 const VkImageCreateInfo* pImageCreateInfo,
2709 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2712 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2713 #define VMA_IMPLEMENTATION 2716 #ifdef VMA_IMPLEMENTATION 2717 #undef VMA_IMPLEMENTATION 2739 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2740 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2752 #if VMA_USE_STL_CONTAINERS 2753 #define VMA_USE_STL_VECTOR 1 2754 #define VMA_USE_STL_UNORDERED_MAP 1 2755 #define VMA_USE_STL_LIST 1 2758 #if VMA_USE_STL_VECTOR 2762 #if VMA_USE_STL_UNORDERED_MAP 2763 #include <unordered_map> 2766 #if VMA_USE_STL_LIST 2775 #include <algorithm> 2781 #define VMA_NULL nullptr 2784 #if defined(__APPLE__) || defined(__ANDROID__) 2786 void *aligned_alloc(
size_t alignment,
size_t size)
2789 if(alignment <
sizeof(
void*))
2791 alignment =
sizeof(
void*);
2795 if(posix_memalign(&pointer, alignment, size) == 0)
2809 #define VMA_ASSERT(expr) assert(expr) 2811 #define VMA_ASSERT(expr) 2817 #ifndef VMA_HEAVY_ASSERT 2819 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2821 #define VMA_HEAVY_ASSERT(expr) 2825 #ifndef VMA_ALIGN_OF 2826 #define VMA_ALIGN_OF(type) (__alignof(type)) 2829 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2831 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2833 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2837 #ifndef VMA_SYSTEM_FREE 2839 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2841 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2846 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2850 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2854 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2858 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2861 #ifndef VMA_DEBUG_LOG 2862 #define VMA_DEBUG_LOG(format, ...) 2872 #if VMA_STATS_STRING_ENABLED 2873 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2875 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2877 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2879 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2881 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2883 snprintf(outStr, strLen,
"%p", ptr);
2893 void Lock() { m_Mutex.lock(); }
2894 void Unlock() { m_Mutex.unlock(); }
2898 #define VMA_MUTEX VmaMutex 2909 #ifndef VMA_ATOMIC_UINT32 2910 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2913 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2918 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2921 #ifndef VMA_DEBUG_ALIGNMENT 2926 #define VMA_DEBUG_ALIGNMENT (1) 2929 #ifndef VMA_DEBUG_MARGIN 2934 #define VMA_DEBUG_MARGIN (0) 2937 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 2942 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 2945 #ifndef VMA_DEBUG_DETECT_CORRUPTION 2951 #define VMA_DEBUG_DETECT_CORRUPTION (0) 2954 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2959 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2962 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2967 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2970 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2971 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2975 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2976 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2980 #ifndef VMA_CLASS_NO_COPY 2981 #define VMA_CLASS_NO_COPY(className) \ 2983 className(const className&) = delete; \ 2984 className& operator=(const className&) = delete; 2987 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2990 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
2992 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
2993 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
2999 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
3000 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
3003 static inline uint32_t VmaCountBitsSet(uint32_t v)
3005 uint32_t c = v - ((v >> 1) & 0x55555555);
3006 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
3007 c = ((c >> 4) + c) & 0x0F0F0F0F;
3008 c = ((c >> 8) + c) & 0x00FF00FF;
3009 c = ((c >> 16) + c) & 0x0000FFFF;
3015 template <
typename T>
3016 static inline T VmaAlignUp(T val, T align)
3018 return (val + align - 1) / align * align;
3022 template <
typename T>
3023 static inline T VmaAlignDown(T val, T align)
3025 return val / align * align;
3029 template <
typename T>
3030 static inline T VmaRoundDiv(T x, T y)
3032 return (x + (y / (T)2)) / y;
3040 template <
typename T>
3041 inline bool VmaIsPow2(T x)
3043 return (x & (x-1)) == 0;
3047 static inline uint32_t VmaNextPow2(uint32_t v)
3058 static inline uint64_t VmaNextPow2(uint64_t v)
3072 static inline uint32_t VmaPrevPow2(uint32_t v)
3082 static inline uint64_t VmaPrevPow2(uint64_t v)
3094 static inline bool VmaStrIsEmpty(
const char* pStr)
3096 return pStr == VMA_NULL || *pStr ==
'\0';
3099 static const char* VmaAlgorithmToStr(uint32_t algorithm)
3117 template<
typename Iterator,
typename Compare>
3118 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
3120 Iterator centerValue = end; --centerValue;
3121 Iterator insertIndex = beg;
3122 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
3124 if(cmp(*memTypeIndex, *centerValue))
3126 if(insertIndex != memTypeIndex)
3128 VMA_SWAP(*memTypeIndex, *insertIndex);
3133 if(insertIndex != centerValue)
3135 VMA_SWAP(*insertIndex, *centerValue);
3140 template<
typename Iterator,
typename Compare>
3141 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
3145 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
3146 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
3147 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
3151 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 3153 #endif // #ifndef VMA_SORT 3162 static inline bool VmaBlocksOnSamePage(
3163 VkDeviceSize resourceAOffset,
3164 VkDeviceSize resourceASize,
3165 VkDeviceSize resourceBOffset,
3166 VkDeviceSize pageSize)
3168 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
3169 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
3170 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
3171 VkDeviceSize resourceBStart = resourceBOffset;
3172 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
3173 return resourceAEndPage == resourceBStartPage;
3176 enum VmaSuballocationType
3178 VMA_SUBALLOCATION_TYPE_FREE = 0,
3179 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
3180 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
3181 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
3182 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
3183 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
3184 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
3193 static inline bool VmaIsBufferImageGranularityConflict(
3194 VmaSuballocationType suballocType1,
3195 VmaSuballocationType suballocType2)
3197 if(suballocType1 > suballocType2)
3199 VMA_SWAP(suballocType1, suballocType2);
3202 switch(suballocType1)
3204 case VMA_SUBALLOCATION_TYPE_FREE:
3206 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
3208 case VMA_SUBALLOCATION_TYPE_BUFFER:
3210 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3211 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3212 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
3214 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
3215 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
3216 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3217 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
3219 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
3220 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
3228 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
3230 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
3231 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3232 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
3234 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
3238 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
3240 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
3241 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
3242 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
3244 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
3255 VMA_CLASS_NO_COPY(VmaMutexLock)
3257 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
3258 m_pMutex(useMutex ? &mutex : VMA_NULL)
3275 VMA_MUTEX* m_pMutex;
3278 #if VMA_DEBUG_GLOBAL_MUTEX 3279 static VMA_MUTEX gDebugGlobalMutex;
3280 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 3282 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 3286 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
3297 template <
typename CmpLess,
typename IterT,
typename KeyT>
3298 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpLess cmp)
3300 size_t down = 0, up = (end - beg);
3303 const size_t mid = (down + up) / 2;
3304 if(cmp(*(beg+mid), key))
3319 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
3321 if((pAllocationCallbacks != VMA_NULL) &&
3322 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
3324 return (*pAllocationCallbacks->pfnAllocation)(
3325 pAllocationCallbacks->pUserData,
3328 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
3332 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
3336 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
3338 if((pAllocationCallbacks != VMA_NULL) &&
3339 (pAllocationCallbacks->pfnFree != VMA_NULL))
3341 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
3345 VMA_SYSTEM_FREE(ptr);
3349 template<
typename T>
3350 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
3352 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
3355 template<
typename T>
3356 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
3358 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
3361 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 3363 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 3365 template<
typename T>
3366 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
3369 VmaFree(pAllocationCallbacks, ptr);
3372 template<
typename T>
3373 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
3377 for(
size_t i = count; i--; )
3381 VmaFree(pAllocationCallbacks, ptr);
3386 template<
typename T>
3387 class VmaStlAllocator
3390 const VkAllocationCallbacks*
const m_pCallbacks;
3391 typedef T value_type;
3393 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
3394 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
3396 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
3397 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
3399 template<
typename U>
3400 bool operator==(
const VmaStlAllocator<U>& rhs)
const 3402 return m_pCallbacks == rhs.m_pCallbacks;
3404 template<
typename U>
3405 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 3407 return m_pCallbacks != rhs.m_pCallbacks;
3410 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
3413 #if VMA_USE_STL_VECTOR 3415 #define VmaVector std::vector 3417 template<
typename T,
typename allocatorT>
3418 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
3420 vec.insert(vec.begin() + index, item);
3423 template<
typename T,
typename allocatorT>
3424 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
3426 vec.erase(vec.begin() + index);
3429 #else // #if VMA_USE_STL_VECTOR 3434 template<
typename T,
typename AllocatorT>
3438 typedef T value_type;
3440 VmaVector(
const AllocatorT& allocator) :
3441 m_Allocator(allocator),
3448 VmaVector(
size_t count,
const AllocatorT& allocator) :
3449 m_Allocator(allocator),
3450 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3456 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3457 m_Allocator(src.m_Allocator),
3458 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3459 m_Count(src.m_Count),
3460 m_Capacity(src.m_Count)
3464 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3470 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3473 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3477 resize(rhs.m_Count);
3480 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3486 bool empty()
const {
return m_Count == 0; }
3487 size_t size()
const {
return m_Count; }
3488 T* data() {
return m_pArray; }
3489 const T* data()
const {
return m_pArray; }
3491 T& operator[](
size_t index)
3493 VMA_HEAVY_ASSERT(index < m_Count);
3494 return m_pArray[index];
3496 const T& operator[](
size_t index)
const 3498 VMA_HEAVY_ASSERT(index < m_Count);
3499 return m_pArray[index];
3504 VMA_HEAVY_ASSERT(m_Count > 0);
3507 const T& front()
const 3509 VMA_HEAVY_ASSERT(m_Count > 0);
3514 VMA_HEAVY_ASSERT(m_Count > 0);
3515 return m_pArray[m_Count - 1];
3517 const T& back()
const 3519 VMA_HEAVY_ASSERT(m_Count > 0);
3520 return m_pArray[m_Count - 1];
3523 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3525 newCapacity = VMA_MAX(newCapacity, m_Count);
3527 if((newCapacity < m_Capacity) && !freeMemory)
3529 newCapacity = m_Capacity;
3532 if(newCapacity != m_Capacity)
3534 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3537 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3539 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3540 m_Capacity = newCapacity;
3541 m_pArray = newArray;
3545 void resize(
size_t newCount,
bool freeMemory =
false)
3547 size_t newCapacity = m_Capacity;
3548 if(newCount > m_Capacity)
3550 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3554 newCapacity = newCount;
3557 if(newCapacity != m_Capacity)
3559 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3560 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3561 if(elementsToCopy != 0)
3563 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3565 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3566 m_Capacity = newCapacity;
3567 m_pArray = newArray;
3573 void clear(
bool freeMemory =
false)
3575 resize(0, freeMemory);
3578 void insert(
size_t index,
const T& src)
3580 VMA_HEAVY_ASSERT(index <= m_Count);
3581 const size_t oldCount = size();
3582 resize(oldCount + 1);
3583 if(index < oldCount)
3585 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3587 m_pArray[index] = src;
3590 void remove(
size_t index)
3592 VMA_HEAVY_ASSERT(index < m_Count);
3593 const size_t oldCount = size();
3594 if(index < oldCount - 1)
3596 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
3598 resize(oldCount - 1);
3601 void push_back(
const T& src)
3603 const size_t newIndex = size();
3604 resize(newIndex + 1);
3605 m_pArray[newIndex] = src;
3610 VMA_HEAVY_ASSERT(m_Count > 0);
3614 void push_front(
const T& src)
3621 VMA_HEAVY_ASSERT(m_Count > 0);
3625 typedef T* iterator;
3627 iterator begin() {
return m_pArray; }
3628 iterator end() {
return m_pArray + m_Count; }
3631 AllocatorT m_Allocator;
3637 template<
typename T,
typename allocatorT>
3638 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3640 vec.insert(index, item);
3643 template<
typename T,
typename allocatorT>
3644 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3649 #endif // #if VMA_USE_STL_VECTOR 3651 template<
typename CmpLess,
typename VectorT>
3652 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3654 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3656 vector.data() + vector.size(),
3658 CmpLess()) - vector.data();
3659 VmaVectorInsert(vector, indexToInsert, value);
3660 return indexToInsert;
3663 template<
typename CmpLess,
typename VectorT>
3664 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3667 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3672 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3674 size_t indexToRemove = it - vector.begin();
3675 VmaVectorRemove(vector, indexToRemove);
3681 template<
typename CmpLess,
typename IterT,
typename KeyT>
3682 IterT VmaVectorFindSorted(
const IterT& beg,
const IterT& end,
const KeyT& value)
3685 IterT it = VmaBinaryFindFirstNotLess<CmpLess, IterT, KeyT>(
3686 beg, end, value, comparator);
3688 (!comparator(*it, value) && !comparator(value, *it)))
3703 template<
typename T>
3704 class VmaPoolAllocator
3706 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3708 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3709 ~VmaPoolAllocator();
3717 uint32_t NextFreeIndex;
3724 uint32_t FirstFreeIndex;
3727 const VkAllocationCallbacks* m_pAllocationCallbacks;
3728 size_t m_ItemsPerBlock;
3729 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3731 ItemBlock& CreateNewBlock();
3734 template<
typename T>
3735 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3736 m_pAllocationCallbacks(pAllocationCallbacks),
3737 m_ItemsPerBlock(itemsPerBlock),
3738 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3740 VMA_ASSERT(itemsPerBlock > 0);
3743 template<
typename T>
3744 VmaPoolAllocator<T>::~VmaPoolAllocator()
3749 template<
typename T>
3750 void VmaPoolAllocator<T>::Clear()
3752 for(
size_t i = m_ItemBlocks.size(); i--; )
3753 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3754 m_ItemBlocks.clear();
3757 template<
typename T>
3758 T* VmaPoolAllocator<T>::Alloc()
3760 for(
size_t i = m_ItemBlocks.size(); i--; )
3762 ItemBlock& block = m_ItemBlocks[i];
3764 if(block.FirstFreeIndex != UINT32_MAX)
3766 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3767 block.FirstFreeIndex = pItem->NextFreeIndex;
3768 return &pItem->Value;
3773 ItemBlock& newBlock = CreateNewBlock();
3774 Item*
const pItem = &newBlock.pItems[0];
3775 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3776 return &pItem->Value;
3779 template<
typename T>
3780 void VmaPoolAllocator<T>::Free(T* ptr)
3783 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3785 ItemBlock& block = m_ItemBlocks[i];
3789 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3792 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3794 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3795 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3796 block.FirstFreeIndex = index;
3800 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3803 template<
typename T>
3804 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3806 ItemBlock newBlock = {
3807 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3809 m_ItemBlocks.push_back(newBlock);
3812 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3813 newBlock.pItems[i].NextFreeIndex = i + 1;
3814 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3815 return m_ItemBlocks.back();
3821 #if VMA_USE_STL_LIST 3823 #define VmaList std::list 3825 #else // #if VMA_USE_STL_LIST 3827 template<
typename T>
3836 template<
typename T>
3839 VMA_CLASS_NO_COPY(VmaRawList)
3841 typedef VmaListItem<T> ItemType;
3843 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3847 size_t GetCount()
const {
return m_Count; }
3848 bool IsEmpty()
const {
return m_Count == 0; }
3850 ItemType* Front() {
return m_pFront; }
3851 const ItemType* Front()
const {
return m_pFront; }
3852 ItemType* Back() {
return m_pBack; }
3853 const ItemType* Back()
const {
return m_pBack; }
3855 ItemType* PushBack();
3856 ItemType* PushFront();
3857 ItemType* PushBack(
const T& value);
3858 ItemType* PushFront(
const T& value);
3863 ItemType* InsertBefore(ItemType* pItem);
3865 ItemType* InsertAfter(ItemType* pItem);
3867 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3868 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3870 void Remove(ItemType* pItem);
3873 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3874 VmaPoolAllocator<ItemType> m_ItemAllocator;
3880 template<
typename T>
3881 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3882 m_pAllocationCallbacks(pAllocationCallbacks),
3883 m_ItemAllocator(pAllocationCallbacks, 128),
3890 template<
typename T>
3891 VmaRawList<T>::~VmaRawList()
3897 template<
typename T>
3898 void VmaRawList<T>::Clear()
3900 if(IsEmpty() ==
false)
3902 ItemType* pItem = m_pBack;
3903 while(pItem != VMA_NULL)
3905 ItemType*
const pPrevItem = pItem->pPrev;
3906 m_ItemAllocator.Free(pItem);
3909 m_pFront = VMA_NULL;
3915 template<
typename T>
3916 VmaListItem<T>* VmaRawList<T>::PushBack()
3918 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3919 pNewItem->pNext = VMA_NULL;
3922 pNewItem->pPrev = VMA_NULL;
3923 m_pFront = pNewItem;
3929 pNewItem->pPrev = m_pBack;
3930 m_pBack->pNext = pNewItem;
3937 template<
typename T>
3938 VmaListItem<T>* VmaRawList<T>::PushFront()
3940 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3941 pNewItem->pPrev = VMA_NULL;
3944 pNewItem->pNext = VMA_NULL;
3945 m_pFront = pNewItem;
3951 pNewItem->pNext = m_pFront;
3952 m_pFront->pPrev = pNewItem;
3953 m_pFront = pNewItem;
3959 template<
typename T>
3960 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3962 ItemType*
const pNewItem = PushBack();
3963 pNewItem->Value = value;
3967 template<
typename T>
3968 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3970 ItemType*
const pNewItem = PushFront();
3971 pNewItem->Value = value;
3975 template<
typename T>
3976 void VmaRawList<T>::PopBack()
3978 VMA_HEAVY_ASSERT(m_Count > 0);
3979 ItemType*
const pBackItem = m_pBack;
3980 ItemType*
const pPrevItem = pBackItem->pPrev;
3981 if(pPrevItem != VMA_NULL)
3983 pPrevItem->pNext = VMA_NULL;
3985 m_pBack = pPrevItem;
3986 m_ItemAllocator.Free(pBackItem);
3990 template<
typename T>
3991 void VmaRawList<T>::PopFront()
3993 VMA_HEAVY_ASSERT(m_Count > 0);
3994 ItemType*
const pFrontItem = m_pFront;
3995 ItemType*
const pNextItem = pFrontItem->pNext;
3996 if(pNextItem != VMA_NULL)
3998 pNextItem->pPrev = VMA_NULL;
4000 m_pFront = pNextItem;
4001 m_ItemAllocator.Free(pFrontItem);
4005 template<
typename T>
4006 void VmaRawList<T>::Remove(ItemType* pItem)
4008 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
4009 VMA_HEAVY_ASSERT(m_Count > 0);
4011 if(pItem->pPrev != VMA_NULL)
4013 pItem->pPrev->pNext = pItem->pNext;
4017 VMA_HEAVY_ASSERT(m_pFront == pItem);
4018 m_pFront = pItem->pNext;
4021 if(pItem->pNext != VMA_NULL)
4023 pItem->pNext->pPrev = pItem->pPrev;
4027 VMA_HEAVY_ASSERT(m_pBack == pItem);
4028 m_pBack = pItem->pPrev;
4031 m_ItemAllocator.Free(pItem);
4035 template<
typename T>
4036 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
4038 if(pItem != VMA_NULL)
4040 ItemType*
const prevItem = pItem->pPrev;
4041 ItemType*
const newItem = m_ItemAllocator.Alloc();
4042 newItem->pPrev = prevItem;
4043 newItem->pNext = pItem;
4044 pItem->pPrev = newItem;
4045 if(prevItem != VMA_NULL)
4047 prevItem->pNext = newItem;
4051 VMA_HEAVY_ASSERT(m_pFront == pItem);
4061 template<
typename T>
4062 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
4064 if(pItem != VMA_NULL)
4066 ItemType*
const nextItem = pItem->pNext;
4067 ItemType*
const newItem = m_ItemAllocator.Alloc();
4068 newItem->pNext = nextItem;
4069 newItem->pPrev = pItem;
4070 pItem->pNext = newItem;
4071 if(nextItem != VMA_NULL)
4073 nextItem->pPrev = newItem;
4077 VMA_HEAVY_ASSERT(m_pBack == pItem);
4087 template<
typename T>
4088 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
4090 ItemType*
const newItem = InsertBefore(pItem);
4091 newItem->Value = value;
4095 template<
typename T>
4096 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
4098 ItemType*
const newItem = InsertAfter(pItem);
4099 newItem->Value = value;
4103 template<
typename T,
typename AllocatorT>
4106 VMA_CLASS_NO_COPY(VmaList)
4117 T& operator*()
const 4119 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4120 return m_pItem->Value;
4122 T* operator->()
const 4124 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4125 return &m_pItem->Value;
4128 iterator& operator++()
4130 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4131 m_pItem = m_pItem->pNext;
4134 iterator& operator--()
4136 if(m_pItem != VMA_NULL)
4138 m_pItem = m_pItem->pPrev;
4142 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4143 m_pItem = m_pList->Back();
4148 iterator operator++(
int)
4150 iterator result = *
this;
4154 iterator operator--(
int)
4156 iterator result = *
this;
4161 bool operator==(
const iterator& rhs)
const 4163 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4164 return m_pItem == rhs.m_pItem;
4166 bool operator!=(
const iterator& rhs)
const 4168 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4169 return m_pItem != rhs.m_pItem;
4173 VmaRawList<T>* m_pList;
4174 VmaListItem<T>* m_pItem;
4176 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
4182 friend class VmaList<T, AllocatorT>;
4185 class const_iterator
4194 const_iterator(
const iterator& src) :
4195 m_pList(src.m_pList),
4196 m_pItem(src.m_pItem)
4200 const T& operator*()
const 4202 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4203 return m_pItem->Value;
4205 const T* operator->()
const 4207 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4208 return &m_pItem->Value;
4211 const_iterator& operator++()
4213 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
4214 m_pItem = m_pItem->pNext;
4217 const_iterator& operator--()
4219 if(m_pItem != VMA_NULL)
4221 m_pItem = m_pItem->pPrev;
4225 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
4226 m_pItem = m_pList->Back();
4231 const_iterator operator++(
int)
4233 const_iterator result = *
this;
4237 const_iterator operator--(
int)
4239 const_iterator result = *
this;
4244 bool operator==(
const const_iterator& rhs)
const 4246 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4247 return m_pItem == rhs.m_pItem;
4249 bool operator!=(
const const_iterator& rhs)
const 4251 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
4252 return m_pItem != rhs.m_pItem;
4256 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
4262 const VmaRawList<T>* m_pList;
4263 const VmaListItem<T>* m_pItem;
4265 friend class VmaList<T, AllocatorT>;
4268 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
4270 bool empty()
const {
return m_RawList.IsEmpty(); }
4271 size_t size()
const {
return m_RawList.GetCount(); }
4273 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
4274 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
4276 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
4277 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
4279 void clear() { m_RawList.Clear(); }
4280 void push_back(
const T& value) { m_RawList.PushBack(value); }
4281 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
4282 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
4285 VmaRawList<T> m_RawList;
4288 #endif // #if VMA_USE_STL_LIST 4296 #if VMA_USE_STL_UNORDERED_MAP 4298 #define VmaPair std::pair 4300 #define VMA_MAP_TYPE(KeyT, ValueT) \ 4301 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 4303 #else // #if VMA_USE_STL_UNORDERED_MAP 4305 template<
typename T1,
typename T2>
4311 VmaPair() : first(), second() { }
4312 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
4318 template<
typename KeyT,
typename ValueT>
4322 typedef VmaPair<KeyT, ValueT> PairType;
4323 typedef PairType* iterator;
4325 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
4327 iterator begin() {
return m_Vector.begin(); }
4328 iterator end() {
return m_Vector.end(); }
4330 void insert(
const PairType& pair);
4331 iterator find(
const KeyT& key);
4332 void erase(iterator it);
4335 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
4338 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 4340 template<
typename FirstT,
typename SecondT>
4341 struct VmaPairFirstLess
4343 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 4345 return lhs.first < rhs.first;
4347 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 4349 return lhs.first < rhsFirst;
4353 template<
typename KeyT,
typename ValueT>
4354 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
4356 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
4358 m_Vector.data() + m_Vector.size(),
4360 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
4361 VmaVectorInsert(m_Vector, indexToInsert, pair);
4364 template<
typename KeyT,
typename ValueT>
4365 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
4367 PairType* it = VmaBinaryFindFirstNotLess(
4369 m_Vector.data() + m_Vector.size(),
4371 VmaPairFirstLess<KeyT, ValueT>());
4372 if((it != m_Vector.end()) && (it->first == key))
4378 return m_Vector.end();
4382 template<
typename KeyT,
typename ValueT>
4383 void VmaMap<KeyT, ValueT>::erase(iterator it)
4385 VmaVectorRemove(m_Vector, it - m_Vector.begin());
4388 #endif // #if VMA_USE_STL_UNORDERED_MAP 4394 class VmaDeviceMemoryBlock;
4396 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
4398 struct VmaAllocation_T
4400 VMA_CLASS_NO_COPY(VmaAllocation_T)
4402 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
4406 FLAG_USER_DATA_STRING = 0x01,
4410 enum ALLOCATION_TYPE
4412 ALLOCATION_TYPE_NONE,
4413 ALLOCATION_TYPE_BLOCK,
4414 ALLOCATION_TYPE_DEDICATED,
4417 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
4420 m_pUserData(VMA_NULL),
4421 m_LastUseFrameIndex(currentFrameIndex),
4422 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4423 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4425 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4427 #if VMA_STATS_STRING_ENABLED 4428 m_CreationFrameIndex = currentFrameIndex;
4429 m_BufferImageUsage = 0;
4435 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4438 VMA_ASSERT(m_pUserData == VMA_NULL);
4441 void InitBlockAllocation(
4443 VmaDeviceMemoryBlock* block,
4444 VkDeviceSize offset,
4445 VkDeviceSize alignment,
4447 VmaSuballocationType suballocationType,
4451 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4452 VMA_ASSERT(block != VMA_NULL);
4453 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4454 m_Alignment = alignment;
4456 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4457 m_SuballocationType = (uint8_t)suballocationType;
4458 m_BlockAllocation.m_hPool = hPool;
4459 m_BlockAllocation.m_Block = block;
4460 m_BlockAllocation.m_Offset = offset;
4461 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4466 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4467 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4468 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4469 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4470 m_BlockAllocation.m_Block = VMA_NULL;
4471 m_BlockAllocation.m_Offset = 0;
4472 m_BlockAllocation.m_CanBecomeLost =
true;
4475 void ChangeBlockAllocation(
4477 VmaDeviceMemoryBlock* block,
4478 VkDeviceSize offset);
4481 void InitDedicatedAllocation(
4482 uint32_t memoryTypeIndex,
4483 VkDeviceMemory hMemory,
4484 VmaSuballocationType suballocationType,
4488 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4489 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4490 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4493 m_SuballocationType = (uint8_t)suballocationType;
4494 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4495 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4496 m_DedicatedAllocation.m_hMemory = hMemory;
4497 m_DedicatedAllocation.m_pMappedData = pMappedData;
4500 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4501 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4502 VkDeviceSize GetSize()
const {
return m_Size; }
4503 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4504 void* GetUserData()
const {
return m_pUserData; }
4505 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4506 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4508 VmaDeviceMemoryBlock* GetBlock()
const 4510 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4511 return m_BlockAllocation.m_Block;
4513 VkDeviceSize GetOffset()
const;
4514 VkDeviceMemory GetMemory()
const;
4515 uint32_t GetMemoryTypeIndex()
const;
4516 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4517 void* GetMappedData()
const;
4518 bool CanBecomeLost()
const;
4521 uint32_t GetLastUseFrameIndex()
const 4523 return m_LastUseFrameIndex.load();
4525 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4527 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4537 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4539 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4541 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4552 void BlockAllocMap();
4553 void BlockAllocUnmap();
4554 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4557 #if VMA_STATS_STRING_ENABLED 4558 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4559 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4561 void InitBufferImageUsage(uint32_t bufferImageUsage)
4563 VMA_ASSERT(m_BufferImageUsage == 0);
4564 m_BufferImageUsage = bufferImageUsage;
4567 void PrintParameters(
class VmaJsonWriter& json)
const;
4571 VkDeviceSize m_Alignment;
4572 VkDeviceSize m_Size;
4574 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4576 uint8_t m_SuballocationType;
4583 struct BlockAllocation
4586 VmaDeviceMemoryBlock* m_Block;
4587 VkDeviceSize m_Offset;
4588 bool m_CanBecomeLost;
4592 struct DedicatedAllocation
4594 uint32_t m_MemoryTypeIndex;
4595 VkDeviceMemory m_hMemory;
4596 void* m_pMappedData;
4602 BlockAllocation m_BlockAllocation;
4604 DedicatedAllocation m_DedicatedAllocation;
4607 #if VMA_STATS_STRING_ENABLED 4608 uint32_t m_CreationFrameIndex;
4609 uint32_t m_BufferImageUsage;
4619 struct VmaSuballocation
4621 VkDeviceSize offset;
4624 VmaSuballocationType type;
4628 struct VmaSuballocationOffsetLess
4630 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4632 return lhs.offset < rhs.offset;
4635 struct VmaSuballocationOffsetGreater
4637 bool operator()(
const VmaSuballocation& lhs,
const VmaSuballocation& rhs)
const 4639 return lhs.offset > rhs.offset;
4643 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
4646 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4661 struct VmaAllocationRequest
4663 VkDeviceSize offset;
4664 VkDeviceSize sumFreeSize;
4665 VkDeviceSize sumItemSize;
4666 VmaSuballocationList::iterator item;
4667 size_t itemsToMakeLostCount;
4670 VkDeviceSize CalcCost()
const 4672 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4680 class VmaBlockMetadata
4684 virtual ~VmaBlockMetadata() { }
4685 virtual void Init(VkDeviceSize size) { m_Size = size; }
4688 virtual bool Validate()
const = 0;
4689 VkDeviceSize GetSize()
const {
return m_Size; }
4690 virtual size_t GetAllocationCount()
const = 0;
4691 virtual VkDeviceSize GetSumFreeSize()
const = 0;
4692 virtual VkDeviceSize GetUnusedRangeSizeMax()
const = 0;
4694 virtual bool IsEmpty()
const = 0;
4696 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const = 0;
4698 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const = 0;
4700 #if VMA_STATS_STRING_ENABLED 4701 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const = 0;
4707 virtual bool CreateAllocationRequest(
4708 uint32_t currentFrameIndex,
4709 uint32_t frameInUseCount,
4710 VkDeviceSize bufferImageGranularity,
4711 VkDeviceSize allocSize,
4712 VkDeviceSize allocAlignment,
4714 VmaSuballocationType allocType,
4715 bool canMakeOtherLost,
4717 VmaAllocationRequest* pAllocationRequest) = 0;
4719 virtual bool MakeRequestedAllocationsLost(
4720 uint32_t currentFrameIndex,
4721 uint32_t frameInUseCount,
4722 VmaAllocationRequest* pAllocationRequest) = 0;
4724 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount) = 0;
4726 virtual VkResult CheckCorruption(
const void* pBlockData) = 0;
4730 const VmaAllocationRequest& request,
4731 VmaSuballocationType type,
4732 VkDeviceSize allocSize,
4738 virtual void FreeAtOffset(VkDeviceSize offset) = 0;
4741 const VkAllocationCallbacks* GetAllocationCallbacks()
const {
return m_pAllocationCallbacks; }
4743 #if VMA_STATS_STRING_ENABLED 4744 void PrintDetailedMap_Begin(
class VmaJsonWriter& json,
4745 VkDeviceSize unusedBytes,
4746 size_t allocationCount,
4747 size_t unusedRangeCount)
const;
4748 void PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
4749 VkDeviceSize offset,
4751 void PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
4752 VkDeviceSize offset,
4753 VkDeviceSize size)
const;
4754 void PrintDetailedMap_End(
class VmaJsonWriter& json)
const;
4758 VkDeviceSize m_Size;
4759 const VkAllocationCallbacks* m_pAllocationCallbacks;
4762 #define VMA_VALIDATE(cond) do { if(!(cond)) { \ 4763 VMA_ASSERT(0 && "Validation failed: " #cond); \ 4767 class VmaBlockMetadata_Generic :
public VmaBlockMetadata
4769 VMA_CLASS_NO_COPY(VmaBlockMetadata_Generic)
4772 virtual ~VmaBlockMetadata_Generic();
4773 virtual void Init(VkDeviceSize size);
4775 virtual bool Validate()
const;
4776 virtual size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4777 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4778 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4779 virtual bool IsEmpty()
const;
4781 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4782 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4784 #if VMA_STATS_STRING_ENABLED 4785 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4788 virtual bool CreateAllocationRequest(
4789 uint32_t currentFrameIndex,
4790 uint32_t frameInUseCount,
4791 VkDeviceSize bufferImageGranularity,
4792 VkDeviceSize allocSize,
4793 VkDeviceSize allocAlignment,
4795 VmaSuballocationType allocType,
4796 bool canMakeOtherLost,
4798 VmaAllocationRequest* pAllocationRequest);
4800 virtual bool MakeRequestedAllocationsLost(
4801 uint32_t currentFrameIndex,
4802 uint32_t frameInUseCount,
4803 VmaAllocationRequest* pAllocationRequest);
4805 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4807 virtual VkResult CheckCorruption(
const void* pBlockData);
4810 const VmaAllocationRequest& request,
4811 VmaSuballocationType type,
4812 VkDeviceSize allocSize,
4817 virtual void FreeAtOffset(VkDeviceSize offset);
4820 uint32_t m_FreeCount;
4821 VkDeviceSize m_SumFreeSize;
4822 VmaSuballocationList m_Suballocations;
4825 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4827 bool ValidateFreeSuballocationList()
const;
4831 bool CheckAllocation(
4832 uint32_t currentFrameIndex,
4833 uint32_t frameInUseCount,
4834 VkDeviceSize bufferImageGranularity,
4835 VkDeviceSize allocSize,
4836 VkDeviceSize allocAlignment,
4837 VmaSuballocationType allocType,
4838 VmaSuballocationList::const_iterator suballocItem,
4839 bool canMakeOtherLost,
4840 VkDeviceSize* pOffset,
4841 size_t* itemsToMakeLostCount,
4842 VkDeviceSize* pSumFreeSize,
4843 VkDeviceSize* pSumItemSize)
const;
4845 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4849 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4852 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4855 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4936 class VmaBlockMetadata_Linear :
public VmaBlockMetadata
4938 VMA_CLASS_NO_COPY(VmaBlockMetadata_Linear)
4941 virtual ~VmaBlockMetadata_Linear();
4942 virtual void Init(VkDeviceSize size);
4944 virtual bool Validate()
const;
4945 virtual size_t GetAllocationCount()
const;
4946 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4947 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
4948 virtual bool IsEmpty()
const {
return GetAllocationCount() == 0; }
4950 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4951 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
4953 #if VMA_STATS_STRING_ENABLED 4954 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4957 virtual bool CreateAllocationRequest(
4958 uint32_t currentFrameIndex,
4959 uint32_t frameInUseCount,
4960 VkDeviceSize bufferImageGranularity,
4961 VkDeviceSize allocSize,
4962 VkDeviceSize allocAlignment,
4964 VmaSuballocationType allocType,
4965 bool canMakeOtherLost,
4967 VmaAllocationRequest* pAllocationRequest);
4969 virtual bool MakeRequestedAllocationsLost(
4970 uint32_t currentFrameIndex,
4971 uint32_t frameInUseCount,
4972 VmaAllocationRequest* pAllocationRequest);
4974 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4976 virtual VkResult CheckCorruption(
const void* pBlockData);
4979 const VmaAllocationRequest& request,
4980 VmaSuballocationType type,
4981 VkDeviceSize allocSize,
4986 virtual void FreeAtOffset(VkDeviceSize offset);
4996 typedef VmaVector< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > SuballocationVectorType;
4998 enum SECOND_VECTOR_MODE
5000 SECOND_VECTOR_EMPTY,
5005 SECOND_VECTOR_RING_BUFFER,
5011 SECOND_VECTOR_DOUBLE_STACK,
5014 VkDeviceSize m_SumFreeSize;
5015 SuballocationVectorType m_Suballocations0, m_Suballocations1;
5016 uint32_t m_1stVectorIndex;
5017 SECOND_VECTOR_MODE m_2ndVectorMode;
5019 SuballocationVectorType& AccessSuballocations1st() {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5020 SuballocationVectorType& AccessSuballocations2nd() {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5021 const SuballocationVectorType& AccessSuballocations1st()
const {
return m_1stVectorIndex ? m_Suballocations1 : m_Suballocations0; }
5022 const SuballocationVectorType& AccessSuballocations2nd()
const {
return m_1stVectorIndex ? m_Suballocations0 : m_Suballocations1; }
5025 size_t m_1stNullItemsBeginCount;
5027 size_t m_1stNullItemsMiddleCount;
5029 size_t m_2ndNullItemsCount;
5031 bool ShouldCompact1st()
const;
5032 void CleanupAfterFree();
5046 class VmaBlockMetadata_Buddy :
public VmaBlockMetadata
5048 VMA_CLASS_NO_COPY(VmaBlockMetadata_Buddy)
5051 virtual ~VmaBlockMetadata_Buddy();
5052 virtual void Init(VkDeviceSize size);
5054 virtual bool Validate()
const;
5055 virtual size_t GetAllocationCount()
const {
return m_AllocationCount; }
5056 virtual VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize + GetUnusableSize(); }
5057 virtual VkDeviceSize GetUnusedRangeSizeMax()
const;
5058 virtual bool IsEmpty()
const {
return m_Root->type == Node::TYPE_FREE; }
5060 virtual void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
5061 virtual void AddPoolStats(
VmaPoolStats& inoutStats)
const;
5063 #if VMA_STATS_STRING_ENABLED 5064 virtual void PrintDetailedMap(
class VmaJsonWriter& json)
const;
5067 virtual bool CreateAllocationRequest(
5068 uint32_t currentFrameIndex,
5069 uint32_t frameInUseCount,
5070 VkDeviceSize bufferImageGranularity,
5071 VkDeviceSize allocSize,
5072 VkDeviceSize allocAlignment,
5074 VmaSuballocationType allocType,
5075 bool canMakeOtherLost,
5077 VmaAllocationRequest* pAllocationRequest);
5079 virtual bool MakeRequestedAllocationsLost(
5080 uint32_t currentFrameIndex,
5081 uint32_t frameInUseCount,
5082 VmaAllocationRequest* pAllocationRequest);
5084 virtual uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
5086 virtual VkResult CheckCorruption(
const void* pBlockData) {
return VK_ERROR_FEATURE_NOT_PRESENT; }
5089 const VmaAllocationRequest& request,
5090 VmaSuballocationType type,
5091 VkDeviceSize allocSize,
5095 virtual void Free(
const VmaAllocation allocation) { FreeAtOffset(allocation, allocation->GetOffset()); }
5096 virtual void FreeAtOffset(VkDeviceSize offset) { FreeAtOffset(VMA_NULL, offset); }
5099 static const VkDeviceSize MIN_NODE_SIZE = 32;
5100 static const size_t MAX_LEVELS = 30;
5102 struct ValidationContext
5104 size_t calculatedAllocationCount;
5105 size_t calculatedFreeCount;
5106 VkDeviceSize calculatedSumFreeSize;
5108 ValidationContext() :
5109 calculatedAllocationCount(0),
5110 calculatedFreeCount(0),
5111 calculatedSumFreeSize(0) { }
5116 VkDeviceSize offset;
5146 VkDeviceSize m_UsableSize;
5147 uint32_t m_LevelCount;
5153 } m_FreeList[MAX_LEVELS];
5155 size_t m_AllocationCount;
5159 VkDeviceSize m_SumFreeSize;
5161 VkDeviceSize GetUnusableSize()
const {
return GetSize() - m_UsableSize; }
5162 void DeleteNode(Node* node);
5163 bool ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const;
5164 uint32_t AllocSizeToLevel(VkDeviceSize allocSize)
const;
5165 inline VkDeviceSize LevelToNodeSize(uint32_t level)
const {
return m_UsableSize >> level; }
5167 void FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset);
5168 void CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const;
5172 void AddToFreeListFront(uint32_t level, Node* node);
5176 void RemoveFromFreeList(uint32_t level, Node* node);
5178 #if VMA_STATS_STRING_ENABLED 5179 void PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const;
5189 class VmaDeviceMemoryBlock
5191 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
5193 VmaBlockMetadata* m_pMetadata;
5197 ~VmaDeviceMemoryBlock()
5199 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5200 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5206 uint32_t newMemoryTypeIndex,
5207 VkDeviceMemory newMemory,
5208 VkDeviceSize newSize,
5210 uint32_t algorithm);
5214 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
5215 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5216 uint32_t GetId()
const {
return m_Id; }
5217 void* GetMappedData()
const {
return m_pMappedData; }
5220 bool Validate()
const;
5225 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
5228 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5229 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
5231 VkResult BindBufferMemory(
5235 VkResult BindImageMemory(
5241 uint32_t m_MemoryTypeIndex;
5243 VkDeviceMemory m_hMemory;
5248 uint32_t m_MapCount;
5249 void* m_pMappedData;
5252 struct VmaPointerLess
5254 bool operator()(
const void* lhs,
const void* rhs)
const 5260 class VmaDefragmentator;
5268 struct VmaBlockVector
5270 VMA_CLASS_NO_COPY(VmaBlockVector)
5274 uint32_t memoryTypeIndex,
5275 VkDeviceSize preferredBlockSize,
5276 size_t minBlockCount,
5277 size_t maxBlockCount,
5278 VkDeviceSize bufferImageGranularity,
5279 uint32_t frameInUseCount,
5281 bool explicitBlockSize,
5282 uint32_t algorithm);
5285 VkResult CreateMinBlocks();
5287 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
5288 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
5289 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
5290 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
5291 uint32_t GetAlgorithm()
const {
return m_Algorithm; }
5295 bool IsEmpty()
const {
return m_Blocks.empty(); }
5296 bool IsCorruptionDetectionEnabled()
const;
5300 uint32_t currentFrameIndex,
5302 VkDeviceSize alignment,
5304 VmaSuballocationType suballocType,
5313 #if VMA_STATS_STRING_ENABLED 5314 void PrintDetailedMap(
class VmaJsonWriter& json);
5317 void MakePoolAllocationsLost(
5318 uint32_t currentFrameIndex,
5319 size_t* pLostAllocationCount);
5320 VkResult CheckCorruption();
5322 VmaDefragmentator* EnsureDefragmentator(
5324 uint32_t currentFrameIndex);
5326 VkResult Defragment(
5328 VkDeviceSize& maxBytesToMove,
5329 uint32_t& maxAllocationsToMove);
5331 void DestroyDefragmentator();
5334 friend class VmaDefragmentator;
5337 const uint32_t m_MemoryTypeIndex;
5338 const VkDeviceSize m_PreferredBlockSize;
5339 const size_t m_MinBlockCount;
5340 const size_t m_MaxBlockCount;
5341 const VkDeviceSize m_BufferImageGranularity;
5342 const uint32_t m_FrameInUseCount;
5343 const bool m_IsCustomPool;
5344 const bool m_ExplicitBlockSize;
5345 const uint32_t m_Algorithm;
5346 bool m_HasEmptyBlock;
5349 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
5353 VmaDefragmentator* m_pDefragmentator;
5354 uint32_t m_NextBlockId;
5356 VkDeviceSize CalcMaxBlockSize()
const;
5359 void Remove(VmaDeviceMemoryBlock* pBlock);
5363 void IncrementallySortBlocks();
5366 VkResult AllocateFromBlock(
5367 VmaDeviceMemoryBlock* pBlock,
5369 uint32_t currentFrameIndex,
5371 VkDeviceSize alignment,
5374 VmaSuballocationType suballocType,
5378 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
5383 VMA_CLASS_NO_COPY(VmaPool_T)
5385 VmaBlockVector m_BlockVector;
5390 VkDeviceSize preferredBlockSize);
5393 uint32_t GetId()
const {
return m_Id; }
5394 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
5396 #if VMA_STATS_STRING_ENABLED 5404 class VmaDefragmentator
5406 VMA_CLASS_NO_COPY(VmaDefragmentator)
5409 VmaBlockVector*
const m_pBlockVector;
5410 uint32_t m_CurrentFrameIndex;
5411 VkDeviceSize m_BytesMoved;
5412 uint32_t m_AllocationsMoved;
5414 struct AllocationInfo
5417 VkBool32* m_pChanged;
5420 m_hAllocation(VK_NULL_HANDLE),
5421 m_pChanged(VMA_NULL)
5426 struct AllocationInfoSizeGreater
5428 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 5430 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
5435 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5439 VmaDeviceMemoryBlock* m_pBlock;
5440 bool m_HasNonMovableAllocations;
5441 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
5443 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
5445 m_HasNonMovableAllocations(true),
5446 m_Allocations(pAllocationCallbacks),
5447 m_pMappedDataForDefragmentation(VMA_NULL)
5451 void CalcHasNonMovableAllocations()
5453 const size_t blockAllocCount = m_pBlock->m_pMetadata->GetAllocationCount();
5454 const size_t defragmentAllocCount = m_Allocations.size();
5455 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
5458 void SortAllocationsBySizeDescecnding()
5460 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
5463 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
5468 void* m_pMappedDataForDefragmentation;
5471 struct BlockPointerLess
5473 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 5475 return pLhsBlockInfo->m_pBlock < pRhsBlock;
5477 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5479 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
5485 struct BlockInfoCompareMoveDestination
5487 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 5489 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
5493 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
5497 if(pLhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_pMetadata->GetSumFreeSize())
5505 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
5506 BlockInfoVector m_Blocks;
5508 VkResult DefragmentRound(
5509 VkDeviceSize maxBytesToMove,
5510 uint32_t maxAllocationsToMove);
5512 static bool MoveMakesSense(
5513 size_t dstBlockIndex, VkDeviceSize dstOffset,
5514 size_t srcBlockIndex, VkDeviceSize srcOffset);
5519 VmaBlockVector* pBlockVector,
5520 uint32_t currentFrameIndex);
5522 ~VmaDefragmentator();
5524 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
5525 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
5527 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
5529 VkResult Defragment(
5530 VkDeviceSize maxBytesToMove,
5531 uint32_t maxAllocationsToMove);
5534 #if VMA_RECORDING_ENABLED 5541 void WriteConfiguration(
5542 const VkPhysicalDeviceProperties& devProps,
5543 const VkPhysicalDeviceMemoryProperties& memProps,
5544 bool dedicatedAllocationExtensionEnabled);
5547 void RecordCreateAllocator(uint32_t frameIndex);
5548 void RecordDestroyAllocator(uint32_t frameIndex);
5549 void RecordCreatePool(uint32_t frameIndex,
5552 void RecordDestroyPool(uint32_t frameIndex,
VmaPool pool);
5553 void RecordAllocateMemory(uint32_t frameIndex,
5554 const VkMemoryRequirements& vkMemReq,
5557 void RecordAllocateMemoryForBuffer(uint32_t frameIndex,
5558 const VkMemoryRequirements& vkMemReq,
5559 bool requiresDedicatedAllocation,
5560 bool prefersDedicatedAllocation,
5563 void RecordAllocateMemoryForImage(uint32_t frameIndex,
5564 const VkMemoryRequirements& vkMemReq,
5565 bool requiresDedicatedAllocation,
5566 bool prefersDedicatedAllocation,
5569 void RecordFreeMemory(uint32_t frameIndex,
5571 void RecordSetAllocationUserData(uint32_t frameIndex,
5573 const void* pUserData);
5574 void RecordCreateLostAllocation(uint32_t frameIndex,
5576 void RecordMapMemory(uint32_t frameIndex,
5578 void RecordUnmapMemory(uint32_t frameIndex,
5580 void RecordFlushAllocation(uint32_t frameIndex,
5581 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5582 void RecordInvalidateAllocation(uint32_t frameIndex,
5583 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size);
5584 void RecordCreateBuffer(uint32_t frameIndex,
5585 const VkBufferCreateInfo& bufCreateInfo,
5588 void RecordCreateImage(uint32_t frameIndex,
5589 const VkImageCreateInfo& imageCreateInfo,
5592 void RecordDestroyBuffer(uint32_t frameIndex,
5594 void RecordDestroyImage(uint32_t frameIndex,
5596 void RecordTouchAllocation(uint32_t frameIndex,
5598 void RecordGetAllocationInfo(uint32_t frameIndex,
5600 void RecordMakePoolAllocationsLost(uint32_t frameIndex,
5610 class UserDataString
5614 const char* GetString()
const {
return m_Str; }
5624 VMA_MUTEX m_FileMutex;
5626 int64_t m_StartCounter;
5628 void GetBasicParams(CallParams& outParams);
5632 #endif // #if VMA_RECORDING_ENABLED 5635 struct VmaAllocator_T
5637 VMA_CLASS_NO_COPY(VmaAllocator_T)
5640 bool m_UseKhrDedicatedAllocation;
5642 bool m_AllocationCallbacksSpecified;
5643 VkAllocationCallbacks m_AllocationCallbacks;
5647 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
5648 VMA_MUTEX m_HeapSizeLimitMutex;
5650 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
5651 VkPhysicalDeviceMemoryProperties m_MemProps;
5654 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
5657 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
5658 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
5659 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
5665 const VkAllocationCallbacks* GetAllocationCallbacks()
const 5667 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
5671 return m_VulkanFunctions;
5674 VkDeviceSize GetBufferImageGranularity()
const 5677 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
5678 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
5681 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
5682 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
5684 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 5686 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
5687 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
5690 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 5692 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
5693 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
5696 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 5698 return IsMemoryTypeNonCoherent(memTypeIndex) ?
5699 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
5700 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
5703 bool IsIntegratedGpu()
const 5705 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
5708 #if VMA_RECORDING_ENABLED 5709 VmaRecorder* GetRecorder()
const {
return m_pRecorder; }
5712 void GetBufferMemoryRequirements(
5714 VkMemoryRequirements& memReq,
5715 bool& requiresDedicatedAllocation,
5716 bool& prefersDedicatedAllocation)
const;
5717 void GetImageMemoryRequirements(
5719 VkMemoryRequirements& memReq,
5720 bool& requiresDedicatedAllocation,
5721 bool& prefersDedicatedAllocation)
const;
5724 VkResult AllocateMemory(
5725 const VkMemoryRequirements& vkMemReq,
5726 bool requiresDedicatedAllocation,
5727 bool prefersDedicatedAllocation,
5728 VkBuffer dedicatedBuffer,
5729 VkImage dedicatedImage,
5731 VmaSuballocationType suballocType,
5737 void CalculateStats(
VmaStats* pStats);
5739 #if VMA_STATS_STRING_ENABLED 5740 void PrintDetailedMap(
class VmaJsonWriter& json);
5743 VkResult Defragment(
5745 size_t allocationCount,
5746 VkBool32* pAllocationsChanged,
5754 void DestroyPool(
VmaPool pool);
5757 void SetCurrentFrameIndex(uint32_t frameIndex);
5758 uint32_t GetCurrentFrameIndex()
const {
return m_CurrentFrameIndex.load(); }
5760 void MakePoolAllocationsLost(
5762 size_t* pLostAllocationCount);
5763 VkResult CheckPoolCorruption(
VmaPool hPool);
5764 VkResult CheckCorruption(uint32_t memoryTypeBits);
5768 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
5769 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
5774 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
5775 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
5777 void FlushOrInvalidateAllocation(
5779 VkDeviceSize offset, VkDeviceSize size,
5780 VMA_CACHE_OPERATION op);
5782 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
5785 VkDeviceSize m_PreferredLargeHeapBlockSize;
5787 VkPhysicalDevice m_PhysicalDevice;
5788 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
5790 VMA_MUTEX m_PoolsMutex;
5792 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
5793 uint32_t m_NextPoolId;
5797 #if VMA_RECORDING_ENABLED 5798 VmaRecorder* m_pRecorder;
5803 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
5805 VkResult AllocateMemoryOfType(
5807 VkDeviceSize alignment,
5808 bool dedicatedAllocation,
5809 VkBuffer dedicatedBuffer,
5810 VkImage dedicatedImage,
5812 uint32_t memTypeIndex,
5813 VmaSuballocationType suballocType,
5817 VkResult AllocateDedicatedMemory(
5819 VmaSuballocationType suballocType,
5820 uint32_t memTypeIndex,
5822 bool isUserDataString,
5824 VkBuffer dedicatedBuffer,
5825 VkImage dedicatedImage,
5835 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
5837 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
5840 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
5842 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
5845 template<
typename T>
5848 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
5851 template<
typename T>
5852 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
5854 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
5857 template<
typename T>
5858 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
5863 VmaFree(hAllocator, ptr);
5867 template<
typename T>
5868 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
5872 for(
size_t i = count; i--; )
5874 VmaFree(hAllocator, ptr);
5881 #if VMA_STATS_STRING_ENABLED 5883 class VmaStringBuilder
5886 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
5887 size_t GetLength()
const {
return m_Data.size(); }
5888 const char* GetData()
const {
return m_Data.data(); }
5890 void Add(
char ch) { m_Data.push_back(ch); }
5891 void Add(
const char* pStr);
5892 void AddNewLine() { Add(
'\n'); }
5893 void AddNumber(uint32_t num);
5894 void AddNumber(uint64_t num);
5895 void AddPointer(
const void* ptr);
5898 VmaVector< char, VmaStlAllocator<char> > m_Data;
5901 void VmaStringBuilder::Add(
const char* pStr)
5903 const size_t strLen = strlen(pStr);
5906 const size_t oldCount = m_Data.size();
5907 m_Data.resize(oldCount + strLen);
5908 memcpy(m_Data.data() + oldCount, pStr, strLen);
5912 void VmaStringBuilder::AddNumber(uint32_t num)
5915 VmaUint32ToStr(buf,
sizeof(buf), num);
5919 void VmaStringBuilder::AddNumber(uint64_t num)
5922 VmaUint64ToStr(buf,
sizeof(buf), num);
5926 void VmaStringBuilder::AddPointer(
const void* ptr)
5929 VmaPtrToStr(buf,
sizeof(buf), ptr);
5933 #endif // #if VMA_STATS_STRING_ENABLED 5938 #if VMA_STATS_STRING_ENABLED 5942 VMA_CLASS_NO_COPY(VmaJsonWriter)
5944 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
5947 void BeginObject(
bool singleLine =
false);
5950 void BeginArray(
bool singleLine =
false);
5953 void WriteString(
const char* pStr);
5954 void BeginString(
const char* pStr = VMA_NULL);
5955 void ContinueString(
const char* pStr);
5956 void ContinueString(uint32_t n);
5957 void ContinueString(uint64_t n);
5958 void ContinueString_Pointer(
const void* ptr);
5959 void EndString(
const char* pStr = VMA_NULL);
5961 void WriteNumber(uint32_t n);
5962 void WriteNumber(uint64_t n);
5963 void WriteBool(
bool b);
5967 static const char*
const INDENT;
5969 enum COLLECTION_TYPE
5971 COLLECTION_TYPE_OBJECT,
5972 COLLECTION_TYPE_ARRAY,
5976 COLLECTION_TYPE type;
5977 uint32_t valueCount;
5978 bool singleLineMode;
5981 VmaStringBuilder& m_SB;
5982 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
5983 bool m_InsideString;
5985 void BeginValue(
bool isString);
5986 void WriteIndent(
bool oneLess =
false);
5989 const char*
const VmaJsonWriter::INDENT =
" ";
5991 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
5993 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
5994 m_InsideString(false)
5998 VmaJsonWriter::~VmaJsonWriter()
6000 VMA_ASSERT(!m_InsideString);
6001 VMA_ASSERT(m_Stack.empty());
6004 void VmaJsonWriter::BeginObject(
bool singleLine)
6006 VMA_ASSERT(!m_InsideString);
6012 item.type = COLLECTION_TYPE_OBJECT;
6013 item.valueCount = 0;
6014 item.singleLineMode = singleLine;
6015 m_Stack.push_back(item);
6018 void VmaJsonWriter::EndObject()
6020 VMA_ASSERT(!m_InsideString);
6025 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
6029 void VmaJsonWriter::BeginArray(
bool singleLine)
6031 VMA_ASSERT(!m_InsideString);
6037 item.type = COLLECTION_TYPE_ARRAY;
6038 item.valueCount = 0;
6039 item.singleLineMode = singleLine;
6040 m_Stack.push_back(item);
6043 void VmaJsonWriter::EndArray()
6045 VMA_ASSERT(!m_InsideString);
6050 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
6054 void VmaJsonWriter::WriteString(
const char* pStr)
6060 void VmaJsonWriter::BeginString(
const char* pStr)
6062 VMA_ASSERT(!m_InsideString);
6066 m_InsideString =
true;
6067 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6069 ContinueString(pStr);
6073 void VmaJsonWriter::ContinueString(
const char* pStr)
6075 VMA_ASSERT(m_InsideString);
6077 const size_t strLen = strlen(pStr);
6078 for(
size_t i = 0; i < strLen; ++i)
6111 VMA_ASSERT(0 &&
"Character not currently supported.");
6117 void VmaJsonWriter::ContinueString(uint32_t n)
6119 VMA_ASSERT(m_InsideString);
6123 void VmaJsonWriter::ContinueString(uint64_t n)
6125 VMA_ASSERT(m_InsideString);
6129 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
6131 VMA_ASSERT(m_InsideString);
6132 m_SB.AddPointer(ptr);
6135 void VmaJsonWriter::EndString(
const char* pStr)
6137 VMA_ASSERT(m_InsideString);
6138 if(pStr != VMA_NULL && pStr[0] !=
'\0')
6140 ContinueString(pStr);
6143 m_InsideString =
false;
6146 void VmaJsonWriter::WriteNumber(uint32_t n)
6148 VMA_ASSERT(!m_InsideString);
6153 void VmaJsonWriter::WriteNumber(uint64_t n)
6155 VMA_ASSERT(!m_InsideString);
6160 void VmaJsonWriter::WriteBool(
bool b)
6162 VMA_ASSERT(!m_InsideString);
6164 m_SB.Add(b ?
"true" :
"false");
6167 void VmaJsonWriter::WriteNull()
6169 VMA_ASSERT(!m_InsideString);
6174 void VmaJsonWriter::BeginValue(
bool isString)
6176 if(!m_Stack.empty())
6178 StackItem& currItem = m_Stack.back();
6179 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6180 currItem.valueCount % 2 == 0)
6182 VMA_ASSERT(isString);
6185 if(currItem.type == COLLECTION_TYPE_OBJECT &&
6186 currItem.valueCount % 2 != 0)
6190 else if(currItem.valueCount > 0)
6199 ++currItem.valueCount;
6203 void VmaJsonWriter::WriteIndent(
bool oneLess)
6205 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
6209 size_t count = m_Stack.size();
6210 if(count > 0 && oneLess)
6214 for(
size_t i = 0; i < count; ++i)
6221 #endif // #if VMA_STATS_STRING_ENABLED 6225 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
6227 if(IsUserDataString())
6229 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
6231 FreeUserDataString(hAllocator);
6233 if(pUserData != VMA_NULL)
6235 const char*
const newStrSrc = (
char*)pUserData;
6236 const size_t newStrLen = strlen(newStrSrc);
6237 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
6238 memcpy(newStrDst, newStrSrc, newStrLen + 1);
6239 m_pUserData = newStrDst;
6244 m_pUserData = pUserData;
6248 void VmaAllocation_T::ChangeBlockAllocation(
6250 VmaDeviceMemoryBlock* block,
6251 VkDeviceSize offset)
6253 VMA_ASSERT(block != VMA_NULL);
6254 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6257 if(block != m_BlockAllocation.m_Block)
6259 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
6260 if(IsPersistentMap())
6262 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
6263 block->Map(hAllocator, mapRefCount, VMA_NULL);
6266 m_BlockAllocation.m_Block = block;
6267 m_BlockAllocation.m_Offset = offset;
6270 VkDeviceSize VmaAllocation_T::GetOffset()
const 6274 case ALLOCATION_TYPE_BLOCK:
6275 return m_BlockAllocation.m_Offset;
6276 case ALLOCATION_TYPE_DEDICATED:
6284 VkDeviceMemory VmaAllocation_T::GetMemory()
const 6288 case ALLOCATION_TYPE_BLOCK:
6289 return m_BlockAllocation.m_Block->GetDeviceMemory();
6290 case ALLOCATION_TYPE_DEDICATED:
6291 return m_DedicatedAllocation.m_hMemory;
6294 return VK_NULL_HANDLE;
6298 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 6302 case ALLOCATION_TYPE_BLOCK:
6303 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
6304 case ALLOCATION_TYPE_DEDICATED:
6305 return m_DedicatedAllocation.m_MemoryTypeIndex;
6312 void* VmaAllocation_T::GetMappedData()
const 6316 case ALLOCATION_TYPE_BLOCK:
6319 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
6320 VMA_ASSERT(pBlockData != VMA_NULL);
6321 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
6328 case ALLOCATION_TYPE_DEDICATED:
6329 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
6330 return m_DedicatedAllocation.m_pMappedData;
6337 bool VmaAllocation_T::CanBecomeLost()
const 6341 case ALLOCATION_TYPE_BLOCK:
6342 return m_BlockAllocation.m_CanBecomeLost;
6343 case ALLOCATION_TYPE_DEDICATED:
6351 VmaPool VmaAllocation_T::GetPool()
const 6353 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
6354 return m_BlockAllocation.m_hPool;
6357 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6359 VMA_ASSERT(CanBecomeLost());
6365 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
6368 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
6373 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
6379 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
6389 #if VMA_STATS_STRING_ENABLED 6392 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
6401 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 6403 json.WriteString(
"Type");
6404 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
6406 json.WriteString(
"Size");
6407 json.WriteNumber(m_Size);
6409 if(m_pUserData != VMA_NULL)
6411 json.WriteString(
"UserData");
6412 if(IsUserDataString())
6414 json.WriteString((
const char*)m_pUserData);
6419 json.ContinueString_Pointer(m_pUserData);
6424 json.WriteString(
"CreationFrameIndex");
6425 json.WriteNumber(m_CreationFrameIndex);
6427 json.WriteString(
"LastUseFrameIndex");
6428 json.WriteNumber(GetLastUseFrameIndex());
6430 if(m_BufferImageUsage != 0)
6432 json.WriteString(
"Usage");
6433 json.WriteNumber(m_BufferImageUsage);
6439 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
6441 VMA_ASSERT(IsUserDataString());
6442 if(m_pUserData != VMA_NULL)
6444 char*
const oldStr = (
char*)m_pUserData;
6445 const size_t oldStrLen = strlen(oldStr);
6446 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
6447 m_pUserData = VMA_NULL;
6451 void VmaAllocation_T::BlockAllocMap()
6453 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6455 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6461 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
6465 void VmaAllocation_T::BlockAllocUnmap()
6467 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
6469 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6475 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
6479 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
6481 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6485 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
6487 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
6488 *ppData = m_DedicatedAllocation.m_pMappedData;
6494 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
6495 return VK_ERROR_MEMORY_MAP_FAILED;
6500 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6501 hAllocator->m_hDevice,
6502 m_DedicatedAllocation.m_hMemory,
6507 if(result == VK_SUCCESS)
6509 m_DedicatedAllocation.m_pMappedData = *ppData;
6516 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
6518 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
6520 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
6525 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
6526 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
6527 hAllocator->m_hDevice,
6528 m_DedicatedAllocation.m_hMemory);
6533 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
6537 #if VMA_STATS_STRING_ENABLED 6539 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
6543 json.WriteString(
"Blocks");
6546 json.WriteString(
"Allocations");
6549 json.WriteString(
"UnusedRanges");
6552 json.WriteString(
"UsedBytes");
6555 json.WriteString(
"UnusedBytes");
6560 json.WriteString(
"AllocationSize");
6561 json.BeginObject(
true);
6562 json.WriteString(
"Min");
6564 json.WriteString(
"Avg");
6566 json.WriteString(
"Max");
6573 json.WriteString(
"UnusedRangeSize");
6574 json.BeginObject(
true);
6575 json.WriteString(
"Min");
6577 json.WriteString(
"Avg");
6579 json.WriteString(
"Max");
6587 #endif // #if VMA_STATS_STRING_ENABLED 6589 struct VmaSuballocationItemSizeLess
6592 const VmaSuballocationList::iterator lhs,
6593 const VmaSuballocationList::iterator rhs)
const 6595 return lhs->size < rhs->size;
6598 const VmaSuballocationList::iterator lhs,
6599 VkDeviceSize rhsSize)
const 6601 return lhs->size < rhsSize;
6609 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
6611 m_pAllocationCallbacks(hAllocator->GetAllocationCallbacks())
6615 #if VMA_STATS_STRING_ENABLED 6617 void VmaBlockMetadata::PrintDetailedMap_Begin(
class VmaJsonWriter& json,
6618 VkDeviceSize unusedBytes,
6619 size_t allocationCount,
6620 size_t unusedRangeCount)
const 6624 json.WriteString(
"TotalBytes");
6625 json.WriteNumber(GetSize());
6627 json.WriteString(
"UnusedBytes");
6628 json.WriteNumber(unusedBytes);
6630 json.WriteString(
"Allocations");
6631 json.WriteNumber((uint64_t)allocationCount);
6633 json.WriteString(
"UnusedRanges");
6634 json.WriteNumber((uint64_t)unusedRangeCount);
6636 json.WriteString(
"Suballocations");
6640 void VmaBlockMetadata::PrintDetailedMap_Allocation(
class VmaJsonWriter& json,
6641 VkDeviceSize offset,
6644 json.BeginObject(
true);
6646 json.WriteString(
"Offset");
6647 json.WriteNumber(offset);
6649 hAllocation->PrintParameters(json);
6654 void VmaBlockMetadata::PrintDetailedMap_UnusedRange(
class VmaJsonWriter& json,
6655 VkDeviceSize offset,
6656 VkDeviceSize size)
const 6658 json.BeginObject(
true);
6660 json.WriteString(
"Offset");
6661 json.WriteNumber(offset);
6663 json.WriteString(
"Type");
6664 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
6666 json.WriteString(
"Size");
6667 json.WriteNumber(size);
6672 void VmaBlockMetadata::PrintDetailedMap_End(
class VmaJsonWriter& json)
const 6678 #endif // #if VMA_STATS_STRING_ENABLED 6683 VmaBlockMetadata_Generic::VmaBlockMetadata_Generic(
VmaAllocator hAllocator) :
6684 VmaBlockMetadata(hAllocator),
6687 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
6688 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
6692 VmaBlockMetadata_Generic::~VmaBlockMetadata_Generic()
6696 void VmaBlockMetadata_Generic::Init(VkDeviceSize size)
6698 VmaBlockMetadata::Init(size);
6701 m_SumFreeSize = size;
6703 VmaSuballocation suballoc = {};
6704 suballoc.offset = 0;
6705 suballoc.size = size;
6706 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6707 suballoc.hAllocation = VK_NULL_HANDLE;
6709 VMA_ASSERT(size > VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
6710 m_Suballocations.push_back(suballoc);
6711 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
6713 m_FreeSuballocationsBySize.push_back(suballocItem);
6716 bool VmaBlockMetadata_Generic::Validate()
const 6718 VMA_VALIDATE(!m_Suballocations.empty());
6721 VkDeviceSize calculatedOffset = 0;
6723 uint32_t calculatedFreeCount = 0;
6725 VkDeviceSize calculatedSumFreeSize = 0;
6728 size_t freeSuballocationsToRegister = 0;
6730 bool prevFree =
false;
6732 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6733 suballocItem != m_Suballocations.cend();
6736 const VmaSuballocation& subAlloc = *suballocItem;
6739 VMA_VALIDATE(subAlloc.offset == calculatedOffset);
6741 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
6743 VMA_VALIDATE(!prevFree || !currFree);
6745 VMA_VALIDATE(currFree == (subAlloc.hAllocation == VK_NULL_HANDLE));
6749 calculatedSumFreeSize += subAlloc.size;
6750 ++calculatedFreeCount;
6751 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6753 ++freeSuballocationsToRegister;
6757 VMA_VALIDATE(subAlloc.size >= VMA_DEBUG_MARGIN);
6761 VMA_VALIDATE(subAlloc.hAllocation->GetOffset() == subAlloc.offset);
6762 VMA_VALIDATE(subAlloc.hAllocation->GetSize() == subAlloc.size);
6765 VMA_VALIDATE(VMA_DEBUG_MARGIN == 0 || prevFree);
6768 calculatedOffset += subAlloc.size;
6769 prevFree = currFree;
6774 VMA_VALIDATE(m_FreeSuballocationsBySize.size() == freeSuballocationsToRegister);
6776 VkDeviceSize lastSize = 0;
6777 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
6779 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
6782 VMA_VALIDATE(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6784 VMA_VALIDATE(suballocItem->size >= lastSize);
6786 lastSize = suballocItem->size;
6790 VMA_VALIDATE(ValidateFreeSuballocationList());
6791 VMA_VALIDATE(calculatedOffset == GetSize());
6792 VMA_VALIDATE(calculatedSumFreeSize == m_SumFreeSize);
6793 VMA_VALIDATE(calculatedFreeCount == m_FreeCount);
6798 VkDeviceSize VmaBlockMetadata_Generic::GetUnusedRangeSizeMax()
const 6800 if(!m_FreeSuballocationsBySize.empty())
6802 return m_FreeSuballocationsBySize.back()->size;
6810 bool VmaBlockMetadata_Generic::IsEmpty()
const 6812 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
6815 void VmaBlockMetadata_Generic::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 6819 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6831 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6832 suballocItem != m_Suballocations.cend();
6835 const VmaSuballocation& suballoc = *suballocItem;
6836 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
6849 void VmaBlockMetadata_Generic::AddPoolStats(
VmaPoolStats& inoutStats)
const 6851 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
6853 inoutStats.
size += GetSize();
6860 #if VMA_STATS_STRING_ENABLED 6862 void VmaBlockMetadata_Generic::PrintDetailedMap(
class VmaJsonWriter& json)
const 6864 PrintDetailedMap_Begin(json,
6866 m_Suballocations.size() - (size_t)m_FreeCount,
6870 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
6871 suballocItem != m_Suballocations.cend();
6872 ++suballocItem, ++i)
6874 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6876 PrintDetailedMap_UnusedRange(json, suballocItem->offset, suballocItem->size);
6880 PrintDetailedMap_Allocation(json, suballocItem->offset, suballocItem->hAllocation);
6884 PrintDetailedMap_End(json);
6887 #endif // #if VMA_STATS_STRING_ENABLED 6889 bool VmaBlockMetadata_Generic::CreateAllocationRequest(
6890 uint32_t currentFrameIndex,
6891 uint32_t frameInUseCount,
6892 VkDeviceSize bufferImageGranularity,
6893 VkDeviceSize allocSize,
6894 VkDeviceSize allocAlignment,
6896 VmaSuballocationType allocType,
6897 bool canMakeOtherLost,
6899 VmaAllocationRequest* pAllocationRequest)
6901 VMA_ASSERT(allocSize > 0);
6902 VMA_ASSERT(!upperAddress);
6903 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6904 VMA_ASSERT(pAllocationRequest != VMA_NULL);
6905 VMA_HEAVY_ASSERT(Validate());
6908 if(canMakeOtherLost ==
false &&
6909 m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
6915 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
6916 if(freeSuballocCount > 0)
6921 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6922 m_FreeSuballocationsBySize.data(),
6923 m_FreeSuballocationsBySize.data() + freeSuballocCount,
6924 allocSize + 2 * VMA_DEBUG_MARGIN,
6925 VmaSuballocationItemSizeLess());
6926 size_t index = it - m_FreeSuballocationsBySize.data();
6927 for(; index < freeSuballocCount; ++index)
6932 bufferImageGranularity,
6936 m_FreeSuballocationsBySize[index],
6938 &pAllocationRequest->offset,
6939 &pAllocationRequest->itemsToMakeLostCount,
6940 &pAllocationRequest->sumFreeSize,
6941 &pAllocationRequest->sumItemSize))
6943 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6951 for(
size_t index = freeSuballocCount; index--; )
6956 bufferImageGranularity,
6960 m_FreeSuballocationsBySize[index],
6962 &pAllocationRequest->offset,
6963 &pAllocationRequest->itemsToMakeLostCount,
6964 &pAllocationRequest->sumFreeSize,
6965 &pAllocationRequest->sumItemSize))
6967 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
6974 if(canMakeOtherLost)
6978 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
6979 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
6981 VmaAllocationRequest tmpAllocRequest = {};
6982 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
6983 suballocIt != m_Suballocations.end();
6986 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
6987 suballocIt->hAllocation->CanBecomeLost())
6992 bufferImageGranularity,
6998 &tmpAllocRequest.offset,
6999 &tmpAllocRequest.itemsToMakeLostCount,
7000 &tmpAllocRequest.sumFreeSize,
7001 &tmpAllocRequest.sumItemSize))
7003 tmpAllocRequest.item = suballocIt;
7005 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost() ||
7008 *pAllocationRequest = tmpAllocRequest;
7014 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
7023 bool VmaBlockMetadata_Generic::MakeRequestedAllocationsLost(
7024 uint32_t currentFrameIndex,
7025 uint32_t frameInUseCount,
7026 VmaAllocationRequest* pAllocationRequest)
7028 while(pAllocationRequest->itemsToMakeLostCount > 0)
7030 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
7032 ++pAllocationRequest->item;
7034 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7035 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
7036 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
7037 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7039 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
7040 --pAllocationRequest->itemsToMakeLostCount;
7048 VMA_HEAVY_ASSERT(Validate());
7049 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
7050 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
7055 uint32_t VmaBlockMetadata_Generic::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
7057 uint32_t lostAllocationCount = 0;
7058 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7059 it != m_Suballocations.end();
7062 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
7063 it->hAllocation->CanBecomeLost() &&
7064 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
7066 it = FreeSuballocation(it);
7067 ++lostAllocationCount;
7070 return lostAllocationCount;
7073 VkResult VmaBlockMetadata_Generic::CheckCorruption(
const void* pBlockData)
7075 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
7076 it != m_Suballocations.end();
7079 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
7081 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
7083 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
7084 return VK_ERROR_VALIDATION_FAILED_EXT;
7086 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
7088 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
7089 return VK_ERROR_VALIDATION_FAILED_EXT;
7097 void VmaBlockMetadata_Generic::Alloc(
7098 const VmaAllocationRequest& request,
7099 VmaSuballocationType type,
7100 VkDeviceSize allocSize,
7104 VMA_ASSERT(!upperAddress);
7105 VMA_ASSERT(request.item != m_Suballocations.end());
7106 VmaSuballocation& suballoc = *request.item;
7108 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7110 VMA_ASSERT(request.offset >= suballoc.offset);
7111 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
7112 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
7113 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
7117 UnregisterFreeSuballocation(request.item);
7119 suballoc.offset = request.offset;
7120 suballoc.size = allocSize;
7121 suballoc.type = type;
7122 suballoc.hAllocation = hAllocation;
7127 VmaSuballocation paddingSuballoc = {};
7128 paddingSuballoc.offset = request.offset + allocSize;
7129 paddingSuballoc.size = paddingEnd;
7130 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7131 VmaSuballocationList::iterator next = request.item;
7133 const VmaSuballocationList::iterator paddingEndItem =
7134 m_Suballocations.insert(next, paddingSuballoc);
7135 RegisterFreeSuballocation(paddingEndItem);
7141 VmaSuballocation paddingSuballoc = {};
7142 paddingSuballoc.offset = request.offset - paddingBegin;
7143 paddingSuballoc.size = paddingBegin;
7144 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7145 const VmaSuballocationList::iterator paddingBeginItem =
7146 m_Suballocations.insert(request.item, paddingSuballoc);
7147 RegisterFreeSuballocation(paddingBeginItem);
7151 m_FreeCount = m_FreeCount - 1;
7152 if(paddingBegin > 0)
7160 m_SumFreeSize -= allocSize;
7163 void VmaBlockMetadata_Generic::Free(
const VmaAllocation allocation)
7165 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7166 suballocItem != m_Suballocations.end();
7169 VmaSuballocation& suballoc = *suballocItem;
7170 if(suballoc.hAllocation == allocation)
7172 FreeSuballocation(suballocItem);
7173 VMA_HEAVY_ASSERT(Validate());
7177 VMA_ASSERT(0 &&
"Not found!");
7180 void VmaBlockMetadata_Generic::FreeAtOffset(VkDeviceSize offset)
7182 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
7183 suballocItem != m_Suballocations.end();
7186 VmaSuballocation& suballoc = *suballocItem;
7187 if(suballoc.offset == offset)
7189 FreeSuballocation(suballocItem);
7193 VMA_ASSERT(0 &&
"Not found!");
7196 bool VmaBlockMetadata_Generic::ValidateFreeSuballocationList()
const 7198 VkDeviceSize lastSize = 0;
7199 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
7201 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
7203 VMA_VALIDATE(it->type == VMA_SUBALLOCATION_TYPE_FREE);
7204 VMA_VALIDATE(it->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER);
7205 VMA_VALIDATE(it->size >= lastSize);
7206 lastSize = it->size;
7211 bool VmaBlockMetadata_Generic::CheckAllocation(
7212 uint32_t currentFrameIndex,
7213 uint32_t frameInUseCount,
7214 VkDeviceSize bufferImageGranularity,
7215 VkDeviceSize allocSize,
7216 VkDeviceSize allocAlignment,
7217 VmaSuballocationType allocType,
7218 VmaSuballocationList::const_iterator suballocItem,
7219 bool canMakeOtherLost,
7220 VkDeviceSize* pOffset,
7221 size_t* itemsToMakeLostCount,
7222 VkDeviceSize* pSumFreeSize,
7223 VkDeviceSize* pSumItemSize)
const 7225 VMA_ASSERT(allocSize > 0);
7226 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
7227 VMA_ASSERT(suballocItem != m_Suballocations.cend());
7228 VMA_ASSERT(pOffset != VMA_NULL);
7230 *itemsToMakeLostCount = 0;
7234 if(canMakeOtherLost)
7236 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7238 *pSumFreeSize = suballocItem->size;
7242 if(suballocItem->hAllocation->CanBecomeLost() &&
7243 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7245 ++*itemsToMakeLostCount;
7246 *pSumItemSize = suballocItem->size;
7255 if(GetSize() - suballocItem->offset < allocSize)
7261 *pOffset = suballocItem->offset;
7264 if(VMA_DEBUG_MARGIN > 0)
7266 *pOffset += VMA_DEBUG_MARGIN;
7270 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7274 if(bufferImageGranularity > 1)
7276 bool bufferImageGranularityConflict =
false;
7277 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7278 while(prevSuballocItem != m_Suballocations.cbegin())
7281 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7282 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7284 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7286 bufferImageGranularityConflict =
true;
7294 if(bufferImageGranularityConflict)
7296 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7302 if(*pOffset >= suballocItem->offset + suballocItem->size)
7308 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
7311 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7313 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
7315 if(suballocItem->offset + totalSize > GetSize())
7322 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
7323 if(totalSize > suballocItem->size)
7325 VkDeviceSize remainingSize = totalSize - suballocItem->size;
7326 while(remainingSize > 0)
7329 if(lastSuballocItem == m_Suballocations.cend())
7333 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7335 *pSumFreeSize += lastSuballocItem->size;
7339 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
7340 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
7341 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7343 ++*itemsToMakeLostCount;
7344 *pSumItemSize += lastSuballocItem->size;
7351 remainingSize = (lastSuballocItem->size < remainingSize) ?
7352 remainingSize - lastSuballocItem->size : 0;
7358 if(bufferImageGranularity > 1)
7360 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
7362 while(nextSuballocItem != m_Suballocations.cend())
7364 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7365 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7367 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7369 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
7370 if(nextSuballoc.hAllocation->CanBecomeLost() &&
7371 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
7373 ++*itemsToMakeLostCount;
7392 const VmaSuballocation& suballoc = *suballocItem;
7393 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7395 *pSumFreeSize = suballoc.size;
7398 if(suballoc.size < allocSize)
7404 *pOffset = suballoc.offset;
7407 if(VMA_DEBUG_MARGIN > 0)
7409 *pOffset += VMA_DEBUG_MARGIN;
7413 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
7417 if(bufferImageGranularity > 1)
7419 bool bufferImageGranularityConflict =
false;
7420 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
7421 while(prevSuballocItem != m_Suballocations.cbegin())
7424 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
7425 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
7427 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
7429 bufferImageGranularityConflict =
true;
7437 if(bufferImageGranularityConflict)
7439 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
7444 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
7447 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
7450 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
7457 if(bufferImageGranularity > 1)
7459 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
7461 while(nextSuballocItem != m_Suballocations.cend())
7463 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
7464 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
7466 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
7485 void VmaBlockMetadata_Generic::MergeFreeWithNext(VmaSuballocationList::iterator item)
7487 VMA_ASSERT(item != m_Suballocations.end());
7488 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7490 VmaSuballocationList::iterator nextItem = item;
7492 VMA_ASSERT(nextItem != m_Suballocations.end());
7493 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
7495 item->size += nextItem->size;
7497 m_Suballocations.erase(nextItem);
7500 VmaSuballocationList::iterator VmaBlockMetadata_Generic::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
7503 VmaSuballocation& suballoc = *suballocItem;
7504 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
7505 suballoc.hAllocation = VK_NULL_HANDLE;
7509 m_SumFreeSize += suballoc.size;
7512 bool mergeWithNext =
false;
7513 bool mergeWithPrev =
false;
7515 VmaSuballocationList::iterator nextItem = suballocItem;
7517 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
7519 mergeWithNext =
true;
7522 VmaSuballocationList::iterator prevItem = suballocItem;
7523 if(suballocItem != m_Suballocations.begin())
7526 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
7528 mergeWithPrev =
true;
7534 UnregisterFreeSuballocation(nextItem);
7535 MergeFreeWithNext(suballocItem);
7540 UnregisterFreeSuballocation(prevItem);
7541 MergeFreeWithNext(prevItem);
7542 RegisterFreeSuballocation(prevItem);
7547 RegisterFreeSuballocation(suballocItem);
7548 return suballocItem;
7552 void VmaBlockMetadata_Generic::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
7554 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7555 VMA_ASSERT(item->size > 0);
7559 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7561 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7563 if(m_FreeSuballocationsBySize.empty())
7565 m_FreeSuballocationsBySize.push_back(item);
7569 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
7577 void VmaBlockMetadata_Generic::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
7579 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
7580 VMA_ASSERT(item->size > 0);
7584 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
7586 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
7588 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
7589 m_FreeSuballocationsBySize.data(),
7590 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
7592 VmaSuballocationItemSizeLess());
7593 for(
size_t index = it - m_FreeSuballocationsBySize.data();
7594 index < m_FreeSuballocationsBySize.size();
7597 if(m_FreeSuballocationsBySize[index] == item)
7599 VmaVectorRemove(m_FreeSuballocationsBySize, index);
7602 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
7604 VMA_ASSERT(0 &&
"Not found.");
7613 VmaBlockMetadata_Linear::VmaBlockMetadata_Linear(
VmaAllocator hAllocator) :
7614 VmaBlockMetadata(hAllocator),
7616 m_Suballocations0(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7617 m_Suballocations1(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
7618 m_1stVectorIndex(0),
7619 m_2ndVectorMode(SECOND_VECTOR_EMPTY),
7620 m_1stNullItemsBeginCount(0),
7621 m_1stNullItemsMiddleCount(0),
7622 m_2ndNullItemsCount(0)
7626 VmaBlockMetadata_Linear::~VmaBlockMetadata_Linear()
7630 void VmaBlockMetadata_Linear::Init(VkDeviceSize size)
7632 VmaBlockMetadata::Init(size);
7633 m_SumFreeSize = size;
7636 bool VmaBlockMetadata_Linear::Validate()
const 7638 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7639 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7641 VMA_VALIDATE(suballocations2nd.empty() == (m_2ndVectorMode == SECOND_VECTOR_EMPTY));
7642 VMA_VALIDATE(!suballocations1st.empty() ||
7643 suballocations2nd.empty() ||
7644 m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER);
7646 if(!suballocations1st.empty())
7649 VMA_VALIDATE(suballocations1st[m_1stNullItemsBeginCount].hAllocation != VK_NULL_HANDLE);
7651 VMA_VALIDATE(suballocations1st.back().hAllocation != VK_NULL_HANDLE);
7653 if(!suballocations2nd.empty())
7656 VMA_VALIDATE(suballocations2nd.back().hAllocation != VK_NULL_HANDLE);
7659 VMA_VALIDATE(m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount <= suballocations1st.size());
7660 VMA_VALIDATE(m_2ndNullItemsCount <= suballocations2nd.size());
7662 VkDeviceSize sumUsedSize = 0;
7663 const size_t suballoc1stCount = suballocations1st.size();
7664 VkDeviceSize offset = VMA_DEBUG_MARGIN;
7666 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7668 const size_t suballoc2ndCount = suballocations2nd.size();
7669 size_t nullItem2ndCount = 0;
7670 for(
size_t i = 0; i < suballoc2ndCount; ++i)
7672 const VmaSuballocation& suballoc = suballocations2nd[i];
7673 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7675 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7676 VMA_VALIDATE(suballoc.offset >= offset);
7680 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7681 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7682 sumUsedSize += suballoc.size;
7689 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7692 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7695 for(
size_t i = 0; i < m_1stNullItemsBeginCount; ++i)
7697 const VmaSuballocation& suballoc = suballocations1st[i];
7698 VMA_VALIDATE(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE &&
7699 suballoc.hAllocation == VK_NULL_HANDLE);
7702 size_t nullItem1stCount = m_1stNullItemsBeginCount;
7704 for(
size_t i = m_1stNullItemsBeginCount; i < suballoc1stCount; ++i)
7706 const VmaSuballocation& suballoc = suballocations1st[i];
7707 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7709 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7710 VMA_VALIDATE(suballoc.offset >= offset);
7711 VMA_VALIDATE(i >= m_1stNullItemsBeginCount || currFree);
7715 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7716 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7717 sumUsedSize += suballoc.size;
7724 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7726 VMA_VALIDATE(nullItem1stCount == m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount);
7728 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7730 const size_t suballoc2ndCount = suballocations2nd.size();
7731 size_t nullItem2ndCount = 0;
7732 for(
size_t i = suballoc2ndCount; i--; )
7734 const VmaSuballocation& suballoc = suballocations2nd[i];
7735 const bool currFree = (suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
7737 VMA_VALIDATE(currFree == (suballoc.hAllocation == VK_NULL_HANDLE));
7738 VMA_VALIDATE(suballoc.offset >= offset);
7742 VMA_VALIDATE(suballoc.hAllocation->GetOffset() == suballoc.offset);
7743 VMA_VALIDATE(suballoc.hAllocation->GetSize() == suballoc.size);
7744 sumUsedSize += suballoc.size;
7751 offset = suballoc.offset + suballoc.size + VMA_DEBUG_MARGIN;
7754 VMA_VALIDATE(nullItem2ndCount == m_2ndNullItemsCount);
7757 VMA_VALIDATE(offset <= GetSize());
7758 VMA_VALIDATE(m_SumFreeSize == GetSize() - sumUsedSize);
7763 size_t VmaBlockMetadata_Linear::GetAllocationCount()
const 7765 return AccessSuballocations1st().size() - (m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount) +
7766 AccessSuballocations2nd().size() - m_2ndNullItemsCount;
7769 VkDeviceSize VmaBlockMetadata_Linear::GetUnusedRangeSizeMax()
const 7771 const VkDeviceSize size = GetSize();
7783 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7785 switch(m_2ndVectorMode)
7787 case SECOND_VECTOR_EMPTY:
7793 const size_t suballocations1stCount = suballocations1st.size();
7794 VMA_ASSERT(suballocations1stCount > m_1stNullItemsBeginCount);
7795 const VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
7796 const VmaSuballocation& lastSuballoc = suballocations1st[suballocations1stCount - 1];
7798 firstSuballoc.offset,
7799 size - (lastSuballoc.offset + lastSuballoc.size));
7803 case SECOND_VECTOR_RING_BUFFER:
7808 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7809 const VmaSuballocation& lastSuballoc2nd = suballocations2nd.back();
7810 const VmaSuballocation& firstSuballoc1st = suballocations1st[m_1stNullItemsBeginCount];
7811 return firstSuballoc1st.offset - (lastSuballoc2nd.offset + lastSuballoc2nd.size);
7815 case SECOND_VECTOR_DOUBLE_STACK:
7820 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7821 const VmaSuballocation& topSuballoc2nd = suballocations2nd.back();
7822 const VmaSuballocation& lastSuballoc1st = suballocations1st.back();
7823 return topSuballoc2nd.offset - (lastSuballoc1st.offset + lastSuballoc1st.size);
7833 void VmaBlockMetadata_Linear::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 7835 const VkDeviceSize size = GetSize();
7836 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
7837 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
7838 const size_t suballoc1stCount = suballocations1st.size();
7839 const size_t suballoc2ndCount = suballocations2nd.size();
7850 VkDeviceSize lastOffset = 0;
7852 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
7854 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
7855 size_t nextAlloc2ndIndex = 0;
7856 while(lastOffset < freeSpace2ndTo1stEnd)
7859 while(nextAlloc2ndIndex < suballoc2ndCount &&
7860 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7862 ++nextAlloc2ndIndex;
7866 if(nextAlloc2ndIndex < suballoc2ndCount)
7868 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7871 if(lastOffset < suballoc.offset)
7874 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7888 lastOffset = suballoc.offset + suballoc.size;
7889 ++nextAlloc2ndIndex;
7895 if(lastOffset < freeSpace2ndTo1stEnd)
7897 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
7905 lastOffset = freeSpace2ndTo1stEnd;
7910 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
7911 const VkDeviceSize freeSpace1stTo2ndEnd =
7912 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
7913 while(lastOffset < freeSpace1stTo2ndEnd)
7916 while(nextAlloc1stIndex < suballoc1stCount &&
7917 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
7919 ++nextAlloc1stIndex;
7923 if(nextAlloc1stIndex < suballoc1stCount)
7925 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
7928 if(lastOffset < suballoc.offset)
7931 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
7945 lastOffset = suballoc.offset + suballoc.size;
7946 ++nextAlloc1stIndex;
7952 if(lastOffset < freeSpace1stTo2ndEnd)
7954 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
7962 lastOffset = freeSpace1stTo2ndEnd;
7966 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
7968 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
7969 while(lastOffset < size)
7972 while(nextAlloc2ndIndex != SIZE_MAX &&
7973 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
7975 --nextAlloc2ndIndex;
7979 if(nextAlloc2ndIndex != SIZE_MAX)
7981 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
7984 if(lastOffset < suballoc.offset)
7987 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8001 lastOffset = suballoc.offset + suballoc.size;
8002 --nextAlloc2ndIndex;
8008 if(lastOffset < size)
8010 const VkDeviceSize unusedRangeSize = size - lastOffset;
8026 void VmaBlockMetadata_Linear::AddPoolStats(
VmaPoolStats& inoutStats)
const 8028 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8029 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8030 const VkDeviceSize size = GetSize();
8031 const size_t suballoc1stCount = suballocations1st.size();
8032 const size_t suballoc2ndCount = suballocations2nd.size();
8034 inoutStats.
size += size;
8036 VkDeviceSize lastOffset = 0;
8038 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8040 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8041 size_t nextAlloc2ndIndex = m_1stNullItemsBeginCount;
8042 while(lastOffset < freeSpace2ndTo1stEnd)
8045 while(nextAlloc2ndIndex < suballoc2ndCount &&
8046 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8048 ++nextAlloc2ndIndex;
8052 if(nextAlloc2ndIndex < suballoc2ndCount)
8054 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8057 if(lastOffset < suballoc.offset)
8060 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8071 lastOffset = suballoc.offset + suballoc.size;
8072 ++nextAlloc2ndIndex;
8077 if(lastOffset < freeSpace2ndTo1stEnd)
8080 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8087 lastOffset = freeSpace2ndTo1stEnd;
8092 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8093 const VkDeviceSize freeSpace1stTo2ndEnd =
8094 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8095 while(lastOffset < freeSpace1stTo2ndEnd)
8098 while(nextAlloc1stIndex < suballoc1stCount &&
8099 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8101 ++nextAlloc1stIndex;
8105 if(nextAlloc1stIndex < suballoc1stCount)
8107 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8110 if(lastOffset < suballoc.offset)
8113 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8124 lastOffset = suballoc.offset + suballoc.size;
8125 ++nextAlloc1stIndex;
8130 if(lastOffset < freeSpace1stTo2ndEnd)
8133 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8140 lastOffset = freeSpace1stTo2ndEnd;
8144 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8146 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8147 while(lastOffset < size)
8150 while(nextAlloc2ndIndex != SIZE_MAX &&
8151 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8153 --nextAlloc2ndIndex;
8157 if(nextAlloc2ndIndex != SIZE_MAX)
8159 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8162 if(lastOffset < suballoc.offset)
8165 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8176 lastOffset = suballoc.offset + suballoc.size;
8177 --nextAlloc2ndIndex;
8182 if(lastOffset < size)
8185 const VkDeviceSize unusedRangeSize = size - lastOffset;
8198 #if VMA_STATS_STRING_ENABLED 8199 void VmaBlockMetadata_Linear::PrintDetailedMap(
class VmaJsonWriter& json)
const 8201 const VkDeviceSize size = GetSize();
8202 const SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8203 const SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8204 const size_t suballoc1stCount = suballocations1st.size();
8205 const size_t suballoc2ndCount = suballocations2nd.size();
8209 size_t unusedRangeCount = 0;
8210 VkDeviceSize usedBytes = 0;
8212 VkDeviceSize lastOffset = 0;
8214 size_t alloc2ndCount = 0;
8215 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8217 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8218 size_t nextAlloc2ndIndex = 0;
8219 while(lastOffset < freeSpace2ndTo1stEnd)
8222 while(nextAlloc2ndIndex < suballoc2ndCount &&
8223 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8225 ++nextAlloc2ndIndex;
8229 if(nextAlloc2ndIndex < suballoc2ndCount)
8231 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8234 if(lastOffset < suballoc.offset)
8243 usedBytes += suballoc.size;
8246 lastOffset = suballoc.offset + suballoc.size;
8247 ++nextAlloc2ndIndex;
8252 if(lastOffset < freeSpace2ndTo1stEnd)
8259 lastOffset = freeSpace2ndTo1stEnd;
8264 size_t nextAlloc1stIndex = m_1stNullItemsBeginCount;
8265 size_t alloc1stCount = 0;
8266 const VkDeviceSize freeSpace1stTo2ndEnd =
8267 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ? suballocations2nd.back().offset : size;
8268 while(lastOffset < freeSpace1stTo2ndEnd)
8271 while(nextAlloc1stIndex < suballoc1stCount &&
8272 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8274 ++nextAlloc1stIndex;
8278 if(nextAlloc1stIndex < suballoc1stCount)
8280 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8283 if(lastOffset < suballoc.offset)
8292 usedBytes += suballoc.size;
8295 lastOffset = suballoc.offset + suballoc.size;
8296 ++nextAlloc1stIndex;
8301 if(lastOffset < size)
8308 lastOffset = freeSpace1stTo2ndEnd;
8312 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8314 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8315 while(lastOffset < size)
8318 while(nextAlloc2ndIndex != SIZE_MAX &&
8319 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8321 --nextAlloc2ndIndex;
8325 if(nextAlloc2ndIndex != SIZE_MAX)
8327 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8330 if(lastOffset < suballoc.offset)
8339 usedBytes += suballoc.size;
8342 lastOffset = suballoc.offset + suballoc.size;
8343 --nextAlloc2ndIndex;
8348 if(lastOffset < size)
8360 const VkDeviceSize unusedBytes = size - usedBytes;
8361 PrintDetailedMap_Begin(json, unusedBytes, alloc1stCount + alloc2ndCount, unusedRangeCount);
8366 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8368 const VkDeviceSize freeSpace2ndTo1stEnd = suballocations1st[m_1stNullItemsBeginCount].offset;
8369 size_t nextAlloc2ndIndex = 0;
8370 while(lastOffset < freeSpace2ndTo1stEnd)
8373 while(nextAlloc2ndIndex < suballoc2ndCount &&
8374 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8376 ++nextAlloc2ndIndex;
8380 if(nextAlloc2ndIndex < suballoc2ndCount)
8382 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8385 if(lastOffset < suballoc.offset)
8388 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8389 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8394 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8397 lastOffset = suballoc.offset + suballoc.size;
8398 ++nextAlloc2ndIndex;
8403 if(lastOffset < freeSpace2ndTo1stEnd)
8406 const VkDeviceSize unusedRangeSize = freeSpace2ndTo1stEnd - lastOffset;
8407 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8411 lastOffset = freeSpace2ndTo1stEnd;
8416 nextAlloc1stIndex = m_1stNullItemsBeginCount;
8417 while(lastOffset < freeSpace1stTo2ndEnd)
8420 while(nextAlloc1stIndex < suballoc1stCount &&
8421 suballocations1st[nextAlloc1stIndex].hAllocation == VK_NULL_HANDLE)
8423 ++nextAlloc1stIndex;
8427 if(nextAlloc1stIndex < suballoc1stCount)
8429 const VmaSuballocation& suballoc = suballocations1st[nextAlloc1stIndex];
8432 if(lastOffset < suballoc.offset)
8435 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8436 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8441 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8444 lastOffset = suballoc.offset + suballoc.size;
8445 ++nextAlloc1stIndex;
8450 if(lastOffset < freeSpace1stTo2ndEnd)
8453 const VkDeviceSize unusedRangeSize = freeSpace1stTo2ndEnd - lastOffset;
8454 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8458 lastOffset = freeSpace1stTo2ndEnd;
8462 if(m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8464 size_t nextAlloc2ndIndex = suballocations2nd.size() - 1;
8465 while(lastOffset < size)
8468 while(nextAlloc2ndIndex != SIZE_MAX &&
8469 suballocations2nd[nextAlloc2ndIndex].hAllocation == VK_NULL_HANDLE)
8471 --nextAlloc2ndIndex;
8475 if(nextAlloc2ndIndex != SIZE_MAX)
8477 const VmaSuballocation& suballoc = suballocations2nd[nextAlloc2ndIndex];
8480 if(lastOffset < suballoc.offset)
8483 const VkDeviceSize unusedRangeSize = suballoc.offset - lastOffset;
8484 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8489 PrintDetailedMap_Allocation(json, suballoc.offset, suballoc.hAllocation);
8492 lastOffset = suballoc.offset + suballoc.size;
8493 --nextAlloc2ndIndex;
8498 if(lastOffset < size)
8501 const VkDeviceSize unusedRangeSize = size - lastOffset;
8502 PrintDetailedMap_UnusedRange(json, lastOffset, unusedRangeSize);
8511 PrintDetailedMap_End(json);
8513 #endif // #if VMA_STATS_STRING_ENABLED 8515 bool VmaBlockMetadata_Linear::CreateAllocationRequest(
8516 uint32_t currentFrameIndex,
8517 uint32_t frameInUseCount,
8518 VkDeviceSize bufferImageGranularity,
8519 VkDeviceSize allocSize,
8520 VkDeviceSize allocAlignment,
8522 VmaSuballocationType allocType,
8523 bool canMakeOtherLost,
8525 VmaAllocationRequest* pAllocationRequest)
8527 VMA_ASSERT(allocSize > 0);
8528 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
8529 VMA_ASSERT(pAllocationRequest != VMA_NULL);
8530 VMA_HEAVY_ASSERT(Validate());
8532 const VkDeviceSize size = GetSize();
8533 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8534 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8538 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8540 VMA_ASSERT(0 &&
"Trying to use pool with linear algorithm as double stack, while it is already being used as ring buffer.");
8545 if(allocSize > size)
8549 VkDeviceSize resultBaseOffset = size - allocSize;
8550 if(!suballocations2nd.empty())
8552 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8553 resultBaseOffset = lastSuballoc.offset - allocSize;
8554 if(allocSize > lastSuballoc.offset)
8561 VkDeviceSize resultOffset = resultBaseOffset;
8564 if(VMA_DEBUG_MARGIN > 0)
8566 if(resultOffset < VMA_DEBUG_MARGIN)
8570 resultOffset -= VMA_DEBUG_MARGIN;
8574 resultOffset = VmaAlignDown(resultOffset, allocAlignment);
8578 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8580 bool bufferImageGranularityConflict =
false;
8581 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8583 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8584 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8586 if(VmaIsBufferImageGranularityConflict(nextSuballoc.type, allocType))
8588 bufferImageGranularityConflict =
true;
8596 if(bufferImageGranularityConflict)
8598 resultOffset = VmaAlignDown(resultOffset, bufferImageGranularity);
8603 const VkDeviceSize endOf1st = !suballocations1st.empty() ?
8604 suballocations1st.back().offset + suballocations1st.back().size :
8606 if(endOf1st + VMA_DEBUG_MARGIN <= resultOffset)
8610 if(bufferImageGranularity > 1)
8612 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8614 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8615 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8617 if(VmaIsBufferImageGranularityConflict(allocType, prevSuballoc.type))
8631 pAllocationRequest->offset = resultOffset;
8632 pAllocationRequest->sumFreeSize = resultBaseOffset + allocSize - endOf1st;
8633 pAllocationRequest->sumItemSize = 0;
8635 pAllocationRequest->itemsToMakeLostCount = 0;
8641 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8645 VkDeviceSize resultBaseOffset = 0;
8646 if(!suballocations1st.empty())
8648 const VmaSuballocation& lastSuballoc = suballocations1st.back();
8649 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8653 VkDeviceSize resultOffset = resultBaseOffset;
8656 if(VMA_DEBUG_MARGIN > 0)
8658 resultOffset += VMA_DEBUG_MARGIN;
8662 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8666 if(bufferImageGranularity > 1 && !suballocations1st.empty())
8668 bool bufferImageGranularityConflict =
false;
8669 for(
size_t prevSuballocIndex = suballocations1st.size(); prevSuballocIndex--; )
8671 const VmaSuballocation& prevSuballoc = suballocations1st[prevSuballocIndex];
8672 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8674 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8676 bufferImageGranularityConflict =
true;
8684 if(bufferImageGranularityConflict)
8686 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8690 const VkDeviceSize freeSpaceEnd = m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK ?
8691 suballocations2nd.back().offset : size;
8694 if(resultOffset + allocSize + VMA_DEBUG_MARGIN <= freeSpaceEnd)
8698 if(bufferImageGranularity > 1 && m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
8700 for(
size_t nextSuballocIndex = suballocations2nd.size(); nextSuballocIndex--; )
8702 const VmaSuballocation& nextSuballoc = suballocations2nd[nextSuballocIndex];
8703 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8705 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8719 pAllocationRequest->offset = resultOffset;
8720 pAllocationRequest->sumFreeSize = freeSpaceEnd - resultBaseOffset;
8721 pAllocationRequest->sumItemSize = 0;
8723 pAllocationRequest->itemsToMakeLostCount = 0;
8730 if(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
8732 VMA_ASSERT(!suballocations1st.empty());
8734 VkDeviceSize resultBaseOffset = 0;
8735 if(!suballocations2nd.empty())
8737 const VmaSuballocation& lastSuballoc = suballocations2nd.back();
8738 resultBaseOffset = lastSuballoc.offset + lastSuballoc.size;
8742 VkDeviceSize resultOffset = resultBaseOffset;
8745 if(VMA_DEBUG_MARGIN > 0)
8747 resultOffset += VMA_DEBUG_MARGIN;
8751 resultOffset = VmaAlignUp(resultOffset, allocAlignment);
8755 if(bufferImageGranularity > 1 && !suballocations2nd.empty())
8757 bool bufferImageGranularityConflict =
false;
8758 for(
size_t prevSuballocIndex = suballocations2nd.size(); prevSuballocIndex--; )
8760 const VmaSuballocation& prevSuballoc = suballocations2nd[prevSuballocIndex];
8761 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, resultOffset, bufferImageGranularity))
8763 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
8765 bufferImageGranularityConflict =
true;
8773 if(bufferImageGranularityConflict)
8775 resultOffset = VmaAlignUp(resultOffset, bufferImageGranularity);
8779 pAllocationRequest->itemsToMakeLostCount = 0;
8780 pAllocationRequest->sumItemSize = 0;
8781 size_t index1st = m_1stNullItemsBeginCount;
8783 if(canMakeOtherLost)
8785 while(index1st < suballocations1st.size() &&
8786 resultOffset + allocSize + VMA_DEBUG_MARGIN > suballocations1st[index1st].offset)
8789 const VmaSuballocation& suballoc = suballocations1st[index1st];
8790 if(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE)
8796 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8797 if(suballoc.hAllocation->CanBecomeLost() &&
8798 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8800 ++pAllocationRequest->itemsToMakeLostCount;
8801 pAllocationRequest->sumItemSize += suballoc.size;
8813 if(bufferImageGranularity > 1)
8815 while(index1st < suballocations1st.size())
8817 const VmaSuballocation& suballoc = suballocations1st[index1st];
8818 if(VmaBlocksOnSamePage(resultOffset, allocSize, suballoc.offset, bufferImageGranularity))
8820 if(suballoc.hAllocation != VK_NULL_HANDLE)
8823 if(suballoc.hAllocation->CanBecomeLost() &&
8824 suballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
8826 ++pAllocationRequest->itemsToMakeLostCount;
8827 pAllocationRequest->sumItemSize += suballoc.size;
8846 if((index1st == suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN < size) ||
8847 (index1st < suballocations1st.size() && resultOffset + allocSize + VMA_DEBUG_MARGIN <= suballocations1st[index1st].offset))
8851 if(bufferImageGranularity > 1)
8853 for(
size_t nextSuballocIndex = index1st;
8854 nextSuballocIndex < suballocations1st.size();
8855 nextSuballocIndex++)
8857 const VmaSuballocation& nextSuballoc = suballocations1st[nextSuballocIndex];
8858 if(VmaBlocksOnSamePage(resultOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
8860 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
8874 pAllocationRequest->offset = resultOffset;
8875 pAllocationRequest->sumFreeSize =
8876 (index1st < suballocations1st.size() ? suballocations1st[index1st].offset : size)
8878 - pAllocationRequest->sumItemSize;
8888 bool VmaBlockMetadata_Linear::MakeRequestedAllocationsLost(
8889 uint32_t currentFrameIndex,
8890 uint32_t frameInUseCount,
8891 VmaAllocationRequest* pAllocationRequest)
8893 if(pAllocationRequest->itemsToMakeLostCount == 0)
8898 VMA_ASSERT(m_2ndVectorMode == SECOND_VECTOR_EMPTY || m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER);
8900 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8901 size_t index1st = m_1stNullItemsBeginCount;
8902 size_t madeLostCount = 0;
8903 while(madeLostCount < pAllocationRequest->itemsToMakeLostCount)
8905 VMA_ASSERT(index1st < suballocations1st.size());
8906 VmaSuballocation& suballoc = suballocations1st[index1st];
8907 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8909 VMA_ASSERT(suballoc.hAllocation != VK_NULL_HANDLE);
8910 VMA_ASSERT(suballoc.hAllocation->CanBecomeLost());
8911 if(suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8913 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8914 suballoc.hAllocation = VK_NULL_HANDLE;
8915 m_SumFreeSize += suballoc.size;
8916 ++m_1stNullItemsMiddleCount;
8933 uint32_t VmaBlockMetadata_Linear::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
8935 uint32_t lostAllocationCount = 0;
8937 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8938 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
8940 VmaSuballocation& suballoc = suballocations1st[i];
8941 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
8942 suballoc.hAllocation->CanBecomeLost() &&
8943 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8945 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8946 suballoc.hAllocation = VK_NULL_HANDLE;
8947 ++m_1stNullItemsMiddleCount;
8948 m_SumFreeSize += suballoc.size;
8949 ++lostAllocationCount;
8953 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8954 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
8956 VmaSuballocation& suballoc = suballocations2nd[i];
8957 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE &&
8958 suballoc.hAllocation->CanBecomeLost() &&
8959 suballoc.hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
8961 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
8962 suballoc.hAllocation = VK_NULL_HANDLE;
8963 ++m_2ndNullItemsCount;
8964 ++lostAllocationCount;
8968 if(lostAllocationCount)
8973 return lostAllocationCount;
8976 VkResult VmaBlockMetadata_Linear::CheckCorruption(
const void* pBlockData)
8978 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
8979 for(
size_t i = m_1stNullItemsBeginCount, count = suballocations1st.size(); i < count; ++i)
8981 const VmaSuballocation& suballoc = suballocations1st[i];
8982 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
8984 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
8986 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
8987 return VK_ERROR_VALIDATION_FAILED_EXT;
8989 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
8991 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
8992 return VK_ERROR_VALIDATION_FAILED_EXT;
8997 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
8998 for(
size_t i = 0, count = suballocations2nd.size(); i < count; ++i)
9000 const VmaSuballocation& suballoc = suballocations2nd[i];
9001 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
9003 if(!VmaValidateMagicValue(pBlockData, suballoc.offset - VMA_DEBUG_MARGIN))
9005 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
9006 return VK_ERROR_VALIDATION_FAILED_EXT;
9008 if(!VmaValidateMagicValue(pBlockData, suballoc.offset + suballoc.size))
9010 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
9011 return VK_ERROR_VALIDATION_FAILED_EXT;
9019 void VmaBlockMetadata_Linear::Alloc(
9020 const VmaAllocationRequest& request,
9021 VmaSuballocationType type,
9022 VkDeviceSize allocSize,
9026 const VmaSuballocation newSuballoc = { request.offset, allocSize, hAllocation, type };
9030 VMA_ASSERT(m_2ndVectorMode != SECOND_VECTOR_RING_BUFFER &&
9031 "CRITICAL ERROR: Trying to use linear allocator as double stack while it was already used as ring buffer.");
9032 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9033 suballocations2nd.push_back(newSuballoc);
9034 m_2ndVectorMode = SECOND_VECTOR_DOUBLE_STACK;
9038 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9041 if(suballocations1st.empty())
9043 suballocations1st.push_back(newSuballoc);
9048 if(request.offset >= suballocations1st.back().offset + suballocations1st.back().size)
9051 VMA_ASSERT(request.offset + allocSize <= GetSize());
9052 suballocations1st.push_back(newSuballoc);
9055 else if(request.offset + allocSize <= suballocations1st[m_1stNullItemsBeginCount].offset)
9057 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9059 switch(m_2ndVectorMode)
9061 case SECOND_VECTOR_EMPTY:
9063 VMA_ASSERT(suballocations2nd.empty());
9064 m_2ndVectorMode = SECOND_VECTOR_RING_BUFFER;
9066 case SECOND_VECTOR_RING_BUFFER:
9068 VMA_ASSERT(!suballocations2nd.empty());
9070 case SECOND_VECTOR_DOUBLE_STACK:
9071 VMA_ASSERT(0 &&
"CRITICAL ERROR: Trying to use linear allocator as ring buffer while it was already used as double stack.");
9077 suballocations2nd.push_back(newSuballoc);
9081 VMA_ASSERT(0 &&
"CRITICAL INTERNAL ERROR.");
9086 m_SumFreeSize -= newSuballoc.size;
9089 void VmaBlockMetadata_Linear::Free(
const VmaAllocation allocation)
9091 FreeAtOffset(allocation->GetOffset());
9094 void VmaBlockMetadata_Linear::FreeAtOffset(VkDeviceSize offset)
9096 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9097 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9099 if(!suballocations1st.empty())
9102 VmaSuballocation& firstSuballoc = suballocations1st[m_1stNullItemsBeginCount];
9103 if(firstSuballoc.offset == offset)
9105 firstSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
9106 firstSuballoc.hAllocation = VK_NULL_HANDLE;
9107 m_SumFreeSize += firstSuballoc.size;
9108 ++m_1stNullItemsBeginCount;
9115 if(m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ||
9116 m_2ndVectorMode == SECOND_VECTOR_DOUBLE_STACK)
9118 VmaSuballocation& lastSuballoc = suballocations2nd.back();
9119 if(lastSuballoc.offset == offset)
9121 m_SumFreeSize += lastSuballoc.size;
9122 suballocations2nd.pop_back();
9128 else if(m_2ndVectorMode == SECOND_VECTOR_EMPTY)
9130 VmaSuballocation& lastSuballoc = suballocations1st.back();
9131 if(lastSuballoc.offset == offset)
9133 m_SumFreeSize += lastSuballoc.size;
9134 suballocations1st.pop_back();
9142 VmaSuballocation refSuballoc;
9143 refSuballoc.offset = offset;
9145 SuballocationVectorType::iterator it = VmaVectorFindSorted<VmaSuballocationOffsetLess>(
9146 suballocations1st.begin() + m_1stNullItemsBeginCount,
9147 suballocations1st.end(),
9149 if(it != suballocations1st.end())
9151 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9152 it->hAllocation = VK_NULL_HANDLE;
9153 ++m_1stNullItemsMiddleCount;
9154 m_SumFreeSize += it->size;
9160 if(m_2ndVectorMode != SECOND_VECTOR_EMPTY)
9163 VmaSuballocation refSuballoc;
9164 refSuballoc.offset = offset;
9166 SuballocationVectorType::iterator it = m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER ?
9167 VmaVectorFindSorted<VmaSuballocationOffsetLess>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc) :
9168 VmaVectorFindSorted<VmaSuballocationOffsetGreater>(suballocations2nd.begin(), suballocations2nd.end(), refSuballoc);
9169 if(it != suballocations2nd.end())
9171 it->type = VMA_SUBALLOCATION_TYPE_FREE;
9172 it->hAllocation = VK_NULL_HANDLE;
9173 ++m_2ndNullItemsCount;
9174 m_SumFreeSize += it->size;
9180 VMA_ASSERT(0 &&
"Allocation to free not found in linear allocator!");
9183 bool VmaBlockMetadata_Linear::ShouldCompact1st()
const 9185 const size_t nullItemCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9186 const size_t suballocCount = AccessSuballocations1st().size();
9187 return suballocCount > 32 && nullItemCount * 2 >= (suballocCount - nullItemCount) * 3;
9190 void VmaBlockMetadata_Linear::CleanupAfterFree()
9192 SuballocationVectorType& suballocations1st = AccessSuballocations1st();
9193 SuballocationVectorType& suballocations2nd = AccessSuballocations2nd();
9197 suballocations1st.clear();
9198 suballocations2nd.clear();
9199 m_1stNullItemsBeginCount = 0;
9200 m_1stNullItemsMiddleCount = 0;
9201 m_2ndNullItemsCount = 0;
9202 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9206 const size_t suballoc1stCount = suballocations1st.size();
9207 const size_t nullItem1stCount = m_1stNullItemsBeginCount + m_1stNullItemsMiddleCount;
9208 VMA_ASSERT(nullItem1stCount <= suballoc1stCount);
9211 while(m_1stNullItemsBeginCount < suballoc1stCount &&
9212 suballocations1st[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9214 ++m_1stNullItemsBeginCount;
9215 --m_1stNullItemsMiddleCount;
9219 while(m_1stNullItemsMiddleCount > 0 &&
9220 suballocations1st.back().hAllocation == VK_NULL_HANDLE)
9222 --m_1stNullItemsMiddleCount;
9223 suballocations1st.pop_back();
9227 while(m_2ndNullItemsCount > 0 &&
9228 suballocations2nd.back().hAllocation == VK_NULL_HANDLE)
9230 --m_2ndNullItemsCount;
9231 suballocations2nd.pop_back();
9234 if(ShouldCompact1st())
9236 const size_t nonNullItemCount = suballoc1stCount - nullItem1stCount;
9237 size_t srcIndex = m_1stNullItemsBeginCount;
9238 for(
size_t dstIndex = 0; dstIndex < nonNullItemCount; ++dstIndex)
9240 while(suballocations1st[srcIndex].hAllocation == VK_NULL_HANDLE)
9244 if(dstIndex != srcIndex)
9246 suballocations1st[dstIndex] = suballocations1st[srcIndex];
9250 suballocations1st.resize(nonNullItemCount);
9251 m_1stNullItemsBeginCount = 0;
9252 m_1stNullItemsMiddleCount = 0;
9256 if(suballocations2nd.empty())
9258 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9262 if(suballocations1st.size() - m_1stNullItemsBeginCount == 0)
9264 suballocations1st.clear();
9265 m_1stNullItemsBeginCount = 0;
9267 if(!suballocations2nd.empty() && m_2ndVectorMode == SECOND_VECTOR_RING_BUFFER)
9270 m_2ndVectorMode = SECOND_VECTOR_EMPTY;
9271 m_1stNullItemsMiddleCount = m_2ndNullItemsCount;
9272 while(m_1stNullItemsBeginCount < suballocations2nd.size() &&
9273 suballocations2nd[m_1stNullItemsBeginCount].hAllocation == VK_NULL_HANDLE)
9275 ++m_1stNullItemsBeginCount;
9276 --m_1stNullItemsMiddleCount;
9278 m_2ndNullItemsCount = 0;
9279 m_1stVectorIndex ^= 1;
9284 VMA_HEAVY_ASSERT(Validate());
9291 VmaBlockMetadata_Buddy::VmaBlockMetadata_Buddy(
VmaAllocator hAllocator) :
9292 VmaBlockMetadata(hAllocator),
9294 m_AllocationCount(0),
9298 memset(m_FreeList, 0,
sizeof(m_FreeList));
9301 VmaBlockMetadata_Buddy::~VmaBlockMetadata_Buddy()
9306 void VmaBlockMetadata_Buddy::Init(VkDeviceSize size)
9308 VmaBlockMetadata::Init(size);
9310 m_UsableSize = VmaPrevPow2(size);
9311 m_SumFreeSize = m_UsableSize;
9315 while(m_LevelCount < MAX_LEVELS &&
9316 LevelToNodeSize(m_LevelCount) >= MIN_NODE_SIZE)
9321 Node* rootNode = vma_new(GetAllocationCallbacks(), Node)();
9322 rootNode->offset = 0;
9323 rootNode->type = Node::TYPE_FREE;
9324 rootNode->parent = VMA_NULL;
9325 rootNode->buddy = VMA_NULL;
9328 AddToFreeListFront(0, rootNode);
9331 bool VmaBlockMetadata_Buddy::Validate()
const 9334 ValidationContext ctx;
9335 if(!ValidateNode(ctx, VMA_NULL, m_Root, 0, LevelToNodeSize(0)))
9337 VMA_VALIDATE(
false &&
"ValidateNode failed.");
9339 VMA_VALIDATE(m_AllocationCount == ctx.calculatedAllocationCount);
9340 VMA_VALIDATE(m_SumFreeSize == ctx.calculatedSumFreeSize);
9343 for(uint32_t level = 0; level < m_LevelCount; ++level)
9345 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL ||
9346 m_FreeList[level].front->free.prev == VMA_NULL);
9348 for(Node* node = m_FreeList[level].front;
9350 node = node->free.next)
9352 VMA_VALIDATE(node->type == Node::TYPE_FREE);
9354 if(node->free.next == VMA_NULL)
9356 VMA_VALIDATE(m_FreeList[level].back == node);
9360 VMA_VALIDATE(node->free.next->free.prev == node);
9366 for(uint32_t level = m_LevelCount; level < MAX_LEVELS; ++level)
9368 VMA_VALIDATE(m_FreeList[level].front == VMA_NULL && m_FreeList[level].back == VMA_NULL);
9374 VkDeviceSize VmaBlockMetadata_Buddy::GetUnusedRangeSizeMax()
const 9376 for(uint32_t level = 0; level < m_LevelCount; ++level)
9378 if(m_FreeList[level].front != VMA_NULL)
9380 return LevelToNodeSize(level);
9386 void VmaBlockMetadata_Buddy::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 9388 const VkDeviceSize unusableSize = GetUnusableSize();
9399 CalcAllocationStatInfoNode(outInfo, m_Root, LevelToNodeSize(0));
9401 if(unusableSize > 0)
9410 void VmaBlockMetadata_Buddy::AddPoolStats(
VmaPoolStats& inoutStats)
const 9412 const VkDeviceSize unusableSize = GetUnusableSize();
9414 inoutStats.
size += GetSize();
9415 inoutStats.
unusedSize += m_SumFreeSize + unusableSize;
9420 if(unusableSize > 0)
9427 #if VMA_STATS_STRING_ENABLED 9429 void VmaBlockMetadata_Buddy::PrintDetailedMap(
class VmaJsonWriter& json)
const 9433 CalcAllocationStatInfo(stat);
9435 PrintDetailedMap_Begin(
9441 PrintDetailedMapNode(json, m_Root, LevelToNodeSize(0));
9443 const VkDeviceSize unusableSize = GetUnusableSize();
9444 if(unusableSize > 0)
9446 PrintDetailedMap_UnusedRange(json,
9451 PrintDetailedMap_End(json);
9454 #endif // #if VMA_STATS_STRING_ENABLED 9456 bool VmaBlockMetadata_Buddy::CreateAllocationRequest(
9457 uint32_t currentFrameIndex,
9458 uint32_t frameInUseCount,
9459 VkDeviceSize bufferImageGranularity,
9460 VkDeviceSize allocSize,
9461 VkDeviceSize allocAlignment,
9463 VmaSuballocationType allocType,
9464 bool canMakeOtherLost,
9466 VmaAllocationRequest* pAllocationRequest)
9468 VMA_ASSERT(!upperAddress &&
"VMA_ALLOCATION_CREATE_UPPER_ADDRESS_BIT can be used only with linear algorithm.");
9472 if(allocType == VMA_SUBALLOCATION_TYPE_UNKNOWN ||
9473 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
9474 allocType == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL)
9476 allocAlignment = VMA_MAX(allocAlignment, bufferImageGranularity);
9477 allocSize = VMA_MAX(allocSize, bufferImageGranularity);
9480 if(allocSize > m_UsableSize)
9485 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9486 for(uint32_t level = targetLevel + 1; level--; )
9488 for(Node* freeNode = m_FreeList[level].front;
9489 freeNode != VMA_NULL;
9490 freeNode = freeNode->free.next)
9492 if(freeNode->offset % allocAlignment == 0)
9494 pAllocationRequest->offset = freeNode->offset;
9495 pAllocationRequest->sumFreeSize = LevelToNodeSize(level);
9496 pAllocationRequest->sumItemSize = 0;
9497 pAllocationRequest->itemsToMakeLostCount = 0;
9498 pAllocationRequest->customData = (
void*)(uintptr_t)level;
9507 bool VmaBlockMetadata_Buddy::MakeRequestedAllocationsLost(
9508 uint32_t currentFrameIndex,
9509 uint32_t frameInUseCount,
9510 VmaAllocationRequest* pAllocationRequest)
9516 return pAllocationRequest->itemsToMakeLostCount == 0;
9519 uint32_t VmaBlockMetadata_Buddy::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
9528 void VmaBlockMetadata_Buddy::Alloc(
9529 const VmaAllocationRequest& request,
9530 VmaSuballocationType type,
9531 VkDeviceSize allocSize,
9535 const uint32_t targetLevel = AllocSizeToLevel(allocSize);
9536 uint32_t currLevel = (uint32_t)(uintptr_t)request.customData;
9538 Node* currNode = m_FreeList[currLevel].front;
9539 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9540 while(currNode->offset != request.offset)
9542 currNode = currNode->free.next;
9543 VMA_ASSERT(currNode != VMA_NULL && currNode->type == Node::TYPE_FREE);
9547 while(currLevel < targetLevel)
9551 RemoveFromFreeList(currLevel, currNode);
9553 const uint32_t childrenLevel = currLevel + 1;
9556 Node* leftChild = vma_new(GetAllocationCallbacks(), Node)();
9557 Node* rightChild = vma_new(GetAllocationCallbacks(), Node)();
9559 leftChild->offset = currNode->offset;
9560 leftChild->type = Node::TYPE_FREE;
9561 leftChild->parent = currNode;
9562 leftChild->buddy = rightChild;
9564 rightChild->offset = currNode->offset + LevelToNodeSize(childrenLevel);
9565 rightChild->type = Node::TYPE_FREE;
9566 rightChild->parent = currNode;
9567 rightChild->buddy = leftChild;
9570 currNode->type = Node::TYPE_SPLIT;
9571 currNode->split.leftChild = leftChild;
9574 AddToFreeListFront(childrenLevel, rightChild);
9575 AddToFreeListFront(childrenLevel, leftChild);
9580 currNode = m_FreeList[currLevel].front;
9589 VMA_ASSERT(currLevel == targetLevel &&
9590 currNode != VMA_NULL &&
9591 currNode->type == Node::TYPE_FREE);
9592 RemoveFromFreeList(currLevel, currNode);
9595 currNode->type = Node::TYPE_ALLOCATION;
9596 currNode->allocation.alloc = hAllocation;
9598 ++m_AllocationCount;
9600 m_SumFreeSize -= allocSize;
9603 void VmaBlockMetadata_Buddy::DeleteNode(Node* node)
9605 if(node->type == Node::TYPE_SPLIT)
9607 DeleteNode(node->split.leftChild->buddy);
9608 DeleteNode(node->split.leftChild);
9611 vma_delete(GetAllocationCallbacks(), node);
9614 bool VmaBlockMetadata_Buddy::ValidateNode(ValidationContext& ctx,
const Node* parent,
const Node* curr, uint32_t level, VkDeviceSize levelNodeSize)
const 9616 VMA_VALIDATE(level < m_LevelCount);
9617 VMA_VALIDATE(curr->parent == parent);
9618 VMA_VALIDATE((curr->buddy == VMA_NULL) == (parent == VMA_NULL));
9619 VMA_VALIDATE(curr->buddy == VMA_NULL || curr->buddy->buddy == curr);
9622 case Node::TYPE_FREE:
9624 ctx.calculatedSumFreeSize += levelNodeSize;
9625 ++ctx.calculatedFreeCount;
9627 case Node::TYPE_ALLOCATION:
9628 ++ctx.calculatedAllocationCount;
9629 ctx.calculatedSumFreeSize += levelNodeSize - curr->allocation.alloc->GetSize();
9630 VMA_VALIDATE(curr->allocation.alloc != VK_NULL_HANDLE);
9632 case Node::TYPE_SPLIT:
9634 const uint32_t childrenLevel = level + 1;
9635 const VkDeviceSize childrenLevelNodeSize = levelNodeSize / 2;
9636 const Node*
const leftChild = curr->split.leftChild;
9637 VMA_VALIDATE(leftChild != VMA_NULL);
9638 VMA_VALIDATE(leftChild->offset == curr->offset);
9639 if(!ValidateNode(ctx, curr, leftChild, childrenLevel, childrenLevelNodeSize))
9641 VMA_VALIDATE(
false &&
"ValidateNode for left child failed.");
9643 const Node*
const rightChild = leftChild->buddy;
9644 VMA_VALIDATE(rightChild->offset == curr->offset + childrenLevelNodeSize);
9645 if(!ValidateNode(ctx, curr, rightChild, childrenLevel, childrenLevelNodeSize))
9647 VMA_VALIDATE(
false &&
"ValidateNode for right child failed.");
9658 uint32_t VmaBlockMetadata_Buddy::AllocSizeToLevel(VkDeviceSize allocSize)
const 9662 VkDeviceSize currLevelNodeSize = m_UsableSize;
9663 VkDeviceSize nextLevelNodeSize = currLevelNodeSize >> 1;
9664 while(allocSize <= nextLevelNodeSize && level + 1 < m_LevelCount)
9667 currLevelNodeSize = nextLevelNodeSize;
9668 nextLevelNodeSize = currLevelNodeSize >> 1;
9673 void VmaBlockMetadata_Buddy::FreeAtOffset(
VmaAllocation alloc, VkDeviceSize offset)
9676 Node* node = m_Root;
9677 VkDeviceSize nodeOffset = 0;
9679 VkDeviceSize levelNodeSize = LevelToNodeSize(0);
9680 while(node->type == Node::TYPE_SPLIT)
9682 const VkDeviceSize nextLevelSize = levelNodeSize >> 1;
9683 if(offset < nodeOffset + nextLevelSize)
9685 node = node->split.leftChild;
9689 node = node->split.leftChild->buddy;
9690 nodeOffset += nextLevelSize;
9693 levelNodeSize = nextLevelSize;
9696 VMA_ASSERT(node != VMA_NULL && node->type == Node::TYPE_ALLOCATION);
9697 VMA_ASSERT(alloc == VK_NULL_HANDLE || node->allocation.alloc == alloc);
9700 --m_AllocationCount;
9701 m_SumFreeSize += alloc->GetSize();
9703 node->type = Node::TYPE_FREE;
9706 while(level > 0 && node->buddy->type == Node::TYPE_FREE)
9708 RemoveFromFreeList(level, node->buddy);
9709 Node*
const parent = node->parent;
9711 vma_delete(GetAllocationCallbacks(), node->buddy);
9712 vma_delete(GetAllocationCallbacks(), node);
9713 parent->type = Node::TYPE_FREE;
9721 AddToFreeListFront(level, node);
9724 void VmaBlockMetadata_Buddy::CalcAllocationStatInfoNode(
VmaStatInfo& outInfo,
const Node* node, VkDeviceSize levelNodeSize)
const 9728 case Node::TYPE_FREE:
9734 case Node::TYPE_ALLOCATION:
9736 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
9742 const VkDeviceSize unusedRangeSize = levelNodeSize - allocSize;
9743 if(unusedRangeSize > 0)
9752 case Node::TYPE_SPLIT:
9754 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9755 const Node*
const leftChild = node->split.leftChild;
9756 CalcAllocationStatInfoNode(outInfo, leftChild, childrenNodeSize);
9757 const Node*
const rightChild = leftChild->buddy;
9758 CalcAllocationStatInfoNode(outInfo, rightChild, childrenNodeSize);
9766 void VmaBlockMetadata_Buddy::AddToFreeListFront(uint32_t level, Node* node)
9768 VMA_ASSERT(node->type == Node::TYPE_FREE);
9771 Node*
const frontNode = m_FreeList[level].front;
9772 if(frontNode == VMA_NULL)
9774 VMA_ASSERT(m_FreeList[level].back == VMA_NULL);
9775 node->free.prev = node->free.next = VMA_NULL;
9776 m_FreeList[level].front = m_FreeList[level].back = node;
9780 VMA_ASSERT(frontNode->free.prev == VMA_NULL);
9781 node->free.prev = VMA_NULL;
9782 node->free.next = frontNode;
9783 frontNode->free.prev = node;
9784 m_FreeList[level].front = node;
9788 void VmaBlockMetadata_Buddy::RemoveFromFreeList(uint32_t level, Node* node)
9790 VMA_ASSERT(m_FreeList[level].front != VMA_NULL);
9793 if(node->free.prev == VMA_NULL)
9795 VMA_ASSERT(m_FreeList[level].front == node);
9796 m_FreeList[level].front = node->free.next;
9800 Node*
const prevFreeNode = node->free.prev;
9801 VMA_ASSERT(prevFreeNode->free.next == node);
9802 prevFreeNode->free.next = node->free.next;
9806 if(node->free.next == VMA_NULL)
9808 VMA_ASSERT(m_FreeList[level].back == node);
9809 m_FreeList[level].back = node->free.prev;
9813 Node*
const nextFreeNode = node->free.next;
9814 VMA_ASSERT(nextFreeNode->free.prev == node);
9815 nextFreeNode->free.prev = node->free.prev;
9819 #if VMA_STATS_STRING_ENABLED 9820 void VmaBlockMetadata_Buddy::PrintDetailedMapNode(
class VmaJsonWriter& json,
const Node* node, VkDeviceSize levelNodeSize)
const 9824 case Node::TYPE_FREE:
9825 PrintDetailedMap_UnusedRange(json, node->offset, levelNodeSize);
9827 case Node::TYPE_ALLOCATION:
9829 PrintDetailedMap_Allocation(json, node->offset, node->allocation.alloc);
9830 const VkDeviceSize allocSize = node->allocation.alloc->GetSize();
9831 if(allocSize < levelNodeSize)
9833 PrintDetailedMap_UnusedRange(json, node->offset + allocSize, levelNodeSize - allocSize);
9837 case Node::TYPE_SPLIT:
9839 const VkDeviceSize childrenNodeSize = levelNodeSize / 2;
9840 const Node*
const leftChild = node->split.leftChild;
9841 PrintDetailedMapNode(json, leftChild, childrenNodeSize);
9842 const Node*
const rightChild = leftChild->buddy;
9843 PrintDetailedMapNode(json, rightChild, childrenNodeSize);
9850 #endif // #if VMA_STATS_STRING_ENABLED 9856 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
9857 m_pMetadata(VMA_NULL),
9858 m_MemoryTypeIndex(UINT32_MAX),
9860 m_hMemory(VK_NULL_HANDLE),
9862 m_pMappedData(VMA_NULL)
9866 void VmaDeviceMemoryBlock::Init(
9868 uint32_t newMemoryTypeIndex,
9869 VkDeviceMemory newMemory,
9870 VkDeviceSize newSize,
9874 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
9876 m_MemoryTypeIndex = newMemoryTypeIndex;
9878 m_hMemory = newMemory;
9883 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Linear)(hAllocator);
9886 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Buddy)(hAllocator);
9892 m_pMetadata = vma_new(hAllocator, VmaBlockMetadata_Generic)(hAllocator);
9894 m_pMetadata->Init(newSize);
9897 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
9901 VMA_ASSERT(m_pMetadata->IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
9903 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
9904 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_pMetadata->GetSize(), m_hMemory);
9905 m_hMemory = VK_NULL_HANDLE;
9907 vma_delete(allocator, m_pMetadata);
9908 m_pMetadata = VMA_NULL;
9911 bool VmaDeviceMemoryBlock::Validate()
const 9913 VMA_VALIDATE((m_hMemory != VK_NULL_HANDLE) &&
9914 (m_pMetadata->GetSize() != 0));
9916 return m_pMetadata->Validate();
9919 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
9921 void* pData =
nullptr;
9922 VkResult res = Map(hAllocator, 1, &pData);
9923 if(res != VK_SUCCESS)
9928 res = m_pMetadata->CheckCorruption(pData);
9930 Unmap(hAllocator, 1);
9935 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
9942 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9945 m_MapCount += count;
9946 VMA_ASSERT(m_pMappedData != VMA_NULL);
9947 if(ppData != VMA_NULL)
9949 *ppData = m_pMappedData;
9955 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
9956 hAllocator->m_hDevice,
9962 if(result == VK_SUCCESS)
9964 if(ppData != VMA_NULL)
9966 *ppData = m_pMappedData;
9974 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
9981 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
9982 if(m_MapCount >= count)
9984 m_MapCount -= count;
9987 m_pMappedData = VMA_NULL;
9988 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
9993 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
9997 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
9999 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10000 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10003 VkResult res = Map(hAllocator, 1, &pData);
10004 if(res != VK_SUCCESS)
10009 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
10010 VmaWriteMagicValue(pData, allocOffset + allocSize);
10012 Unmap(hAllocator, 1);
10017 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
10019 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
10020 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
10023 VkResult res = Map(hAllocator, 1, &pData);
10024 if(res != VK_SUCCESS)
10029 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
10031 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
10033 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
10035 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
10038 Unmap(hAllocator, 1);
10043 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
10048 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10049 hAllocation->GetBlock() ==
this);
10051 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10052 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
10053 hAllocator->m_hDevice,
10056 hAllocation->GetOffset());
10059 VkResult VmaDeviceMemoryBlock::BindImageMemory(
10064 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
10065 hAllocation->GetBlock() ==
this);
10067 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
10068 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
10069 hAllocator->m_hDevice,
10072 hAllocation->GetOffset());
10077 memset(&outInfo, 0,
sizeof(outInfo));
10096 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
10104 VmaPool_T::VmaPool_T(
10107 VkDeviceSize preferredBlockSize) :
10110 createInfo.memoryTypeIndex,
10111 createInfo.blockSize != 0 ? createInfo.blockSize : preferredBlockSize,
10112 createInfo.minBlockCount,
10113 createInfo.maxBlockCount,
10115 createInfo.frameInUseCount,
10117 createInfo.blockSize != 0,
10123 VmaPool_T::~VmaPool_T()
10127 #if VMA_STATS_STRING_ENABLED 10129 #endif // #if VMA_STATS_STRING_ENABLED 10131 VmaBlockVector::VmaBlockVector(
10133 uint32_t memoryTypeIndex,
10134 VkDeviceSize preferredBlockSize,
10135 size_t minBlockCount,
10136 size_t maxBlockCount,
10137 VkDeviceSize bufferImageGranularity,
10138 uint32_t frameInUseCount,
10140 bool explicitBlockSize,
10141 uint32_t algorithm) :
10142 m_hAllocator(hAllocator),
10143 m_MemoryTypeIndex(memoryTypeIndex),
10144 m_PreferredBlockSize(preferredBlockSize),
10145 m_MinBlockCount(minBlockCount),
10146 m_MaxBlockCount(maxBlockCount),
10147 m_BufferImageGranularity(bufferImageGranularity),
10148 m_FrameInUseCount(frameInUseCount),
10149 m_IsCustomPool(isCustomPool),
10150 m_ExplicitBlockSize(explicitBlockSize),
10151 m_Algorithm(algorithm),
10152 m_HasEmptyBlock(false),
10153 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
10154 m_pDefragmentator(VMA_NULL),
10159 VmaBlockVector::~VmaBlockVector()
10161 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
10163 for(
size_t i = m_Blocks.size(); i--; )
10165 m_Blocks[i]->Destroy(m_hAllocator);
10166 vma_delete(m_hAllocator, m_Blocks[i]);
10170 VkResult VmaBlockVector::CreateMinBlocks()
10172 for(
size_t i = 0; i < m_MinBlockCount; ++i)
10174 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
10175 if(res != VK_SUCCESS)
10183 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
10185 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10187 const size_t blockCount = m_Blocks.size();
10196 for(uint32_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
10198 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10199 VMA_ASSERT(pBlock);
10200 VMA_HEAVY_ASSERT(pBlock->Validate());
10201 pBlock->m_pMetadata->AddPoolStats(*pStats);
10205 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 10207 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
10208 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
10209 (VMA_DEBUG_MARGIN > 0) &&
10210 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
10213 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
10215 VkResult VmaBlockVector::Allocate(
10217 uint32_t currentFrameIndex,
10219 VkDeviceSize alignment,
10221 VmaSuballocationType suballocType,
10228 const bool canCreateNewBlock =
10230 (m_Blocks.size() < m_MaxBlockCount);
10237 canMakeOtherLost =
false;
10241 if(isUpperAddress &&
10244 return VK_ERROR_FEATURE_NOT_PRESENT;
10258 return VK_ERROR_FEATURE_NOT_PRESENT;
10262 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
10264 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10267 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10274 if(!canMakeOtherLost || canCreateNewBlock)
10283 if(!m_Blocks.empty())
10285 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks.back();
10286 VMA_ASSERT(pCurrBlock);
10287 VkResult res = AllocateFromBlock(
10298 if(res == VK_SUCCESS)
10300 VMA_DEBUG_LOG(
" Returned from last block #%u", (uint32_t)(m_Blocks.size() - 1));
10310 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10312 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10313 VMA_ASSERT(pCurrBlock);
10314 VkResult res = AllocateFromBlock(
10325 if(res == VK_SUCCESS)
10327 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10335 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10337 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10338 VMA_ASSERT(pCurrBlock);
10339 VkResult res = AllocateFromBlock(
10350 if(res == VK_SUCCESS)
10352 VMA_DEBUG_LOG(
" Returned from existing block #%u", (uint32_t)blockIndex);
10360 if(canCreateNewBlock)
10363 VkDeviceSize newBlockSize = m_PreferredBlockSize;
10364 uint32_t newBlockSizeShift = 0;
10365 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
10367 if(!m_ExplicitBlockSize)
10370 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
10371 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
10373 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10374 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
10376 newBlockSize = smallerNewBlockSize;
10377 ++newBlockSizeShift;
10386 size_t newBlockIndex = 0;
10387 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
10389 if(!m_ExplicitBlockSize)
10391 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
10393 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
10394 if(smallerNewBlockSize >= size)
10396 newBlockSize = smallerNewBlockSize;
10397 ++newBlockSizeShift;
10398 res = CreateBlock(newBlockSize, &newBlockIndex);
10407 if(res == VK_SUCCESS)
10409 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
10410 VMA_ASSERT(pBlock->m_pMetadata->GetSize() >= size);
10412 res = AllocateFromBlock(
10423 if(res == VK_SUCCESS)
10425 VMA_DEBUG_LOG(
" Created new block Size=%llu", newBlockSize);
10431 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10438 if(canMakeOtherLost)
10440 uint32_t tryIndex = 0;
10441 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
10443 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
10444 VmaAllocationRequest bestRequest = {};
10445 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
10451 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
10453 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10454 VMA_ASSERT(pCurrBlock);
10455 VmaAllocationRequest currRequest = {};
10456 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10459 m_BufferImageGranularity,
10468 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10469 if(pBestRequestBlock == VMA_NULL ||
10470 currRequestCost < bestRequestCost)
10472 pBestRequestBlock = pCurrBlock;
10473 bestRequest = currRequest;
10474 bestRequestCost = currRequestCost;
10476 if(bestRequestCost == 0)
10487 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10489 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
10490 VMA_ASSERT(pCurrBlock);
10491 VmaAllocationRequest currRequest = {};
10492 if(pCurrBlock->m_pMetadata->CreateAllocationRequest(
10495 m_BufferImageGranularity,
10504 const VkDeviceSize currRequestCost = currRequest.CalcCost();
10505 if(pBestRequestBlock == VMA_NULL ||
10506 currRequestCost < bestRequestCost ||
10509 pBestRequestBlock = pCurrBlock;
10510 bestRequest = currRequest;
10511 bestRequestCost = currRequestCost;
10513 if(bestRequestCost == 0 ||
10523 if(pBestRequestBlock != VMA_NULL)
10527 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
10528 if(res != VK_SUCCESS)
10534 if(pBestRequestBlock->m_pMetadata->MakeRequestedAllocationsLost(
10540 if(pBestRequestBlock->m_pMetadata->IsEmpty())
10542 m_HasEmptyBlock =
false;
10545 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10546 pBestRequestBlock->m_pMetadata->Alloc(bestRequest, suballocType, size, isUpperAddress, *pAllocation);
10547 (*pAllocation)->InitBlockAllocation(
10550 bestRequest.offset,
10556 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
10557 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
10558 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
10559 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10561 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10563 if(IsCorruptionDetectionEnabled())
10565 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
10566 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10581 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
10583 return VK_ERROR_TOO_MANY_OBJECTS;
10587 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10590 void VmaBlockVector::Free(
10593 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
10597 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10599 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
10601 if(IsCorruptionDetectionEnabled())
10603 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
10604 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
10607 if(hAllocation->IsPersistentMap())
10609 pBlock->Unmap(m_hAllocator, 1);
10612 pBlock->m_pMetadata->Free(hAllocation);
10613 VMA_HEAVY_ASSERT(pBlock->Validate());
10615 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
10618 if(pBlock->m_pMetadata->IsEmpty())
10621 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
10623 pBlockToDelete = pBlock;
10629 m_HasEmptyBlock =
true;
10634 else if(m_HasEmptyBlock)
10636 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
10637 if(pLastBlock->m_pMetadata->IsEmpty() && m_Blocks.size() > m_MinBlockCount)
10639 pBlockToDelete = pLastBlock;
10640 m_Blocks.pop_back();
10641 m_HasEmptyBlock =
false;
10645 IncrementallySortBlocks();
10650 if(pBlockToDelete != VMA_NULL)
10652 VMA_DEBUG_LOG(
" Deleted empty allocation");
10653 pBlockToDelete->Destroy(m_hAllocator);
10654 vma_delete(m_hAllocator, pBlockToDelete);
10658 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 10660 VkDeviceSize result = 0;
10661 for(
size_t i = m_Blocks.size(); i--; )
10663 result = VMA_MAX(result, m_Blocks[i]->m_pMetadata->GetSize());
10664 if(result >= m_PreferredBlockSize)
10672 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
10674 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10676 if(m_Blocks[blockIndex] == pBlock)
10678 VmaVectorRemove(m_Blocks, blockIndex);
10685 void VmaBlockVector::IncrementallySortBlocks()
10690 for(
size_t i = 1; i < m_Blocks.size(); ++i)
10692 if(m_Blocks[i - 1]->m_pMetadata->GetSumFreeSize() > m_Blocks[i]->m_pMetadata->GetSumFreeSize())
10694 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
10701 VkResult VmaBlockVector::AllocateFromBlock(
10702 VmaDeviceMemoryBlock* pBlock,
10704 uint32_t currentFrameIndex,
10706 VkDeviceSize alignment,
10709 VmaSuballocationType suballocType,
10718 VmaAllocationRequest currRequest = {};
10719 if(pBlock->m_pMetadata->CreateAllocationRequest(
10722 m_BufferImageGranularity,
10732 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
10736 VkResult res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
10737 if(res != VK_SUCCESS)
10744 if(pBlock->m_pMetadata->IsEmpty())
10746 m_HasEmptyBlock =
false;
10749 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
10750 pBlock->m_pMetadata->Alloc(currRequest, suballocType, size, isUpperAddress, *pAllocation);
10751 (*pAllocation)->InitBlockAllocation(
10754 currRequest.offset,
10760 VMA_HEAVY_ASSERT(pBlock->Validate());
10761 (*pAllocation)->SetUserData(m_hAllocator, pUserData);
10762 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
10764 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
10766 if(IsCorruptionDetectionEnabled())
10768 VkResult res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
10769 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
10773 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
10776 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
10778 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
10779 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
10780 allocInfo.allocationSize = blockSize;
10781 VkDeviceMemory mem = VK_NULL_HANDLE;
10782 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
10791 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
10796 allocInfo.allocationSize,
10800 m_Blocks.push_back(pBlock);
10801 if(pNewBlockIndex != VMA_NULL)
10803 *pNewBlockIndex = m_Blocks.size() - 1;
10809 #if VMA_STATS_STRING_ENABLED 10811 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
10813 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10815 json.BeginObject();
10819 json.WriteString(
"MemoryTypeIndex");
10820 json.WriteNumber(m_MemoryTypeIndex);
10822 json.WriteString(
"BlockSize");
10823 json.WriteNumber(m_PreferredBlockSize);
10825 json.WriteString(
"BlockCount");
10826 json.BeginObject(
true);
10827 if(m_MinBlockCount > 0)
10829 json.WriteString(
"Min");
10830 json.WriteNumber((uint64_t)m_MinBlockCount);
10832 if(m_MaxBlockCount < SIZE_MAX)
10834 json.WriteString(
"Max");
10835 json.WriteNumber((uint64_t)m_MaxBlockCount);
10837 json.WriteString(
"Cur");
10838 json.WriteNumber((uint64_t)m_Blocks.size());
10841 if(m_FrameInUseCount > 0)
10843 json.WriteString(
"FrameInUseCount");
10844 json.WriteNumber(m_FrameInUseCount);
10847 if(m_Algorithm != 0)
10849 json.WriteString(
"Algorithm");
10850 json.WriteString(VmaAlgorithmToStr(m_Algorithm));
10855 json.WriteString(
"PreferredBlockSize");
10856 json.WriteNumber(m_PreferredBlockSize);
10859 json.WriteString(
"Blocks");
10860 json.BeginObject();
10861 for(
size_t i = 0; i < m_Blocks.size(); ++i)
10863 json.BeginString();
10864 json.ContinueString(m_Blocks[i]->GetId());
10867 m_Blocks[i]->m_pMetadata->PrintDetailedMap(json);
10874 #endif // #if VMA_STATS_STRING_ENABLED 10876 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
10878 uint32_t currentFrameIndex)
10880 if(m_pDefragmentator == VMA_NULL)
10882 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
10885 currentFrameIndex);
10888 return m_pDefragmentator;
10891 VkResult VmaBlockVector::Defragment(
10893 VkDeviceSize& maxBytesToMove,
10894 uint32_t& maxAllocationsToMove)
10896 if(m_pDefragmentator == VMA_NULL)
10901 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10904 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
10907 if(pDefragmentationStats != VMA_NULL)
10909 const VkDeviceSize bytesMoved = m_pDefragmentator->GetBytesMoved();
10910 const uint32_t allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
10911 pDefragmentationStats->
bytesMoved += bytesMoved;
10913 VMA_ASSERT(bytesMoved <= maxBytesToMove);
10914 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
10915 maxBytesToMove -= bytesMoved;
10916 maxAllocationsToMove -= allocationsMoved;
10920 m_HasEmptyBlock =
false;
10921 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
10923 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
10924 if(pBlock->m_pMetadata->IsEmpty())
10926 if(m_Blocks.size() > m_MinBlockCount)
10928 if(pDefragmentationStats != VMA_NULL)
10931 pDefragmentationStats->
bytesFreed += pBlock->m_pMetadata->GetSize();
10934 VmaVectorRemove(m_Blocks, blockIndex);
10935 pBlock->Destroy(m_hAllocator);
10936 vma_delete(m_hAllocator, pBlock);
10940 m_HasEmptyBlock =
true;
10948 void VmaBlockVector::DestroyDefragmentator()
10950 if(m_pDefragmentator != VMA_NULL)
10952 vma_delete(m_hAllocator, m_pDefragmentator);
10953 m_pDefragmentator = VMA_NULL;
10957 void VmaBlockVector::MakePoolAllocationsLost(
10958 uint32_t currentFrameIndex,
10959 size_t* pLostAllocationCount)
10961 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10962 size_t lostAllocationCount = 0;
10963 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10965 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10966 VMA_ASSERT(pBlock);
10967 lostAllocationCount += pBlock->m_pMetadata->MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
10969 if(pLostAllocationCount != VMA_NULL)
10971 *pLostAllocationCount = lostAllocationCount;
10975 VkResult VmaBlockVector::CheckCorruption()
10977 if(!IsCorruptionDetectionEnabled())
10979 return VK_ERROR_FEATURE_NOT_PRESENT;
10982 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
10983 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
10985 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
10986 VMA_ASSERT(pBlock);
10987 VkResult res = pBlock->CheckCorruption(m_hAllocator);
10988 if(res != VK_SUCCESS)
10996 void VmaBlockVector::AddStats(
VmaStats* pStats)
10998 const uint32_t memTypeIndex = m_MemoryTypeIndex;
10999 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
11001 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
11003 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
11005 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
11006 VMA_ASSERT(pBlock);
11007 VMA_HEAVY_ASSERT(pBlock->Validate());
11009 pBlock->m_pMetadata->CalcAllocationStatInfo(allocationStatInfo);
11010 VmaAddStatInfo(pStats->
total, allocationStatInfo);
11011 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
11012 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
11019 VmaDefragmentator::VmaDefragmentator(
11021 VmaBlockVector* pBlockVector,
11022 uint32_t currentFrameIndex) :
11023 m_hAllocator(hAllocator),
11024 m_pBlockVector(pBlockVector),
11025 m_CurrentFrameIndex(currentFrameIndex),
11027 m_AllocationsMoved(0),
11028 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
11029 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
11031 VMA_ASSERT(pBlockVector->GetAlgorithm() == 0);
11034 VmaDefragmentator::~VmaDefragmentator()
11036 for(
size_t i = m_Blocks.size(); i--; )
11038 vma_delete(m_hAllocator, m_Blocks[i]);
11042 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
11044 AllocationInfo allocInfo;
11045 allocInfo.m_hAllocation = hAlloc;
11046 allocInfo.m_pChanged = pChanged;
11047 m_Allocations.push_back(allocInfo);
11050 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
11053 if(m_pMappedDataForDefragmentation)
11055 *ppMappedData = m_pMappedDataForDefragmentation;
11060 if(m_pBlock->GetMappedData())
11062 *ppMappedData = m_pBlock->GetMappedData();
11067 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
11068 *ppMappedData = m_pMappedDataForDefragmentation;
11072 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
11074 if(m_pMappedDataForDefragmentation != VMA_NULL)
11076 m_pBlock->Unmap(hAllocator, 1);
11080 VkResult VmaDefragmentator::DefragmentRound(
11081 VkDeviceSize maxBytesToMove,
11082 uint32_t maxAllocationsToMove)
11084 if(m_Blocks.empty())
11089 size_t srcBlockIndex = m_Blocks.size() - 1;
11090 size_t srcAllocIndex = SIZE_MAX;
11096 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
11098 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
11101 if(srcBlockIndex == 0)
11108 srcAllocIndex = SIZE_MAX;
11113 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
11117 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
11118 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
11120 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
11121 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
11122 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
11123 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
11126 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
11128 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
11129 VmaAllocationRequest dstAllocRequest;
11130 if(pDstBlockInfo->m_pBlock->m_pMetadata->CreateAllocationRequest(
11131 m_CurrentFrameIndex,
11132 m_pBlockVector->GetFrameInUseCount(),
11133 m_pBlockVector->GetBufferImageGranularity(),
11140 &dstAllocRequest) &&
11142 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
11144 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
11147 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
11148 (m_BytesMoved + size > maxBytesToMove))
11150 return VK_INCOMPLETE;
11153 void* pDstMappedData = VMA_NULL;
11154 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
11155 if(res != VK_SUCCESS)
11160 void* pSrcMappedData = VMA_NULL;
11161 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
11162 if(res != VK_SUCCESS)
11169 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
11170 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
11171 static_cast<size_t>(size));
11173 if(VMA_DEBUG_MARGIN > 0)
11175 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset - VMA_DEBUG_MARGIN);
11176 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset + size);
11179 pDstBlockInfo->m_pBlock->m_pMetadata->Alloc(
11184 allocInfo.m_hAllocation);
11185 pSrcBlockInfo->m_pBlock->m_pMetadata->FreeAtOffset(srcOffset);
11187 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
11189 if(allocInfo.m_pChanged != VMA_NULL)
11191 *allocInfo.m_pChanged = VK_TRUE;
11194 ++m_AllocationsMoved;
11195 m_BytesMoved += size;
11197 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
11205 if(srcAllocIndex > 0)
11211 if(srcBlockIndex > 0)
11214 srcAllocIndex = SIZE_MAX;
11224 VkResult VmaDefragmentator::Defragment(
11225 VkDeviceSize maxBytesToMove,
11226 uint32_t maxAllocationsToMove)
11228 if(m_Allocations.empty())
11234 const size_t blockCount = m_pBlockVector->m_Blocks.size();
11235 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11237 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
11238 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
11239 m_Blocks.push_back(pBlockInfo);
11243 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
11246 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
11248 AllocationInfo& allocInfo = m_Allocations[blockIndex];
11250 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
11252 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
11253 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
11254 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
11256 (*it)->m_Allocations.push_back(allocInfo);
11264 m_Allocations.clear();
11266 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11268 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
11269 pBlockInfo->CalcHasNonMovableAllocations();
11270 pBlockInfo->SortAllocationsBySizeDescecnding();
11274 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
11277 VkResult result = VK_SUCCESS;
11278 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
11280 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
11284 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
11286 m_Blocks[blockIndex]->Unmap(m_hAllocator);
11292 bool VmaDefragmentator::MoveMakesSense(
11293 size_t dstBlockIndex, VkDeviceSize dstOffset,
11294 size_t srcBlockIndex, VkDeviceSize srcOffset)
11296 if(dstBlockIndex < srcBlockIndex)
11300 if(dstBlockIndex > srcBlockIndex)
11304 if(dstOffset < srcOffset)
11314 #if VMA_RECORDING_ENABLED 11316 VmaRecorder::VmaRecorder() :
11321 m_StartCounter(INT64_MAX)
11327 m_UseMutex = useMutex;
11328 m_Flags = settings.
flags;
11330 QueryPerformanceFrequency((LARGE_INTEGER*)&m_Freq);
11331 QueryPerformanceCounter((LARGE_INTEGER*)&m_StartCounter);
11334 errno_t err = fopen_s(&m_File, settings.
pFilePath,
"wb");
11337 return VK_ERROR_INITIALIZATION_FAILED;
11341 fprintf(m_File,
"%s\n",
"Vulkan Memory Allocator,Calls recording");
11342 fprintf(m_File,
"%s\n",
"1,3");
11347 VmaRecorder::~VmaRecorder()
11349 if(m_File != VMA_NULL)
11355 void VmaRecorder::RecordCreateAllocator(uint32_t frameIndex)
11357 CallParams callParams;
11358 GetBasicParams(callParams);
11360 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11361 fprintf(m_File,
"%u,%.3f,%u,vmaCreateAllocator\n", callParams.threadId, callParams.time, frameIndex);
11365 void VmaRecorder::RecordDestroyAllocator(uint32_t frameIndex)
11367 CallParams callParams;
11368 GetBasicParams(callParams);
11370 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11371 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyAllocator\n", callParams.threadId, callParams.time, frameIndex);
11377 CallParams callParams;
11378 GetBasicParams(callParams);
11380 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11381 fprintf(m_File,
"%u,%.3f,%u,vmaCreatePool,%u,%u,%llu,%llu,%llu,%u,%p\n", callParams.threadId, callParams.time, frameIndex,
11392 void VmaRecorder::RecordDestroyPool(uint32_t frameIndex,
VmaPool pool)
11394 CallParams callParams;
11395 GetBasicParams(callParams);
11397 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11398 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyPool,%p\n", callParams.threadId, callParams.time, frameIndex,
11403 void VmaRecorder::RecordAllocateMemory(uint32_t frameIndex,
11404 const VkMemoryRequirements& vkMemReq,
11408 CallParams callParams;
11409 GetBasicParams(callParams);
11411 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11412 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11413 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemory,%llu,%llu,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11415 vkMemReq.alignment,
11416 vkMemReq.memoryTypeBits,
11424 userDataStr.GetString());
11428 void VmaRecorder::RecordAllocateMemoryForBuffer(uint32_t frameIndex,
11429 const VkMemoryRequirements& vkMemReq,
11430 bool requiresDedicatedAllocation,
11431 bool prefersDedicatedAllocation,
11435 CallParams callParams;
11436 GetBasicParams(callParams);
11438 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11439 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11440 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForBuffer,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11442 vkMemReq.alignment,
11443 vkMemReq.memoryTypeBits,
11444 requiresDedicatedAllocation ? 1 : 0,
11445 prefersDedicatedAllocation ? 1 : 0,
11453 userDataStr.GetString());
11457 void VmaRecorder::RecordAllocateMemoryForImage(uint32_t frameIndex,
11458 const VkMemoryRequirements& vkMemReq,
11459 bool requiresDedicatedAllocation,
11460 bool prefersDedicatedAllocation,
11464 CallParams callParams;
11465 GetBasicParams(callParams);
11467 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11468 UserDataString userDataStr(createInfo.
flags, createInfo.
pUserData);
11469 fprintf(m_File,
"%u,%.3f,%u,vmaAllocateMemoryForImage,%llu,%llu,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11471 vkMemReq.alignment,
11472 vkMemReq.memoryTypeBits,
11473 requiresDedicatedAllocation ? 1 : 0,
11474 prefersDedicatedAllocation ? 1 : 0,
11482 userDataStr.GetString());
11486 void VmaRecorder::RecordFreeMemory(uint32_t frameIndex,
11489 CallParams callParams;
11490 GetBasicParams(callParams);
11492 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11493 fprintf(m_File,
"%u,%.3f,%u,vmaFreeMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11498 void VmaRecorder::RecordSetAllocationUserData(uint32_t frameIndex,
11500 const void* pUserData)
11502 CallParams callParams;
11503 GetBasicParams(callParams);
11505 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11506 UserDataString userDataStr(
11509 fprintf(m_File,
"%u,%.3f,%u,vmaSetAllocationUserData,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11511 userDataStr.GetString());
11515 void VmaRecorder::RecordCreateLostAllocation(uint32_t frameIndex,
11518 CallParams callParams;
11519 GetBasicParams(callParams);
11521 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11522 fprintf(m_File,
"%u,%.3f,%u,vmaCreateLostAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11527 void VmaRecorder::RecordMapMemory(uint32_t frameIndex,
11530 CallParams callParams;
11531 GetBasicParams(callParams);
11533 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11534 fprintf(m_File,
"%u,%.3f,%u,vmaMapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11539 void VmaRecorder::RecordUnmapMemory(uint32_t frameIndex,
11542 CallParams callParams;
11543 GetBasicParams(callParams);
11545 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11546 fprintf(m_File,
"%u,%.3f,%u,vmaUnmapMemory,%p\n", callParams.threadId, callParams.time, frameIndex,
11551 void VmaRecorder::RecordFlushAllocation(uint32_t frameIndex,
11552 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11554 CallParams callParams;
11555 GetBasicParams(callParams);
11557 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11558 fprintf(m_File,
"%u,%.3f,%u,vmaFlushAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11565 void VmaRecorder::RecordInvalidateAllocation(uint32_t frameIndex,
11566 VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
11568 CallParams callParams;
11569 GetBasicParams(callParams);
11571 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11572 fprintf(m_File,
"%u,%.3f,%u,vmaInvalidateAllocation,%p,%llu,%llu\n", callParams.threadId, callParams.time, frameIndex,
11579 void VmaRecorder::RecordCreateBuffer(uint32_t frameIndex,
11580 const VkBufferCreateInfo& bufCreateInfo,
11584 CallParams callParams;
11585 GetBasicParams(callParams);
11587 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11588 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11589 fprintf(m_File,
"%u,%.3f,%u,vmaCreateBuffer,%u,%llu,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11590 bufCreateInfo.flags,
11591 bufCreateInfo.size,
11592 bufCreateInfo.usage,
11593 bufCreateInfo.sharingMode,
11594 allocCreateInfo.
flags,
11595 allocCreateInfo.
usage,
11599 allocCreateInfo.
pool,
11601 userDataStr.GetString());
11605 void VmaRecorder::RecordCreateImage(uint32_t frameIndex,
11606 const VkImageCreateInfo& imageCreateInfo,
11610 CallParams callParams;
11611 GetBasicParams(callParams);
11613 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11614 UserDataString userDataStr(allocCreateInfo.
flags, allocCreateInfo.
pUserData);
11615 fprintf(m_File,
"%u,%.3f,%u,vmaCreateImage,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%u,%p,%p,%s\n", callParams.threadId, callParams.time, frameIndex,
11616 imageCreateInfo.flags,
11617 imageCreateInfo.imageType,
11618 imageCreateInfo.format,
11619 imageCreateInfo.extent.width,
11620 imageCreateInfo.extent.height,
11621 imageCreateInfo.extent.depth,
11622 imageCreateInfo.mipLevels,
11623 imageCreateInfo.arrayLayers,
11624 imageCreateInfo.samples,
11625 imageCreateInfo.tiling,
11626 imageCreateInfo.usage,
11627 imageCreateInfo.sharingMode,
11628 imageCreateInfo.initialLayout,
11629 allocCreateInfo.
flags,
11630 allocCreateInfo.
usage,
11634 allocCreateInfo.
pool,
11636 userDataStr.GetString());
11640 void VmaRecorder::RecordDestroyBuffer(uint32_t frameIndex,
11643 CallParams callParams;
11644 GetBasicParams(callParams);
11646 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11647 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyBuffer,%p\n", callParams.threadId, callParams.time, frameIndex,
11652 void VmaRecorder::RecordDestroyImage(uint32_t frameIndex,
11655 CallParams callParams;
11656 GetBasicParams(callParams);
11658 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11659 fprintf(m_File,
"%u,%.3f,%u,vmaDestroyImage,%p\n", callParams.threadId, callParams.time, frameIndex,
11664 void VmaRecorder::RecordTouchAllocation(uint32_t frameIndex,
11667 CallParams callParams;
11668 GetBasicParams(callParams);
11670 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11671 fprintf(m_File,
"%u,%.3f,%u,vmaTouchAllocation,%p\n", callParams.threadId, callParams.time, frameIndex,
11676 void VmaRecorder::RecordGetAllocationInfo(uint32_t frameIndex,
11679 CallParams callParams;
11680 GetBasicParams(callParams);
11682 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11683 fprintf(m_File,
"%u,%.3f,%u,vmaGetAllocationInfo,%p\n", callParams.threadId, callParams.time, frameIndex,
11688 void VmaRecorder::RecordMakePoolAllocationsLost(uint32_t frameIndex,
11691 CallParams callParams;
11692 GetBasicParams(callParams);
11694 VmaMutexLock lock(m_FileMutex, m_UseMutex);
11695 fprintf(m_File,
"%u,%.3f,%u,vmaMakePoolAllocationsLost,%p\n", callParams.threadId, callParams.time, frameIndex,
11702 if(pUserData != VMA_NULL)
11706 m_Str = (
const char*)pUserData;
11710 sprintf_s(m_PtrStr,
"%p", pUserData);
11720 void VmaRecorder::WriteConfiguration(
11721 const VkPhysicalDeviceProperties& devProps,
11722 const VkPhysicalDeviceMemoryProperties& memProps,
11723 bool dedicatedAllocationExtensionEnabled)
11725 fprintf(m_File,
"Config,Begin\n");
11727 fprintf(m_File,
"PhysicalDevice,apiVersion,%u\n", devProps.apiVersion);
11728 fprintf(m_File,
"PhysicalDevice,driverVersion,%u\n", devProps.driverVersion);
11729 fprintf(m_File,
"PhysicalDevice,vendorID,%u\n", devProps.vendorID);
11730 fprintf(m_File,
"PhysicalDevice,deviceID,%u\n", devProps.deviceID);
11731 fprintf(m_File,
"PhysicalDevice,deviceType,%u\n", devProps.deviceType);
11732 fprintf(m_File,
"PhysicalDevice,deviceName,%s\n", devProps.deviceName);
11734 fprintf(m_File,
"PhysicalDeviceLimits,maxMemoryAllocationCount,%u\n", devProps.limits.maxMemoryAllocationCount);
11735 fprintf(m_File,
"PhysicalDeviceLimits,bufferImageGranularity,%llu\n", devProps.limits.bufferImageGranularity);
11736 fprintf(m_File,
"PhysicalDeviceLimits,nonCoherentAtomSize,%llu\n", devProps.limits.nonCoherentAtomSize);
11738 fprintf(m_File,
"PhysicalDeviceMemory,HeapCount,%u\n", memProps.memoryHeapCount);
11739 for(uint32_t i = 0; i < memProps.memoryHeapCount; ++i)
11741 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,size,%llu\n", i, memProps.memoryHeaps[i].size);
11742 fprintf(m_File,
"PhysicalDeviceMemory,Heap,%u,flags,%u\n", i, memProps.memoryHeaps[i].flags);
11744 fprintf(m_File,
"PhysicalDeviceMemory,TypeCount,%u\n", memProps.memoryTypeCount);
11745 for(uint32_t i = 0; i < memProps.memoryTypeCount; ++i)
11747 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,heapIndex,%u\n", i, memProps.memoryTypes[i].heapIndex);
11748 fprintf(m_File,
"PhysicalDeviceMemory,Type,%u,propertyFlags,%u\n", i, memProps.memoryTypes[i].propertyFlags);
11751 fprintf(m_File,
"Extension,VK_KHR_dedicated_allocation,%u\n", dedicatedAllocationExtensionEnabled ? 1 : 0);
11753 fprintf(m_File,
"Macro,VMA_DEBUG_ALWAYS_DEDICATED_MEMORY,%u\n", VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ? 1 : 0);
11754 fprintf(m_File,
"Macro,VMA_DEBUG_ALIGNMENT,%llu\n", (VkDeviceSize)VMA_DEBUG_ALIGNMENT);
11755 fprintf(m_File,
"Macro,VMA_DEBUG_MARGIN,%llu\n", (VkDeviceSize)VMA_DEBUG_MARGIN);
11756 fprintf(m_File,
"Macro,VMA_DEBUG_INITIALIZE_ALLOCATIONS,%u\n", VMA_DEBUG_INITIALIZE_ALLOCATIONS ? 1 : 0);
11757 fprintf(m_File,
"Macro,VMA_DEBUG_DETECT_CORRUPTION,%u\n", VMA_DEBUG_DETECT_CORRUPTION ? 1 : 0);
11758 fprintf(m_File,
"Macro,VMA_DEBUG_GLOBAL_MUTEX,%u\n", VMA_DEBUG_GLOBAL_MUTEX ? 1 : 0);
11759 fprintf(m_File,
"Macro,VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY,%llu\n", (VkDeviceSize)VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY);
11760 fprintf(m_File,
"Macro,VMA_SMALL_HEAP_MAX_SIZE,%llu\n", (VkDeviceSize)VMA_SMALL_HEAP_MAX_SIZE);
11761 fprintf(m_File,
"Macro,VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE,%llu\n", (VkDeviceSize)VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE);
11763 fprintf(m_File,
"Config,End\n");
11766 void VmaRecorder::GetBasicParams(CallParams& outParams)
11768 outParams.threadId = GetCurrentThreadId();
11770 LARGE_INTEGER counter;
11771 QueryPerformanceCounter(&counter);
11772 outParams.time = (double)(counter.QuadPart - m_StartCounter) / (double)m_Freq;
11775 void VmaRecorder::Flush()
11783 #endif // #if VMA_RECORDING_ENABLED 11791 m_hDevice(pCreateInfo->device),
11792 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
11793 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
11794 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
11795 m_PreferredLargeHeapBlockSize(0),
11796 m_PhysicalDevice(pCreateInfo->physicalDevice),
11797 m_CurrentFrameIndex(0),
11798 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
11801 ,m_pRecorder(VMA_NULL)
11804 if(VMA_DEBUG_DETECT_CORRUPTION)
11807 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
11812 #if !(VMA_DEDICATED_ALLOCATION) 11815 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
11819 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
11820 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
11821 memset(&m_MemProps, 0,
sizeof(m_MemProps));
11823 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
11824 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
11826 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
11828 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
11839 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
11840 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
11842 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_ALIGNMENT));
11843 VMA_ASSERT(VmaIsPow2(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY));
11844 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.bufferImageGranularity));
11845 VMA_ASSERT(VmaIsPow2(m_PhysicalDeviceProperties.limits.nonCoherentAtomSize));
11852 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
11854 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
11855 if(limit != VK_WHOLE_SIZE)
11857 m_HeapSizeLimit[heapIndex] = limit;
11858 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
11860 m_MemProps.memoryHeaps[heapIndex].size = limit;
11866 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
11868 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
11870 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
11873 preferredBlockSize,
11876 GetBufferImageGranularity(),
11883 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
11890 VkResult res = VK_SUCCESS;
11895 #if VMA_RECORDING_ENABLED 11896 m_pRecorder = vma_new(
this, VmaRecorder)();
11898 if(res != VK_SUCCESS)
11902 m_pRecorder->WriteConfiguration(
11903 m_PhysicalDeviceProperties,
11905 m_UseKhrDedicatedAllocation);
11906 m_pRecorder->RecordCreateAllocator(GetCurrentFrameIndex());
11908 VMA_ASSERT(0 &&
"VmaAllocatorCreateInfo::pRecordSettings used, but not supported due to VMA_RECORDING_ENABLED not defined to 1.");
11909 return VK_ERROR_FEATURE_NOT_PRESENT;
11916 VmaAllocator_T::~VmaAllocator_T()
11918 #if VMA_RECORDING_ENABLED 11919 if(m_pRecorder != VMA_NULL)
11921 m_pRecorder->RecordDestroyAllocator(GetCurrentFrameIndex());
11922 vma_delete(
this, m_pRecorder);
11926 VMA_ASSERT(m_Pools.empty());
11928 for(
size_t i = GetMemoryTypeCount(); i--; )
11930 vma_delete(
this, m_pDedicatedAllocations[i]);
11931 vma_delete(
this, m_pBlockVectors[i]);
11935 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
11937 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 11938 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
11939 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
11940 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
11941 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
11942 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
11943 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
11944 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
11945 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
11946 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
11947 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
11948 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
11949 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
11950 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
11951 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
11952 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
11953 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
11954 #if VMA_DEDICATED_ALLOCATION 11955 if(m_UseKhrDedicatedAllocation)
11957 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
11958 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
11959 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
11960 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
11962 #endif // #if VMA_DEDICATED_ALLOCATION 11963 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 11965 #define VMA_COPY_IF_NOT_NULL(funcName) \ 11966 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 11968 if(pVulkanFunctions != VMA_NULL)
11970 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
11971 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
11972 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
11973 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
11974 VMA_COPY_IF_NOT_NULL(vkMapMemory);
11975 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
11976 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
11977 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
11978 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
11979 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
11980 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
11981 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
11982 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
11983 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
11984 VMA_COPY_IF_NOT_NULL(vkCreateImage);
11985 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
11986 #if VMA_DEDICATED_ALLOCATION 11987 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
11988 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
11992 #undef VMA_COPY_IF_NOT_NULL 11996 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
11997 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
11998 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
11999 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
12000 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
12001 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
12002 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
12003 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
12004 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
12005 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
12006 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
12007 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
12008 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
12009 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
12010 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
12011 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
12012 #if VMA_DEDICATED_ALLOCATION 12013 if(m_UseKhrDedicatedAllocation)
12015 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
12016 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
12021 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
12023 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12024 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
12025 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
12026 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
12029 VkResult VmaAllocator_T::AllocateMemoryOfType(
12031 VkDeviceSize alignment,
12032 bool dedicatedAllocation,
12033 VkBuffer dedicatedBuffer,
12034 VkImage dedicatedImage,
12036 uint32_t memTypeIndex,
12037 VmaSuballocationType suballocType,
12040 VMA_ASSERT(pAllocation != VMA_NULL);
12041 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
12047 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
12052 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
12053 VMA_ASSERT(blockVector);
12055 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
12056 bool preferDedicatedMemory =
12057 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
12058 dedicatedAllocation ||
12060 size > preferredBlockSize / 2;
12062 if(preferDedicatedMemory &&
12064 finalCreateInfo.
pool == VK_NULL_HANDLE)
12073 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12077 return AllocateDedicatedMemory(
12091 VkResult res = blockVector->Allocate(
12093 m_CurrentFrameIndex.load(),
12099 if(res == VK_SUCCESS)
12107 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12111 res = AllocateDedicatedMemory(
12117 finalCreateInfo.pUserData,
12121 if(res == VK_SUCCESS)
12124 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
12130 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12137 VkResult VmaAllocator_T::AllocateDedicatedMemory(
12139 VmaSuballocationType suballocType,
12140 uint32_t memTypeIndex,
12142 bool isUserDataString,
12144 VkBuffer dedicatedBuffer,
12145 VkImage dedicatedImage,
12148 VMA_ASSERT(pAllocation);
12150 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
12151 allocInfo.memoryTypeIndex = memTypeIndex;
12152 allocInfo.allocationSize = size;
12154 #if VMA_DEDICATED_ALLOCATION 12155 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
12156 if(m_UseKhrDedicatedAllocation)
12158 if(dedicatedBuffer != VK_NULL_HANDLE)
12160 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
12161 dedicatedAllocInfo.buffer = dedicatedBuffer;
12162 allocInfo.pNext = &dedicatedAllocInfo;
12164 else if(dedicatedImage != VK_NULL_HANDLE)
12166 dedicatedAllocInfo.image = dedicatedImage;
12167 allocInfo.pNext = &dedicatedAllocInfo;
12170 #endif // #if VMA_DEDICATED_ALLOCATION 12173 VkDeviceMemory hMemory = VK_NULL_HANDLE;
12174 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
12177 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
12181 void* pMappedData = VMA_NULL;
12184 res = (*m_VulkanFunctions.vkMapMemory)(
12193 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
12194 FreeVulkanMemory(memTypeIndex, size, hMemory);
12199 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
12200 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
12201 (*pAllocation)->SetUserData(
this, pUserData);
12202 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12204 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
12209 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12210 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
12211 VMA_ASSERT(pDedicatedAllocations);
12212 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
12215 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
12220 void VmaAllocator_T::GetBufferMemoryRequirements(
12222 VkMemoryRequirements& memReq,
12223 bool& requiresDedicatedAllocation,
12224 bool& prefersDedicatedAllocation)
const 12226 #if VMA_DEDICATED_ALLOCATION 12227 if(m_UseKhrDedicatedAllocation)
12229 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
12230 memReqInfo.buffer = hBuffer;
12232 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12234 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12235 memReq2.pNext = &memDedicatedReq;
12237 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12239 memReq = memReq2.memoryRequirements;
12240 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12241 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12244 #endif // #if VMA_DEDICATED_ALLOCATION 12246 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
12247 requiresDedicatedAllocation =
false;
12248 prefersDedicatedAllocation =
false;
12252 void VmaAllocator_T::GetImageMemoryRequirements(
12254 VkMemoryRequirements& memReq,
12255 bool& requiresDedicatedAllocation,
12256 bool& prefersDedicatedAllocation)
const 12258 #if VMA_DEDICATED_ALLOCATION 12259 if(m_UseKhrDedicatedAllocation)
12261 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
12262 memReqInfo.image = hImage;
12264 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
12266 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
12267 memReq2.pNext = &memDedicatedReq;
12269 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
12271 memReq = memReq2.memoryRequirements;
12272 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
12273 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
12276 #endif // #if VMA_DEDICATED_ALLOCATION 12278 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
12279 requiresDedicatedAllocation =
false;
12280 prefersDedicatedAllocation =
false;
12284 VkResult VmaAllocator_T::AllocateMemory(
12285 const VkMemoryRequirements& vkMemReq,
12286 bool requiresDedicatedAllocation,
12287 bool prefersDedicatedAllocation,
12288 VkBuffer dedicatedBuffer,
12289 VkImage dedicatedImage,
12291 VmaSuballocationType suballocType,
12294 VMA_ASSERT(VmaIsPow2(vkMemReq.alignment));
12299 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
12300 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12305 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
12306 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12308 if(requiresDedicatedAllocation)
12312 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
12313 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12315 if(createInfo.
pool != VK_NULL_HANDLE)
12317 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
12318 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12321 if((createInfo.
pool != VK_NULL_HANDLE) &&
12324 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
12325 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12328 if(createInfo.
pool != VK_NULL_HANDLE)
12330 const VkDeviceSize alignmentForPool = VMA_MAX(
12331 vkMemReq.alignment,
12332 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
12333 return createInfo.
pool->m_BlockVector.Allocate(
12335 m_CurrentFrameIndex.load(),
12345 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
12346 uint32_t memTypeIndex = UINT32_MAX;
12348 if(res == VK_SUCCESS)
12350 VkDeviceSize alignmentForMemType = VMA_MAX(
12351 vkMemReq.alignment,
12352 GetMemoryTypeMinAlignment(memTypeIndex));
12354 res = AllocateMemoryOfType(
12356 alignmentForMemType,
12357 requiresDedicatedAllocation || prefersDedicatedAllocation,
12365 if(res == VK_SUCCESS)
12375 memoryTypeBits &= ~(1u << memTypeIndex);
12378 if(res == VK_SUCCESS)
12380 alignmentForMemType = VMA_MAX(
12381 vkMemReq.alignment,
12382 GetMemoryTypeMinAlignment(memTypeIndex));
12384 res = AllocateMemoryOfType(
12386 alignmentForMemType,
12387 requiresDedicatedAllocation || prefersDedicatedAllocation,
12395 if(res == VK_SUCCESS)
12405 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
12416 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
12418 VMA_ASSERT(allocation);
12420 if(TouchAllocation(allocation))
12422 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
12424 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
12427 switch(allocation->GetType())
12429 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12431 VmaBlockVector* pBlockVector = VMA_NULL;
12432 VmaPool hPool = allocation->GetPool();
12433 if(hPool != VK_NULL_HANDLE)
12435 pBlockVector = &hPool->m_BlockVector;
12439 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
12440 pBlockVector = m_pBlockVectors[memTypeIndex];
12442 pBlockVector->Free(allocation);
12445 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12446 FreeDedicatedMemory(allocation);
12453 allocation->SetUserData(
this, VMA_NULL);
12454 vma_delete(
this, allocation);
12457 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
12460 InitStatInfo(pStats->
total);
12461 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
12463 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
12467 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12469 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
12470 VMA_ASSERT(pBlockVector);
12471 pBlockVector->AddStats(pStats);
12476 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12477 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
12479 m_Pools[poolIndex]->m_BlockVector.AddStats(pStats);
12484 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12486 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
12487 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
12488 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
12489 VMA_ASSERT(pDedicatedAllocVector);
12490 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
12493 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
12494 VmaAddStatInfo(pStats->
total, allocationStatInfo);
12495 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
12496 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
12501 VmaPostprocessCalcStatInfo(pStats->
total);
12502 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
12503 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
12504 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
12505 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
12508 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
12510 VkResult VmaAllocator_T::Defragment(
12512 size_t allocationCount,
12513 VkBool32* pAllocationsChanged,
12517 if(pAllocationsChanged != VMA_NULL)
12519 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
12521 if(pDefragmentationStats != VMA_NULL)
12523 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
12526 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
12528 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
12530 const size_t poolCount = m_Pools.size();
12533 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
12536 VMA_ASSERT(hAlloc);
12537 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
12539 const VkMemoryPropertyFlags requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
12540 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
12542 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags) &&
12544 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
12546 VmaBlockVector* pAllocBlockVector = VMA_NULL;
12548 const VmaPool hAllocPool = hAlloc->GetPool();
12550 if(hAllocPool != VK_NULL_HANDLE)
12553 if(hAllocPool->m_BlockVector.GetAlgorithm() == 0)
12555 pAllocBlockVector = &hAllocPool->m_BlockVector;
12561 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
12564 if(pAllocBlockVector != VMA_NULL)
12566 VmaDefragmentator*
const pDefragmentator =
12567 pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
12568 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
12569 &pAllocationsChanged[allocIndex] : VMA_NULL;
12570 pDefragmentator->AddAllocation(hAlloc, pChanged);
12575 VkResult result = VK_SUCCESS;
12579 VkDeviceSize maxBytesToMove = SIZE_MAX;
12580 uint32_t maxAllocationsToMove = UINT32_MAX;
12581 if(pDefragmentationInfo != VMA_NULL)
12588 for(uint32_t memTypeIndex = 0;
12589 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
12593 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12595 result = m_pBlockVectors[memTypeIndex]->Defragment(
12596 pDefragmentationStats,
12598 maxAllocationsToMove);
12603 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
12605 result = m_Pools[poolIndex]->m_BlockVector.Defragment(
12606 pDefragmentationStats,
12608 maxAllocationsToMove);
12614 for(
size_t poolIndex = poolCount; poolIndex--; )
12616 m_Pools[poolIndex]->m_BlockVector.DestroyDefragmentator();
12620 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
12622 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
12624 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
12633 if(hAllocation->CanBecomeLost())
12639 const uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12640 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12643 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12647 pAllocationInfo->
offset = 0;
12648 pAllocationInfo->
size = hAllocation->GetSize();
12650 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12653 else if(localLastUseFrameIndex == localCurrFrameIndex)
12655 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12656 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12657 pAllocationInfo->
offset = hAllocation->GetOffset();
12658 pAllocationInfo->
size = hAllocation->GetSize();
12660 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12665 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12667 localLastUseFrameIndex = localCurrFrameIndex;
12674 #if VMA_STATS_STRING_ENABLED 12675 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12676 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12679 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12680 if(localLastUseFrameIndex == localCurrFrameIndex)
12686 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12688 localLastUseFrameIndex = localCurrFrameIndex;
12694 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
12695 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
12696 pAllocationInfo->
offset = hAllocation->GetOffset();
12697 pAllocationInfo->
size = hAllocation->GetSize();
12698 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
12699 pAllocationInfo->
pUserData = hAllocation->GetUserData();
12703 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
12706 if(hAllocation->CanBecomeLost())
12708 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12709 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12712 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
12716 else if(localLastUseFrameIndex == localCurrFrameIndex)
12722 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12724 localLastUseFrameIndex = localCurrFrameIndex;
12731 #if VMA_STATS_STRING_ENABLED 12732 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
12733 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
12736 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
12737 if(localLastUseFrameIndex == localCurrFrameIndex)
12743 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
12745 localLastUseFrameIndex = localCurrFrameIndex;
12757 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u, flags=%u", pCreateInfo->
memoryTypeIndex, pCreateInfo->
flags);
12767 return VK_ERROR_INITIALIZATION_FAILED;
12770 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(newCreateInfo.
memoryTypeIndex);
12772 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo, preferredBlockSize);
12774 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
12775 if(res != VK_SUCCESS)
12777 vma_delete(
this, *pPool);
12784 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12785 (*pPool)->SetId(m_NextPoolId++);
12786 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
12792 void VmaAllocator_T::DestroyPool(
VmaPool pool)
12796 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12797 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
12798 VMA_ASSERT(success &&
"Pool not found in Allocator.");
12801 vma_delete(
this, pool);
12806 pool->m_BlockVector.GetPoolStats(pPoolStats);
12809 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
12811 m_CurrentFrameIndex.store(frameIndex);
12814 void VmaAllocator_T::MakePoolAllocationsLost(
12816 size_t* pLostAllocationCount)
12818 hPool->m_BlockVector.MakePoolAllocationsLost(
12819 m_CurrentFrameIndex.load(),
12820 pLostAllocationCount);
12823 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
12825 return hPool->m_BlockVector.CheckCorruption();
12828 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
12830 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
12833 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
12835 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
12837 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
12838 VMA_ASSERT(pBlockVector);
12839 VkResult localRes = pBlockVector->CheckCorruption();
12842 case VK_ERROR_FEATURE_NOT_PRESENT:
12845 finalRes = VK_SUCCESS;
12855 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
12856 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
12858 if(((1u << m_Pools[poolIndex]->m_BlockVector.GetMemoryTypeIndex()) & memoryTypeBits) != 0)
12860 VkResult localRes = m_Pools[poolIndex]->m_BlockVector.CheckCorruption();
12863 case VK_ERROR_FEATURE_NOT_PRESENT:
12866 finalRes = VK_SUCCESS;
12878 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
12880 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
12881 (*pAllocation)->InitLost();
12884 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
12886 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
12889 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
12891 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
12892 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
12894 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
12895 if(res == VK_SUCCESS)
12897 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
12902 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
12907 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
12910 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
12912 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
12918 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
12920 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
12922 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
12925 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
12927 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
12928 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
12930 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
12931 m_HeapSizeLimit[heapIndex] += size;
12935 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
12937 if(hAllocation->CanBecomeLost())
12939 return VK_ERROR_MEMORY_MAP_FAILED;
12942 switch(hAllocation->GetType())
12944 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12946 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
12947 char *pBytes = VMA_NULL;
12948 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
12949 if(res == VK_SUCCESS)
12951 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
12952 hAllocation->BlockAllocMap();
12956 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12957 return hAllocation->DedicatedAllocMap(
this, ppData);
12960 return VK_ERROR_MEMORY_MAP_FAILED;
12966 switch(hAllocation->GetType())
12968 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12970 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
12971 hAllocation->BlockAllocUnmap();
12972 pBlock->Unmap(
this, 1);
12975 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12976 hAllocation->DedicatedAllocUnmap(
this);
12983 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
12985 VkResult res = VK_SUCCESS;
12986 switch(hAllocation->GetType())
12988 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
12989 res = GetVulkanFunctions().vkBindBufferMemory(
12992 hAllocation->GetMemory(),
12995 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
12997 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
12998 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
12999 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
13008 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
13010 VkResult res = VK_SUCCESS;
13011 switch(hAllocation->GetType())
13013 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13014 res = GetVulkanFunctions().vkBindImageMemory(
13017 hAllocation->GetMemory(),
13020 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13022 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
13023 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
13024 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
13033 void VmaAllocator_T::FlushOrInvalidateAllocation(
13035 VkDeviceSize offset, VkDeviceSize size,
13036 VMA_CACHE_OPERATION op)
13038 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
13039 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
13041 const VkDeviceSize allocationSize = hAllocation->GetSize();
13042 VMA_ASSERT(offset <= allocationSize);
13044 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
13046 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
13047 memRange.memory = hAllocation->GetMemory();
13049 switch(hAllocation->GetType())
13051 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
13052 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13053 if(size == VK_WHOLE_SIZE)
13055 memRange.size = allocationSize - memRange.offset;
13059 VMA_ASSERT(offset + size <= allocationSize);
13060 memRange.size = VMA_MIN(
13061 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
13062 allocationSize - memRange.offset);
13066 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
13069 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
13070 if(size == VK_WHOLE_SIZE)
13072 size = allocationSize - offset;
13076 VMA_ASSERT(offset + size <= allocationSize);
13078 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
13081 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
13082 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
13083 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_pMetadata->GetSize();
13084 memRange.offset += allocationOffset;
13085 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
13096 case VMA_CACHE_FLUSH:
13097 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
13099 case VMA_CACHE_INVALIDATE:
13100 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
13109 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
13111 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
13113 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
13115 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13116 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
13117 VMA_ASSERT(pDedicatedAllocations);
13118 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
13119 VMA_ASSERT(success);
13122 VkDeviceMemory hMemory = allocation->GetMemory();
13134 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
13136 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
13139 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
13141 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
13142 !hAllocation->CanBecomeLost() &&
13143 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13145 void* pData = VMA_NULL;
13146 VkResult res = Map(hAllocation, &pData);
13147 if(res == VK_SUCCESS)
13149 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
13150 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
13151 Unmap(hAllocation);
13155 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
13160 #if VMA_STATS_STRING_ENABLED 13162 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
13164 bool dedicatedAllocationsStarted =
false;
13165 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13167 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
13168 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
13169 VMA_ASSERT(pDedicatedAllocVector);
13170 if(pDedicatedAllocVector->empty() ==
false)
13172 if(dedicatedAllocationsStarted ==
false)
13174 dedicatedAllocationsStarted =
true;
13175 json.WriteString(
"DedicatedAllocations");
13176 json.BeginObject();
13179 json.BeginString(
"Type ");
13180 json.ContinueString(memTypeIndex);
13185 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
13187 json.BeginObject(
true);
13189 hAlloc->PrintParameters(json);
13196 if(dedicatedAllocationsStarted)
13202 bool allocationsStarted =
false;
13203 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
13205 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
13207 if(allocationsStarted ==
false)
13209 allocationsStarted =
true;
13210 json.WriteString(
"DefaultPools");
13211 json.BeginObject();
13214 json.BeginString(
"Type ");
13215 json.ContinueString(memTypeIndex);
13218 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
13221 if(allocationsStarted)
13229 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
13230 const size_t poolCount = m_Pools.size();
13233 json.WriteString(
"Pools");
13234 json.BeginObject();
13235 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
13237 json.BeginString();
13238 json.ContinueString(m_Pools[poolIndex]->GetId());
13241 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
13248 #endif // #if VMA_STATS_STRING_ENABLED 13257 VMA_ASSERT(pCreateInfo && pAllocator);
13258 VMA_DEBUG_LOG(
"vmaCreateAllocator");
13260 return (*pAllocator)->Init(pCreateInfo);
13266 if(allocator != VK_NULL_HANDLE)
13268 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
13269 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
13270 vma_delete(&allocationCallbacks, allocator);
13276 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
13278 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
13279 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
13284 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
13286 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
13287 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
13292 uint32_t memoryTypeIndex,
13293 VkMemoryPropertyFlags* pFlags)
13295 VMA_ASSERT(allocator && pFlags);
13296 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
13297 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
13302 uint32_t frameIndex)
13304 VMA_ASSERT(allocator);
13305 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
13307 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13309 allocator->SetCurrentFrameIndex(frameIndex);
13316 VMA_ASSERT(allocator && pStats);
13317 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13318 allocator->CalculateStats(pStats);
13321 #if VMA_STATS_STRING_ENABLED 13325 char** ppStatsString,
13326 VkBool32 detailedMap)
13328 VMA_ASSERT(allocator && ppStatsString);
13329 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13331 VmaStringBuilder sb(allocator);
13333 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
13334 json.BeginObject();
13337 allocator->CalculateStats(&stats);
13339 json.WriteString(
"Total");
13340 VmaPrintStatInfo(json, stats.
total);
13342 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
13344 json.BeginString(
"Heap ");
13345 json.ContinueString(heapIndex);
13347 json.BeginObject();
13349 json.WriteString(
"Size");
13350 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
13352 json.WriteString(
"Flags");
13353 json.BeginArray(
true);
13354 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
13356 json.WriteString(
"DEVICE_LOCAL");
13362 json.WriteString(
"Stats");
13363 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
13366 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
13368 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
13370 json.BeginString(
"Type ");
13371 json.ContinueString(typeIndex);
13374 json.BeginObject();
13376 json.WriteString(
"Flags");
13377 json.BeginArray(
true);
13378 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
13379 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
13381 json.WriteString(
"DEVICE_LOCAL");
13383 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
13385 json.WriteString(
"HOST_VISIBLE");
13387 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
13389 json.WriteString(
"HOST_COHERENT");
13391 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
13393 json.WriteString(
"HOST_CACHED");
13395 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
13397 json.WriteString(
"LAZILY_ALLOCATED");
13403 json.WriteString(
"Stats");
13404 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
13413 if(detailedMap == VK_TRUE)
13415 allocator->PrintDetailedMap(json);
13421 const size_t len = sb.GetLength();
13422 char*
const pChars = vma_new_array(allocator,
char, len + 1);
13425 memcpy(pChars, sb.GetData(), len);
13427 pChars[len] =
'\0';
13428 *ppStatsString = pChars;
13433 char* pStatsString)
13435 if(pStatsString != VMA_NULL)
13437 VMA_ASSERT(allocator);
13438 size_t len = strlen(pStatsString);
13439 vma_delete_array(allocator, pStatsString, len + 1);
13443 #endif // #if VMA_STATS_STRING_ENABLED 13450 uint32_t memoryTypeBits,
13452 uint32_t* pMemoryTypeIndex)
13454 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13455 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13456 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13463 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
13464 uint32_t preferredFlags = pAllocationCreateInfo->
preferredFlags;
13469 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13473 switch(pAllocationCreateInfo->
usage)
13478 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13480 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13484 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
13487 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13488 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
13490 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
13494 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
13495 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
13501 *pMemoryTypeIndex = UINT32_MAX;
13502 uint32_t minCost = UINT32_MAX;
13503 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
13504 memTypeIndex < allocator->GetMemoryTypeCount();
13505 ++memTypeIndex, memTypeBit <<= 1)
13508 if((memTypeBit & memoryTypeBits) != 0)
13510 const VkMemoryPropertyFlags currFlags =
13511 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
13513 if((requiredFlags & ~currFlags) == 0)
13516 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
13518 if(currCost < minCost)
13520 *pMemoryTypeIndex = memTypeIndex;
13525 minCost = currCost;
13530 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
13535 const VkBufferCreateInfo* pBufferCreateInfo,
13537 uint32_t* pMemoryTypeIndex)
13539 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13540 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
13541 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13542 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13544 const VkDevice hDev = allocator->m_hDevice;
13545 VkBuffer hBuffer = VK_NULL_HANDLE;
13546 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
13547 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
13548 if(res == VK_SUCCESS)
13550 VkMemoryRequirements memReq = {};
13551 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
13552 hDev, hBuffer, &memReq);
13556 memReq.memoryTypeBits,
13557 pAllocationCreateInfo,
13560 allocator->GetVulkanFunctions().vkDestroyBuffer(
13561 hDev, hBuffer, allocator->GetAllocationCallbacks());
13568 const VkImageCreateInfo* pImageCreateInfo,
13570 uint32_t* pMemoryTypeIndex)
13572 VMA_ASSERT(allocator != VK_NULL_HANDLE);
13573 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
13574 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
13575 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
13577 const VkDevice hDev = allocator->m_hDevice;
13578 VkImage hImage = VK_NULL_HANDLE;
13579 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
13580 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
13581 if(res == VK_SUCCESS)
13583 VkMemoryRequirements memReq = {};
13584 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
13585 hDev, hImage, &memReq);
13589 memReq.memoryTypeBits,
13590 pAllocationCreateInfo,
13593 allocator->GetVulkanFunctions().vkDestroyImage(
13594 hDev, hImage, allocator->GetAllocationCallbacks());
13604 VMA_ASSERT(allocator && pCreateInfo && pPool);
13606 VMA_DEBUG_LOG(
"vmaCreatePool");
13608 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13610 VkResult res = allocator->CreatePool(pCreateInfo, pPool);
13612 #if VMA_RECORDING_ENABLED 13613 if(allocator->GetRecorder() != VMA_NULL)
13615 allocator->GetRecorder()->RecordCreatePool(allocator->GetCurrentFrameIndex(), *pCreateInfo, *pPool);
13626 VMA_ASSERT(allocator);
13628 if(pool == VK_NULL_HANDLE)
13633 VMA_DEBUG_LOG(
"vmaDestroyPool");
13635 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13637 #if VMA_RECORDING_ENABLED 13638 if(allocator->GetRecorder() != VMA_NULL)
13640 allocator->GetRecorder()->RecordDestroyPool(allocator->GetCurrentFrameIndex(), pool);
13644 allocator->DestroyPool(pool);
13652 VMA_ASSERT(allocator && pool && pPoolStats);
13654 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13656 allocator->GetPoolStats(pool, pPoolStats);
13662 size_t* pLostAllocationCount)
13664 VMA_ASSERT(allocator && pool);
13666 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13668 #if VMA_RECORDING_ENABLED 13669 if(allocator->GetRecorder() != VMA_NULL)
13671 allocator->GetRecorder()->RecordMakePoolAllocationsLost(allocator->GetCurrentFrameIndex(), pool);
13675 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
13680 VMA_ASSERT(allocator && pool);
13682 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13684 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
13686 return allocator->CheckPoolCorruption(pool);
13691 const VkMemoryRequirements* pVkMemoryRequirements,
13696 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
13698 VMA_DEBUG_LOG(
"vmaAllocateMemory");
13700 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13702 VkResult result = allocator->AllocateMemory(
13703 *pVkMemoryRequirements,
13709 VMA_SUBALLOCATION_TYPE_UNKNOWN,
13712 #if VMA_RECORDING_ENABLED 13713 if(allocator->GetRecorder() != VMA_NULL)
13715 allocator->GetRecorder()->RecordAllocateMemory(
13716 allocator->GetCurrentFrameIndex(),
13717 *pVkMemoryRequirements,
13723 if(pAllocationInfo != VMA_NULL && result == VK_SUCCESS)
13725 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13738 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
13740 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
13742 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13744 VkMemoryRequirements vkMemReq = {};
13745 bool requiresDedicatedAllocation =
false;
13746 bool prefersDedicatedAllocation =
false;
13747 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
13748 requiresDedicatedAllocation,
13749 prefersDedicatedAllocation);
13751 VkResult result = allocator->AllocateMemory(
13753 requiresDedicatedAllocation,
13754 prefersDedicatedAllocation,
13758 VMA_SUBALLOCATION_TYPE_BUFFER,
13761 #if VMA_RECORDING_ENABLED 13762 if(allocator->GetRecorder() != VMA_NULL)
13764 allocator->GetRecorder()->RecordAllocateMemoryForBuffer(
13765 allocator->GetCurrentFrameIndex(),
13767 requiresDedicatedAllocation,
13768 prefersDedicatedAllocation,
13774 if(pAllocationInfo && result == VK_SUCCESS)
13776 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13789 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
13791 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
13793 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13795 VkMemoryRequirements vkMemReq = {};
13796 bool requiresDedicatedAllocation =
false;
13797 bool prefersDedicatedAllocation =
false;
13798 allocator->GetImageMemoryRequirements(image, vkMemReq,
13799 requiresDedicatedAllocation, prefersDedicatedAllocation);
13801 VkResult result = allocator->AllocateMemory(
13803 requiresDedicatedAllocation,
13804 prefersDedicatedAllocation,
13808 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
13811 #if VMA_RECORDING_ENABLED 13812 if(allocator->GetRecorder() != VMA_NULL)
13814 allocator->GetRecorder()->RecordAllocateMemoryForImage(
13815 allocator->GetCurrentFrameIndex(),
13817 requiresDedicatedAllocation,
13818 prefersDedicatedAllocation,
13824 if(pAllocationInfo && result == VK_SUCCESS)
13826 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
13836 VMA_ASSERT(allocator);
13838 if(allocation == VK_NULL_HANDLE)
13843 VMA_DEBUG_LOG(
"vmaFreeMemory");
13845 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13847 #if VMA_RECORDING_ENABLED 13848 if(allocator->GetRecorder() != VMA_NULL)
13850 allocator->GetRecorder()->RecordFreeMemory(
13851 allocator->GetCurrentFrameIndex(),
13856 allocator->FreeMemory(allocation);
13864 VMA_ASSERT(allocator && allocation && pAllocationInfo);
13866 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13868 #if VMA_RECORDING_ENABLED 13869 if(allocator->GetRecorder() != VMA_NULL)
13871 allocator->GetRecorder()->RecordGetAllocationInfo(
13872 allocator->GetCurrentFrameIndex(),
13877 allocator->GetAllocationInfo(allocation, pAllocationInfo);
13884 VMA_ASSERT(allocator && allocation);
13886 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13888 #if VMA_RECORDING_ENABLED 13889 if(allocator->GetRecorder() != VMA_NULL)
13891 allocator->GetRecorder()->RecordTouchAllocation(
13892 allocator->GetCurrentFrameIndex(),
13897 return allocator->TouchAllocation(allocation);
13905 VMA_ASSERT(allocator && allocation);
13907 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13909 allocation->SetUserData(allocator, pUserData);
13911 #if VMA_RECORDING_ENABLED 13912 if(allocator->GetRecorder() != VMA_NULL)
13914 allocator->GetRecorder()->RecordSetAllocationUserData(
13915 allocator->GetCurrentFrameIndex(),
13926 VMA_ASSERT(allocator && pAllocation);
13928 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
13930 allocator->CreateLostAllocation(pAllocation);
13932 #if VMA_RECORDING_ENABLED 13933 if(allocator->GetRecorder() != VMA_NULL)
13935 allocator->GetRecorder()->RecordCreateLostAllocation(
13936 allocator->GetCurrentFrameIndex(),
13947 VMA_ASSERT(allocator && allocation && ppData);
13949 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13951 VkResult res = allocator->Map(allocation, ppData);
13953 #if VMA_RECORDING_ENABLED 13954 if(allocator->GetRecorder() != VMA_NULL)
13956 allocator->GetRecorder()->RecordMapMemory(
13957 allocator->GetCurrentFrameIndex(),
13969 VMA_ASSERT(allocator && allocation);
13971 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13973 #if VMA_RECORDING_ENABLED 13974 if(allocator->GetRecorder() != VMA_NULL)
13976 allocator->GetRecorder()->RecordUnmapMemory(
13977 allocator->GetCurrentFrameIndex(),
13982 allocator->Unmap(allocation);
13987 VMA_ASSERT(allocator && allocation);
13989 VMA_DEBUG_LOG(
"vmaFlushAllocation");
13991 VMA_DEBUG_GLOBAL_MUTEX_LOCK
13993 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
13995 #if VMA_RECORDING_ENABLED 13996 if(allocator->GetRecorder() != VMA_NULL)
13998 allocator->GetRecorder()->RecordFlushAllocation(
13999 allocator->GetCurrentFrameIndex(),
14000 allocation, offset, size);
14007 VMA_ASSERT(allocator && allocation);
14009 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
14011 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14013 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
14015 #if VMA_RECORDING_ENABLED 14016 if(allocator->GetRecorder() != VMA_NULL)
14018 allocator->GetRecorder()->RecordInvalidateAllocation(
14019 allocator->GetCurrentFrameIndex(),
14020 allocation, offset, size);
14027 VMA_ASSERT(allocator);
14029 VMA_DEBUG_LOG(
"vmaCheckCorruption");
14031 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14033 return allocator->CheckCorruption(memoryTypeBits);
14039 size_t allocationCount,
14040 VkBool32* pAllocationsChanged,
14044 VMA_ASSERT(allocator && pAllocations);
14046 VMA_DEBUG_LOG(
"vmaDefragment");
14048 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14050 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
14058 VMA_ASSERT(allocator && allocation && buffer);
14060 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
14062 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14064 return allocator->BindBufferMemory(allocation, buffer);
14072 VMA_ASSERT(allocator && allocation && image);
14074 VMA_DEBUG_LOG(
"vmaBindImageMemory");
14076 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14078 return allocator->BindImageMemory(allocation, image);
14083 const VkBufferCreateInfo* pBufferCreateInfo,
14089 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
14091 VMA_DEBUG_LOG(
"vmaCreateBuffer");
14093 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14095 *pBuffer = VK_NULL_HANDLE;
14096 *pAllocation = VK_NULL_HANDLE;
14099 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
14100 allocator->m_hDevice,
14102 allocator->GetAllocationCallbacks(),
14107 VkMemoryRequirements vkMemReq = {};
14108 bool requiresDedicatedAllocation =
false;
14109 bool prefersDedicatedAllocation =
false;
14110 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
14111 requiresDedicatedAllocation, prefersDedicatedAllocation);
14115 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
14117 VMA_ASSERT(vkMemReq.alignment %
14118 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
14120 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
14122 VMA_ASSERT(vkMemReq.alignment %
14123 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
14125 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
14127 VMA_ASSERT(vkMemReq.alignment %
14128 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
14132 res = allocator->AllocateMemory(
14134 requiresDedicatedAllocation,
14135 prefersDedicatedAllocation,
14138 *pAllocationCreateInfo,
14139 VMA_SUBALLOCATION_TYPE_BUFFER,
14142 #if VMA_RECORDING_ENABLED 14143 if(allocator->GetRecorder() != VMA_NULL)
14145 allocator->GetRecorder()->RecordCreateBuffer(
14146 allocator->GetCurrentFrameIndex(),
14147 *pBufferCreateInfo,
14148 *pAllocationCreateInfo,
14156 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
14160 #if VMA_STATS_STRING_ENABLED 14161 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
14163 if(pAllocationInfo != VMA_NULL)
14165 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14170 allocator->FreeMemory(*pAllocation);
14171 *pAllocation = VK_NULL_HANDLE;
14172 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14173 *pBuffer = VK_NULL_HANDLE;
14176 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
14177 *pBuffer = VK_NULL_HANDLE;
14188 VMA_ASSERT(allocator);
14190 if(buffer == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14195 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
14197 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14199 #if VMA_RECORDING_ENABLED 14200 if(allocator->GetRecorder() != VMA_NULL)
14202 allocator->GetRecorder()->RecordDestroyBuffer(
14203 allocator->GetCurrentFrameIndex(),
14208 if(buffer != VK_NULL_HANDLE)
14210 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
14213 if(allocation != VK_NULL_HANDLE)
14215 allocator->FreeMemory(allocation);
14221 const VkImageCreateInfo* pImageCreateInfo,
14227 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
14229 VMA_DEBUG_LOG(
"vmaCreateImage");
14231 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14233 *pImage = VK_NULL_HANDLE;
14234 *pAllocation = VK_NULL_HANDLE;
14237 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
14238 allocator->m_hDevice,
14240 allocator->GetAllocationCallbacks(),
14244 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
14245 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
14246 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
14249 VkMemoryRequirements vkMemReq = {};
14250 bool requiresDedicatedAllocation =
false;
14251 bool prefersDedicatedAllocation =
false;
14252 allocator->GetImageMemoryRequirements(*pImage, vkMemReq,
14253 requiresDedicatedAllocation, prefersDedicatedAllocation);
14255 res = allocator->AllocateMemory(
14257 requiresDedicatedAllocation,
14258 prefersDedicatedAllocation,
14261 *pAllocationCreateInfo,
14265 #if VMA_RECORDING_ENABLED 14266 if(allocator->GetRecorder() != VMA_NULL)
14268 allocator->GetRecorder()->RecordCreateImage(
14269 allocator->GetCurrentFrameIndex(),
14271 *pAllocationCreateInfo,
14279 res = allocator->BindImageMemory(*pAllocation, *pImage);
14283 #if VMA_STATS_STRING_ENABLED 14284 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
14286 if(pAllocationInfo != VMA_NULL)
14288 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
14293 allocator->FreeMemory(*pAllocation);
14294 *pAllocation = VK_NULL_HANDLE;
14295 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14296 *pImage = VK_NULL_HANDLE;
14299 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
14300 *pImage = VK_NULL_HANDLE;
14311 VMA_ASSERT(allocator);
14313 if(image == VK_NULL_HANDLE && allocation == VK_NULL_HANDLE)
14318 VMA_DEBUG_LOG(
"vmaDestroyImage");
14320 VMA_DEBUG_GLOBAL_MUTEX_LOCK
14322 #if VMA_RECORDING_ENABLED 14323 if(allocator->GetRecorder() != VMA_NULL)
14325 allocator->GetRecorder()->RecordDestroyImage(
14326 allocator->GetCurrentFrameIndex(),
14331 if(image != VK_NULL_HANDLE)
14333 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
14335 if(allocation != VK_NULL_HANDLE)
14337 allocator->FreeMemory(allocation);
14341 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1567
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1868
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1624
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
Definition: vk_mem_alloc.h:1598
size_t blockCount
Number of VkDeviceMemory blocks allocated for this pool.
Definition: vk_mem_alloc.h:2190
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1579
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1825
Definition: vk_mem_alloc.h:1928
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1571
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:2290
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1621
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2535
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:2079
#define VMA_RECORDING_ENABLED
Definition: vk_mem_alloc.h:1468
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:2171
Definition: vk_mem_alloc.h:1905
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1560
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1978
Definition: vk_mem_alloc.h:1852
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1633
Enables alternative, buddy allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2107
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1686
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1618
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1856
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1758
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1576
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1757
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2539
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1650
VmaStatInfo total
Definition: vk_mem_alloc.h:1767
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2547
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1962
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2530
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1577
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1502
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1627
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:2121
Definition: vk_mem_alloc.h:2115
const VmaRecordSettings * pRecordSettings
Parameters for recording of VMA calls. Can be null.
Definition: vk_mem_alloc.h:1693
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:2300
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1572
Enables flush after recording every function call.
Definition: vk_mem_alloc.h:1596
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1999
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:2141
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:2177
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1558
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:2124
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VmaMemoryUsage
Definition: vk_mem_alloc.h:1803
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2525
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2543
Definition: vk_mem_alloc.h:1842
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1986
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1575
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1763
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1508
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
Definition: vk_mem_alloc.h:1946
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1529
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
VkFlags VmaRecordFlags
Definition: vk_mem_alloc.h:1600
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1534
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2545
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1973
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region available for new allocation.
Definition: vk_mem_alloc.h:2187
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1568
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1746
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes. Optional.
Definition: vk_mem_alloc.h:2136
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1521
Definition: vk_mem_alloc.h:2111
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1912
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1759
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1525
Definition: vk_mem_alloc.h:1936
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:2127
Definition: vk_mem_alloc.h:1851
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1574
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1968
Definition: vk_mem_alloc.h:1959
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1749
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1570
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:2149
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1636
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2180
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1957
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1992
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1674
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1765
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1892
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1758
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1581
VmaRecordFlags flags
Flags for recording. Use VmaRecordFlagBits enum.
Definition: vk_mem_alloc.h:1606
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1523
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1580
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:2163
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1573
Definition: vk_mem_alloc.h:1923
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
const char * pFilePath
Path to the file that should be written by the recording.
Definition: vk_mem_alloc.h:1614
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:2314
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1630
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1758
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1755
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:2168
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
Definition: vk_mem_alloc.h:1932
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:2295
Definition: vk_mem_alloc.h:1943
Definition: vk_mem_alloc.h:1955
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2541
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1566
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1753
Definition: vk_mem_alloc.h:1808
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:2117
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
Definition: vk_mem_alloc.h:1603
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1751
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1578
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1582
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1879
Definition: vk_mem_alloc.h:1950
Definition: vk_mem_alloc.h:1835
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:2309
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1556
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1569
Enables alternative, linear allocation algorithm in this pool.
Definition: vk_mem_alloc.h:2096
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:2276
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1940
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:2061
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1759
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
Definition: vk_mem_alloc.h:1918
VmaRecordFlagBits
Flags to be used in VmaRecordSettings::flags.
Definition: vk_mem_alloc.h:1590
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1766
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:2174
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1759
struct VmaRecordSettings VmaRecordSettings
Parameters for recording calls to VMA functions. To be used in VmaAllocatorCreateInfo::pRecordSetting...
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:2281