23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1184 #include <vulkan/vulkan.h> 1186 #if !defined(VMA_DEDICATED_ALLOCATION) 1187 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1188 #define VMA_DEDICATED_ALLOCATION 1 1190 #define VMA_DEDICATED_ALLOCATION 0 1208 uint32_t memoryType,
1209 VkDeviceMemory memory,
1214 uint32_t memoryType,
1215 VkDeviceMemory memory,
1287 #if VMA_DEDICATED_ALLOCATION 1288 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1289 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1380 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1388 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1398 uint32_t memoryTypeIndex,
1399 VkMemoryPropertyFlags* pFlags);
1411 uint32_t frameIndex);
1444 #define VMA_STATS_STRING_ENABLED 1 1446 #if VMA_STATS_STRING_ENABLED 1453 char** ppStatsString,
1454 VkBool32 detailedMap);
1458 char* pStatsString);
1460 #endif // #if VMA_STATS_STRING_ENABLED 1654 uint32_t memoryTypeBits,
1656 uint32_t* pMemoryTypeIndex);
1672 const VkBufferCreateInfo* pBufferCreateInfo,
1674 uint32_t* pMemoryTypeIndex);
1690 const VkImageCreateInfo* pImageCreateInfo,
1692 uint32_t* pMemoryTypeIndex);
1823 size_t* pLostAllocationCount);
1922 const VkMemoryRequirements* pVkMemoryRequirements,
2228 size_t allocationCount,
2229 VkBool32* pAllocationsChanged,
2295 const VkBufferCreateInfo* pBufferCreateInfo,
2320 const VkImageCreateInfo* pImageCreateInfo,
2346 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2349 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2350 #define VMA_IMPLEMENTATION 2353 #ifdef VMA_IMPLEMENTATION 2354 #undef VMA_IMPLEMENTATION 2376 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2377 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2389 #if VMA_USE_STL_CONTAINERS 2390 #define VMA_USE_STL_VECTOR 1 2391 #define VMA_USE_STL_UNORDERED_MAP 1 2392 #define VMA_USE_STL_LIST 1 2395 #if VMA_USE_STL_VECTOR 2399 #if VMA_USE_STL_UNORDERED_MAP 2400 #include <unordered_map> 2403 #if VMA_USE_STL_LIST 2412 #include <algorithm> 2418 #define VMA_NULL nullptr 2421 #if defined(__APPLE__) || defined(__ANDROID__) 2423 void *aligned_alloc(
size_t alignment,
size_t size)
2426 if(alignment <
sizeof(
void*))
2428 alignment =
sizeof(
void*);
2432 if(posix_memalign(&pointer, alignment, size) == 0)
2446 #define VMA_ASSERT(expr) assert(expr) 2448 #define VMA_ASSERT(expr) 2454 #ifndef VMA_HEAVY_ASSERT 2456 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2458 #define VMA_HEAVY_ASSERT(expr) 2462 #ifndef VMA_ALIGN_OF 2463 #define VMA_ALIGN_OF(type) (__alignof(type)) 2466 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2468 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2470 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2474 #ifndef VMA_SYSTEM_FREE 2476 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2478 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2483 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2487 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2491 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2495 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2498 #ifndef VMA_DEBUG_LOG 2499 #define VMA_DEBUG_LOG(format, ...) 2509 #if VMA_STATS_STRING_ENABLED 2510 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2512 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2514 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2516 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2518 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2520 snprintf(outStr, strLen,
"%p", ptr);
2530 void Lock() { m_Mutex.lock(); }
2531 void Unlock() { m_Mutex.unlock(); }
2535 #define VMA_MUTEX VmaMutex 2546 #ifndef VMA_ATOMIC_UINT32 2547 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2550 #ifndef VMA_BEST_FIT 2563 #define VMA_BEST_FIT (1) 2566 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2571 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2574 #ifndef VMA_DEBUG_ALIGNMENT 2579 #define VMA_DEBUG_ALIGNMENT (1) 2582 #ifndef VMA_DEBUG_MARGIN 2587 #define VMA_DEBUG_MARGIN (0) 2590 #ifndef VMA_DEBUG_INITIALIZE_ALLOCATIONS 2595 #define VMA_DEBUG_INITIALIZE_ALLOCATIONS (0) 2598 #ifndef VMA_DEBUG_DETECT_CORRUPTION 2604 #define VMA_DEBUG_DETECT_CORRUPTION (0) 2607 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2612 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2615 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2620 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2623 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2624 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2628 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2629 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2633 #ifndef VMA_CLASS_NO_COPY 2634 #define VMA_CLASS_NO_COPY(className) \ 2636 className(const className&) = delete; \ 2637 className& operator=(const className&) = delete; 2640 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2643 static const uint32_t VMA_CORRUPTION_DETECTION_MAGIC_VALUE = 0x7F84E666;
2645 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_CREATED = 0xDC;
2646 static const uint8_t VMA_ALLOCATION_FILL_PATTERN_DESTROYED = 0xEF;
2652 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2653 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2656 static inline uint32_t VmaCountBitsSet(uint32_t v)
2658 uint32_t c = v - ((v >> 1) & 0x55555555);
2659 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2660 c = ((c >> 4) + c) & 0x0F0F0F0F;
2661 c = ((c >> 8) + c) & 0x00FF00FF;
2662 c = ((c >> 16) + c) & 0x0000FFFF;
2668 template <
typename T>
2669 static inline T VmaAlignUp(T val, T align)
2671 return (val + align - 1) / align * align;
2675 template <
typename T>
2676 static inline T VmaAlignDown(T val, T align)
2678 return val / align * align;
2682 template <
typename T>
2683 inline T VmaRoundDiv(T x, T y)
2685 return (x + (y / (T)2)) / y;
2690 template<
typename Iterator,
typename Compare>
2691 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2693 Iterator centerValue = end; --centerValue;
2694 Iterator insertIndex = beg;
2695 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2697 if(cmp(*memTypeIndex, *centerValue))
2699 if(insertIndex != memTypeIndex)
2701 VMA_SWAP(*memTypeIndex, *insertIndex);
2706 if(insertIndex != centerValue)
2708 VMA_SWAP(*insertIndex, *centerValue);
2713 template<
typename Iterator,
typename Compare>
2714 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2718 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2719 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2720 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2724 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2726 #endif // #ifndef VMA_SORT 2735 static inline bool VmaBlocksOnSamePage(
2736 VkDeviceSize resourceAOffset,
2737 VkDeviceSize resourceASize,
2738 VkDeviceSize resourceBOffset,
2739 VkDeviceSize pageSize)
2741 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2742 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2743 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2744 VkDeviceSize resourceBStart = resourceBOffset;
2745 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2746 return resourceAEndPage == resourceBStartPage;
2749 enum VmaSuballocationType
2751 VMA_SUBALLOCATION_TYPE_FREE = 0,
2752 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2753 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2754 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2755 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2756 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2757 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2766 static inline bool VmaIsBufferImageGranularityConflict(
2767 VmaSuballocationType suballocType1,
2768 VmaSuballocationType suballocType2)
2770 if(suballocType1 > suballocType2)
2772 VMA_SWAP(suballocType1, suballocType2);
2775 switch(suballocType1)
2777 case VMA_SUBALLOCATION_TYPE_FREE:
2779 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2781 case VMA_SUBALLOCATION_TYPE_BUFFER:
2783 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2784 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2785 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2787 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2788 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2789 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2790 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2792 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2793 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2801 static void VmaWriteMagicValue(
void* pData, VkDeviceSize offset)
2803 uint32_t* pDst = (uint32_t*)((
char*)pData + offset);
2804 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
2805 for(
size_t i = 0; i < numberCount; ++i, ++pDst)
2807 *pDst = VMA_CORRUPTION_DETECTION_MAGIC_VALUE;
2811 static bool VmaValidateMagicValue(
const void* pData, VkDeviceSize offset)
2813 const uint32_t* pSrc = (
const uint32_t*)((
const char*)pData + offset);
2814 const size_t numberCount = VMA_DEBUG_MARGIN /
sizeof(uint32_t);
2815 for(
size_t i = 0; i < numberCount; ++i, ++pSrc)
2817 if(*pSrc != VMA_CORRUPTION_DETECTION_MAGIC_VALUE)
2828 VMA_CLASS_NO_COPY(VmaMutexLock)
2830 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2831 m_pMutex(useMutex ? &mutex : VMA_NULL)
2848 VMA_MUTEX* m_pMutex;
2851 #if VMA_DEBUG_GLOBAL_MUTEX 2852 static VMA_MUTEX gDebugGlobalMutex;
2853 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2855 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2859 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2870 template <
typename IterT,
typename KeyT,
typename CmpT>
2871 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2873 size_t down = 0, up = (end - beg);
2876 const size_t mid = (down + up) / 2;
2877 if(cmp(*(beg+mid), key))
2892 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2894 if((pAllocationCallbacks != VMA_NULL) &&
2895 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2897 return (*pAllocationCallbacks->pfnAllocation)(
2898 pAllocationCallbacks->pUserData,
2901 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2905 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2909 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2911 if((pAllocationCallbacks != VMA_NULL) &&
2912 (pAllocationCallbacks->pfnFree != VMA_NULL))
2914 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2918 VMA_SYSTEM_FREE(ptr);
2922 template<
typename T>
2923 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2925 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2928 template<
typename T>
2929 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2931 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2934 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2936 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2938 template<
typename T>
2939 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2942 VmaFree(pAllocationCallbacks, ptr);
2945 template<
typename T>
2946 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2950 for(
size_t i = count; i--; )
2954 VmaFree(pAllocationCallbacks, ptr);
2959 template<
typename T>
2960 class VmaStlAllocator
2963 const VkAllocationCallbacks*
const m_pCallbacks;
2964 typedef T value_type;
2966 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2967 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2969 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2970 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2972 template<
typename U>
2973 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2975 return m_pCallbacks == rhs.m_pCallbacks;
2977 template<
typename U>
2978 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2980 return m_pCallbacks != rhs.m_pCallbacks;
2983 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2986 #if VMA_USE_STL_VECTOR 2988 #define VmaVector std::vector 2990 template<
typename T,
typename allocatorT>
2991 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2993 vec.insert(vec.begin() + index, item);
2996 template<
typename T,
typename allocatorT>
2997 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2999 vec.erase(vec.begin() + index);
3002 #else // #if VMA_USE_STL_VECTOR 3007 template<
typename T,
typename AllocatorT>
3011 typedef T value_type;
3013 VmaVector(
const AllocatorT& allocator) :
3014 m_Allocator(allocator),
3021 VmaVector(
size_t count,
const AllocatorT& allocator) :
3022 m_Allocator(allocator),
3023 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
3029 VmaVector(
const VmaVector<T, AllocatorT>& src) :
3030 m_Allocator(src.m_Allocator),
3031 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
3032 m_Count(src.m_Count),
3033 m_Capacity(src.m_Count)
3037 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
3043 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3046 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
3050 resize(rhs.m_Count);
3053 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
3059 bool empty()
const {
return m_Count == 0; }
3060 size_t size()
const {
return m_Count; }
3061 T* data() {
return m_pArray; }
3062 const T* data()
const {
return m_pArray; }
3064 T& operator[](
size_t index)
3066 VMA_HEAVY_ASSERT(index < m_Count);
3067 return m_pArray[index];
3069 const T& operator[](
size_t index)
const 3071 VMA_HEAVY_ASSERT(index < m_Count);
3072 return m_pArray[index];
3077 VMA_HEAVY_ASSERT(m_Count > 0);
3080 const T& front()
const 3082 VMA_HEAVY_ASSERT(m_Count > 0);
3087 VMA_HEAVY_ASSERT(m_Count > 0);
3088 return m_pArray[m_Count - 1];
3090 const T& back()
const 3092 VMA_HEAVY_ASSERT(m_Count > 0);
3093 return m_pArray[m_Count - 1];
3096 void reserve(
size_t newCapacity,
bool freeMemory =
false)
3098 newCapacity = VMA_MAX(newCapacity, m_Count);
3100 if((newCapacity < m_Capacity) && !freeMemory)
3102 newCapacity = m_Capacity;
3105 if(newCapacity != m_Capacity)
3107 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
3110 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
3112 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3113 m_Capacity = newCapacity;
3114 m_pArray = newArray;
3118 void resize(
size_t newCount,
bool freeMemory =
false)
3120 size_t newCapacity = m_Capacity;
3121 if(newCount > m_Capacity)
3123 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
3127 newCapacity = newCount;
3130 if(newCapacity != m_Capacity)
3132 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
3133 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
3134 if(elementsToCopy != 0)
3136 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
3138 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
3139 m_Capacity = newCapacity;
3140 m_pArray = newArray;
3146 void clear(
bool freeMemory =
false)
3148 resize(0, freeMemory);
3151 void insert(
size_t index,
const T& src)
3153 VMA_HEAVY_ASSERT(index <= m_Count);
3154 const size_t oldCount = size();
3155 resize(oldCount + 1);
3156 if(index < oldCount)
3158 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
3160 m_pArray[index] = src;
3163 void remove(
size_t index)
3165 VMA_HEAVY_ASSERT(index < m_Count);
3166 const size_t oldCount = size();
3167 if(index < oldCount - 1)
3169 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
3171 resize(oldCount - 1);
3174 void push_back(
const T& src)
3176 const size_t newIndex = size();
3177 resize(newIndex + 1);
3178 m_pArray[newIndex] = src;
3183 VMA_HEAVY_ASSERT(m_Count > 0);
3187 void push_front(
const T& src)
3194 VMA_HEAVY_ASSERT(m_Count > 0);
3198 typedef T* iterator;
3200 iterator begin() {
return m_pArray; }
3201 iterator end() {
return m_pArray + m_Count; }
3204 AllocatorT m_Allocator;
3210 template<
typename T,
typename allocatorT>
3211 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3213 vec.insert(index, item);
3216 template<
typename T,
typename allocatorT>
3217 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3222 #endif // #if VMA_USE_STL_VECTOR 3224 template<
typename CmpLess,
typename VectorT>
3225 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3227 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3229 vector.data() + vector.size(),
3231 CmpLess()) - vector.data();
3232 VmaVectorInsert(vector, indexToInsert, value);
3233 return indexToInsert;
3236 template<
typename CmpLess,
typename VectorT>
3237 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3240 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3245 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3247 size_t indexToRemove = it - vector.begin();
3248 VmaVectorRemove(vector, indexToRemove);
3254 template<
typename CmpLess,
typename VectorT>
3255 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
3258 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3260 vector.data() + vector.size(),
3263 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
3265 return it - vector.begin();
3269 return vector.size();
3281 template<
typename T>
3282 class VmaPoolAllocator
3284 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3286 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3287 ~VmaPoolAllocator();
3295 uint32_t NextFreeIndex;
3302 uint32_t FirstFreeIndex;
3305 const VkAllocationCallbacks* m_pAllocationCallbacks;
3306 size_t m_ItemsPerBlock;
3307 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3309 ItemBlock& CreateNewBlock();
3312 template<
typename T>
3313 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3314 m_pAllocationCallbacks(pAllocationCallbacks),
3315 m_ItemsPerBlock(itemsPerBlock),
3316 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3318 VMA_ASSERT(itemsPerBlock > 0);
3321 template<
typename T>
3322 VmaPoolAllocator<T>::~VmaPoolAllocator()
3327 template<
typename T>
3328 void VmaPoolAllocator<T>::Clear()
3330 for(
size_t i = m_ItemBlocks.size(); i--; )
3331 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3332 m_ItemBlocks.clear();
3335 template<
typename T>
3336 T* VmaPoolAllocator<T>::Alloc()
3338 for(
size_t i = m_ItemBlocks.size(); i--; )
3340 ItemBlock& block = m_ItemBlocks[i];
3342 if(block.FirstFreeIndex != UINT32_MAX)
3344 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3345 block.FirstFreeIndex = pItem->NextFreeIndex;
3346 return &pItem->Value;
3351 ItemBlock& newBlock = CreateNewBlock();
3352 Item*
const pItem = &newBlock.pItems[0];
3353 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3354 return &pItem->Value;
3357 template<
typename T>
3358 void VmaPoolAllocator<T>::Free(T* ptr)
3361 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3363 ItemBlock& block = m_ItemBlocks[i];
3367 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3370 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3372 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3373 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3374 block.FirstFreeIndex = index;
3378 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3381 template<
typename T>
3382 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3384 ItemBlock newBlock = {
3385 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3387 m_ItemBlocks.push_back(newBlock);
3390 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3391 newBlock.pItems[i].NextFreeIndex = i + 1;
3392 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3393 return m_ItemBlocks.back();
3399 #if VMA_USE_STL_LIST 3401 #define VmaList std::list 3403 #else // #if VMA_USE_STL_LIST 3405 template<
typename T>
3414 template<
typename T>
3417 VMA_CLASS_NO_COPY(VmaRawList)
3419 typedef VmaListItem<T> ItemType;
3421 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3425 size_t GetCount()
const {
return m_Count; }
3426 bool IsEmpty()
const {
return m_Count == 0; }
3428 ItemType* Front() {
return m_pFront; }
3429 const ItemType* Front()
const {
return m_pFront; }
3430 ItemType* Back() {
return m_pBack; }
3431 const ItemType* Back()
const {
return m_pBack; }
3433 ItemType* PushBack();
3434 ItemType* PushFront();
3435 ItemType* PushBack(
const T& value);
3436 ItemType* PushFront(
const T& value);
3441 ItemType* InsertBefore(ItemType* pItem);
3443 ItemType* InsertAfter(ItemType* pItem);
3445 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3446 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3448 void Remove(ItemType* pItem);
3451 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3452 VmaPoolAllocator<ItemType> m_ItemAllocator;
3458 template<
typename T>
3459 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3460 m_pAllocationCallbacks(pAllocationCallbacks),
3461 m_ItemAllocator(pAllocationCallbacks, 128),
3468 template<
typename T>
3469 VmaRawList<T>::~VmaRawList()
3475 template<
typename T>
3476 void VmaRawList<T>::Clear()
3478 if(IsEmpty() ==
false)
3480 ItemType* pItem = m_pBack;
3481 while(pItem != VMA_NULL)
3483 ItemType*
const pPrevItem = pItem->pPrev;
3484 m_ItemAllocator.Free(pItem);
3487 m_pFront = VMA_NULL;
3493 template<
typename T>
3494 VmaListItem<T>* VmaRawList<T>::PushBack()
3496 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3497 pNewItem->pNext = VMA_NULL;
3500 pNewItem->pPrev = VMA_NULL;
3501 m_pFront = pNewItem;
3507 pNewItem->pPrev = m_pBack;
3508 m_pBack->pNext = pNewItem;
3515 template<
typename T>
3516 VmaListItem<T>* VmaRawList<T>::PushFront()
3518 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3519 pNewItem->pPrev = VMA_NULL;
3522 pNewItem->pNext = VMA_NULL;
3523 m_pFront = pNewItem;
3529 pNewItem->pNext = m_pFront;
3530 m_pFront->pPrev = pNewItem;
3531 m_pFront = pNewItem;
3537 template<
typename T>
3538 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3540 ItemType*
const pNewItem = PushBack();
3541 pNewItem->Value = value;
3545 template<
typename T>
3546 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3548 ItemType*
const pNewItem = PushFront();
3549 pNewItem->Value = value;
3553 template<
typename T>
3554 void VmaRawList<T>::PopBack()
3556 VMA_HEAVY_ASSERT(m_Count > 0);
3557 ItemType*
const pBackItem = m_pBack;
3558 ItemType*
const pPrevItem = pBackItem->pPrev;
3559 if(pPrevItem != VMA_NULL)
3561 pPrevItem->pNext = VMA_NULL;
3563 m_pBack = pPrevItem;
3564 m_ItemAllocator.Free(pBackItem);
3568 template<
typename T>
3569 void VmaRawList<T>::PopFront()
3571 VMA_HEAVY_ASSERT(m_Count > 0);
3572 ItemType*
const pFrontItem = m_pFront;
3573 ItemType*
const pNextItem = pFrontItem->pNext;
3574 if(pNextItem != VMA_NULL)
3576 pNextItem->pPrev = VMA_NULL;
3578 m_pFront = pNextItem;
3579 m_ItemAllocator.Free(pFrontItem);
3583 template<
typename T>
3584 void VmaRawList<T>::Remove(ItemType* pItem)
3586 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3587 VMA_HEAVY_ASSERT(m_Count > 0);
3589 if(pItem->pPrev != VMA_NULL)
3591 pItem->pPrev->pNext = pItem->pNext;
3595 VMA_HEAVY_ASSERT(m_pFront == pItem);
3596 m_pFront = pItem->pNext;
3599 if(pItem->pNext != VMA_NULL)
3601 pItem->pNext->pPrev = pItem->pPrev;
3605 VMA_HEAVY_ASSERT(m_pBack == pItem);
3606 m_pBack = pItem->pPrev;
3609 m_ItemAllocator.Free(pItem);
3613 template<
typename T>
3614 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3616 if(pItem != VMA_NULL)
3618 ItemType*
const prevItem = pItem->pPrev;
3619 ItemType*
const newItem = m_ItemAllocator.Alloc();
3620 newItem->pPrev = prevItem;
3621 newItem->pNext = pItem;
3622 pItem->pPrev = newItem;
3623 if(prevItem != VMA_NULL)
3625 prevItem->pNext = newItem;
3629 VMA_HEAVY_ASSERT(m_pFront == pItem);
3639 template<
typename T>
3640 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3642 if(pItem != VMA_NULL)
3644 ItemType*
const nextItem = pItem->pNext;
3645 ItemType*
const newItem = m_ItemAllocator.Alloc();
3646 newItem->pNext = nextItem;
3647 newItem->pPrev = pItem;
3648 pItem->pNext = newItem;
3649 if(nextItem != VMA_NULL)
3651 nextItem->pPrev = newItem;
3655 VMA_HEAVY_ASSERT(m_pBack == pItem);
3665 template<
typename T>
3666 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3668 ItemType*
const newItem = InsertBefore(pItem);
3669 newItem->Value = value;
3673 template<
typename T>
3674 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3676 ItemType*
const newItem = InsertAfter(pItem);
3677 newItem->Value = value;
3681 template<
typename T,
typename AllocatorT>
3684 VMA_CLASS_NO_COPY(VmaList)
3695 T& operator*()
const 3697 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3698 return m_pItem->Value;
3700 T* operator->()
const 3702 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3703 return &m_pItem->Value;
3706 iterator& operator++()
3708 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3709 m_pItem = m_pItem->pNext;
3712 iterator& operator--()
3714 if(m_pItem != VMA_NULL)
3716 m_pItem = m_pItem->pPrev;
3720 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3721 m_pItem = m_pList->Back();
3726 iterator operator++(
int)
3728 iterator result = *
this;
3732 iterator operator--(
int)
3734 iterator result = *
this;
3739 bool operator==(
const iterator& rhs)
const 3741 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3742 return m_pItem == rhs.m_pItem;
3744 bool operator!=(
const iterator& rhs)
const 3746 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3747 return m_pItem != rhs.m_pItem;
3751 VmaRawList<T>* m_pList;
3752 VmaListItem<T>* m_pItem;
3754 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3760 friend class VmaList<T, AllocatorT>;
3763 class const_iterator
3772 const_iterator(
const iterator& src) :
3773 m_pList(src.m_pList),
3774 m_pItem(src.m_pItem)
3778 const T& operator*()
const 3780 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3781 return m_pItem->Value;
3783 const T* operator->()
const 3785 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3786 return &m_pItem->Value;
3789 const_iterator& operator++()
3791 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3792 m_pItem = m_pItem->pNext;
3795 const_iterator& operator--()
3797 if(m_pItem != VMA_NULL)
3799 m_pItem = m_pItem->pPrev;
3803 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3804 m_pItem = m_pList->Back();
3809 const_iterator operator++(
int)
3811 const_iterator result = *
this;
3815 const_iterator operator--(
int)
3817 const_iterator result = *
this;
3822 bool operator==(
const const_iterator& rhs)
const 3824 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3825 return m_pItem == rhs.m_pItem;
3827 bool operator!=(
const const_iterator& rhs)
const 3829 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3830 return m_pItem != rhs.m_pItem;
3834 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3840 const VmaRawList<T>* m_pList;
3841 const VmaListItem<T>* m_pItem;
3843 friend class VmaList<T, AllocatorT>;
3846 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3848 bool empty()
const {
return m_RawList.IsEmpty(); }
3849 size_t size()
const {
return m_RawList.GetCount(); }
3851 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3852 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3854 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3855 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3857 void clear() { m_RawList.Clear(); }
3858 void push_back(
const T& value) { m_RawList.PushBack(value); }
3859 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3860 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3863 VmaRawList<T> m_RawList;
3866 #endif // #if VMA_USE_STL_LIST 3874 #if VMA_USE_STL_UNORDERED_MAP 3876 #define VmaPair std::pair 3878 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3879 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3881 #else // #if VMA_USE_STL_UNORDERED_MAP 3883 template<
typename T1,
typename T2>
3889 VmaPair() : first(), second() { }
3890 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3896 template<
typename KeyT,
typename ValueT>
3900 typedef VmaPair<KeyT, ValueT> PairType;
3901 typedef PairType* iterator;
3903 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3905 iterator begin() {
return m_Vector.begin(); }
3906 iterator end() {
return m_Vector.end(); }
3908 void insert(
const PairType& pair);
3909 iterator find(
const KeyT& key);
3910 void erase(iterator it);
3913 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3916 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3918 template<
typename FirstT,
typename SecondT>
3919 struct VmaPairFirstLess
3921 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3923 return lhs.first < rhs.first;
3925 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3927 return lhs.first < rhsFirst;
3931 template<
typename KeyT,
typename ValueT>
3932 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3934 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3936 m_Vector.data() + m_Vector.size(),
3938 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3939 VmaVectorInsert(m_Vector, indexToInsert, pair);
3942 template<
typename KeyT,
typename ValueT>
3943 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3945 PairType* it = VmaBinaryFindFirstNotLess(
3947 m_Vector.data() + m_Vector.size(),
3949 VmaPairFirstLess<KeyT, ValueT>());
3950 if((it != m_Vector.end()) && (it->first == key))
3956 return m_Vector.end();
3960 template<
typename KeyT,
typename ValueT>
3961 void VmaMap<KeyT, ValueT>::erase(iterator it)
3963 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3966 #endif // #if VMA_USE_STL_UNORDERED_MAP 3972 class VmaDeviceMemoryBlock;
3974 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
3976 struct VmaAllocation_T
3978 VMA_CLASS_NO_COPY(VmaAllocation_T)
3980 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3984 FLAG_USER_DATA_STRING = 0x01,
3988 enum ALLOCATION_TYPE
3990 ALLOCATION_TYPE_NONE,
3991 ALLOCATION_TYPE_BLOCK,
3992 ALLOCATION_TYPE_DEDICATED,
3995 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3998 m_pUserData(VMA_NULL),
3999 m_LastUseFrameIndex(currentFrameIndex),
4000 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
4001 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
4003 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
4005 #if VMA_STATS_STRING_ENABLED 4006 m_CreationFrameIndex = currentFrameIndex;
4007 m_BufferImageUsage = 0;
4013 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
4016 VMA_ASSERT(m_pUserData == VMA_NULL);
4019 void InitBlockAllocation(
4021 VmaDeviceMemoryBlock* block,
4022 VkDeviceSize offset,
4023 VkDeviceSize alignment,
4025 VmaSuballocationType suballocationType,
4029 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4030 VMA_ASSERT(block != VMA_NULL);
4031 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4032 m_Alignment = alignment;
4034 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4035 m_SuballocationType = (uint8_t)suballocationType;
4036 m_BlockAllocation.m_hPool = hPool;
4037 m_BlockAllocation.m_Block = block;
4038 m_BlockAllocation.m_Offset = offset;
4039 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
4044 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4045 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
4046 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
4047 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
4048 m_BlockAllocation.m_Block = VMA_NULL;
4049 m_BlockAllocation.m_Offset = 0;
4050 m_BlockAllocation.m_CanBecomeLost =
true;
4053 void ChangeBlockAllocation(
4055 VmaDeviceMemoryBlock* block,
4056 VkDeviceSize offset);
4059 void InitDedicatedAllocation(
4060 uint32_t memoryTypeIndex,
4061 VkDeviceMemory hMemory,
4062 VmaSuballocationType suballocationType,
4066 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
4067 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
4068 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
4071 m_SuballocationType = (uint8_t)suballocationType;
4072 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
4073 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
4074 m_DedicatedAllocation.m_hMemory = hMemory;
4075 m_DedicatedAllocation.m_pMappedData = pMappedData;
4078 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
4079 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
4080 VkDeviceSize GetSize()
const {
return m_Size; }
4081 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
4082 void* GetUserData()
const {
return m_pUserData; }
4083 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
4084 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
4086 VmaDeviceMemoryBlock* GetBlock()
const 4088 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4089 return m_BlockAllocation.m_Block;
4091 VkDeviceSize GetOffset()
const;
4092 VkDeviceMemory GetMemory()
const;
4093 uint32_t GetMemoryTypeIndex()
const;
4094 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
4095 void* GetMappedData()
const;
4096 bool CanBecomeLost()
const;
4099 uint32_t GetLastUseFrameIndex()
const 4101 return m_LastUseFrameIndex.load();
4103 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
4105 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
4115 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4117 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
4119 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
4130 void BlockAllocMap();
4131 void BlockAllocUnmap();
4132 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
4135 #if VMA_STATS_STRING_ENABLED 4136 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
4137 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
4139 void InitBufferImageUsage(uint32_t bufferImageUsage)
4141 VMA_ASSERT(m_BufferImageUsage == 0);
4142 m_BufferImageUsage = bufferImageUsage;
4145 void PrintParameters(
class VmaJsonWriter& json)
const;
4149 VkDeviceSize m_Alignment;
4150 VkDeviceSize m_Size;
4152 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
4154 uint8_t m_SuballocationType;
4161 struct BlockAllocation
4164 VmaDeviceMemoryBlock* m_Block;
4165 VkDeviceSize m_Offset;
4166 bool m_CanBecomeLost;
4170 struct DedicatedAllocation
4172 uint32_t m_MemoryTypeIndex;
4173 VkDeviceMemory m_hMemory;
4174 void* m_pMappedData;
4180 BlockAllocation m_BlockAllocation;
4182 DedicatedAllocation m_DedicatedAllocation;
4185 #if VMA_STATS_STRING_ENABLED 4186 uint32_t m_CreationFrameIndex;
4187 uint32_t m_BufferImageUsage;
4197 struct VmaSuballocation
4199 VkDeviceSize offset;
4202 VmaSuballocationType type;
4205 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
4208 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4223 struct VmaAllocationRequest
4225 VkDeviceSize offset;
4226 VkDeviceSize sumFreeSize;
4227 VkDeviceSize sumItemSize;
4228 VmaSuballocationList::iterator item;
4229 size_t itemsToMakeLostCount;
4231 VkDeviceSize CalcCost()
const 4233 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4241 class VmaBlockMetadata
4243 VMA_CLASS_NO_COPY(VmaBlockMetadata)
4246 ~VmaBlockMetadata();
4247 void Init(VkDeviceSize size);
4250 bool Validate()
const;
4251 VkDeviceSize GetSize()
const {
return m_Size; }
4252 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4253 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4254 VkDeviceSize GetUnusedRangeSizeMax()
const;
4256 bool IsEmpty()
const;
4258 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4261 #if VMA_STATS_STRING_ENABLED 4262 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4268 bool CreateAllocationRequest(
4269 uint32_t currentFrameIndex,
4270 uint32_t frameInUseCount,
4271 VkDeviceSize bufferImageGranularity,
4272 VkDeviceSize allocSize,
4273 VkDeviceSize allocAlignment,
4274 VmaSuballocationType allocType,
4275 bool canMakeOtherLost,
4276 VmaAllocationRequest* pAllocationRequest);
4278 bool MakeRequestedAllocationsLost(
4279 uint32_t currentFrameIndex,
4280 uint32_t frameInUseCount,
4281 VmaAllocationRequest* pAllocationRequest);
4283 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4285 VkResult CheckCorruption(
const void* pBlockData);
4289 const VmaAllocationRequest& request,
4290 VmaSuballocationType type,
4291 VkDeviceSize allocSize,
4296 void FreeAtOffset(VkDeviceSize offset);
4299 VkDeviceSize m_Size;
4300 uint32_t m_FreeCount;
4301 VkDeviceSize m_SumFreeSize;
4302 VmaSuballocationList m_Suballocations;
4305 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4307 bool ValidateFreeSuballocationList()
const;
4311 bool CheckAllocation(
4312 uint32_t currentFrameIndex,
4313 uint32_t frameInUseCount,
4314 VkDeviceSize bufferImageGranularity,
4315 VkDeviceSize allocSize,
4316 VkDeviceSize allocAlignment,
4317 VmaSuballocationType allocType,
4318 VmaSuballocationList::const_iterator suballocItem,
4319 bool canMakeOtherLost,
4320 VkDeviceSize* pOffset,
4321 size_t* itemsToMakeLostCount,
4322 VkDeviceSize* pSumFreeSize,
4323 VkDeviceSize* pSumItemSize)
const;
4325 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4329 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4332 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4335 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4344 class VmaDeviceMemoryBlock
4346 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
4348 VmaBlockMetadata m_Metadata;
4352 ~VmaDeviceMemoryBlock()
4354 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
4355 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
4360 uint32_t newMemoryTypeIndex,
4361 VkDeviceMemory newMemory,
4362 VkDeviceSize newSize,
4367 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
4368 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4369 uint32_t GetId()
const {
return m_Id; }
4370 void* GetMappedData()
const {
return m_pMappedData; }
4373 bool Validate()
const;
4378 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
4381 VkResult WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
4382 VkResult ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize);
4384 VkResult BindBufferMemory(
4388 VkResult BindImageMemory(
4394 uint32_t m_MemoryTypeIndex;
4396 VkDeviceMemory m_hMemory;
4401 uint32_t m_MapCount;
4402 void* m_pMappedData;
4405 struct VmaPointerLess
4407 bool operator()(
const void* lhs,
const void* rhs)
const 4413 class VmaDefragmentator;
4421 struct VmaBlockVector
4423 VMA_CLASS_NO_COPY(VmaBlockVector)
4427 uint32_t memoryTypeIndex,
4428 VkDeviceSize preferredBlockSize,
4429 size_t minBlockCount,
4430 size_t maxBlockCount,
4431 VkDeviceSize bufferImageGranularity,
4432 uint32_t frameInUseCount,
4436 VkResult CreateMinBlocks();
4438 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4439 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
4440 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
4441 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
4445 bool IsEmpty()
const {
return m_Blocks.empty(); }
4446 bool IsCorruptionDetectionEnabled()
const;
4450 uint32_t currentFrameIndex,
4452 VkDeviceSize alignment,
4454 VmaSuballocationType suballocType,
4463 #if VMA_STATS_STRING_ENABLED 4464 void PrintDetailedMap(
class VmaJsonWriter& json);
4467 void MakePoolAllocationsLost(
4468 uint32_t currentFrameIndex,
4469 size_t* pLostAllocationCount);
4470 VkResult CheckCorruption();
4472 VmaDefragmentator* EnsureDefragmentator(
4474 uint32_t currentFrameIndex);
4476 VkResult Defragment(
4478 VkDeviceSize& maxBytesToMove,
4479 uint32_t& maxAllocationsToMove);
4481 void DestroyDefragmentator();
4484 friend class VmaDefragmentator;
4487 const uint32_t m_MemoryTypeIndex;
4488 const VkDeviceSize m_PreferredBlockSize;
4489 const size_t m_MinBlockCount;
4490 const size_t m_MaxBlockCount;
4491 const VkDeviceSize m_BufferImageGranularity;
4492 const uint32_t m_FrameInUseCount;
4493 const bool m_IsCustomPool;
4496 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
4500 bool m_HasEmptyBlock;
4501 VmaDefragmentator* m_pDefragmentator;
4502 uint32_t m_NextBlockId;
4504 VkDeviceSize CalcMaxBlockSize()
const;
4507 void Remove(VmaDeviceMemoryBlock* pBlock);
4511 void IncrementallySortBlocks();
4513 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
4518 VMA_CLASS_NO_COPY(VmaPool_T)
4520 VmaBlockVector m_BlockVector;
4527 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
4528 uint32_t GetId()
const {
return m_Id; }
4529 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
4531 #if VMA_STATS_STRING_ENABLED 4539 class VmaDefragmentator
4541 VMA_CLASS_NO_COPY(VmaDefragmentator)
4544 VmaBlockVector*
const m_pBlockVector;
4545 uint32_t m_CurrentFrameIndex;
4546 VkDeviceSize m_BytesMoved;
4547 uint32_t m_AllocationsMoved;
4549 struct AllocationInfo
4552 VkBool32* m_pChanged;
4555 m_hAllocation(VK_NULL_HANDLE),
4556 m_pChanged(VMA_NULL)
4561 struct AllocationInfoSizeGreater
4563 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 4565 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
4570 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4574 VmaDeviceMemoryBlock* m_pBlock;
4575 bool m_HasNonMovableAllocations;
4576 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4578 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
4580 m_HasNonMovableAllocations(true),
4581 m_Allocations(pAllocationCallbacks),
4582 m_pMappedDataForDefragmentation(VMA_NULL)
4586 void CalcHasNonMovableAllocations()
4588 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
4589 const size_t defragmentAllocCount = m_Allocations.size();
4590 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
4593 void SortAllocationsBySizeDescecnding()
4595 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
4598 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
4603 void* m_pMappedDataForDefragmentation;
4606 struct BlockPointerLess
4608 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 4610 return pLhsBlockInfo->m_pBlock < pRhsBlock;
4612 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4614 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
4620 struct BlockInfoCompareMoveDestination
4622 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4624 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
4628 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
4632 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
4640 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
4641 BlockInfoVector m_Blocks;
4643 VkResult DefragmentRound(
4644 VkDeviceSize maxBytesToMove,
4645 uint32_t maxAllocationsToMove);
4647 static bool MoveMakesSense(
4648 size_t dstBlockIndex, VkDeviceSize dstOffset,
4649 size_t srcBlockIndex, VkDeviceSize srcOffset);
4654 VmaBlockVector* pBlockVector,
4655 uint32_t currentFrameIndex);
4657 ~VmaDefragmentator();
4659 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
4660 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
4662 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
4664 VkResult Defragment(
4665 VkDeviceSize maxBytesToMove,
4666 uint32_t maxAllocationsToMove);
4670 struct VmaAllocator_T
4672 VMA_CLASS_NO_COPY(VmaAllocator_T)
4675 bool m_UseKhrDedicatedAllocation;
4677 bool m_AllocationCallbacksSpecified;
4678 VkAllocationCallbacks m_AllocationCallbacks;
4682 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
4683 VMA_MUTEX m_HeapSizeLimitMutex;
4685 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
4686 VkPhysicalDeviceMemoryProperties m_MemProps;
4689 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
4692 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
4693 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
4694 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
4699 const VkAllocationCallbacks* GetAllocationCallbacks()
const 4701 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
4705 return m_VulkanFunctions;
4708 VkDeviceSize GetBufferImageGranularity()
const 4711 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
4712 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
4715 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
4716 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
4718 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 4720 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
4721 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4724 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 4726 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
4727 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
4730 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 4732 return IsMemoryTypeNonCoherent(memTypeIndex) ?
4733 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
4734 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
4737 bool IsIntegratedGpu()
const 4739 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
4742 void GetBufferMemoryRequirements(
4744 VkMemoryRequirements& memReq,
4745 bool& requiresDedicatedAllocation,
4746 bool& prefersDedicatedAllocation)
const;
4747 void GetImageMemoryRequirements(
4749 VkMemoryRequirements& memReq,
4750 bool& requiresDedicatedAllocation,
4751 bool& prefersDedicatedAllocation)
const;
4754 VkResult AllocateMemory(
4755 const VkMemoryRequirements& vkMemReq,
4756 bool requiresDedicatedAllocation,
4757 bool prefersDedicatedAllocation,
4758 VkBuffer dedicatedBuffer,
4759 VkImage dedicatedImage,
4761 VmaSuballocationType suballocType,
4767 void CalculateStats(
VmaStats* pStats);
4769 #if VMA_STATS_STRING_ENABLED 4770 void PrintDetailedMap(
class VmaJsonWriter& json);
4773 VkResult Defragment(
4775 size_t allocationCount,
4776 VkBool32* pAllocationsChanged,
4784 void DestroyPool(
VmaPool pool);
4787 void SetCurrentFrameIndex(uint32_t frameIndex);
4789 void MakePoolAllocationsLost(
4791 size_t* pLostAllocationCount);
4792 VkResult CheckPoolCorruption(
VmaPool hPool);
4793 VkResult CheckCorruption(uint32_t memoryTypeBits);
4797 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
4798 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
4803 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
4804 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
4806 void FlushOrInvalidateAllocation(
4808 VkDeviceSize offset, VkDeviceSize size,
4809 VMA_CACHE_OPERATION op);
4811 void FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern);
4814 VkDeviceSize m_PreferredLargeHeapBlockSize;
4816 VkPhysicalDevice m_PhysicalDevice;
4817 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
4819 VMA_MUTEX m_PoolsMutex;
4821 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
4822 uint32_t m_NextPoolId;
4828 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
4830 VkResult AllocateMemoryOfType(
4832 VkDeviceSize alignment,
4833 bool dedicatedAllocation,
4834 VkBuffer dedicatedBuffer,
4835 VkImage dedicatedImage,
4837 uint32_t memTypeIndex,
4838 VmaSuballocationType suballocType,
4842 VkResult AllocateDedicatedMemory(
4844 VmaSuballocationType suballocType,
4845 uint32_t memTypeIndex,
4847 bool isUserDataString,
4849 VkBuffer dedicatedBuffer,
4850 VkImage dedicatedImage,
4860 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
4862 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4865 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
4867 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4870 template<
typename T>
4873 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
4876 template<
typename T>
4877 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
4879 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
4882 template<
typename T>
4883 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
4888 VmaFree(hAllocator, ptr);
4892 template<
typename T>
4893 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
4897 for(
size_t i = count; i--; )
4899 VmaFree(hAllocator, ptr);
4906 #if VMA_STATS_STRING_ENABLED 4908 class VmaStringBuilder
4911 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4912 size_t GetLength()
const {
return m_Data.size(); }
4913 const char* GetData()
const {
return m_Data.data(); }
4915 void Add(
char ch) { m_Data.push_back(ch); }
4916 void Add(
const char* pStr);
4917 void AddNewLine() { Add(
'\n'); }
4918 void AddNumber(uint32_t num);
4919 void AddNumber(uint64_t num);
4920 void AddPointer(
const void* ptr);
4923 VmaVector< char, VmaStlAllocator<char> > m_Data;
4926 void VmaStringBuilder::Add(
const char* pStr)
4928 const size_t strLen = strlen(pStr);
4931 const size_t oldCount = m_Data.size();
4932 m_Data.resize(oldCount + strLen);
4933 memcpy(m_Data.data() + oldCount, pStr, strLen);
4937 void VmaStringBuilder::AddNumber(uint32_t num)
4940 VmaUint32ToStr(buf,
sizeof(buf), num);
4944 void VmaStringBuilder::AddNumber(uint64_t num)
4947 VmaUint64ToStr(buf,
sizeof(buf), num);
4951 void VmaStringBuilder::AddPointer(
const void* ptr)
4954 VmaPtrToStr(buf,
sizeof(buf), ptr);
4958 #endif // #if VMA_STATS_STRING_ENABLED 4963 #if VMA_STATS_STRING_ENABLED 4967 VMA_CLASS_NO_COPY(VmaJsonWriter)
4969 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4972 void BeginObject(
bool singleLine =
false);
4975 void BeginArray(
bool singleLine =
false);
4978 void WriteString(
const char* pStr);
4979 void BeginString(
const char* pStr = VMA_NULL);
4980 void ContinueString(
const char* pStr);
4981 void ContinueString(uint32_t n);
4982 void ContinueString(uint64_t n);
4983 void ContinueString_Pointer(
const void* ptr);
4984 void EndString(
const char* pStr = VMA_NULL);
4986 void WriteNumber(uint32_t n);
4987 void WriteNumber(uint64_t n);
4988 void WriteBool(
bool b);
4992 static const char*
const INDENT;
4994 enum COLLECTION_TYPE
4996 COLLECTION_TYPE_OBJECT,
4997 COLLECTION_TYPE_ARRAY,
5001 COLLECTION_TYPE type;
5002 uint32_t valueCount;
5003 bool singleLineMode;
5006 VmaStringBuilder& m_SB;
5007 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
5008 bool m_InsideString;
5010 void BeginValue(
bool isString);
5011 void WriteIndent(
bool oneLess =
false);
5014 const char*
const VmaJsonWriter::INDENT =
" ";
5016 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
5018 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
5019 m_InsideString(false)
5023 VmaJsonWriter::~VmaJsonWriter()
5025 VMA_ASSERT(!m_InsideString);
5026 VMA_ASSERT(m_Stack.empty());
5029 void VmaJsonWriter::BeginObject(
bool singleLine)
5031 VMA_ASSERT(!m_InsideString);
5037 item.type = COLLECTION_TYPE_OBJECT;
5038 item.valueCount = 0;
5039 item.singleLineMode = singleLine;
5040 m_Stack.push_back(item);
5043 void VmaJsonWriter::EndObject()
5045 VMA_ASSERT(!m_InsideString);
5050 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
5054 void VmaJsonWriter::BeginArray(
bool singleLine)
5056 VMA_ASSERT(!m_InsideString);
5062 item.type = COLLECTION_TYPE_ARRAY;
5063 item.valueCount = 0;
5064 item.singleLineMode = singleLine;
5065 m_Stack.push_back(item);
5068 void VmaJsonWriter::EndArray()
5070 VMA_ASSERT(!m_InsideString);
5075 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
5079 void VmaJsonWriter::WriteString(
const char* pStr)
5085 void VmaJsonWriter::BeginString(
const char* pStr)
5087 VMA_ASSERT(!m_InsideString);
5091 m_InsideString =
true;
5092 if(pStr != VMA_NULL && pStr[0] !=
'\0')
5094 ContinueString(pStr);
5098 void VmaJsonWriter::ContinueString(
const char* pStr)
5100 VMA_ASSERT(m_InsideString);
5102 const size_t strLen = strlen(pStr);
5103 for(
size_t i = 0; i < strLen; ++i)
5136 VMA_ASSERT(0 &&
"Character not currently supported.");
5142 void VmaJsonWriter::ContinueString(uint32_t n)
5144 VMA_ASSERT(m_InsideString);
5148 void VmaJsonWriter::ContinueString(uint64_t n)
5150 VMA_ASSERT(m_InsideString);
5154 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
5156 VMA_ASSERT(m_InsideString);
5157 m_SB.AddPointer(ptr);
5160 void VmaJsonWriter::EndString(
const char* pStr)
5162 VMA_ASSERT(m_InsideString);
5163 if(pStr != VMA_NULL && pStr[0] !=
'\0')
5165 ContinueString(pStr);
5168 m_InsideString =
false;
5171 void VmaJsonWriter::WriteNumber(uint32_t n)
5173 VMA_ASSERT(!m_InsideString);
5178 void VmaJsonWriter::WriteNumber(uint64_t n)
5180 VMA_ASSERT(!m_InsideString);
5185 void VmaJsonWriter::WriteBool(
bool b)
5187 VMA_ASSERT(!m_InsideString);
5189 m_SB.Add(b ?
"true" :
"false");
5192 void VmaJsonWriter::WriteNull()
5194 VMA_ASSERT(!m_InsideString);
5199 void VmaJsonWriter::BeginValue(
bool isString)
5201 if(!m_Stack.empty())
5203 StackItem& currItem = m_Stack.back();
5204 if(currItem.type == COLLECTION_TYPE_OBJECT &&
5205 currItem.valueCount % 2 == 0)
5207 VMA_ASSERT(isString);
5210 if(currItem.type == COLLECTION_TYPE_OBJECT &&
5211 currItem.valueCount % 2 != 0)
5215 else if(currItem.valueCount > 0)
5224 ++currItem.valueCount;
5228 void VmaJsonWriter::WriteIndent(
bool oneLess)
5230 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
5234 size_t count = m_Stack.size();
5235 if(count > 0 && oneLess)
5239 for(
size_t i = 0; i < count; ++i)
5246 #endif // #if VMA_STATS_STRING_ENABLED 5250 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
5252 if(IsUserDataString())
5254 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
5256 FreeUserDataString(hAllocator);
5258 if(pUserData != VMA_NULL)
5260 const char*
const newStrSrc = (
char*)pUserData;
5261 const size_t newStrLen = strlen(newStrSrc);
5262 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
5263 memcpy(newStrDst, newStrSrc, newStrLen + 1);
5264 m_pUserData = newStrDst;
5269 m_pUserData = pUserData;
5273 void VmaAllocation_T::ChangeBlockAllocation(
5275 VmaDeviceMemoryBlock* block,
5276 VkDeviceSize offset)
5278 VMA_ASSERT(block != VMA_NULL);
5279 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5282 if(block != m_BlockAllocation.m_Block)
5284 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
5285 if(IsPersistentMap())
5287 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
5288 block->Map(hAllocator, mapRefCount, VMA_NULL);
5291 m_BlockAllocation.m_Block = block;
5292 m_BlockAllocation.m_Offset = offset;
5295 VkDeviceSize VmaAllocation_T::GetOffset()
const 5299 case ALLOCATION_TYPE_BLOCK:
5300 return m_BlockAllocation.m_Offset;
5301 case ALLOCATION_TYPE_DEDICATED:
5309 VkDeviceMemory VmaAllocation_T::GetMemory()
const 5313 case ALLOCATION_TYPE_BLOCK:
5314 return m_BlockAllocation.m_Block->GetDeviceMemory();
5315 case ALLOCATION_TYPE_DEDICATED:
5316 return m_DedicatedAllocation.m_hMemory;
5319 return VK_NULL_HANDLE;
5323 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 5327 case ALLOCATION_TYPE_BLOCK:
5328 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
5329 case ALLOCATION_TYPE_DEDICATED:
5330 return m_DedicatedAllocation.m_MemoryTypeIndex;
5337 void* VmaAllocation_T::GetMappedData()
const 5341 case ALLOCATION_TYPE_BLOCK:
5344 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
5345 VMA_ASSERT(pBlockData != VMA_NULL);
5346 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
5353 case ALLOCATION_TYPE_DEDICATED:
5354 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
5355 return m_DedicatedAllocation.m_pMappedData;
5362 bool VmaAllocation_T::CanBecomeLost()
const 5366 case ALLOCATION_TYPE_BLOCK:
5367 return m_BlockAllocation.m_CanBecomeLost;
5368 case ALLOCATION_TYPE_DEDICATED:
5376 VmaPool VmaAllocation_T::GetPool()
const 5378 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5379 return m_BlockAllocation.m_hPool;
5382 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5384 VMA_ASSERT(CanBecomeLost());
5390 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
5393 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
5398 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
5404 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
5414 #if VMA_STATS_STRING_ENABLED 5417 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
5426 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 5428 json.WriteString(
"Type");
5429 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
5431 json.WriteString(
"Size");
5432 json.WriteNumber(m_Size);
5434 if(m_pUserData != VMA_NULL)
5436 json.WriteString(
"UserData");
5437 if(IsUserDataString())
5439 json.WriteString((
const char*)m_pUserData);
5444 json.ContinueString_Pointer(m_pUserData);
5449 json.WriteString(
"CreationFrameIndex");
5450 json.WriteNumber(m_CreationFrameIndex);
5452 json.WriteString(
"LastUseFrameIndex");
5453 json.WriteNumber(GetLastUseFrameIndex());
5455 if(m_BufferImageUsage != 0)
5457 json.WriteString(
"Usage");
5458 json.WriteNumber(m_BufferImageUsage);
5464 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
5466 VMA_ASSERT(IsUserDataString());
5467 if(m_pUserData != VMA_NULL)
5469 char*
const oldStr = (
char*)m_pUserData;
5470 const size_t oldStrLen = strlen(oldStr);
5471 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
5472 m_pUserData = VMA_NULL;
5476 void VmaAllocation_T::BlockAllocMap()
5478 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
5480 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
5486 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
5490 void VmaAllocation_T::BlockAllocUnmap()
5492 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
5494 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
5500 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
5504 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
5506 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
5510 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
5512 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
5513 *ppData = m_DedicatedAllocation.m_pMappedData;
5519 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
5520 return VK_ERROR_MEMORY_MAP_FAILED;
5525 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5526 hAllocator->m_hDevice,
5527 m_DedicatedAllocation.m_hMemory,
5532 if(result == VK_SUCCESS)
5534 m_DedicatedAllocation.m_pMappedData = *ppData;
5541 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
5543 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
5545 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
5550 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
5551 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
5552 hAllocator->m_hDevice,
5553 m_DedicatedAllocation.m_hMemory);
5558 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
5562 #if VMA_STATS_STRING_ENABLED 5564 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
5568 json.WriteString(
"Blocks");
5571 json.WriteString(
"Allocations");
5574 json.WriteString(
"UnusedRanges");
5577 json.WriteString(
"UsedBytes");
5580 json.WriteString(
"UnusedBytes");
5585 json.WriteString(
"AllocationSize");
5586 json.BeginObject(
true);
5587 json.WriteString(
"Min");
5589 json.WriteString(
"Avg");
5591 json.WriteString(
"Max");
5598 json.WriteString(
"UnusedRangeSize");
5599 json.BeginObject(
true);
5600 json.WriteString(
"Min");
5602 json.WriteString(
"Avg");
5604 json.WriteString(
"Max");
5612 #endif // #if VMA_STATS_STRING_ENABLED 5614 struct VmaSuballocationItemSizeLess
5617 const VmaSuballocationList::iterator lhs,
5618 const VmaSuballocationList::iterator rhs)
const 5620 return lhs->size < rhs->size;
5623 const VmaSuballocationList::iterator lhs,
5624 VkDeviceSize rhsSize)
const 5626 return lhs->size < rhsSize;
5633 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
5637 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
5638 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
5642 VmaBlockMetadata::~VmaBlockMetadata()
5646 void VmaBlockMetadata::Init(VkDeviceSize size)
5650 m_SumFreeSize = size;
5652 VmaSuballocation suballoc = {};
5653 suballoc.offset = 0;
5654 suballoc.size = size;
5655 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5656 suballoc.hAllocation = VK_NULL_HANDLE;
5658 m_Suballocations.push_back(suballoc);
5659 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
5661 m_FreeSuballocationsBySize.push_back(suballocItem);
5664 bool VmaBlockMetadata::Validate()
const 5666 if(m_Suballocations.empty())
5672 VkDeviceSize calculatedOffset = 0;
5674 uint32_t calculatedFreeCount = 0;
5676 VkDeviceSize calculatedSumFreeSize = 0;
5679 size_t freeSuballocationsToRegister = 0;
5681 bool prevFree =
false;
5683 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5684 suballocItem != m_Suballocations.cend();
5687 const VmaSuballocation& subAlloc = *suballocItem;
5690 if(subAlloc.offset != calculatedOffset)
5695 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
5697 if(prevFree && currFree)
5702 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
5709 calculatedSumFreeSize += subAlloc.size;
5710 ++calculatedFreeCount;
5711 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5713 ++freeSuballocationsToRegister;
5717 if(subAlloc.size < VMA_DEBUG_MARGIN)
5724 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
5728 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
5734 if(VMA_DEBUG_MARGIN > 0 && !prevFree)
5740 calculatedOffset += subAlloc.size;
5741 prevFree = currFree;
5746 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
5751 VkDeviceSize lastSize = 0;
5752 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
5754 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
5757 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5762 if(suballocItem->size < lastSize)
5767 lastSize = suballocItem->size;
5771 if(!ValidateFreeSuballocationList() ||
5772 (calculatedOffset != m_Size) ||
5773 (calculatedSumFreeSize != m_SumFreeSize) ||
5774 (calculatedFreeCount != m_FreeCount))
5782 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 5784 if(!m_FreeSuballocationsBySize.empty())
5786 return m_FreeSuballocationsBySize.back()->size;
5794 bool VmaBlockMetadata::IsEmpty()
const 5796 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
5799 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 5803 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5815 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5816 suballocItem != m_Suballocations.cend();
5819 const VmaSuballocation& suballoc = *suballocItem;
5820 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5833 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 5835 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5837 inoutStats.
size += m_Size;
5844 #if VMA_STATS_STRING_ENABLED 5846 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 5850 json.WriteString(
"TotalBytes");
5851 json.WriteNumber(m_Size);
5853 json.WriteString(
"UnusedBytes");
5854 json.WriteNumber(m_SumFreeSize);
5856 json.WriteString(
"Allocations");
5857 json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
5859 json.WriteString(
"UnusedRanges");
5860 json.WriteNumber(m_FreeCount);
5862 json.WriteString(
"Suballocations");
5865 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5866 suballocItem != m_Suballocations.cend();
5867 ++suballocItem, ++i)
5869 json.BeginObject(
true);
5871 json.WriteString(
"Offset");
5872 json.WriteNumber(suballocItem->offset);
5874 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5876 json.WriteString(
"Type");
5877 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
5879 json.WriteString(
"Size");
5880 json.WriteNumber(suballocItem->size);
5884 suballocItem->hAllocation->PrintParameters(json);
5894 #endif // #if VMA_STATS_STRING_ENABLED 5906 bool VmaBlockMetadata::CreateAllocationRequest(
5907 uint32_t currentFrameIndex,
5908 uint32_t frameInUseCount,
5909 VkDeviceSize bufferImageGranularity,
5910 VkDeviceSize allocSize,
5911 VkDeviceSize allocAlignment,
5912 VmaSuballocationType allocType,
5913 bool canMakeOtherLost,
5914 VmaAllocationRequest* pAllocationRequest)
5916 VMA_ASSERT(allocSize > 0);
5917 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5918 VMA_ASSERT(pAllocationRequest != VMA_NULL);
5919 VMA_HEAVY_ASSERT(Validate());
5922 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize + 2 * VMA_DEBUG_MARGIN)
5928 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5929 if(freeSuballocCount > 0)
5934 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5935 m_FreeSuballocationsBySize.data(),
5936 m_FreeSuballocationsBySize.data() + freeSuballocCount,
5937 allocSize + 2 * VMA_DEBUG_MARGIN,
5938 VmaSuballocationItemSizeLess());
5939 size_t index = it - m_FreeSuballocationsBySize.data();
5940 for(; index < freeSuballocCount; ++index)
5945 bufferImageGranularity,
5949 m_FreeSuballocationsBySize[index],
5951 &pAllocationRequest->offset,
5952 &pAllocationRequest->itemsToMakeLostCount,
5953 &pAllocationRequest->sumFreeSize,
5954 &pAllocationRequest->sumItemSize))
5956 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5964 for(
size_t index = freeSuballocCount; index--; )
5969 bufferImageGranularity,
5973 m_FreeSuballocationsBySize[index],
5975 &pAllocationRequest->offset,
5976 &pAllocationRequest->itemsToMakeLostCount,
5977 &pAllocationRequest->sumFreeSize,
5978 &pAllocationRequest->sumItemSize))
5980 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5987 if(canMakeOtherLost)
5991 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5992 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5994 VmaAllocationRequest tmpAllocRequest = {};
5995 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5996 suballocIt != m_Suballocations.end();
5999 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
6000 suballocIt->hAllocation->CanBecomeLost())
6005 bufferImageGranularity,
6011 &tmpAllocRequest.offset,
6012 &tmpAllocRequest.itemsToMakeLostCount,
6013 &tmpAllocRequest.sumFreeSize,
6014 &tmpAllocRequest.sumItemSize))
6016 tmpAllocRequest.item = suballocIt;
6018 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
6020 *pAllocationRequest = tmpAllocRequest;
6026 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
6035 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
6036 uint32_t currentFrameIndex,
6037 uint32_t frameInUseCount,
6038 VmaAllocationRequest* pAllocationRequest)
6040 while(pAllocationRequest->itemsToMakeLostCount > 0)
6042 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
6044 ++pAllocationRequest->item;
6046 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
6047 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
6048 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
6049 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
6051 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
6052 --pAllocationRequest->itemsToMakeLostCount;
6060 VMA_HEAVY_ASSERT(Validate());
6061 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
6062 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
6067 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
6069 uint32_t lostAllocationCount = 0;
6070 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
6071 it != m_Suballocations.end();
6074 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
6075 it->hAllocation->CanBecomeLost() &&
6076 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
6078 it = FreeSuballocation(it);
6079 ++lostAllocationCount;
6082 return lostAllocationCount;
6085 VkResult VmaBlockMetadata::CheckCorruption(
const void* pBlockData)
6087 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
6088 it != m_Suballocations.end();
6091 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
6093 if(!VmaValidateMagicValue(pBlockData, it->offset - VMA_DEBUG_MARGIN))
6095 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE VALIDATED ALLOCATION!");
6096 return VK_ERROR_VALIDATION_FAILED_EXT;
6098 if(!VmaValidateMagicValue(pBlockData, it->offset + it->size))
6100 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER VALIDATED ALLOCATION!");
6101 return VK_ERROR_VALIDATION_FAILED_EXT;
6109 void VmaBlockMetadata::Alloc(
6110 const VmaAllocationRequest& request,
6111 VmaSuballocationType type,
6112 VkDeviceSize allocSize,
6115 VMA_ASSERT(request.item != m_Suballocations.end());
6116 VmaSuballocation& suballoc = *request.item;
6118 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
6120 VMA_ASSERT(request.offset >= suballoc.offset);
6121 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
6122 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
6123 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
6127 UnregisterFreeSuballocation(request.item);
6129 suballoc.offset = request.offset;
6130 suballoc.size = allocSize;
6131 suballoc.type = type;
6132 suballoc.hAllocation = hAllocation;
6137 VmaSuballocation paddingSuballoc = {};
6138 paddingSuballoc.offset = request.offset + allocSize;
6139 paddingSuballoc.size = paddingEnd;
6140 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6141 VmaSuballocationList::iterator next = request.item;
6143 const VmaSuballocationList::iterator paddingEndItem =
6144 m_Suballocations.insert(next, paddingSuballoc);
6145 RegisterFreeSuballocation(paddingEndItem);
6151 VmaSuballocation paddingSuballoc = {};
6152 paddingSuballoc.offset = request.offset - paddingBegin;
6153 paddingSuballoc.size = paddingBegin;
6154 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6155 const VmaSuballocationList::iterator paddingBeginItem =
6156 m_Suballocations.insert(request.item, paddingSuballoc);
6157 RegisterFreeSuballocation(paddingBeginItem);
6161 m_FreeCount = m_FreeCount - 1;
6162 if(paddingBegin > 0)
6170 m_SumFreeSize -= allocSize;
6175 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
6176 suballocItem != m_Suballocations.end();
6179 VmaSuballocation& suballoc = *suballocItem;
6180 if(suballoc.hAllocation == allocation)
6182 FreeSuballocation(suballocItem);
6183 VMA_HEAVY_ASSERT(Validate());
6187 VMA_ASSERT(0 &&
"Not found!");
6190 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
6192 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
6193 suballocItem != m_Suballocations.end();
6196 VmaSuballocation& suballoc = *suballocItem;
6197 if(suballoc.offset == offset)
6199 FreeSuballocation(suballocItem);
6203 VMA_ASSERT(0 &&
"Not found!");
6206 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 6208 VkDeviceSize lastSize = 0;
6209 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
6211 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
6213 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
6218 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6223 if(it->size < lastSize)
6229 lastSize = it->size;
6234 bool VmaBlockMetadata::CheckAllocation(
6235 uint32_t currentFrameIndex,
6236 uint32_t frameInUseCount,
6237 VkDeviceSize bufferImageGranularity,
6238 VkDeviceSize allocSize,
6239 VkDeviceSize allocAlignment,
6240 VmaSuballocationType allocType,
6241 VmaSuballocationList::const_iterator suballocItem,
6242 bool canMakeOtherLost,
6243 VkDeviceSize* pOffset,
6244 size_t* itemsToMakeLostCount,
6245 VkDeviceSize* pSumFreeSize,
6246 VkDeviceSize* pSumItemSize)
const 6248 VMA_ASSERT(allocSize > 0);
6249 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6250 VMA_ASSERT(suballocItem != m_Suballocations.cend());
6251 VMA_ASSERT(pOffset != VMA_NULL);
6253 *itemsToMakeLostCount = 0;
6257 if(canMakeOtherLost)
6259 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6261 *pSumFreeSize = suballocItem->size;
6265 if(suballocItem->hAllocation->CanBecomeLost() &&
6266 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6268 ++*itemsToMakeLostCount;
6269 *pSumItemSize = suballocItem->size;
6278 if(m_Size - suballocItem->offset < allocSize)
6284 *pOffset = suballocItem->offset;
6287 if(VMA_DEBUG_MARGIN > 0)
6289 *pOffset += VMA_DEBUG_MARGIN;
6293 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
6297 if(bufferImageGranularity > 1)
6299 bool bufferImageGranularityConflict =
false;
6300 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
6301 while(prevSuballocItem != m_Suballocations.cbegin())
6304 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
6305 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
6307 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
6309 bufferImageGranularityConflict =
true;
6317 if(bufferImageGranularityConflict)
6319 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
6325 if(*pOffset >= suballocItem->offset + suballocItem->size)
6331 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
6334 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
6336 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
6338 if(suballocItem->offset + totalSize > m_Size)
6345 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
6346 if(totalSize > suballocItem->size)
6348 VkDeviceSize remainingSize = totalSize - suballocItem->size;
6349 while(remainingSize > 0)
6352 if(lastSuballocItem == m_Suballocations.cend())
6356 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6358 *pSumFreeSize += lastSuballocItem->size;
6362 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
6363 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
6364 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6366 ++*itemsToMakeLostCount;
6367 *pSumItemSize += lastSuballocItem->size;
6374 remainingSize = (lastSuballocItem->size < remainingSize) ?
6375 remainingSize - lastSuballocItem->size : 0;
6381 if(bufferImageGranularity > 1)
6383 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
6385 while(nextSuballocItem != m_Suballocations.cend())
6387 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
6388 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
6390 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
6392 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
6393 if(nextSuballoc.hAllocation->CanBecomeLost() &&
6394 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6396 ++*itemsToMakeLostCount;
6415 const VmaSuballocation& suballoc = *suballocItem;
6416 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
6418 *pSumFreeSize = suballoc.size;
6421 if(suballoc.size < allocSize)
6427 *pOffset = suballoc.offset;
6430 if(VMA_DEBUG_MARGIN > 0)
6432 *pOffset += VMA_DEBUG_MARGIN;
6436 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
6440 if(bufferImageGranularity > 1)
6442 bool bufferImageGranularityConflict =
false;
6443 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
6444 while(prevSuballocItem != m_Suballocations.cbegin())
6447 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
6448 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
6450 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
6452 bufferImageGranularityConflict =
true;
6460 if(bufferImageGranularityConflict)
6462 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
6467 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
6470 const VkDeviceSize requiredEndMargin = VMA_DEBUG_MARGIN;
6473 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
6480 if(bufferImageGranularity > 1)
6482 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
6484 while(nextSuballocItem != m_Suballocations.cend())
6486 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
6487 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
6489 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
6508 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
6510 VMA_ASSERT(item != m_Suballocations.end());
6511 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6513 VmaSuballocationList::iterator nextItem = item;
6515 VMA_ASSERT(nextItem != m_Suballocations.end());
6516 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6518 item->size += nextItem->size;
6520 m_Suballocations.erase(nextItem);
6523 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
6526 VmaSuballocation& suballoc = *suballocItem;
6527 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6528 suballoc.hAllocation = VK_NULL_HANDLE;
6532 m_SumFreeSize += suballoc.size;
6535 bool mergeWithNext =
false;
6536 bool mergeWithPrev =
false;
6538 VmaSuballocationList::iterator nextItem = suballocItem;
6540 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
6542 mergeWithNext =
true;
6545 VmaSuballocationList::iterator prevItem = suballocItem;
6546 if(suballocItem != m_Suballocations.begin())
6549 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6551 mergeWithPrev =
true;
6557 UnregisterFreeSuballocation(nextItem);
6558 MergeFreeWithNext(suballocItem);
6563 UnregisterFreeSuballocation(prevItem);
6564 MergeFreeWithNext(prevItem);
6565 RegisterFreeSuballocation(prevItem);
6570 RegisterFreeSuballocation(suballocItem);
6571 return suballocItem;
6575 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
6577 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6578 VMA_ASSERT(item->size > 0);
6582 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6584 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6586 if(m_FreeSuballocationsBySize.empty())
6588 m_FreeSuballocationsBySize.push_back(item);
6592 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
6600 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
6602 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6603 VMA_ASSERT(item->size > 0);
6607 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6609 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6611 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6612 m_FreeSuballocationsBySize.data(),
6613 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
6615 VmaSuballocationItemSizeLess());
6616 for(
size_t index = it - m_FreeSuballocationsBySize.data();
6617 index < m_FreeSuballocationsBySize.size();
6620 if(m_FreeSuballocationsBySize[index] == item)
6622 VmaVectorRemove(m_FreeSuballocationsBySize, index);
6625 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
6627 VMA_ASSERT(0 &&
"Not found.");
6636 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
6637 m_Metadata(hAllocator),
6638 m_MemoryTypeIndex(UINT32_MAX),
6640 m_hMemory(VK_NULL_HANDLE),
6642 m_pMappedData(VMA_NULL)
6646 void VmaDeviceMemoryBlock::Init(
6647 uint32_t newMemoryTypeIndex,
6648 VkDeviceMemory newMemory,
6649 VkDeviceSize newSize,
6652 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6654 m_MemoryTypeIndex = newMemoryTypeIndex;
6656 m_hMemory = newMemory;
6658 m_Metadata.Init(newSize);
6661 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
6665 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
6667 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
6668 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
6669 m_hMemory = VK_NULL_HANDLE;
6672 bool VmaDeviceMemoryBlock::Validate()
const 6674 if((m_hMemory == VK_NULL_HANDLE) ||
6675 (m_Metadata.GetSize() == 0))
6680 return m_Metadata.Validate();
6683 VkResult VmaDeviceMemoryBlock::CheckCorruption(
VmaAllocator hAllocator)
6685 void* pData =
nullptr;
6686 VkResult res = Map(hAllocator, 1, &pData);
6687 if(res != VK_SUCCESS)
6692 res = m_Metadata.CheckCorruption(pData);
6694 Unmap(hAllocator, 1);
6699 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
6706 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6709 m_MapCount += count;
6710 VMA_ASSERT(m_pMappedData != VMA_NULL);
6711 if(ppData != VMA_NULL)
6713 *ppData = m_pMappedData;
6719 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6720 hAllocator->m_hDevice,
6726 if(result == VK_SUCCESS)
6728 if(ppData != VMA_NULL)
6730 *ppData = m_pMappedData;
6738 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
6745 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6746 if(m_MapCount >= count)
6748 m_MapCount -= count;
6751 m_pMappedData = VMA_NULL;
6752 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
6757 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
6761 VkResult VmaDeviceMemoryBlock::WriteMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
6763 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
6764 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
6767 VkResult res = Map(hAllocator, 1, &pData);
6768 if(res != VK_SUCCESS)
6773 VmaWriteMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN);
6774 VmaWriteMagicValue(pData, allocOffset + allocSize);
6776 Unmap(hAllocator, 1);
6781 VkResult VmaDeviceMemoryBlock::ValidateMagicValueAroundAllocation(
VmaAllocator hAllocator, VkDeviceSize allocOffset, VkDeviceSize allocSize)
6783 VMA_ASSERT(VMA_DEBUG_MARGIN > 0 && VMA_DEBUG_MARGIN % 4 == 0 && VMA_DEBUG_DETECT_CORRUPTION);
6784 VMA_ASSERT(allocOffset >= VMA_DEBUG_MARGIN);
6787 VkResult res = Map(hAllocator, 1, &pData);
6788 if(res != VK_SUCCESS)
6793 if(!VmaValidateMagicValue(pData, allocOffset - VMA_DEBUG_MARGIN))
6795 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED BEFORE FREED ALLOCATION!");
6797 else if(!VmaValidateMagicValue(pData, allocOffset + allocSize))
6799 VMA_ASSERT(0 &&
"MEMORY CORRUPTION DETECTED AFTER FREED ALLOCATION!");
6802 Unmap(hAllocator, 1);
6807 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
6812 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
6813 hAllocation->GetBlock() ==
this);
6815 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6816 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
6817 hAllocator->m_hDevice,
6820 hAllocation->GetOffset());
6823 VkResult VmaDeviceMemoryBlock::BindImageMemory(
6828 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
6829 hAllocation->GetBlock() ==
this);
6831 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6832 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
6833 hAllocator->m_hDevice,
6836 hAllocation->GetOffset());
6841 memset(&outInfo, 0,
sizeof(outInfo));
6860 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
6868 VmaPool_T::VmaPool_T(
6873 createInfo.memoryTypeIndex,
6874 createInfo.blockSize,
6875 createInfo.minBlockCount,
6876 createInfo.maxBlockCount,
6878 createInfo.frameInUseCount,
6884 VmaPool_T::~VmaPool_T()
6888 #if VMA_STATS_STRING_ENABLED 6890 #endif // #if VMA_STATS_STRING_ENABLED 6892 VmaBlockVector::VmaBlockVector(
6894 uint32_t memoryTypeIndex,
6895 VkDeviceSize preferredBlockSize,
6896 size_t minBlockCount,
6897 size_t maxBlockCount,
6898 VkDeviceSize bufferImageGranularity,
6899 uint32_t frameInUseCount,
6900 bool isCustomPool) :
6901 m_hAllocator(hAllocator),
6902 m_MemoryTypeIndex(memoryTypeIndex),
6903 m_PreferredBlockSize(preferredBlockSize),
6904 m_MinBlockCount(minBlockCount),
6905 m_MaxBlockCount(maxBlockCount),
6906 m_BufferImageGranularity(bufferImageGranularity),
6907 m_FrameInUseCount(frameInUseCount),
6908 m_IsCustomPool(isCustomPool),
6909 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
6910 m_HasEmptyBlock(false),
6911 m_pDefragmentator(VMA_NULL),
6916 VmaBlockVector::~VmaBlockVector()
6918 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
6920 for(
size_t i = m_Blocks.size(); i--; )
6922 m_Blocks[i]->Destroy(m_hAllocator);
6923 vma_delete(m_hAllocator, m_Blocks[i]);
6927 VkResult VmaBlockVector::CreateMinBlocks()
6929 for(
size_t i = 0; i < m_MinBlockCount; ++i)
6931 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
6932 if(res != VK_SUCCESS)
6940 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
6948 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6950 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6952 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6954 VMA_HEAVY_ASSERT(pBlock->Validate());
6955 pBlock->m_Metadata.AddPoolStats(*pStats);
6959 bool VmaBlockVector::IsCorruptionDetectionEnabled()
const 6961 const uint32_t requiredMemFlags = VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
6962 return (VMA_DEBUG_DETECT_CORRUPTION != 0) &&
6963 (VMA_DEBUG_MARGIN > 0) &&
6964 (m_hAllocator->m_MemProps.memoryTypes[m_MemoryTypeIndex].propertyFlags & requiredMemFlags) == requiredMemFlags;
6967 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
6969 VkResult VmaBlockVector::Allocate(
6971 uint32_t currentFrameIndex,
6973 VkDeviceSize alignment,
6975 VmaSuballocationType suballocType,
6979 if(size + 2 * VMA_DEBUG_MARGIN > m_PreferredBlockSize)
6981 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6987 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6991 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6993 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6994 VMA_ASSERT(pCurrBlock);
6995 VmaAllocationRequest currRequest = {};
6996 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6999 m_BufferImageGranularity,
7007 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
7011 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
7012 if(res != VK_SUCCESS)
7019 if(pCurrBlock->m_Metadata.IsEmpty())
7021 m_HasEmptyBlock =
false;
7024 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
7025 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, size, *pAllocation);
7026 (*pAllocation)->InitBlockAllocation(
7035 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
7036 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
7037 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
7038 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
7040 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
7042 if(IsCorruptionDetectionEnabled())
7044 VkResult res = pCurrBlock->WriteMagicValueAroundAllocation(m_hAllocator, currRequest.offset, size);
7045 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
7051 const bool canCreateNewBlock =
7053 (m_Blocks.size() < m_MaxBlockCount);
7056 if(canCreateNewBlock)
7059 VkDeviceSize newBlockSize = m_PreferredBlockSize;
7060 uint32_t newBlockSizeShift = 0;
7061 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
7065 if(m_IsCustomPool ==
false)
7068 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
7069 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
7071 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
7072 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
7074 newBlockSize = smallerNewBlockSize;
7075 ++newBlockSizeShift;
7084 size_t newBlockIndex = 0;
7085 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
7087 if(m_IsCustomPool ==
false)
7089 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
7091 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
7092 if(smallerNewBlockSize >= size)
7094 newBlockSize = smallerNewBlockSize;
7095 ++newBlockSizeShift;
7096 res = CreateBlock(newBlockSize, &newBlockIndex);
7105 if(res == VK_SUCCESS)
7107 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
7108 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= size);
7112 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
7113 if(res != VK_SUCCESS)
7120 VmaAllocationRequest allocRequest;
7121 if(pBlock->m_Metadata.CreateAllocationRequest(
7124 m_BufferImageGranularity,
7131 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
7132 pBlock->m_Metadata.Alloc(allocRequest, suballocType, size, *pAllocation);
7133 (*pAllocation)->InitBlockAllocation(
7136 allocRequest.offset,
7142 VMA_HEAVY_ASSERT(pBlock->Validate());
7143 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
7144 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
7145 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
7147 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
7149 if(IsCorruptionDetectionEnabled())
7151 res = pBlock->WriteMagicValueAroundAllocation(m_hAllocator, allocRequest.offset, size);
7152 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
7159 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7167 if(canMakeOtherLost)
7169 uint32_t tryIndex = 0;
7170 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
7172 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
7173 VmaAllocationRequest bestRequest = {};
7174 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
7178 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
7180 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
7181 VMA_ASSERT(pCurrBlock);
7182 VmaAllocationRequest currRequest = {};
7183 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
7186 m_BufferImageGranularity,
7193 const VkDeviceSize currRequestCost = currRequest.CalcCost();
7194 if(pBestRequestBlock == VMA_NULL ||
7195 currRequestCost < bestRequestCost)
7197 pBestRequestBlock = pCurrBlock;
7198 bestRequest = currRequest;
7199 bestRequestCost = currRequestCost;
7201 if(bestRequestCost == 0)
7209 if(pBestRequestBlock != VMA_NULL)
7213 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
7214 if(res != VK_SUCCESS)
7220 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
7226 if(pBestRequestBlock->m_Metadata.IsEmpty())
7228 m_HasEmptyBlock =
false;
7231 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
7232 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, size, *pAllocation);
7233 (*pAllocation)->InitBlockAllocation(
7242 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
7243 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
7244 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
7245 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
7247 m_hAllocator->FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
7249 if(IsCorruptionDetectionEnabled())
7251 VkResult res = pBestRequestBlock->WriteMagicValueAroundAllocation(m_hAllocator, bestRequest.offset, size);
7252 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to write magic value.");
7267 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
7269 return VK_ERROR_TOO_MANY_OBJECTS;
7273 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7276 void VmaBlockVector::Free(
7279 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
7283 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7285 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
7287 if(IsCorruptionDetectionEnabled())
7289 VkResult res = pBlock->ValidateMagicValueAroundAllocation(m_hAllocator, hAllocation->GetOffset(), hAllocation->GetSize());
7290 VMA_ASSERT(res == VK_SUCCESS &&
"Couldn't map block memory to validate magic value.");
7293 if(hAllocation->IsPersistentMap())
7295 pBlock->Unmap(m_hAllocator, 1);
7298 pBlock->m_Metadata.Free(hAllocation);
7299 VMA_HEAVY_ASSERT(pBlock->Validate());
7301 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
7304 if(pBlock->m_Metadata.IsEmpty())
7307 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
7309 pBlockToDelete = pBlock;
7315 m_HasEmptyBlock =
true;
7320 else if(m_HasEmptyBlock)
7322 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
7323 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
7325 pBlockToDelete = pLastBlock;
7326 m_Blocks.pop_back();
7327 m_HasEmptyBlock =
false;
7331 IncrementallySortBlocks();
7336 if(pBlockToDelete != VMA_NULL)
7338 VMA_DEBUG_LOG(
" Deleted empty allocation");
7339 pBlockToDelete->Destroy(m_hAllocator);
7340 vma_delete(m_hAllocator, pBlockToDelete);
7344 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 7346 VkDeviceSize result = 0;
7347 for(
size_t i = m_Blocks.size(); i--; )
7349 result = VMA_MAX(result, m_Blocks[i]->m_Metadata.GetSize());
7350 if(result >= m_PreferredBlockSize)
7358 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
7360 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7362 if(m_Blocks[blockIndex] == pBlock)
7364 VmaVectorRemove(m_Blocks, blockIndex);
7371 void VmaBlockVector::IncrementallySortBlocks()
7374 for(
size_t i = 1; i < m_Blocks.size(); ++i)
7376 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
7378 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
7384 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
7386 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7387 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
7388 allocInfo.allocationSize = blockSize;
7389 VkDeviceMemory mem = VK_NULL_HANDLE;
7390 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
7399 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
7403 allocInfo.allocationSize,
7406 m_Blocks.push_back(pBlock);
7407 if(pNewBlockIndex != VMA_NULL)
7409 *pNewBlockIndex = m_Blocks.size() - 1;
7415 #if VMA_STATS_STRING_ENABLED 7417 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
7419 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7425 json.WriteString(
"MemoryTypeIndex");
7426 json.WriteNumber(m_MemoryTypeIndex);
7428 json.WriteString(
"BlockSize");
7429 json.WriteNumber(m_PreferredBlockSize);
7431 json.WriteString(
"BlockCount");
7432 json.BeginObject(
true);
7433 if(m_MinBlockCount > 0)
7435 json.WriteString(
"Min");
7436 json.WriteNumber((uint64_t)m_MinBlockCount);
7438 if(m_MaxBlockCount < SIZE_MAX)
7440 json.WriteString(
"Max");
7441 json.WriteNumber((uint64_t)m_MaxBlockCount);
7443 json.WriteString(
"Cur");
7444 json.WriteNumber((uint64_t)m_Blocks.size());
7447 if(m_FrameInUseCount > 0)
7449 json.WriteString(
"FrameInUseCount");
7450 json.WriteNumber(m_FrameInUseCount);
7455 json.WriteString(
"PreferredBlockSize");
7456 json.WriteNumber(m_PreferredBlockSize);
7459 json.WriteString(
"Blocks");
7461 for(
size_t i = 0; i < m_Blocks.size(); ++i)
7464 json.ContinueString(m_Blocks[i]->GetId());
7467 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
7474 #endif // #if VMA_STATS_STRING_ENABLED 7476 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
7478 uint32_t currentFrameIndex)
7480 if(m_pDefragmentator == VMA_NULL)
7482 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
7488 return m_pDefragmentator;
7491 VkResult VmaBlockVector::Defragment(
7493 VkDeviceSize& maxBytesToMove,
7494 uint32_t& maxAllocationsToMove)
7496 if(m_pDefragmentator == VMA_NULL)
7501 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7504 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
7507 if(pDefragmentationStats != VMA_NULL)
7509 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
7510 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
7513 VMA_ASSERT(bytesMoved <= maxBytesToMove);
7514 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
7520 m_HasEmptyBlock =
false;
7521 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
7523 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
7524 if(pBlock->m_Metadata.IsEmpty())
7526 if(m_Blocks.size() > m_MinBlockCount)
7528 if(pDefragmentationStats != VMA_NULL)
7531 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
7534 VmaVectorRemove(m_Blocks, blockIndex);
7535 pBlock->Destroy(m_hAllocator);
7536 vma_delete(m_hAllocator, pBlock);
7540 m_HasEmptyBlock =
true;
7548 void VmaBlockVector::DestroyDefragmentator()
7550 if(m_pDefragmentator != VMA_NULL)
7552 vma_delete(m_hAllocator, m_pDefragmentator);
7553 m_pDefragmentator = VMA_NULL;
7557 void VmaBlockVector::MakePoolAllocationsLost(
7558 uint32_t currentFrameIndex,
7559 size_t* pLostAllocationCount)
7561 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7562 size_t lostAllocationCount = 0;
7563 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7565 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7567 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
7569 if(pLostAllocationCount != VMA_NULL)
7571 *pLostAllocationCount = lostAllocationCount;
7575 VkResult VmaBlockVector::CheckCorruption()
7577 if(!IsCorruptionDetectionEnabled())
7579 return VK_ERROR_FEATURE_NOT_PRESENT;
7582 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7583 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7585 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7587 VkResult res = pBlock->CheckCorruption(m_hAllocator);
7588 if(res != VK_SUCCESS)
7596 void VmaBlockVector::AddStats(
VmaStats* pStats)
7598 const uint32_t memTypeIndex = m_MemoryTypeIndex;
7599 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
7601 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7603 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7605 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7607 VMA_HEAVY_ASSERT(pBlock->Validate());
7609 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
7610 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7611 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7612 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7619 VmaDefragmentator::VmaDefragmentator(
7621 VmaBlockVector* pBlockVector,
7622 uint32_t currentFrameIndex) :
7623 m_hAllocator(hAllocator),
7624 m_pBlockVector(pBlockVector),
7625 m_CurrentFrameIndex(currentFrameIndex),
7627 m_AllocationsMoved(0),
7628 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
7629 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
7633 VmaDefragmentator::~VmaDefragmentator()
7635 for(
size_t i = m_Blocks.size(); i--; )
7637 vma_delete(m_hAllocator, m_Blocks[i]);
7641 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
7643 AllocationInfo allocInfo;
7644 allocInfo.m_hAllocation = hAlloc;
7645 allocInfo.m_pChanged = pChanged;
7646 m_Allocations.push_back(allocInfo);
7649 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
7652 if(m_pMappedDataForDefragmentation)
7654 *ppMappedData = m_pMappedDataForDefragmentation;
7659 if(m_pBlock->GetMappedData())
7661 *ppMappedData = m_pBlock->GetMappedData();
7666 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
7667 *ppMappedData = m_pMappedDataForDefragmentation;
7671 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
7673 if(m_pMappedDataForDefragmentation != VMA_NULL)
7675 m_pBlock->Unmap(hAllocator, 1);
7679 VkResult VmaDefragmentator::DefragmentRound(
7680 VkDeviceSize maxBytesToMove,
7681 uint32_t maxAllocationsToMove)
7683 if(m_Blocks.empty())
7688 size_t srcBlockIndex = m_Blocks.size() - 1;
7689 size_t srcAllocIndex = SIZE_MAX;
7695 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
7697 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
7700 if(srcBlockIndex == 0)
7707 srcAllocIndex = SIZE_MAX;
7712 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
7716 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
7717 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
7719 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
7720 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
7721 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
7722 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
7725 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
7727 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
7728 VmaAllocationRequest dstAllocRequest;
7729 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
7730 m_CurrentFrameIndex,
7731 m_pBlockVector->GetFrameInUseCount(),
7732 m_pBlockVector->GetBufferImageGranularity(),
7737 &dstAllocRequest) &&
7739 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
7741 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
7744 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
7745 (m_BytesMoved + size > maxBytesToMove))
7747 return VK_INCOMPLETE;
7750 void* pDstMappedData = VMA_NULL;
7751 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
7752 if(res != VK_SUCCESS)
7757 void* pSrcMappedData = VMA_NULL;
7758 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
7759 if(res != VK_SUCCESS)
7766 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
7767 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
7768 static_cast<size_t>(size));
7770 if(VMA_DEBUG_MARGIN > 0)
7772 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset - VMA_DEBUG_MARGIN);
7773 VmaWriteMagicValue(pDstMappedData, dstAllocRequest.offset + size);
7776 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
7777 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
7779 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
7781 if(allocInfo.m_pChanged != VMA_NULL)
7783 *allocInfo.m_pChanged = VK_TRUE;
7786 ++m_AllocationsMoved;
7787 m_BytesMoved += size;
7789 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
7797 if(srcAllocIndex > 0)
7803 if(srcBlockIndex > 0)
7806 srcAllocIndex = SIZE_MAX;
7816 VkResult VmaDefragmentator::Defragment(
7817 VkDeviceSize maxBytesToMove,
7818 uint32_t maxAllocationsToMove)
7820 if(m_Allocations.empty())
7826 const size_t blockCount = m_pBlockVector->m_Blocks.size();
7827 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7829 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
7830 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
7831 m_Blocks.push_back(pBlockInfo);
7835 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
7838 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
7840 AllocationInfo& allocInfo = m_Allocations[blockIndex];
7842 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7844 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
7845 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
7846 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
7848 (*it)->m_Allocations.push_back(allocInfo);
7856 m_Allocations.clear();
7858 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7860 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
7861 pBlockInfo->CalcHasNonMovableAllocations();
7862 pBlockInfo->SortAllocationsBySizeDescecnding();
7866 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
7869 VkResult result = VK_SUCCESS;
7870 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
7872 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
7876 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7878 m_Blocks[blockIndex]->Unmap(m_hAllocator);
7884 bool VmaDefragmentator::MoveMakesSense(
7885 size_t dstBlockIndex, VkDeviceSize dstOffset,
7886 size_t srcBlockIndex, VkDeviceSize srcOffset)
7888 if(dstBlockIndex < srcBlockIndex)
7892 if(dstBlockIndex > srcBlockIndex)
7896 if(dstOffset < srcOffset)
7909 m_hDevice(pCreateInfo->device),
7910 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
7911 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
7912 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
7913 m_PreferredLargeHeapBlockSize(0),
7914 m_PhysicalDevice(pCreateInfo->physicalDevice),
7915 m_CurrentFrameIndex(0),
7916 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
7919 if(VMA_DEBUG_DETECT_CORRUPTION)
7922 VMA_ASSERT(VMA_DEBUG_MARGIN %
sizeof(uint32_t) == 0);
7927 #if !(VMA_DEDICATED_ALLOCATION) 7930 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
7934 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
7935 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
7936 memset(&m_MemProps, 0,
sizeof(m_MemProps));
7938 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
7939 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
7941 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7943 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
7954 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
7955 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
7962 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
7964 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
7965 if(limit != VK_WHOLE_SIZE)
7967 m_HeapSizeLimit[heapIndex] = limit;
7968 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
7970 m_MemProps.memoryHeaps[heapIndex].size = limit;
7976 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7978 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
7980 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
7986 GetBufferImageGranularity(),
7991 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
7996 VmaAllocator_T::~VmaAllocator_T()
7998 VMA_ASSERT(m_Pools.empty());
8000 for(
size_t i = GetMemoryTypeCount(); i--; )
8002 vma_delete(
this, m_pDedicatedAllocations[i]);
8003 vma_delete(
this, m_pBlockVectors[i]);
8007 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
8009 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 8010 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
8011 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
8012 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
8013 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
8014 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
8015 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
8016 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
8017 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
8018 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
8019 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
8020 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
8021 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
8022 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
8023 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
8024 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
8025 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
8026 #if VMA_DEDICATED_ALLOCATION 8027 if(m_UseKhrDedicatedAllocation)
8029 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
8030 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
8031 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
8032 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
8034 #endif // #if VMA_DEDICATED_ALLOCATION 8035 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 8037 #define VMA_COPY_IF_NOT_NULL(funcName) \ 8038 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 8040 if(pVulkanFunctions != VMA_NULL)
8042 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
8043 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
8044 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
8045 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
8046 VMA_COPY_IF_NOT_NULL(vkMapMemory);
8047 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
8048 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
8049 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
8050 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
8051 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
8052 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
8053 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
8054 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
8055 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
8056 VMA_COPY_IF_NOT_NULL(vkCreateImage);
8057 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
8058 #if VMA_DEDICATED_ALLOCATION 8059 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
8060 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
8064 #undef VMA_COPY_IF_NOT_NULL 8068 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
8069 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
8070 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
8071 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
8072 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
8073 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
8074 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
8075 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
8076 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
8077 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
8078 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
8079 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
8080 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
8081 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
8082 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
8083 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
8084 #if VMA_DEDICATED_ALLOCATION 8085 if(m_UseKhrDedicatedAllocation)
8087 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
8088 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
8093 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
8095 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
8096 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
8097 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
8098 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
8101 VkResult VmaAllocator_T::AllocateMemoryOfType(
8103 VkDeviceSize alignment,
8104 bool dedicatedAllocation,
8105 VkBuffer dedicatedBuffer,
8106 VkImage dedicatedImage,
8108 uint32_t memTypeIndex,
8109 VmaSuballocationType suballocType,
8112 VMA_ASSERT(pAllocation != VMA_NULL);
8113 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
8119 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
8124 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
8125 VMA_ASSERT(blockVector);
8127 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
8128 bool preferDedicatedMemory =
8129 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
8130 dedicatedAllocation ||
8132 size > preferredBlockSize / 2;
8134 if(preferDedicatedMemory &&
8136 finalCreateInfo.
pool == VK_NULL_HANDLE)
8145 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8149 return AllocateDedicatedMemory(
8163 VkResult res = blockVector->Allocate(
8165 m_CurrentFrameIndex.load(),
8171 if(res == VK_SUCCESS)
8179 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8183 res = AllocateDedicatedMemory(
8189 finalCreateInfo.pUserData,
8193 if(res == VK_SUCCESS)
8196 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
8202 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
8209 VkResult VmaAllocator_T::AllocateDedicatedMemory(
8211 VmaSuballocationType suballocType,
8212 uint32_t memTypeIndex,
8214 bool isUserDataString,
8216 VkBuffer dedicatedBuffer,
8217 VkImage dedicatedImage,
8220 VMA_ASSERT(pAllocation);
8222 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
8223 allocInfo.memoryTypeIndex = memTypeIndex;
8224 allocInfo.allocationSize = size;
8226 #if VMA_DEDICATED_ALLOCATION 8227 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
8228 if(m_UseKhrDedicatedAllocation)
8230 if(dedicatedBuffer != VK_NULL_HANDLE)
8232 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
8233 dedicatedAllocInfo.buffer = dedicatedBuffer;
8234 allocInfo.pNext = &dedicatedAllocInfo;
8236 else if(dedicatedImage != VK_NULL_HANDLE)
8238 dedicatedAllocInfo.image = dedicatedImage;
8239 allocInfo.pNext = &dedicatedAllocInfo;
8242 #endif // #if VMA_DEDICATED_ALLOCATION 8245 VkDeviceMemory hMemory = VK_NULL_HANDLE;
8246 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
8249 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
8253 void* pMappedData = VMA_NULL;
8256 res = (*m_VulkanFunctions.vkMapMemory)(
8265 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
8266 FreeVulkanMemory(memTypeIndex, size, hMemory);
8271 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
8272 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
8273 (*pAllocation)->SetUserData(
this, pUserData);
8274 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
8276 FillAllocation(*pAllocation, VMA_ALLOCATION_FILL_PATTERN_CREATED);
8281 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8282 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
8283 VMA_ASSERT(pDedicatedAllocations);
8284 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
8287 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
8292 void VmaAllocator_T::GetBufferMemoryRequirements(
8294 VkMemoryRequirements& memReq,
8295 bool& requiresDedicatedAllocation,
8296 bool& prefersDedicatedAllocation)
const 8298 #if VMA_DEDICATED_ALLOCATION 8299 if(m_UseKhrDedicatedAllocation)
8301 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
8302 memReqInfo.buffer = hBuffer;
8304 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
8306 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
8307 memReq2.pNext = &memDedicatedReq;
8309 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
8311 memReq = memReq2.memoryRequirements;
8312 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
8313 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
8316 #endif // #if VMA_DEDICATED_ALLOCATION 8318 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
8319 requiresDedicatedAllocation =
false;
8320 prefersDedicatedAllocation =
false;
8324 void VmaAllocator_T::GetImageMemoryRequirements(
8326 VkMemoryRequirements& memReq,
8327 bool& requiresDedicatedAllocation,
8328 bool& prefersDedicatedAllocation)
const 8330 #if VMA_DEDICATED_ALLOCATION 8331 if(m_UseKhrDedicatedAllocation)
8333 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
8334 memReqInfo.image = hImage;
8336 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
8338 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
8339 memReq2.pNext = &memDedicatedReq;
8341 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
8343 memReq = memReq2.memoryRequirements;
8344 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
8345 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
8348 #endif // #if VMA_DEDICATED_ALLOCATION 8350 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
8351 requiresDedicatedAllocation =
false;
8352 prefersDedicatedAllocation =
false;
8356 VkResult VmaAllocator_T::AllocateMemory(
8357 const VkMemoryRequirements& vkMemReq,
8358 bool requiresDedicatedAllocation,
8359 bool prefersDedicatedAllocation,
8360 VkBuffer dedicatedBuffer,
8361 VkImage dedicatedImage,
8363 VmaSuballocationType suballocType,
8369 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
8370 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8375 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
8376 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8378 if(requiresDedicatedAllocation)
8382 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
8383 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8385 if(createInfo.
pool != VK_NULL_HANDLE)
8387 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
8388 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8391 if((createInfo.
pool != VK_NULL_HANDLE) &&
8394 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
8395 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8398 if(createInfo.
pool != VK_NULL_HANDLE)
8400 const VkDeviceSize alignmentForPool = VMA_MAX(
8402 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
8403 return createInfo.
pool->m_BlockVector.Allocate(
8405 m_CurrentFrameIndex.load(),
8415 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
8416 uint32_t memTypeIndex = UINT32_MAX;
8418 if(res == VK_SUCCESS)
8420 VkDeviceSize alignmentForMemType = VMA_MAX(
8422 GetMemoryTypeMinAlignment(memTypeIndex));
8424 res = AllocateMemoryOfType(
8426 alignmentForMemType,
8427 requiresDedicatedAllocation || prefersDedicatedAllocation,
8435 if(res == VK_SUCCESS)
8445 memoryTypeBits &= ~(1u << memTypeIndex);
8448 if(res == VK_SUCCESS)
8450 alignmentForMemType = VMA_MAX(
8452 GetMemoryTypeMinAlignment(memTypeIndex));
8454 res = AllocateMemoryOfType(
8456 alignmentForMemType,
8457 requiresDedicatedAllocation || prefersDedicatedAllocation,
8465 if(res == VK_SUCCESS)
8475 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8486 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
8488 VMA_ASSERT(allocation);
8490 if(allocation->CanBecomeLost() ==
false ||
8491 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
8493 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS)
8495 FillAllocation(allocation, VMA_ALLOCATION_FILL_PATTERN_DESTROYED);
8498 switch(allocation->GetType())
8500 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8502 VmaBlockVector* pBlockVector = VMA_NULL;
8503 VmaPool hPool = allocation->GetPool();
8504 if(hPool != VK_NULL_HANDLE)
8506 pBlockVector = &hPool->m_BlockVector;
8510 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
8511 pBlockVector = m_pBlockVectors[memTypeIndex];
8513 pBlockVector->Free(allocation);
8516 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8517 FreeDedicatedMemory(allocation);
8524 allocation->SetUserData(
this, VMA_NULL);
8525 vma_delete(
this, allocation);
8528 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
8531 InitStatInfo(pStats->
total);
8532 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
8534 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
8538 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8540 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
8541 VMA_ASSERT(pBlockVector);
8542 pBlockVector->AddStats(pStats);
8547 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8548 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
8550 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
8555 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8557 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
8558 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8559 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8560 VMA_ASSERT(pDedicatedAllocVector);
8561 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
8564 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
8565 VmaAddStatInfo(pStats->
total, allocationStatInfo);
8566 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
8567 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
8572 VmaPostprocessCalcStatInfo(pStats->
total);
8573 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
8574 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
8575 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
8576 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
8579 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
8581 VkResult VmaAllocator_T::Defragment(
8583 size_t allocationCount,
8584 VkBool32* pAllocationsChanged,
8588 if(pAllocationsChanged != VMA_NULL)
8590 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
8592 if(pDefragmentationStats != VMA_NULL)
8594 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
8597 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
8599 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
8601 const size_t poolCount = m_Pools.size();
8604 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
8608 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
8610 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
8612 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
8614 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
8616 VmaBlockVector* pAllocBlockVector = VMA_NULL;
8618 const VmaPool hAllocPool = hAlloc->GetPool();
8620 if(hAllocPool != VK_NULL_HANDLE)
8622 pAllocBlockVector = &hAllocPool->GetBlockVector();
8627 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
8630 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
8632 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
8633 &pAllocationsChanged[allocIndex] : VMA_NULL;
8634 pDefragmentator->AddAllocation(hAlloc, pChanged);
8638 VkResult result = VK_SUCCESS;
8642 VkDeviceSize maxBytesToMove = SIZE_MAX;
8643 uint32_t maxAllocationsToMove = UINT32_MAX;
8644 if(pDefragmentationInfo != VMA_NULL)
8651 for(uint32_t memTypeIndex = 0;
8652 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
8656 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8658 result = m_pBlockVectors[memTypeIndex]->Defragment(
8659 pDefragmentationStats,
8661 maxAllocationsToMove);
8666 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
8668 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
8669 pDefragmentationStats,
8671 maxAllocationsToMove);
8677 for(
size_t poolIndex = poolCount; poolIndex--; )
8679 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
8683 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
8685 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8687 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
8696 if(hAllocation->CanBecomeLost())
8702 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8703 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8706 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8710 pAllocationInfo->
offset = 0;
8711 pAllocationInfo->
size = hAllocation->GetSize();
8713 pAllocationInfo->
pUserData = hAllocation->GetUserData();
8716 else if(localLastUseFrameIndex == localCurrFrameIndex)
8718 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
8719 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
8720 pAllocationInfo->
offset = hAllocation->GetOffset();
8721 pAllocationInfo->
size = hAllocation->GetSize();
8723 pAllocationInfo->
pUserData = hAllocation->GetUserData();
8728 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8730 localLastUseFrameIndex = localCurrFrameIndex;
8737 #if VMA_STATS_STRING_ENABLED 8738 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8739 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8742 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
8743 if(localLastUseFrameIndex == localCurrFrameIndex)
8749 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8751 localLastUseFrameIndex = localCurrFrameIndex;
8757 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
8758 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
8759 pAllocationInfo->
offset = hAllocation->GetOffset();
8760 pAllocationInfo->
size = hAllocation->GetSize();
8761 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
8762 pAllocationInfo->
pUserData = hAllocation->GetUserData();
8766 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
8769 if(hAllocation->CanBecomeLost())
8771 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8772 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8775 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8779 else if(localLastUseFrameIndex == localCurrFrameIndex)
8785 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8787 localLastUseFrameIndex = localCurrFrameIndex;
8794 #if VMA_STATS_STRING_ENABLED 8795 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8796 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8799 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
8800 if(localLastUseFrameIndex == localCurrFrameIndex)
8806 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8808 localLastUseFrameIndex = localCurrFrameIndex;
8820 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
8833 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
8835 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
8836 if(res != VK_SUCCESS)
8838 vma_delete(
this, *pPool);
8845 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8846 (*pPool)->SetId(m_NextPoolId++);
8847 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
8853 void VmaAllocator_T::DestroyPool(
VmaPool pool)
8857 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8858 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
8859 VMA_ASSERT(success &&
"Pool not found in Allocator.");
8862 vma_delete(
this, pool);
8867 pool->m_BlockVector.GetPoolStats(pPoolStats);
8870 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
8872 m_CurrentFrameIndex.store(frameIndex);
8875 void VmaAllocator_T::MakePoolAllocationsLost(
8877 size_t* pLostAllocationCount)
8879 hPool->m_BlockVector.MakePoolAllocationsLost(
8880 m_CurrentFrameIndex.load(),
8881 pLostAllocationCount);
8884 VkResult VmaAllocator_T::CheckPoolCorruption(
VmaPool hPool)
8886 return hPool->m_BlockVector.CheckCorruption();
8889 VkResult VmaAllocator_T::CheckCorruption(uint32_t memoryTypeBits)
8891 VkResult finalRes = VK_ERROR_FEATURE_NOT_PRESENT;
8894 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8896 if(((1u << memTypeIndex) & memoryTypeBits) != 0)
8898 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
8899 VMA_ASSERT(pBlockVector);
8900 VkResult localRes = pBlockVector->CheckCorruption();
8903 case VK_ERROR_FEATURE_NOT_PRESENT:
8906 finalRes = VK_SUCCESS;
8916 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8917 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
8919 if(((1u << m_Pools[poolIndex]->GetBlockVector().GetMemoryTypeIndex()) & memoryTypeBits) != 0)
8921 VkResult localRes = m_Pools[poolIndex]->GetBlockVector().CheckCorruption();
8924 case VK_ERROR_FEATURE_NOT_PRESENT:
8927 finalRes = VK_SUCCESS;
8939 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
8941 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
8942 (*pAllocation)->InitLost();
8945 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
8947 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
8950 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8952 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8953 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
8955 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8956 if(res == VK_SUCCESS)
8958 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
8963 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
8968 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8971 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
8973 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
8979 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
8981 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
8983 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
8986 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
8988 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
8989 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8991 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8992 m_HeapSizeLimit[heapIndex] += size;
8996 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
8998 if(hAllocation->CanBecomeLost())
9000 return VK_ERROR_MEMORY_MAP_FAILED;
9003 switch(hAllocation->GetType())
9005 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
9007 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
9008 char *pBytes = VMA_NULL;
9009 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
9010 if(res == VK_SUCCESS)
9012 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
9013 hAllocation->BlockAllocMap();
9017 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
9018 return hAllocation->DedicatedAllocMap(
this, ppData);
9021 return VK_ERROR_MEMORY_MAP_FAILED;
9027 switch(hAllocation->GetType())
9029 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
9031 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
9032 hAllocation->BlockAllocUnmap();
9033 pBlock->Unmap(
this, 1);
9036 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
9037 hAllocation->DedicatedAllocUnmap(
this);
9044 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
9046 VkResult res = VK_SUCCESS;
9047 switch(hAllocation->GetType())
9049 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
9050 res = GetVulkanFunctions().vkBindBufferMemory(
9053 hAllocation->GetMemory(),
9056 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
9058 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
9059 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
9060 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
9069 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
9071 VkResult res = VK_SUCCESS;
9072 switch(hAllocation->GetType())
9074 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
9075 res = GetVulkanFunctions().vkBindImageMemory(
9078 hAllocation->GetMemory(),
9081 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
9083 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
9084 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
9085 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
9094 void VmaAllocator_T::FlushOrInvalidateAllocation(
9096 VkDeviceSize offset, VkDeviceSize size,
9097 VMA_CACHE_OPERATION op)
9099 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
9100 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
9102 const VkDeviceSize allocationSize = hAllocation->GetSize();
9103 VMA_ASSERT(offset <= allocationSize);
9105 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
9107 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
9108 memRange.memory = hAllocation->GetMemory();
9110 switch(hAllocation->GetType())
9112 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
9113 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
9114 if(size == VK_WHOLE_SIZE)
9116 memRange.size = allocationSize - memRange.offset;
9120 VMA_ASSERT(offset + size <= allocationSize);
9121 memRange.size = VMA_MIN(
9122 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
9123 allocationSize - memRange.offset);
9127 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
9130 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
9131 if(size == VK_WHOLE_SIZE)
9133 size = allocationSize - offset;
9137 VMA_ASSERT(offset + size <= allocationSize);
9139 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
9142 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
9143 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
9144 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_Metadata.GetSize();
9145 memRange.offset += allocationOffset;
9146 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
9157 case VMA_CACHE_FLUSH:
9158 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
9160 case VMA_CACHE_INVALIDATE:
9161 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
9170 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
9172 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
9174 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
9176 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
9177 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
9178 VMA_ASSERT(pDedicatedAllocations);
9179 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
9180 VMA_ASSERT(success);
9183 VkDeviceMemory hMemory = allocation->GetMemory();
9185 if(allocation->GetMappedData() != VMA_NULL)
9187 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
9190 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
9192 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
9195 void VmaAllocator_T::FillAllocation(
const VmaAllocation hAllocation, uint8_t pattern)
9197 if(VMA_DEBUG_INITIALIZE_ALLOCATIONS &&
9198 !hAllocation->CanBecomeLost() &&
9199 (m_MemProps.memoryTypes[hAllocation->GetMemoryTypeIndex()].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
9201 void* pData = VMA_NULL;
9202 VkResult res = Map(hAllocation, &pData);
9203 if(res == VK_SUCCESS)
9205 memset(pData, (
int)pattern, (
size_t)hAllocation->GetSize());
9206 FlushOrInvalidateAllocation(hAllocation, 0, VK_WHOLE_SIZE, VMA_CACHE_FLUSH);
9211 VMA_ASSERT(0 &&
"VMA_DEBUG_INITIALIZE_ALLOCATIONS is enabled, but couldn't map memory to fill allocation.");
9216 #if VMA_STATS_STRING_ENABLED 9218 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
9220 bool dedicatedAllocationsStarted =
false;
9221 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
9223 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
9224 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
9225 VMA_ASSERT(pDedicatedAllocVector);
9226 if(pDedicatedAllocVector->empty() ==
false)
9228 if(dedicatedAllocationsStarted ==
false)
9230 dedicatedAllocationsStarted =
true;
9231 json.WriteString(
"DedicatedAllocations");
9235 json.BeginString(
"Type ");
9236 json.ContinueString(memTypeIndex);
9241 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
9243 json.BeginObject(
true);
9245 hAlloc->PrintParameters(json);
9252 if(dedicatedAllocationsStarted)
9258 bool allocationsStarted =
false;
9259 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
9261 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
9263 if(allocationsStarted ==
false)
9265 allocationsStarted =
true;
9266 json.WriteString(
"DefaultPools");
9270 json.BeginString(
"Type ");
9271 json.ContinueString(memTypeIndex);
9274 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
9277 if(allocationsStarted)
9284 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
9285 const size_t poolCount = m_Pools.size();
9288 json.WriteString(
"Pools");
9290 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
9293 json.ContinueString(m_Pools[poolIndex]->GetId());
9296 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
9303 #endif // #if VMA_STATS_STRING_ENABLED 9305 static VkResult AllocateMemoryForImage(
9309 VmaSuballocationType suballocType,
9312 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
9314 VkMemoryRequirements vkMemReq = {};
9315 bool requiresDedicatedAllocation =
false;
9316 bool prefersDedicatedAllocation =
false;
9317 allocator->GetImageMemoryRequirements(image, vkMemReq,
9318 requiresDedicatedAllocation, prefersDedicatedAllocation);
9320 return allocator->AllocateMemory(
9322 requiresDedicatedAllocation,
9323 prefersDedicatedAllocation,
9326 *pAllocationCreateInfo,
9338 VMA_ASSERT(pCreateInfo && pAllocator);
9339 VMA_DEBUG_LOG(
"vmaCreateAllocator");
9347 if(allocator != VK_NULL_HANDLE)
9349 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
9350 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
9351 vma_delete(&allocationCallbacks, allocator);
9357 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
9359 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
9360 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
9365 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
9367 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
9368 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
9373 uint32_t memoryTypeIndex,
9374 VkMemoryPropertyFlags* pFlags)
9376 VMA_ASSERT(allocator && pFlags);
9377 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
9378 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
9383 uint32_t frameIndex)
9385 VMA_ASSERT(allocator);
9386 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
9388 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9390 allocator->SetCurrentFrameIndex(frameIndex);
9397 VMA_ASSERT(allocator && pStats);
9398 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9399 allocator->CalculateStats(pStats);
9402 #if VMA_STATS_STRING_ENABLED 9406 char** ppStatsString,
9407 VkBool32 detailedMap)
9409 VMA_ASSERT(allocator && ppStatsString);
9410 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9412 VmaStringBuilder sb(allocator);
9414 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
9418 allocator->CalculateStats(&stats);
9420 json.WriteString(
"Total");
9421 VmaPrintStatInfo(json, stats.
total);
9423 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
9425 json.BeginString(
"Heap ");
9426 json.ContinueString(heapIndex);
9430 json.WriteString(
"Size");
9431 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
9433 json.WriteString(
"Flags");
9434 json.BeginArray(
true);
9435 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
9437 json.WriteString(
"DEVICE_LOCAL");
9443 json.WriteString(
"Stats");
9444 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
9447 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
9449 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
9451 json.BeginString(
"Type ");
9452 json.ContinueString(typeIndex);
9457 json.WriteString(
"Flags");
9458 json.BeginArray(
true);
9459 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
9460 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
9462 json.WriteString(
"DEVICE_LOCAL");
9464 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
9466 json.WriteString(
"HOST_VISIBLE");
9468 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
9470 json.WriteString(
"HOST_COHERENT");
9472 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
9474 json.WriteString(
"HOST_CACHED");
9476 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
9478 json.WriteString(
"LAZILY_ALLOCATED");
9484 json.WriteString(
"Stats");
9485 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
9494 if(detailedMap == VK_TRUE)
9496 allocator->PrintDetailedMap(json);
9502 const size_t len = sb.GetLength();
9503 char*
const pChars = vma_new_array(allocator,
char, len + 1);
9506 memcpy(pChars, sb.GetData(), len);
9509 *ppStatsString = pChars;
9516 if(pStatsString != VMA_NULL)
9518 VMA_ASSERT(allocator);
9519 size_t len = strlen(pStatsString);
9520 vma_delete_array(allocator, pStatsString, len + 1);
9524 #endif // #if VMA_STATS_STRING_ENABLED 9531 uint32_t memoryTypeBits,
9533 uint32_t* pMemoryTypeIndex)
9535 VMA_ASSERT(allocator != VK_NULL_HANDLE);
9536 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
9537 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
9544 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
9550 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
9554 switch(pAllocationCreateInfo->
usage)
9559 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
9561 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
9565 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
9568 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
9569 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
9571 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
9575 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
9576 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
9582 *pMemoryTypeIndex = UINT32_MAX;
9583 uint32_t minCost = UINT32_MAX;
9584 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
9585 memTypeIndex < allocator->GetMemoryTypeCount();
9586 ++memTypeIndex, memTypeBit <<= 1)
9589 if((memTypeBit & memoryTypeBits) != 0)
9591 const VkMemoryPropertyFlags currFlags =
9592 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
9594 if((requiredFlags & ~currFlags) == 0)
9597 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
9599 if(currCost < minCost)
9601 *pMemoryTypeIndex = memTypeIndex;
9611 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
9616 const VkBufferCreateInfo* pBufferCreateInfo,
9618 uint32_t* pMemoryTypeIndex)
9620 VMA_ASSERT(allocator != VK_NULL_HANDLE);
9621 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
9622 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
9623 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
9625 const VkDevice hDev = allocator->m_hDevice;
9626 VkBuffer hBuffer = VK_NULL_HANDLE;
9627 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
9628 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
9629 if(res == VK_SUCCESS)
9631 VkMemoryRequirements memReq = {};
9632 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
9633 hDev, hBuffer, &memReq);
9637 memReq.memoryTypeBits,
9638 pAllocationCreateInfo,
9641 allocator->GetVulkanFunctions().vkDestroyBuffer(
9642 hDev, hBuffer, allocator->GetAllocationCallbacks());
9649 const VkImageCreateInfo* pImageCreateInfo,
9651 uint32_t* pMemoryTypeIndex)
9653 VMA_ASSERT(allocator != VK_NULL_HANDLE);
9654 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
9655 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
9656 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
9658 const VkDevice hDev = allocator->m_hDevice;
9659 VkImage hImage = VK_NULL_HANDLE;
9660 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
9661 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
9662 if(res == VK_SUCCESS)
9664 VkMemoryRequirements memReq = {};
9665 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
9666 hDev, hImage, &memReq);
9670 memReq.memoryTypeBits,
9671 pAllocationCreateInfo,
9674 allocator->GetVulkanFunctions().vkDestroyImage(
9675 hDev, hImage, allocator->GetAllocationCallbacks());
9685 VMA_ASSERT(allocator && pCreateInfo && pPool);
9687 VMA_DEBUG_LOG(
"vmaCreatePool");
9689 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9691 return allocator->CreatePool(pCreateInfo, pPool);
9698 VMA_ASSERT(allocator);
9700 if(pool == VK_NULL_HANDLE)
9705 VMA_DEBUG_LOG(
"vmaDestroyPool");
9707 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9709 allocator->DestroyPool(pool);
9717 VMA_ASSERT(allocator && pool && pPoolStats);
9719 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9721 allocator->GetPoolStats(pool, pPoolStats);
9727 size_t* pLostAllocationCount)
9729 VMA_ASSERT(allocator && pool);
9731 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9733 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
9738 VMA_ASSERT(allocator && pool);
9740 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9742 VMA_DEBUG_LOG(
"vmaCheckPoolCorruption");
9744 return allocator->CheckPoolCorruption(pool);
9749 const VkMemoryRequirements* pVkMemoryRequirements,
9754 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
9756 VMA_DEBUG_LOG(
"vmaAllocateMemory");
9758 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9760 VkResult result = allocator->AllocateMemory(
9761 *pVkMemoryRequirements,
9767 VMA_SUBALLOCATION_TYPE_UNKNOWN,
9770 if(pAllocationInfo && result == VK_SUCCESS)
9772 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9785 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
9787 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
9789 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9791 VkMemoryRequirements vkMemReq = {};
9792 bool requiresDedicatedAllocation =
false;
9793 bool prefersDedicatedAllocation =
false;
9794 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
9795 requiresDedicatedAllocation,
9796 prefersDedicatedAllocation);
9798 VkResult result = allocator->AllocateMemory(
9800 requiresDedicatedAllocation,
9801 prefersDedicatedAllocation,
9805 VMA_SUBALLOCATION_TYPE_BUFFER,
9808 if(pAllocationInfo && result == VK_SUCCESS)
9810 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9823 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
9825 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
9827 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9829 VkResult result = AllocateMemoryForImage(
9833 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
9836 if(pAllocationInfo && result == VK_SUCCESS)
9838 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9848 VMA_ASSERT(allocator);
9849 VMA_DEBUG_LOG(
"vmaFreeMemory");
9850 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9851 if(allocation != VK_NULL_HANDLE)
9853 allocator->FreeMemory(allocation);
9862 VMA_ASSERT(allocator && allocation && pAllocationInfo);
9864 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9866 allocator->GetAllocationInfo(allocation, pAllocationInfo);
9873 VMA_ASSERT(allocator && allocation);
9875 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9877 return allocator->TouchAllocation(allocation);
9885 VMA_ASSERT(allocator && allocation);
9887 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9889 allocation->SetUserData(allocator, pUserData);
9896 VMA_ASSERT(allocator && pAllocation);
9898 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
9900 allocator->CreateLostAllocation(pAllocation);
9908 VMA_ASSERT(allocator && allocation && ppData);
9910 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9912 return allocator->Map(allocation, ppData);
9919 VMA_ASSERT(allocator && allocation);
9921 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9923 allocator->Unmap(allocation);
9928 VMA_ASSERT(allocator && allocation);
9930 VMA_DEBUG_LOG(
"vmaFlushAllocation");
9932 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9934 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
9939 VMA_ASSERT(allocator && allocation);
9941 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
9943 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9945 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
9950 VMA_ASSERT(allocator);
9952 VMA_DEBUG_LOG(
"vmaCheckCorruption");
9954 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9956 return allocator->CheckCorruption(memoryTypeBits);
9962 size_t allocationCount,
9963 VkBool32* pAllocationsChanged,
9967 VMA_ASSERT(allocator && pAllocations);
9969 VMA_DEBUG_LOG(
"vmaDefragment");
9971 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9973 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
9981 VMA_ASSERT(allocator && allocation && buffer);
9983 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
9985 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9987 return allocator->BindBufferMemory(allocation, buffer);
9995 VMA_ASSERT(allocator && allocation && image);
9997 VMA_DEBUG_LOG(
"vmaBindImageMemory");
9999 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10001 return allocator->BindImageMemory(allocation, image);
10006 const VkBufferCreateInfo* pBufferCreateInfo,
10012 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
10014 VMA_DEBUG_LOG(
"vmaCreateBuffer");
10016 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10018 *pBuffer = VK_NULL_HANDLE;
10019 *pAllocation = VK_NULL_HANDLE;
10022 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
10023 allocator->m_hDevice,
10025 allocator->GetAllocationCallbacks(),
10030 VkMemoryRequirements vkMemReq = {};
10031 bool requiresDedicatedAllocation =
false;
10032 bool prefersDedicatedAllocation =
false;
10033 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
10034 requiresDedicatedAllocation, prefersDedicatedAllocation);
10038 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
10040 VMA_ASSERT(vkMemReq.alignment %
10041 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
10043 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
10045 VMA_ASSERT(vkMemReq.alignment %
10046 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
10048 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
10050 VMA_ASSERT(vkMemReq.alignment %
10051 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
10055 res = allocator->AllocateMemory(
10057 requiresDedicatedAllocation,
10058 prefersDedicatedAllocation,
10061 *pAllocationCreateInfo,
10062 VMA_SUBALLOCATION_TYPE_BUFFER,
10067 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
10071 #if VMA_STATS_STRING_ENABLED 10072 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
10074 if(pAllocationInfo != VMA_NULL)
10076 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
10080 allocator->FreeMemory(*pAllocation);
10081 *pAllocation = VK_NULL_HANDLE;
10082 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
10083 *pBuffer = VK_NULL_HANDLE;
10086 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
10087 *pBuffer = VK_NULL_HANDLE;
10098 VMA_ASSERT(allocator);
10099 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
10100 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10101 if(buffer != VK_NULL_HANDLE)
10103 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
10105 if(allocation != VK_NULL_HANDLE)
10107 allocator->FreeMemory(allocation);
10113 const VkImageCreateInfo* pImageCreateInfo,
10119 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
10121 VMA_DEBUG_LOG(
"vmaCreateImage");
10123 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10125 *pImage = VK_NULL_HANDLE;
10126 *pAllocation = VK_NULL_HANDLE;
10129 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
10130 allocator->m_hDevice,
10132 allocator->GetAllocationCallbacks(),
10136 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
10137 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
10138 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
10141 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
10145 res = allocator->BindImageMemory(*pAllocation, *pImage);
10149 #if VMA_STATS_STRING_ENABLED 10150 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
10152 if(pAllocationInfo != VMA_NULL)
10154 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
10158 allocator->FreeMemory(*pAllocation);
10159 *pAllocation = VK_NULL_HANDLE;
10160 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
10161 *pImage = VK_NULL_HANDLE;
10164 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
10165 *pImage = VK_NULL_HANDLE;
10176 VMA_ASSERT(allocator);
10177 VMA_DEBUG_LOG(
"vmaDestroyImage");
10178 VMA_DEBUG_GLOBAL_MUTEX_LOCK
10179 if(image != VK_NULL_HANDLE)
10181 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
10183 if(allocation != VK_NULL_HANDLE)
10185 allocator->FreeMemory(allocation);
10189 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1271
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1537
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1300
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1283
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1494
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1275
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1883
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1297
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:2128
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1713
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1767
Definition: vk_mem_alloc.h:1574
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1264
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1612
Definition: vk_mem_alloc.h:1521
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1309
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1362
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1294
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1525
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1427
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1280
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1426
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:2132
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1326
VmaStatInfo total
Definition: vk_mem_alloc.h:1436
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2140
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1596
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2123
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1281
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1206
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1303
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1721
Definition: vk_mem_alloc.h:1715
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1893
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1276
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1633
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1737
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1773
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1262
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1724
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VmaMemoryUsage
Definition: vk_mem_alloc.h:1472
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:2118
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2136
Definition: vk_mem_alloc.h:1511
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1620
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1279
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1432
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1212
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1233
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1238
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2138
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1607
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1783
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1272
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1415
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1732
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1225
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1581
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1428
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1229
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1727
Definition: vk_mem_alloc.h:1520
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1278
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1602
Definition: vk_mem_alloc.h:1593
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1418
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1274
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1745
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1312
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1776
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1591
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1626
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1350
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1434
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1561
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1427
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1285
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1227
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1284
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1759
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1277
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1907
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1306
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1427
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1424
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
VkResult vmaCheckCorruption(VmaAllocator allocator, uint32_t memoryTypeBits)
Checks magic number in margins around all allocations in given memory types (in both default and cust...
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1764
VkResult vmaCheckPoolCorruption(VmaAllocator allocator, VmaPool pool)
Checks magic number in margins around all allocations in given memory pool in search for corruptions...
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1888
Definition: vk_mem_alloc.h:1589
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2134
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1270
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1422
Definition: vk_mem_alloc.h:1477
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1717
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1420
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1282
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1286
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1548
Definition: vk_mem_alloc.h:1504
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1902
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1260
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1273
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1869
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1695
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1428
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
Definition: vk_mem_alloc.h:1587
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1435
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1770
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1428
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1874