23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1092 #include <vulkan/vulkan.h> 1094 #if !defined(VMA_DEDICATED_ALLOCATION) 1095 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1096 #define VMA_DEDICATED_ALLOCATION 1 1098 #define VMA_DEDICATED_ALLOCATION 0 1116 uint32_t memoryType,
1117 VkDeviceMemory memory,
1122 uint32_t memoryType,
1123 VkDeviceMemory memory,
1193 #if VMA_DEDICATED_ALLOCATION 1194 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1195 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1286 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1294 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1304 uint32_t memoryTypeIndex,
1305 VkMemoryPropertyFlags* pFlags);
1317 uint32_t frameIndex);
1350 #define VMA_STATS_STRING_ENABLED 1 1352 #if VMA_STATS_STRING_ENABLED 1359 char** ppStatsString,
1360 VkBool32 detailedMap);
1364 char* pStatsString);
1366 #endif // #if VMA_STATS_STRING_ENABLED 1560 uint32_t memoryTypeBits,
1562 uint32_t* pMemoryTypeIndex);
1578 const VkBufferCreateInfo* pBufferCreateInfo,
1580 uint32_t* pMemoryTypeIndex);
1596 const VkImageCreateInfo* pImageCreateInfo,
1598 uint32_t* pMemoryTypeIndex);
1729 size_t* pLostAllocationCount);
1812 const VkMemoryRequirements* pVkMemoryRequirements,
2072 size_t allocationCount,
2073 VkBool32* pAllocationsChanged,
2139 const VkBufferCreateInfo* pBufferCreateInfo,
2164 const VkImageCreateInfo* pImageCreateInfo,
2190 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2193 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2194 #define VMA_IMPLEMENTATION 2197 #ifdef VMA_IMPLEMENTATION 2198 #undef VMA_IMPLEMENTATION 2220 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2221 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2233 #if VMA_USE_STL_CONTAINERS 2234 #define VMA_USE_STL_VECTOR 1 2235 #define VMA_USE_STL_UNORDERED_MAP 1 2236 #define VMA_USE_STL_LIST 1 2239 #if VMA_USE_STL_VECTOR 2243 #if VMA_USE_STL_UNORDERED_MAP 2244 #include <unordered_map> 2247 #if VMA_USE_STL_LIST 2256 #include <algorithm> 2260 #if !defined(_WIN32) && !defined(__APPLE__) 2266 #define VMA_NULL nullptr 2269 #if defined(__APPLE__) || defined(__ANDROID__) 2271 void *aligned_alloc(
size_t alignment,
size_t size)
2274 if(alignment <
sizeof(
void*))
2276 alignment =
sizeof(
void*);
2280 if(posix_memalign(&pointer, alignment, size) == 0)
2289 #define VMA_ASSERT(expr) assert(expr) 2291 #define VMA_ASSERT(expr) 2297 #ifndef VMA_HEAVY_ASSERT 2299 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2301 #define VMA_HEAVY_ASSERT(expr) 2305 #ifndef VMA_ALIGN_OF 2306 #define VMA_ALIGN_OF(type) (__alignof(type)) 2309 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2311 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2313 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2317 #ifndef VMA_SYSTEM_FREE 2319 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2321 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2326 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2330 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2334 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2338 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2341 #ifndef VMA_DEBUG_LOG 2342 #define VMA_DEBUG_LOG(format, ...) 2352 #if VMA_STATS_STRING_ENABLED 2353 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2355 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2357 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2359 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2361 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2363 snprintf(outStr, strLen,
"%p", ptr);
2373 void Lock() { m_Mutex.lock(); }
2374 void Unlock() { m_Mutex.unlock(); }
2378 #define VMA_MUTEX VmaMutex 2389 #ifndef VMA_ATOMIC_UINT32 2390 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2393 #ifndef VMA_BEST_FIT 2406 #define VMA_BEST_FIT (1) 2409 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2414 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2417 #ifndef VMA_DEBUG_ALIGNMENT 2422 #define VMA_DEBUG_ALIGNMENT (1) 2425 #ifndef VMA_DEBUG_MARGIN 2430 #define VMA_DEBUG_MARGIN (0) 2433 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2438 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2441 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2446 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2449 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2450 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2454 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2455 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2459 #ifndef VMA_CLASS_NO_COPY 2460 #define VMA_CLASS_NO_COPY(className) \ 2462 className(const className&) = delete; \ 2463 className& operator=(const className&) = delete; 2466 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2472 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2473 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2476 static inline uint32_t VmaCountBitsSet(uint32_t v)
2478 uint32_t c = v - ((v >> 1) & 0x55555555);
2479 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2480 c = ((c >> 4) + c) & 0x0F0F0F0F;
2481 c = ((c >> 8) + c) & 0x00FF00FF;
2482 c = ((c >> 16) + c) & 0x0000FFFF;
2488 template <
typename T>
2489 static inline T VmaAlignUp(T val, T align)
2491 return (val + align - 1) / align * align;
2495 template <
typename T>
2496 inline T VmaRoundDiv(T x, T y)
2498 return (x + (y / (T)2)) / y;
2503 template<
typename Iterator,
typename Compare>
2504 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2506 Iterator centerValue = end; --centerValue;
2507 Iterator insertIndex = beg;
2508 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2510 if(cmp(*memTypeIndex, *centerValue))
2512 if(insertIndex != memTypeIndex)
2514 VMA_SWAP(*memTypeIndex, *insertIndex);
2519 if(insertIndex != centerValue)
2521 VMA_SWAP(*insertIndex, *centerValue);
2526 template<
typename Iterator,
typename Compare>
2527 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2531 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2532 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2533 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2537 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2539 #endif // #ifndef VMA_SORT 2548 static inline bool VmaBlocksOnSamePage(
2549 VkDeviceSize resourceAOffset,
2550 VkDeviceSize resourceASize,
2551 VkDeviceSize resourceBOffset,
2552 VkDeviceSize pageSize)
2554 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2555 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2556 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2557 VkDeviceSize resourceBStart = resourceBOffset;
2558 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2559 return resourceAEndPage == resourceBStartPage;
2562 enum VmaSuballocationType
2564 VMA_SUBALLOCATION_TYPE_FREE = 0,
2565 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2566 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2567 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2568 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2569 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2570 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2579 static inline bool VmaIsBufferImageGranularityConflict(
2580 VmaSuballocationType suballocType1,
2581 VmaSuballocationType suballocType2)
2583 if(suballocType1 > suballocType2)
2585 VMA_SWAP(suballocType1, suballocType2);
2588 switch(suballocType1)
2590 case VMA_SUBALLOCATION_TYPE_FREE:
2592 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2594 case VMA_SUBALLOCATION_TYPE_BUFFER:
2596 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2597 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2598 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2600 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2601 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2602 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2603 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2605 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2606 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2617 VMA_CLASS_NO_COPY(VmaMutexLock)
2619 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2620 m_pMutex(useMutex ? &mutex : VMA_NULL)
2637 VMA_MUTEX* m_pMutex;
2640 #if VMA_DEBUG_GLOBAL_MUTEX 2641 static VMA_MUTEX gDebugGlobalMutex;
2642 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2644 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2648 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2659 template <
typename IterT,
typename KeyT,
typename CmpT>
2660 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2662 size_t down = 0, up = (end - beg);
2665 const size_t mid = (down + up) / 2;
2666 if(cmp(*(beg+mid), key))
2681 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2683 if((pAllocationCallbacks != VMA_NULL) &&
2684 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2686 return (*pAllocationCallbacks->pfnAllocation)(
2687 pAllocationCallbacks->pUserData,
2690 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2694 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2698 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2700 if((pAllocationCallbacks != VMA_NULL) &&
2701 (pAllocationCallbacks->pfnFree != VMA_NULL))
2703 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2707 VMA_SYSTEM_FREE(ptr);
2711 template<
typename T>
2712 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2714 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2717 template<
typename T>
2718 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2720 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2723 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2725 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2727 template<
typename T>
2728 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2731 VmaFree(pAllocationCallbacks, ptr);
2734 template<
typename T>
2735 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2739 for(
size_t i = count; i--; )
2743 VmaFree(pAllocationCallbacks, ptr);
2748 template<
typename T>
2749 class VmaStlAllocator
2752 const VkAllocationCallbacks*
const m_pCallbacks;
2753 typedef T value_type;
2755 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2756 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2758 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2759 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2761 template<
typename U>
2762 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2764 return m_pCallbacks == rhs.m_pCallbacks;
2766 template<
typename U>
2767 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2769 return m_pCallbacks != rhs.m_pCallbacks;
2772 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2775 #if VMA_USE_STL_VECTOR 2777 #define VmaVector std::vector 2779 template<
typename T,
typename allocatorT>
2780 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2782 vec.insert(vec.begin() + index, item);
2785 template<
typename T,
typename allocatorT>
2786 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2788 vec.erase(vec.begin() + index);
2791 #else // #if VMA_USE_STL_VECTOR 2796 template<
typename T,
typename AllocatorT>
2800 typedef T value_type;
2802 VmaVector(
const AllocatorT& allocator) :
2803 m_Allocator(allocator),
2810 VmaVector(
size_t count,
const AllocatorT& allocator) :
2811 m_Allocator(allocator),
2812 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2818 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2819 m_Allocator(src.m_Allocator),
2820 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2821 m_Count(src.m_Count),
2822 m_Capacity(src.m_Count)
2826 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2832 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2835 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2839 resize(rhs.m_Count);
2842 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2848 bool empty()
const {
return m_Count == 0; }
2849 size_t size()
const {
return m_Count; }
2850 T* data() {
return m_pArray; }
2851 const T* data()
const {
return m_pArray; }
2853 T& operator[](
size_t index)
2855 VMA_HEAVY_ASSERT(index < m_Count);
2856 return m_pArray[index];
2858 const T& operator[](
size_t index)
const 2860 VMA_HEAVY_ASSERT(index < m_Count);
2861 return m_pArray[index];
2866 VMA_HEAVY_ASSERT(m_Count > 0);
2869 const T& front()
const 2871 VMA_HEAVY_ASSERT(m_Count > 0);
2876 VMA_HEAVY_ASSERT(m_Count > 0);
2877 return m_pArray[m_Count - 1];
2879 const T& back()
const 2881 VMA_HEAVY_ASSERT(m_Count > 0);
2882 return m_pArray[m_Count - 1];
2885 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2887 newCapacity = VMA_MAX(newCapacity, m_Count);
2889 if((newCapacity < m_Capacity) && !freeMemory)
2891 newCapacity = m_Capacity;
2894 if(newCapacity != m_Capacity)
2896 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2899 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2901 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2902 m_Capacity = newCapacity;
2903 m_pArray = newArray;
2907 void resize(
size_t newCount,
bool freeMemory =
false)
2909 size_t newCapacity = m_Capacity;
2910 if(newCount > m_Capacity)
2912 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2916 newCapacity = newCount;
2919 if(newCapacity != m_Capacity)
2921 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2922 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2923 if(elementsToCopy != 0)
2925 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2927 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2928 m_Capacity = newCapacity;
2929 m_pArray = newArray;
2935 void clear(
bool freeMemory =
false)
2937 resize(0, freeMemory);
2940 void insert(
size_t index,
const T& src)
2942 VMA_HEAVY_ASSERT(index <= m_Count);
2943 const size_t oldCount = size();
2944 resize(oldCount + 1);
2945 if(index < oldCount)
2947 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2949 m_pArray[index] = src;
2952 void remove(
size_t index)
2954 VMA_HEAVY_ASSERT(index < m_Count);
2955 const size_t oldCount = size();
2956 if(index < oldCount - 1)
2958 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2960 resize(oldCount - 1);
2963 void push_back(
const T& src)
2965 const size_t newIndex = size();
2966 resize(newIndex + 1);
2967 m_pArray[newIndex] = src;
2972 VMA_HEAVY_ASSERT(m_Count > 0);
2976 void push_front(
const T& src)
2983 VMA_HEAVY_ASSERT(m_Count > 0);
2987 typedef T* iterator;
2989 iterator begin() {
return m_pArray; }
2990 iterator end() {
return m_pArray + m_Count; }
2993 AllocatorT m_Allocator;
2999 template<
typename T,
typename allocatorT>
3000 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3002 vec.insert(index, item);
3005 template<
typename T,
typename allocatorT>
3006 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3011 #endif // #if VMA_USE_STL_VECTOR 3013 template<
typename CmpLess,
typename VectorT>
3014 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3016 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3018 vector.data() + vector.size(),
3020 CmpLess()) - vector.data();
3021 VmaVectorInsert(vector, indexToInsert, value);
3022 return indexToInsert;
3025 template<
typename CmpLess,
typename VectorT>
3026 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3029 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3034 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3036 size_t indexToRemove = it - vector.begin();
3037 VmaVectorRemove(vector, indexToRemove);
3043 template<
typename CmpLess,
typename VectorT>
3044 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
3047 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3049 vector.data() + vector.size(),
3052 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
3054 return it - vector.begin();
3058 return vector.size();
3070 template<
typename T>
3071 class VmaPoolAllocator
3073 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3075 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3076 ~VmaPoolAllocator();
3084 uint32_t NextFreeIndex;
3091 uint32_t FirstFreeIndex;
3094 const VkAllocationCallbacks* m_pAllocationCallbacks;
3095 size_t m_ItemsPerBlock;
3096 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3098 ItemBlock& CreateNewBlock();
3101 template<
typename T>
3102 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3103 m_pAllocationCallbacks(pAllocationCallbacks),
3104 m_ItemsPerBlock(itemsPerBlock),
3105 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3107 VMA_ASSERT(itemsPerBlock > 0);
3110 template<
typename T>
3111 VmaPoolAllocator<T>::~VmaPoolAllocator()
3116 template<
typename T>
3117 void VmaPoolAllocator<T>::Clear()
3119 for(
size_t i = m_ItemBlocks.size(); i--; )
3120 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3121 m_ItemBlocks.clear();
3124 template<
typename T>
3125 T* VmaPoolAllocator<T>::Alloc()
3127 for(
size_t i = m_ItemBlocks.size(); i--; )
3129 ItemBlock& block = m_ItemBlocks[i];
3131 if(block.FirstFreeIndex != UINT32_MAX)
3133 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3134 block.FirstFreeIndex = pItem->NextFreeIndex;
3135 return &pItem->Value;
3140 ItemBlock& newBlock = CreateNewBlock();
3141 Item*
const pItem = &newBlock.pItems[0];
3142 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3143 return &pItem->Value;
3146 template<
typename T>
3147 void VmaPoolAllocator<T>::Free(T* ptr)
3150 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3152 ItemBlock& block = m_ItemBlocks[i];
3156 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3159 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3161 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3162 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3163 block.FirstFreeIndex = index;
3167 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3170 template<
typename T>
3171 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3173 ItemBlock newBlock = {
3174 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3176 m_ItemBlocks.push_back(newBlock);
3179 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3180 newBlock.pItems[i].NextFreeIndex = i + 1;
3181 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3182 return m_ItemBlocks.back();
3188 #if VMA_USE_STL_LIST 3190 #define VmaList std::list 3192 #else // #if VMA_USE_STL_LIST 3194 template<
typename T>
3203 template<
typename T>
3206 VMA_CLASS_NO_COPY(VmaRawList)
3208 typedef VmaListItem<T> ItemType;
3210 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3214 size_t GetCount()
const {
return m_Count; }
3215 bool IsEmpty()
const {
return m_Count == 0; }
3217 ItemType* Front() {
return m_pFront; }
3218 const ItemType* Front()
const {
return m_pFront; }
3219 ItemType* Back() {
return m_pBack; }
3220 const ItemType* Back()
const {
return m_pBack; }
3222 ItemType* PushBack();
3223 ItemType* PushFront();
3224 ItemType* PushBack(
const T& value);
3225 ItemType* PushFront(
const T& value);
3230 ItemType* InsertBefore(ItemType* pItem);
3232 ItemType* InsertAfter(ItemType* pItem);
3234 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3235 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3237 void Remove(ItemType* pItem);
3240 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3241 VmaPoolAllocator<ItemType> m_ItemAllocator;
3247 template<
typename T>
3248 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3249 m_pAllocationCallbacks(pAllocationCallbacks),
3250 m_ItemAllocator(pAllocationCallbacks, 128),
3257 template<
typename T>
3258 VmaRawList<T>::~VmaRawList()
3264 template<
typename T>
3265 void VmaRawList<T>::Clear()
3267 if(IsEmpty() ==
false)
3269 ItemType* pItem = m_pBack;
3270 while(pItem != VMA_NULL)
3272 ItemType*
const pPrevItem = pItem->pPrev;
3273 m_ItemAllocator.Free(pItem);
3276 m_pFront = VMA_NULL;
3282 template<
typename T>
3283 VmaListItem<T>* VmaRawList<T>::PushBack()
3285 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3286 pNewItem->pNext = VMA_NULL;
3289 pNewItem->pPrev = VMA_NULL;
3290 m_pFront = pNewItem;
3296 pNewItem->pPrev = m_pBack;
3297 m_pBack->pNext = pNewItem;
3304 template<
typename T>
3305 VmaListItem<T>* VmaRawList<T>::PushFront()
3307 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3308 pNewItem->pPrev = VMA_NULL;
3311 pNewItem->pNext = VMA_NULL;
3312 m_pFront = pNewItem;
3318 pNewItem->pNext = m_pFront;
3319 m_pFront->pPrev = pNewItem;
3320 m_pFront = pNewItem;
3326 template<
typename T>
3327 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3329 ItemType*
const pNewItem = PushBack();
3330 pNewItem->Value = value;
3334 template<
typename T>
3335 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3337 ItemType*
const pNewItem = PushFront();
3338 pNewItem->Value = value;
3342 template<
typename T>
3343 void VmaRawList<T>::PopBack()
3345 VMA_HEAVY_ASSERT(m_Count > 0);
3346 ItemType*
const pBackItem = m_pBack;
3347 ItemType*
const pPrevItem = pBackItem->pPrev;
3348 if(pPrevItem != VMA_NULL)
3350 pPrevItem->pNext = VMA_NULL;
3352 m_pBack = pPrevItem;
3353 m_ItemAllocator.Free(pBackItem);
3357 template<
typename T>
3358 void VmaRawList<T>::PopFront()
3360 VMA_HEAVY_ASSERT(m_Count > 0);
3361 ItemType*
const pFrontItem = m_pFront;
3362 ItemType*
const pNextItem = pFrontItem->pNext;
3363 if(pNextItem != VMA_NULL)
3365 pNextItem->pPrev = VMA_NULL;
3367 m_pFront = pNextItem;
3368 m_ItemAllocator.Free(pFrontItem);
3372 template<
typename T>
3373 void VmaRawList<T>::Remove(ItemType* pItem)
3375 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3376 VMA_HEAVY_ASSERT(m_Count > 0);
3378 if(pItem->pPrev != VMA_NULL)
3380 pItem->pPrev->pNext = pItem->pNext;
3384 VMA_HEAVY_ASSERT(m_pFront == pItem);
3385 m_pFront = pItem->pNext;
3388 if(pItem->pNext != VMA_NULL)
3390 pItem->pNext->pPrev = pItem->pPrev;
3394 VMA_HEAVY_ASSERT(m_pBack == pItem);
3395 m_pBack = pItem->pPrev;
3398 m_ItemAllocator.Free(pItem);
3402 template<
typename T>
3403 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3405 if(pItem != VMA_NULL)
3407 ItemType*
const prevItem = pItem->pPrev;
3408 ItemType*
const newItem = m_ItemAllocator.Alloc();
3409 newItem->pPrev = prevItem;
3410 newItem->pNext = pItem;
3411 pItem->pPrev = newItem;
3412 if(prevItem != VMA_NULL)
3414 prevItem->pNext = newItem;
3418 VMA_HEAVY_ASSERT(m_pFront == pItem);
3428 template<
typename T>
3429 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3431 if(pItem != VMA_NULL)
3433 ItemType*
const nextItem = pItem->pNext;
3434 ItemType*
const newItem = m_ItemAllocator.Alloc();
3435 newItem->pNext = nextItem;
3436 newItem->pPrev = pItem;
3437 pItem->pNext = newItem;
3438 if(nextItem != VMA_NULL)
3440 nextItem->pPrev = newItem;
3444 VMA_HEAVY_ASSERT(m_pBack == pItem);
3454 template<
typename T>
3455 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3457 ItemType*
const newItem = InsertBefore(pItem);
3458 newItem->Value = value;
3462 template<
typename T>
3463 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3465 ItemType*
const newItem = InsertAfter(pItem);
3466 newItem->Value = value;
3470 template<
typename T,
typename AllocatorT>
3473 VMA_CLASS_NO_COPY(VmaList)
3484 T& operator*()
const 3486 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3487 return m_pItem->Value;
3489 T* operator->()
const 3491 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3492 return &m_pItem->Value;
3495 iterator& operator++()
3497 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3498 m_pItem = m_pItem->pNext;
3501 iterator& operator--()
3503 if(m_pItem != VMA_NULL)
3505 m_pItem = m_pItem->pPrev;
3509 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3510 m_pItem = m_pList->Back();
3515 iterator operator++(
int)
3517 iterator result = *
this;
3521 iterator operator--(
int)
3523 iterator result = *
this;
3528 bool operator==(
const iterator& rhs)
const 3530 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3531 return m_pItem == rhs.m_pItem;
3533 bool operator!=(
const iterator& rhs)
const 3535 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3536 return m_pItem != rhs.m_pItem;
3540 VmaRawList<T>* m_pList;
3541 VmaListItem<T>* m_pItem;
3543 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3549 friend class VmaList<T, AllocatorT>;
3552 class const_iterator
3561 const_iterator(
const iterator& src) :
3562 m_pList(src.m_pList),
3563 m_pItem(src.m_pItem)
3567 const T& operator*()
const 3569 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3570 return m_pItem->Value;
3572 const T* operator->()
const 3574 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3575 return &m_pItem->Value;
3578 const_iterator& operator++()
3580 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3581 m_pItem = m_pItem->pNext;
3584 const_iterator& operator--()
3586 if(m_pItem != VMA_NULL)
3588 m_pItem = m_pItem->pPrev;
3592 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3593 m_pItem = m_pList->Back();
3598 const_iterator operator++(
int)
3600 const_iterator result = *
this;
3604 const_iterator operator--(
int)
3606 const_iterator result = *
this;
3611 bool operator==(
const const_iterator& rhs)
const 3613 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3614 return m_pItem == rhs.m_pItem;
3616 bool operator!=(
const const_iterator& rhs)
const 3618 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3619 return m_pItem != rhs.m_pItem;
3623 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3629 const VmaRawList<T>* m_pList;
3630 const VmaListItem<T>* m_pItem;
3632 friend class VmaList<T, AllocatorT>;
3635 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3637 bool empty()
const {
return m_RawList.IsEmpty(); }
3638 size_t size()
const {
return m_RawList.GetCount(); }
3640 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3641 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3643 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3644 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3646 void clear() { m_RawList.Clear(); }
3647 void push_back(
const T& value) { m_RawList.PushBack(value); }
3648 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3649 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3652 VmaRawList<T> m_RawList;
3655 #endif // #if VMA_USE_STL_LIST 3663 #if VMA_USE_STL_UNORDERED_MAP 3665 #define VmaPair std::pair 3667 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3668 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3670 #else // #if VMA_USE_STL_UNORDERED_MAP 3672 template<
typename T1,
typename T2>
3678 VmaPair() : first(), second() { }
3679 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3685 template<
typename KeyT,
typename ValueT>
3689 typedef VmaPair<KeyT, ValueT> PairType;
3690 typedef PairType* iterator;
3692 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3694 iterator begin() {
return m_Vector.begin(); }
3695 iterator end() {
return m_Vector.end(); }
3697 void insert(
const PairType& pair);
3698 iterator find(
const KeyT& key);
3699 void erase(iterator it);
3702 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3705 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3707 template<
typename FirstT,
typename SecondT>
3708 struct VmaPairFirstLess
3710 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3712 return lhs.first < rhs.first;
3714 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3716 return lhs.first < rhsFirst;
3720 template<
typename KeyT,
typename ValueT>
3721 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3723 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3725 m_Vector.data() + m_Vector.size(),
3727 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3728 VmaVectorInsert(m_Vector, indexToInsert, pair);
3731 template<
typename KeyT,
typename ValueT>
3732 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3734 PairType* it = VmaBinaryFindFirstNotLess(
3736 m_Vector.data() + m_Vector.size(),
3738 VmaPairFirstLess<KeyT, ValueT>());
3739 if((it != m_Vector.end()) && (it->first == key))
3745 return m_Vector.end();
3749 template<
typename KeyT,
typename ValueT>
3750 void VmaMap<KeyT, ValueT>::erase(iterator it)
3752 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3755 #endif // #if VMA_USE_STL_UNORDERED_MAP 3761 class VmaDeviceMemoryBlock;
3763 struct VmaAllocation_T
3765 VMA_CLASS_NO_COPY(VmaAllocation_T)
3767 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3771 FLAG_USER_DATA_STRING = 0x01,
3775 enum ALLOCATION_TYPE
3777 ALLOCATION_TYPE_NONE,
3778 ALLOCATION_TYPE_BLOCK,
3779 ALLOCATION_TYPE_DEDICATED,
3782 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3785 m_pUserData(VMA_NULL),
3786 m_LastUseFrameIndex(currentFrameIndex),
3787 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3788 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3790 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3792 #if VMA_STATS_STRING_ENABLED 3793 m_CreationFrameIndex = currentFrameIndex;
3794 m_BufferImageUsage = 0;
3800 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3803 VMA_ASSERT(m_pUserData == VMA_NULL);
3806 void InitBlockAllocation(
3808 VmaDeviceMemoryBlock* block,
3809 VkDeviceSize offset,
3810 VkDeviceSize alignment,
3812 VmaSuballocationType suballocationType,
3816 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3817 VMA_ASSERT(block != VMA_NULL);
3818 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3819 m_Alignment = alignment;
3821 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3822 m_SuballocationType = (uint8_t)suballocationType;
3823 m_BlockAllocation.m_hPool = hPool;
3824 m_BlockAllocation.m_Block = block;
3825 m_BlockAllocation.m_Offset = offset;
3826 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3831 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3832 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3833 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3834 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3835 m_BlockAllocation.m_Block = VMA_NULL;
3836 m_BlockAllocation.m_Offset = 0;
3837 m_BlockAllocation.m_CanBecomeLost =
true;
3840 void ChangeBlockAllocation(
3842 VmaDeviceMemoryBlock* block,
3843 VkDeviceSize offset);
3846 void InitDedicatedAllocation(
3847 uint32_t memoryTypeIndex,
3848 VkDeviceMemory hMemory,
3849 VmaSuballocationType suballocationType,
3853 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3854 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3855 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3858 m_SuballocationType = (uint8_t)suballocationType;
3859 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3860 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3861 m_DedicatedAllocation.m_hMemory = hMemory;
3862 m_DedicatedAllocation.m_pMappedData = pMappedData;
3865 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3866 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3867 VkDeviceSize GetSize()
const {
return m_Size; }
3868 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3869 void* GetUserData()
const {
return m_pUserData; }
3870 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
3871 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3873 VmaDeviceMemoryBlock* GetBlock()
const 3875 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3876 return m_BlockAllocation.m_Block;
3878 VkDeviceSize GetOffset()
const;
3879 VkDeviceMemory GetMemory()
const;
3880 uint32_t GetMemoryTypeIndex()
const;
3881 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3882 void* GetMappedData()
const;
3883 bool CanBecomeLost()
const;
3886 uint32_t GetLastUseFrameIndex()
const 3888 return m_LastUseFrameIndex.load();
3890 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3892 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3902 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3904 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3906 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3917 void BlockAllocMap();
3918 void BlockAllocUnmap();
3919 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
3922 #if VMA_STATS_STRING_ENABLED 3923 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
3924 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
3926 void InitBufferImageUsage(uint32_t bufferImageUsage)
3928 VMA_ASSERT(m_BufferImageUsage == 0);
3929 m_BufferImageUsage = bufferImageUsage;
3932 void PrintParameters(
class VmaJsonWriter& json)
const;
3936 VkDeviceSize m_Alignment;
3937 VkDeviceSize m_Size;
3939 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3941 uint8_t m_SuballocationType;
3948 struct BlockAllocation
3951 VmaDeviceMemoryBlock* m_Block;
3952 VkDeviceSize m_Offset;
3953 bool m_CanBecomeLost;
3957 struct DedicatedAllocation
3959 uint32_t m_MemoryTypeIndex;
3960 VkDeviceMemory m_hMemory;
3961 void* m_pMappedData;
3967 BlockAllocation m_BlockAllocation;
3969 DedicatedAllocation m_DedicatedAllocation;
3972 #if VMA_STATS_STRING_ENABLED 3973 uint32_t m_CreationFrameIndex;
3974 uint32_t m_BufferImageUsage;
3984 struct VmaSuballocation
3986 VkDeviceSize offset;
3989 VmaSuballocationType type;
3992 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3995 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4010 struct VmaAllocationRequest
4012 VkDeviceSize offset;
4013 VkDeviceSize sumFreeSize;
4014 VkDeviceSize sumItemSize;
4015 VmaSuballocationList::iterator item;
4016 size_t itemsToMakeLostCount;
4018 VkDeviceSize CalcCost()
const 4020 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4028 class VmaBlockMetadata
4030 VMA_CLASS_NO_COPY(VmaBlockMetadata)
4033 ~VmaBlockMetadata();
4034 void Init(VkDeviceSize size);
4037 bool Validate()
const;
4038 VkDeviceSize GetSize()
const {
return m_Size; }
4039 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4040 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4041 VkDeviceSize GetUnusedRangeSizeMax()
const;
4043 bool IsEmpty()
const;
4045 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4048 #if VMA_STATS_STRING_ENABLED 4049 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4053 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
4058 bool CreateAllocationRequest(
4059 uint32_t currentFrameIndex,
4060 uint32_t frameInUseCount,
4061 VkDeviceSize bufferImageGranularity,
4062 VkDeviceSize allocSize,
4063 VkDeviceSize allocAlignment,
4064 VmaSuballocationType allocType,
4065 bool canMakeOtherLost,
4066 VmaAllocationRequest* pAllocationRequest);
4068 bool MakeRequestedAllocationsLost(
4069 uint32_t currentFrameIndex,
4070 uint32_t frameInUseCount,
4071 VmaAllocationRequest* pAllocationRequest);
4073 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4077 const VmaAllocationRequest& request,
4078 VmaSuballocationType type,
4079 VkDeviceSize allocSize,
4084 void FreeAtOffset(VkDeviceSize offset);
4087 VkDeviceSize m_Size;
4088 uint32_t m_FreeCount;
4089 VkDeviceSize m_SumFreeSize;
4090 VmaSuballocationList m_Suballocations;
4093 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4095 bool ValidateFreeSuballocationList()
const;
4099 bool CheckAllocation(
4100 uint32_t currentFrameIndex,
4101 uint32_t frameInUseCount,
4102 VkDeviceSize bufferImageGranularity,
4103 VkDeviceSize allocSize,
4104 VkDeviceSize allocAlignment,
4105 VmaSuballocationType allocType,
4106 VmaSuballocationList::const_iterator suballocItem,
4107 bool canMakeOtherLost,
4108 VkDeviceSize* pOffset,
4109 size_t* itemsToMakeLostCount,
4110 VkDeviceSize* pSumFreeSize,
4111 VkDeviceSize* pSumItemSize)
const;
4113 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4117 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4120 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4123 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4132 class VmaDeviceMemoryBlock
4134 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
4136 VmaBlockMetadata m_Metadata;
4140 ~VmaDeviceMemoryBlock()
4142 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
4143 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
4148 uint32_t newMemoryTypeIndex,
4149 VkDeviceMemory newMemory,
4150 VkDeviceSize newSize);
4154 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
4155 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4156 void* GetMappedData()
const {
return m_pMappedData; }
4159 bool Validate()
const;
4162 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
4165 VkResult BindBufferMemory(
4169 VkResult BindImageMemory(
4175 uint32_t m_MemoryTypeIndex;
4176 VkDeviceMemory m_hMemory;
4181 uint32_t m_MapCount;
4182 void* m_pMappedData;
4185 struct VmaPointerLess
4187 bool operator()(
const void* lhs,
const void* rhs)
const 4193 class VmaDefragmentator;
4201 struct VmaBlockVector
4203 VMA_CLASS_NO_COPY(VmaBlockVector)
4207 uint32_t memoryTypeIndex,
4208 VkDeviceSize preferredBlockSize,
4209 size_t minBlockCount,
4210 size_t maxBlockCount,
4211 VkDeviceSize bufferImageGranularity,
4212 uint32_t frameInUseCount,
4216 VkResult CreateMinBlocks();
4218 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4219 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
4220 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
4221 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
4225 bool IsEmpty()
const {
return m_Blocks.empty(); }
4229 uint32_t currentFrameIndex,
4230 const VkMemoryRequirements& vkMemReq,
4232 VmaSuballocationType suballocType,
4241 #if VMA_STATS_STRING_ENABLED 4242 void PrintDetailedMap(
class VmaJsonWriter& json);
4245 void MakePoolAllocationsLost(
4246 uint32_t currentFrameIndex,
4247 size_t* pLostAllocationCount);
4249 VmaDefragmentator* EnsureDefragmentator(
4251 uint32_t currentFrameIndex);
4253 VkResult Defragment(
4255 VkDeviceSize& maxBytesToMove,
4256 uint32_t& maxAllocationsToMove);
4258 void DestroyDefragmentator();
4261 friend class VmaDefragmentator;
4264 const uint32_t m_MemoryTypeIndex;
4265 const VkDeviceSize m_PreferredBlockSize;
4266 const size_t m_MinBlockCount;
4267 const size_t m_MaxBlockCount;
4268 const VkDeviceSize m_BufferImageGranularity;
4269 const uint32_t m_FrameInUseCount;
4270 const bool m_IsCustomPool;
4273 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
4277 bool m_HasEmptyBlock;
4278 VmaDefragmentator* m_pDefragmentator;
4280 VkDeviceSize CalcMaxBlockSize()
const;
4283 void Remove(VmaDeviceMemoryBlock* pBlock);
4287 void IncrementallySortBlocks();
4289 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
4294 VMA_CLASS_NO_COPY(VmaPool_T)
4296 VmaBlockVector m_BlockVector;
4304 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
4306 #if VMA_STATS_STRING_ENABLED 4311 class VmaDefragmentator
4313 VMA_CLASS_NO_COPY(VmaDefragmentator)
4316 VmaBlockVector*
const m_pBlockVector;
4317 uint32_t m_CurrentFrameIndex;
4318 VkDeviceSize m_BytesMoved;
4319 uint32_t m_AllocationsMoved;
4321 struct AllocationInfo
4324 VkBool32* m_pChanged;
4327 m_hAllocation(VK_NULL_HANDLE),
4328 m_pChanged(VMA_NULL)
4333 struct AllocationInfoSizeGreater
4335 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 4337 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
4342 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4346 VmaDeviceMemoryBlock* m_pBlock;
4347 bool m_HasNonMovableAllocations;
4348 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4350 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
4352 m_HasNonMovableAllocations(true),
4353 m_Allocations(pAllocationCallbacks),
4354 m_pMappedDataForDefragmentation(VMA_NULL)
4358 void CalcHasNonMovableAllocations()
4360 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
4361 const size_t defragmentAllocCount = m_Allocations.size();
4362 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
4365 void SortAllocationsBySizeDescecnding()
4367 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
4370 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
4375 void* m_pMappedDataForDefragmentation;
4378 struct BlockPointerLess
4380 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 4382 return pLhsBlockInfo->m_pBlock < pRhsBlock;
4384 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4386 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
4392 struct BlockInfoCompareMoveDestination
4394 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4396 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
4400 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
4404 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
4412 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
4413 BlockInfoVector m_Blocks;
4415 VkResult DefragmentRound(
4416 VkDeviceSize maxBytesToMove,
4417 uint32_t maxAllocationsToMove);
4419 static bool MoveMakesSense(
4420 size_t dstBlockIndex, VkDeviceSize dstOffset,
4421 size_t srcBlockIndex, VkDeviceSize srcOffset);
4426 VmaBlockVector* pBlockVector,
4427 uint32_t currentFrameIndex);
4429 ~VmaDefragmentator();
4431 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
4432 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
4434 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
4436 VkResult Defragment(
4437 VkDeviceSize maxBytesToMove,
4438 uint32_t maxAllocationsToMove);
4442 struct VmaAllocator_T
4444 VMA_CLASS_NO_COPY(VmaAllocator_T)
4447 bool m_UseKhrDedicatedAllocation;
4449 bool m_AllocationCallbacksSpecified;
4450 VkAllocationCallbacks m_AllocationCallbacks;
4454 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
4455 VMA_MUTEX m_HeapSizeLimitMutex;
4457 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
4458 VkPhysicalDeviceMemoryProperties m_MemProps;
4461 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
4464 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
4465 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
4466 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
4471 const VkAllocationCallbacks* GetAllocationCallbacks()
const 4473 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
4477 return m_VulkanFunctions;
4480 VkDeviceSize GetBufferImageGranularity()
const 4483 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
4484 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
4487 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
4488 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
4490 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 4492 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
4493 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4496 bool IsIntegratedGpu()
const 4498 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
4501 void GetBufferMemoryRequirements(
4503 VkMemoryRequirements& memReq,
4504 bool& requiresDedicatedAllocation,
4505 bool& prefersDedicatedAllocation)
const;
4506 void GetImageMemoryRequirements(
4508 VkMemoryRequirements& memReq,
4509 bool& requiresDedicatedAllocation,
4510 bool& prefersDedicatedAllocation)
const;
4513 VkResult AllocateMemory(
4514 const VkMemoryRequirements& vkMemReq,
4515 bool requiresDedicatedAllocation,
4516 bool prefersDedicatedAllocation,
4517 VkBuffer dedicatedBuffer,
4518 VkImage dedicatedImage,
4520 VmaSuballocationType suballocType,
4526 void CalculateStats(
VmaStats* pStats);
4528 #if VMA_STATS_STRING_ENABLED 4529 void PrintDetailedMap(
class VmaJsonWriter& json);
4532 VkResult Defragment(
4534 size_t allocationCount,
4535 VkBool32* pAllocationsChanged,
4543 void DestroyPool(
VmaPool pool);
4546 void SetCurrentFrameIndex(uint32_t frameIndex);
4548 void MakePoolAllocationsLost(
4550 size_t* pLostAllocationCount);
4554 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
4555 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
4560 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
4561 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
4564 VkDeviceSize m_PreferredLargeHeapBlockSize;
4566 VkPhysicalDevice m_PhysicalDevice;
4567 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
4569 VMA_MUTEX m_PoolsMutex;
4571 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
4577 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
4579 VkResult AllocateMemoryOfType(
4580 const VkMemoryRequirements& vkMemReq,
4581 bool dedicatedAllocation,
4582 VkBuffer dedicatedBuffer,
4583 VkImage dedicatedImage,
4585 uint32_t memTypeIndex,
4586 VmaSuballocationType suballocType,
4590 VkResult AllocateDedicatedMemory(
4592 VmaSuballocationType suballocType,
4593 uint32_t memTypeIndex,
4595 bool isUserDataString,
4597 VkBuffer dedicatedBuffer,
4598 VkImage dedicatedImage,
4608 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
4610 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4613 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
4615 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4618 template<
typename T>
4621 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
4624 template<
typename T>
4625 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
4627 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
4630 template<
typename T>
4631 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
4636 VmaFree(hAllocator, ptr);
4640 template<
typename T>
4641 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
4645 for(
size_t i = count; i--; )
4647 VmaFree(hAllocator, ptr);
4654 #if VMA_STATS_STRING_ENABLED 4656 class VmaStringBuilder
4659 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4660 size_t GetLength()
const {
return m_Data.size(); }
4661 const char* GetData()
const {
return m_Data.data(); }
4663 void Add(
char ch) { m_Data.push_back(ch); }
4664 void Add(
const char* pStr);
4665 void AddNewLine() { Add(
'\n'); }
4666 void AddNumber(uint32_t num);
4667 void AddNumber(uint64_t num);
4668 void AddPointer(
const void* ptr);
4671 VmaVector< char, VmaStlAllocator<char> > m_Data;
4674 void VmaStringBuilder::Add(
const char* pStr)
4676 const size_t strLen = strlen(pStr);
4679 const size_t oldCount = m_Data.size();
4680 m_Data.resize(oldCount + strLen);
4681 memcpy(m_Data.data() + oldCount, pStr, strLen);
4685 void VmaStringBuilder::AddNumber(uint32_t num)
4688 VmaUint32ToStr(buf,
sizeof(buf), num);
4692 void VmaStringBuilder::AddNumber(uint64_t num)
4695 VmaUint64ToStr(buf,
sizeof(buf), num);
4699 void VmaStringBuilder::AddPointer(
const void* ptr)
4702 VmaPtrToStr(buf,
sizeof(buf), ptr);
4706 #endif // #if VMA_STATS_STRING_ENABLED 4711 #if VMA_STATS_STRING_ENABLED 4715 VMA_CLASS_NO_COPY(VmaJsonWriter)
4717 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4720 void BeginObject(
bool singleLine =
false);
4723 void BeginArray(
bool singleLine =
false);
4726 void WriteString(
const char* pStr);
4727 void BeginString(
const char* pStr = VMA_NULL);
4728 void ContinueString(
const char* pStr);
4729 void ContinueString(uint32_t n);
4730 void ContinueString(uint64_t n);
4731 void ContinueString_Pointer(
const void* ptr);
4732 void EndString(
const char* pStr = VMA_NULL);
4734 void WriteNumber(uint32_t n);
4735 void WriteNumber(uint64_t n);
4736 void WriteBool(
bool b);
4740 static const char*
const INDENT;
4742 enum COLLECTION_TYPE
4744 COLLECTION_TYPE_OBJECT,
4745 COLLECTION_TYPE_ARRAY,
4749 COLLECTION_TYPE type;
4750 uint32_t valueCount;
4751 bool singleLineMode;
4754 VmaStringBuilder& m_SB;
4755 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4756 bool m_InsideString;
4758 void BeginValue(
bool isString);
4759 void WriteIndent(
bool oneLess =
false);
4762 const char*
const VmaJsonWriter::INDENT =
" ";
4764 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4766 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4767 m_InsideString(false)
4771 VmaJsonWriter::~VmaJsonWriter()
4773 VMA_ASSERT(!m_InsideString);
4774 VMA_ASSERT(m_Stack.empty());
4777 void VmaJsonWriter::BeginObject(
bool singleLine)
4779 VMA_ASSERT(!m_InsideString);
4785 item.type = COLLECTION_TYPE_OBJECT;
4786 item.valueCount = 0;
4787 item.singleLineMode = singleLine;
4788 m_Stack.push_back(item);
4791 void VmaJsonWriter::EndObject()
4793 VMA_ASSERT(!m_InsideString);
4798 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4802 void VmaJsonWriter::BeginArray(
bool singleLine)
4804 VMA_ASSERT(!m_InsideString);
4810 item.type = COLLECTION_TYPE_ARRAY;
4811 item.valueCount = 0;
4812 item.singleLineMode = singleLine;
4813 m_Stack.push_back(item);
4816 void VmaJsonWriter::EndArray()
4818 VMA_ASSERT(!m_InsideString);
4823 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4827 void VmaJsonWriter::WriteString(
const char* pStr)
4833 void VmaJsonWriter::BeginString(
const char* pStr)
4835 VMA_ASSERT(!m_InsideString);
4839 m_InsideString =
true;
4840 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4842 ContinueString(pStr);
4846 void VmaJsonWriter::ContinueString(
const char* pStr)
4848 VMA_ASSERT(m_InsideString);
4850 const size_t strLen = strlen(pStr);
4851 for(
size_t i = 0; i < strLen; ++i)
4884 VMA_ASSERT(0 &&
"Character not currently supported.");
4890 void VmaJsonWriter::ContinueString(uint32_t n)
4892 VMA_ASSERT(m_InsideString);
4896 void VmaJsonWriter::ContinueString(uint64_t n)
4898 VMA_ASSERT(m_InsideString);
4902 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4904 VMA_ASSERT(m_InsideString);
4905 m_SB.AddPointer(ptr);
4908 void VmaJsonWriter::EndString(
const char* pStr)
4910 VMA_ASSERT(m_InsideString);
4911 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4913 ContinueString(pStr);
4916 m_InsideString =
false;
4919 void VmaJsonWriter::WriteNumber(uint32_t n)
4921 VMA_ASSERT(!m_InsideString);
4926 void VmaJsonWriter::WriteNumber(uint64_t n)
4928 VMA_ASSERT(!m_InsideString);
4933 void VmaJsonWriter::WriteBool(
bool b)
4935 VMA_ASSERT(!m_InsideString);
4937 m_SB.Add(b ?
"true" :
"false");
4940 void VmaJsonWriter::WriteNull()
4942 VMA_ASSERT(!m_InsideString);
4947 void VmaJsonWriter::BeginValue(
bool isString)
4949 if(!m_Stack.empty())
4951 StackItem& currItem = m_Stack.back();
4952 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4953 currItem.valueCount % 2 == 0)
4955 VMA_ASSERT(isString);
4958 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4959 currItem.valueCount % 2 != 0)
4963 else if(currItem.valueCount > 0)
4972 ++currItem.valueCount;
4976 void VmaJsonWriter::WriteIndent(
bool oneLess)
4978 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4982 size_t count = m_Stack.size();
4983 if(count > 0 && oneLess)
4987 for(
size_t i = 0; i < count; ++i)
4994 #endif // #if VMA_STATS_STRING_ENABLED 4998 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
5000 if(IsUserDataString())
5002 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
5004 FreeUserDataString(hAllocator);
5006 if(pUserData != VMA_NULL)
5008 const char*
const newStrSrc = (
char*)pUserData;
5009 const size_t newStrLen = strlen(newStrSrc);
5010 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
5011 memcpy(newStrDst, newStrSrc, newStrLen + 1);
5012 m_pUserData = newStrDst;
5017 m_pUserData = pUserData;
5021 void VmaAllocation_T::ChangeBlockAllocation(
5023 VmaDeviceMemoryBlock* block,
5024 VkDeviceSize offset)
5026 VMA_ASSERT(block != VMA_NULL);
5027 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5030 if(block != m_BlockAllocation.m_Block)
5032 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
5033 if(IsPersistentMap())
5035 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
5036 block->Map(hAllocator, mapRefCount, VMA_NULL);
5039 m_BlockAllocation.m_Block = block;
5040 m_BlockAllocation.m_Offset = offset;
5043 VkDeviceSize VmaAllocation_T::GetOffset()
const 5047 case ALLOCATION_TYPE_BLOCK:
5048 return m_BlockAllocation.m_Offset;
5049 case ALLOCATION_TYPE_DEDICATED:
5057 VkDeviceMemory VmaAllocation_T::GetMemory()
const 5061 case ALLOCATION_TYPE_BLOCK:
5062 return m_BlockAllocation.m_Block->GetDeviceMemory();
5063 case ALLOCATION_TYPE_DEDICATED:
5064 return m_DedicatedAllocation.m_hMemory;
5067 return VK_NULL_HANDLE;
5071 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 5075 case ALLOCATION_TYPE_BLOCK:
5076 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
5077 case ALLOCATION_TYPE_DEDICATED:
5078 return m_DedicatedAllocation.m_MemoryTypeIndex;
5085 void* VmaAllocation_T::GetMappedData()
const 5089 case ALLOCATION_TYPE_BLOCK:
5092 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
5093 VMA_ASSERT(pBlockData != VMA_NULL);
5094 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
5101 case ALLOCATION_TYPE_DEDICATED:
5102 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
5103 return m_DedicatedAllocation.m_pMappedData;
5110 bool VmaAllocation_T::CanBecomeLost()
const 5114 case ALLOCATION_TYPE_BLOCK:
5115 return m_BlockAllocation.m_CanBecomeLost;
5116 case ALLOCATION_TYPE_DEDICATED:
5124 VmaPool VmaAllocation_T::GetPool()
const 5126 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5127 return m_BlockAllocation.m_hPool;
5130 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5132 VMA_ASSERT(CanBecomeLost());
5138 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
5141 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
5146 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
5152 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
5162 #if VMA_STATS_STRING_ENABLED 5165 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
5174 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 5176 json.WriteString(
"Type");
5177 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
5179 json.WriteString(
"Size");
5180 json.WriteNumber(m_Size);
5182 if(m_pUserData != VMA_NULL)
5184 json.WriteString(
"UserData");
5185 if(IsUserDataString())
5187 json.WriteString((
const char*)m_pUserData);
5192 json.ContinueString_Pointer(m_pUserData);
5197 json.WriteString(
"CreationFrameIndex");
5198 json.WriteNumber(m_CreationFrameIndex);
5200 json.WriteString(
"LastUseFrameIndex");
5201 json.WriteNumber(GetLastUseFrameIndex());
5203 if(m_BufferImageUsage != 0)
5205 json.WriteString(
"Usage");
5206 json.WriteNumber(m_BufferImageUsage);
5212 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
5214 VMA_ASSERT(IsUserDataString());
5215 if(m_pUserData != VMA_NULL)
5217 char*
const oldStr = (
char*)m_pUserData;
5218 const size_t oldStrLen = strlen(oldStr);
5219 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
5220 m_pUserData = VMA_NULL;
5224 void VmaAllocation_T::BlockAllocMap()
5226 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
5228 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
5234 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
5238 void VmaAllocation_T::BlockAllocUnmap()
5240 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
5242 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
5248 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
5252 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
5254 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
5258 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
5260 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
5261 *ppData = m_DedicatedAllocation.m_pMappedData;
5267 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
5268 return VK_ERROR_MEMORY_MAP_FAILED;
5273 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5274 hAllocator->m_hDevice,
5275 m_DedicatedAllocation.m_hMemory,
5280 if(result == VK_SUCCESS)
5282 m_DedicatedAllocation.m_pMappedData = *ppData;
5289 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
5291 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
5293 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
5298 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
5299 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
5300 hAllocator->m_hDevice,
5301 m_DedicatedAllocation.m_hMemory);
5306 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
5310 #if VMA_STATS_STRING_ENABLED 5312 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
5316 json.WriteString(
"Blocks");
5319 json.WriteString(
"Allocations");
5322 json.WriteString(
"UnusedRanges");
5325 json.WriteString(
"UsedBytes");
5328 json.WriteString(
"UnusedBytes");
5333 json.WriteString(
"AllocationSize");
5334 json.BeginObject(
true);
5335 json.WriteString(
"Min");
5337 json.WriteString(
"Avg");
5339 json.WriteString(
"Max");
5346 json.WriteString(
"UnusedRangeSize");
5347 json.BeginObject(
true);
5348 json.WriteString(
"Min");
5350 json.WriteString(
"Avg");
5352 json.WriteString(
"Max");
5360 #endif // #if VMA_STATS_STRING_ENABLED 5362 struct VmaSuballocationItemSizeLess
5365 const VmaSuballocationList::iterator lhs,
5366 const VmaSuballocationList::iterator rhs)
const 5368 return lhs->size < rhs->size;
5371 const VmaSuballocationList::iterator lhs,
5372 VkDeviceSize rhsSize)
const 5374 return lhs->size < rhsSize;
5381 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
5385 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
5386 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
5390 VmaBlockMetadata::~VmaBlockMetadata()
5394 void VmaBlockMetadata::Init(VkDeviceSize size)
5398 m_SumFreeSize = size;
5400 VmaSuballocation suballoc = {};
5401 suballoc.offset = 0;
5402 suballoc.size = size;
5403 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5404 suballoc.hAllocation = VK_NULL_HANDLE;
5406 m_Suballocations.push_back(suballoc);
5407 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
5409 m_FreeSuballocationsBySize.push_back(suballocItem);
5412 bool VmaBlockMetadata::Validate()
const 5414 if(m_Suballocations.empty())
5420 VkDeviceSize calculatedOffset = 0;
5422 uint32_t calculatedFreeCount = 0;
5424 VkDeviceSize calculatedSumFreeSize = 0;
5427 size_t freeSuballocationsToRegister = 0;
5429 bool prevFree =
false;
5431 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5432 suballocItem != m_Suballocations.cend();
5435 const VmaSuballocation& subAlloc = *suballocItem;
5438 if(subAlloc.offset != calculatedOffset)
5443 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
5445 if(prevFree && currFree)
5450 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
5457 calculatedSumFreeSize += subAlloc.size;
5458 ++calculatedFreeCount;
5459 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5461 ++freeSuballocationsToRegister;
5466 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
5470 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
5476 calculatedOffset += subAlloc.size;
5477 prevFree = currFree;
5482 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
5487 VkDeviceSize lastSize = 0;
5488 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
5490 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
5493 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5498 if(suballocItem->size < lastSize)
5503 lastSize = suballocItem->size;
5507 if(!ValidateFreeSuballocationList() ||
5508 (calculatedOffset != m_Size) ||
5509 (calculatedSumFreeSize != m_SumFreeSize) ||
5510 (calculatedFreeCount != m_FreeCount))
5518 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 5520 if(!m_FreeSuballocationsBySize.empty())
5522 return m_FreeSuballocationsBySize.back()->size;
5530 bool VmaBlockMetadata::IsEmpty()
const 5532 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
5535 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 5539 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5551 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5552 suballocItem != m_Suballocations.cend();
5555 const VmaSuballocation& suballoc = *suballocItem;
5556 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5569 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 5571 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5573 inoutStats.
size += m_Size;
5580 #if VMA_STATS_STRING_ENABLED 5582 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 5586 json.WriteString(
"TotalBytes");
5587 json.WriteNumber(m_Size);
5589 json.WriteString(
"UnusedBytes");
5590 json.WriteNumber(m_SumFreeSize);
5592 json.WriteString(
"Allocations");
5593 json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
5595 json.WriteString(
"UnusedRanges");
5596 json.WriteNumber(m_FreeCount);
5598 json.WriteString(
"Suballocations");
5601 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5602 suballocItem != m_Suballocations.cend();
5603 ++suballocItem, ++i)
5605 json.BeginObject(
true);
5607 json.WriteString(
"Offset");
5608 json.WriteNumber(suballocItem->offset);
5610 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5612 json.WriteString(
"Type");
5613 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
5615 json.WriteString(
"Size");
5616 json.WriteNumber(suballocItem->size);
5620 suballocItem->hAllocation->PrintParameters(json);
5630 #endif // #if VMA_STATS_STRING_ENABLED 5642 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
5644 VMA_ASSERT(IsEmpty());
5645 pAllocationRequest->offset = 0;
5646 pAllocationRequest->sumFreeSize = m_SumFreeSize;
5647 pAllocationRequest->sumItemSize = 0;
5648 pAllocationRequest->item = m_Suballocations.begin();
5649 pAllocationRequest->itemsToMakeLostCount = 0;
5652 bool VmaBlockMetadata::CreateAllocationRequest(
5653 uint32_t currentFrameIndex,
5654 uint32_t frameInUseCount,
5655 VkDeviceSize bufferImageGranularity,
5656 VkDeviceSize allocSize,
5657 VkDeviceSize allocAlignment,
5658 VmaSuballocationType allocType,
5659 bool canMakeOtherLost,
5660 VmaAllocationRequest* pAllocationRequest)
5662 VMA_ASSERT(allocSize > 0);
5663 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5664 VMA_ASSERT(pAllocationRequest != VMA_NULL);
5665 VMA_HEAVY_ASSERT(Validate());
5668 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
5674 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5675 if(freeSuballocCount > 0)
5680 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5681 m_FreeSuballocationsBySize.data(),
5682 m_FreeSuballocationsBySize.data() + freeSuballocCount,
5684 VmaSuballocationItemSizeLess());
5685 size_t index = it - m_FreeSuballocationsBySize.data();
5686 for(; index < freeSuballocCount; ++index)
5691 bufferImageGranularity,
5695 m_FreeSuballocationsBySize[index],
5697 &pAllocationRequest->offset,
5698 &pAllocationRequest->itemsToMakeLostCount,
5699 &pAllocationRequest->sumFreeSize,
5700 &pAllocationRequest->sumItemSize))
5702 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5710 for(
size_t index = freeSuballocCount; index--; )
5715 bufferImageGranularity,
5719 m_FreeSuballocationsBySize[index],
5721 &pAllocationRequest->offset,
5722 &pAllocationRequest->itemsToMakeLostCount,
5723 &pAllocationRequest->sumFreeSize,
5724 &pAllocationRequest->sumItemSize))
5726 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5733 if(canMakeOtherLost)
5737 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5738 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5740 VmaAllocationRequest tmpAllocRequest = {};
5741 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5742 suballocIt != m_Suballocations.end();
5745 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5746 suballocIt->hAllocation->CanBecomeLost())
5751 bufferImageGranularity,
5757 &tmpAllocRequest.offset,
5758 &tmpAllocRequest.itemsToMakeLostCount,
5759 &tmpAllocRequest.sumFreeSize,
5760 &tmpAllocRequest.sumItemSize))
5762 tmpAllocRequest.item = suballocIt;
5764 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5766 *pAllocationRequest = tmpAllocRequest;
5772 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5781 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5782 uint32_t currentFrameIndex,
5783 uint32_t frameInUseCount,
5784 VmaAllocationRequest* pAllocationRequest)
5786 while(pAllocationRequest->itemsToMakeLostCount > 0)
5788 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5790 ++pAllocationRequest->item;
5792 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5793 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5794 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5795 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5797 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5798 --pAllocationRequest->itemsToMakeLostCount;
5806 VMA_HEAVY_ASSERT(Validate());
5807 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5808 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5813 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5815 uint32_t lostAllocationCount = 0;
5816 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5817 it != m_Suballocations.end();
5820 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5821 it->hAllocation->CanBecomeLost() &&
5822 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5824 it = FreeSuballocation(it);
5825 ++lostAllocationCount;
5828 return lostAllocationCount;
5831 void VmaBlockMetadata::Alloc(
5832 const VmaAllocationRequest& request,
5833 VmaSuballocationType type,
5834 VkDeviceSize allocSize,
5837 VMA_ASSERT(request.item != m_Suballocations.end());
5838 VmaSuballocation& suballoc = *request.item;
5840 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5842 VMA_ASSERT(request.offset >= suballoc.offset);
5843 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5844 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5845 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5849 UnregisterFreeSuballocation(request.item);
5851 suballoc.offset = request.offset;
5852 suballoc.size = allocSize;
5853 suballoc.type = type;
5854 suballoc.hAllocation = hAllocation;
5859 VmaSuballocation paddingSuballoc = {};
5860 paddingSuballoc.offset = request.offset + allocSize;
5861 paddingSuballoc.size = paddingEnd;
5862 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5863 VmaSuballocationList::iterator next = request.item;
5865 const VmaSuballocationList::iterator paddingEndItem =
5866 m_Suballocations.insert(next, paddingSuballoc);
5867 RegisterFreeSuballocation(paddingEndItem);
5873 VmaSuballocation paddingSuballoc = {};
5874 paddingSuballoc.offset = request.offset - paddingBegin;
5875 paddingSuballoc.size = paddingBegin;
5876 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5877 const VmaSuballocationList::iterator paddingBeginItem =
5878 m_Suballocations.insert(request.item, paddingSuballoc);
5879 RegisterFreeSuballocation(paddingBeginItem);
5883 m_FreeCount = m_FreeCount - 1;
5884 if(paddingBegin > 0)
5892 m_SumFreeSize -= allocSize;
5897 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5898 suballocItem != m_Suballocations.end();
5901 VmaSuballocation& suballoc = *suballocItem;
5902 if(suballoc.hAllocation == allocation)
5904 FreeSuballocation(suballocItem);
5905 VMA_HEAVY_ASSERT(Validate());
5909 VMA_ASSERT(0 &&
"Not found!");
5912 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
5914 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5915 suballocItem != m_Suballocations.end();
5918 VmaSuballocation& suballoc = *suballocItem;
5919 if(suballoc.offset == offset)
5921 FreeSuballocation(suballocItem);
5925 VMA_ASSERT(0 &&
"Not found!");
5928 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5930 VkDeviceSize lastSize = 0;
5931 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5933 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5935 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5940 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5945 if(it->size < lastSize)
5951 lastSize = it->size;
5956 bool VmaBlockMetadata::CheckAllocation(
5957 uint32_t currentFrameIndex,
5958 uint32_t frameInUseCount,
5959 VkDeviceSize bufferImageGranularity,
5960 VkDeviceSize allocSize,
5961 VkDeviceSize allocAlignment,
5962 VmaSuballocationType allocType,
5963 VmaSuballocationList::const_iterator suballocItem,
5964 bool canMakeOtherLost,
5965 VkDeviceSize* pOffset,
5966 size_t* itemsToMakeLostCount,
5967 VkDeviceSize* pSumFreeSize,
5968 VkDeviceSize* pSumItemSize)
const 5970 VMA_ASSERT(allocSize > 0);
5971 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5972 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5973 VMA_ASSERT(pOffset != VMA_NULL);
5975 *itemsToMakeLostCount = 0;
5979 if(canMakeOtherLost)
5981 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5983 *pSumFreeSize = suballocItem->size;
5987 if(suballocItem->hAllocation->CanBecomeLost() &&
5988 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5990 ++*itemsToMakeLostCount;
5991 *pSumItemSize = suballocItem->size;
6000 if(m_Size - suballocItem->offset < allocSize)
6006 *pOffset = suballocItem->offset;
6009 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
6011 *pOffset += VMA_DEBUG_MARGIN;
6015 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
6016 *pOffset = VmaAlignUp(*pOffset, alignment);
6020 if(bufferImageGranularity > 1)
6022 bool bufferImageGranularityConflict =
false;
6023 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
6024 while(prevSuballocItem != m_Suballocations.cbegin())
6027 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
6028 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
6030 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
6032 bufferImageGranularityConflict =
true;
6040 if(bufferImageGranularityConflict)
6042 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
6048 if(*pOffset >= suballocItem->offset + suballocItem->size)
6054 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
6057 VmaSuballocationList::const_iterator next = suballocItem;
6059 const VkDeviceSize requiredEndMargin =
6060 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
6062 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
6064 if(suballocItem->offset + totalSize > m_Size)
6071 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
6072 if(totalSize > suballocItem->size)
6074 VkDeviceSize remainingSize = totalSize - suballocItem->size;
6075 while(remainingSize > 0)
6078 if(lastSuballocItem == m_Suballocations.cend())
6082 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6084 *pSumFreeSize += lastSuballocItem->size;
6088 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
6089 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
6090 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6092 ++*itemsToMakeLostCount;
6093 *pSumItemSize += lastSuballocItem->size;
6100 remainingSize = (lastSuballocItem->size < remainingSize) ?
6101 remainingSize - lastSuballocItem->size : 0;
6107 if(bufferImageGranularity > 1)
6109 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
6111 while(nextSuballocItem != m_Suballocations.cend())
6113 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
6114 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
6116 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
6118 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
6119 if(nextSuballoc.hAllocation->CanBecomeLost() &&
6120 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6122 ++*itemsToMakeLostCount;
6141 const VmaSuballocation& suballoc = *suballocItem;
6142 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
6144 *pSumFreeSize = suballoc.size;
6147 if(suballoc.size < allocSize)
6153 *pOffset = suballoc.offset;
6156 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
6158 *pOffset += VMA_DEBUG_MARGIN;
6162 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
6163 *pOffset = VmaAlignUp(*pOffset, alignment);
6167 if(bufferImageGranularity > 1)
6169 bool bufferImageGranularityConflict =
false;
6170 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
6171 while(prevSuballocItem != m_Suballocations.cbegin())
6174 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
6175 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
6177 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
6179 bufferImageGranularityConflict =
true;
6187 if(bufferImageGranularityConflict)
6189 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
6194 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
6197 VmaSuballocationList::const_iterator next = suballocItem;
6199 const VkDeviceSize requiredEndMargin =
6200 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
6203 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
6210 if(bufferImageGranularity > 1)
6212 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
6214 while(nextSuballocItem != m_Suballocations.cend())
6216 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
6217 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
6219 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
6238 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
6240 VMA_ASSERT(item != m_Suballocations.end());
6241 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6243 VmaSuballocationList::iterator nextItem = item;
6245 VMA_ASSERT(nextItem != m_Suballocations.end());
6246 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6248 item->size += nextItem->size;
6250 m_Suballocations.erase(nextItem);
6253 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
6256 VmaSuballocation& suballoc = *suballocItem;
6257 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6258 suballoc.hAllocation = VK_NULL_HANDLE;
6262 m_SumFreeSize += suballoc.size;
6265 bool mergeWithNext =
false;
6266 bool mergeWithPrev =
false;
6268 VmaSuballocationList::iterator nextItem = suballocItem;
6270 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
6272 mergeWithNext =
true;
6275 VmaSuballocationList::iterator prevItem = suballocItem;
6276 if(suballocItem != m_Suballocations.begin())
6279 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6281 mergeWithPrev =
true;
6287 UnregisterFreeSuballocation(nextItem);
6288 MergeFreeWithNext(suballocItem);
6293 UnregisterFreeSuballocation(prevItem);
6294 MergeFreeWithNext(prevItem);
6295 RegisterFreeSuballocation(prevItem);
6300 RegisterFreeSuballocation(suballocItem);
6301 return suballocItem;
6305 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
6307 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6308 VMA_ASSERT(item->size > 0);
6312 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6314 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6316 if(m_FreeSuballocationsBySize.empty())
6318 m_FreeSuballocationsBySize.push_back(item);
6322 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
6330 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
6332 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6333 VMA_ASSERT(item->size > 0);
6337 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6339 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6341 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6342 m_FreeSuballocationsBySize.data(),
6343 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
6345 VmaSuballocationItemSizeLess());
6346 for(
size_t index = it - m_FreeSuballocationsBySize.data();
6347 index < m_FreeSuballocationsBySize.size();
6350 if(m_FreeSuballocationsBySize[index] == item)
6352 VmaVectorRemove(m_FreeSuballocationsBySize, index);
6355 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
6357 VMA_ASSERT(0 &&
"Not found.");
6366 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
6367 m_Metadata(hAllocator),
6368 m_MemoryTypeIndex(UINT32_MAX),
6369 m_hMemory(VK_NULL_HANDLE),
6371 m_pMappedData(VMA_NULL)
6375 void VmaDeviceMemoryBlock::Init(
6376 uint32_t newMemoryTypeIndex,
6377 VkDeviceMemory newMemory,
6378 VkDeviceSize newSize)
6380 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6382 m_MemoryTypeIndex = newMemoryTypeIndex;
6383 m_hMemory = newMemory;
6385 m_Metadata.Init(newSize);
6388 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
6392 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
6394 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
6395 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
6396 m_hMemory = VK_NULL_HANDLE;
6399 bool VmaDeviceMemoryBlock::Validate()
const 6401 if((m_hMemory == VK_NULL_HANDLE) ||
6402 (m_Metadata.GetSize() == 0))
6407 return m_Metadata.Validate();
6410 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
6417 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6420 m_MapCount += count;
6421 VMA_ASSERT(m_pMappedData != VMA_NULL);
6422 if(ppData != VMA_NULL)
6424 *ppData = m_pMappedData;
6430 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6431 hAllocator->m_hDevice,
6437 if(result == VK_SUCCESS)
6439 if(ppData != VMA_NULL)
6441 *ppData = m_pMappedData;
6449 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
6456 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6457 if(m_MapCount >= count)
6459 m_MapCount -= count;
6462 m_pMappedData = VMA_NULL;
6463 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
6468 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
6472 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
6477 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
6478 hAllocation->GetBlock() ==
this);
6480 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6481 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
6482 hAllocator->m_hDevice,
6485 hAllocation->GetOffset());
6488 VkResult VmaDeviceMemoryBlock::BindImageMemory(
6493 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
6494 hAllocation->GetBlock() ==
this);
6496 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6497 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
6498 hAllocator->m_hDevice,
6501 hAllocation->GetOffset());
6506 memset(&outInfo, 0,
sizeof(outInfo));
6525 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
6533 VmaPool_T::VmaPool_T(
6538 createInfo.memoryTypeIndex,
6539 createInfo.blockSize,
6540 createInfo.minBlockCount,
6541 createInfo.maxBlockCount,
6543 createInfo.frameInUseCount,
6548 VmaPool_T::~VmaPool_T()
6552 #if VMA_STATS_STRING_ENABLED 6554 #endif // #if VMA_STATS_STRING_ENABLED 6556 VmaBlockVector::VmaBlockVector(
6558 uint32_t memoryTypeIndex,
6559 VkDeviceSize preferredBlockSize,
6560 size_t minBlockCount,
6561 size_t maxBlockCount,
6562 VkDeviceSize bufferImageGranularity,
6563 uint32_t frameInUseCount,
6564 bool isCustomPool) :
6565 m_hAllocator(hAllocator),
6566 m_MemoryTypeIndex(memoryTypeIndex),
6567 m_PreferredBlockSize(preferredBlockSize),
6568 m_MinBlockCount(minBlockCount),
6569 m_MaxBlockCount(maxBlockCount),
6570 m_BufferImageGranularity(bufferImageGranularity),
6571 m_FrameInUseCount(frameInUseCount),
6572 m_IsCustomPool(isCustomPool),
6573 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
6574 m_HasEmptyBlock(false),
6575 m_pDefragmentator(VMA_NULL)
6579 VmaBlockVector::~VmaBlockVector()
6581 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
6583 for(
size_t i = m_Blocks.size(); i--; )
6585 m_Blocks[i]->Destroy(m_hAllocator);
6586 vma_delete(m_hAllocator, m_Blocks[i]);
6590 VkResult VmaBlockVector::CreateMinBlocks()
6592 for(
size_t i = 0; i < m_MinBlockCount; ++i)
6594 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
6595 if(res != VK_SUCCESS)
6603 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
6611 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6613 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6615 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6617 VMA_HEAVY_ASSERT(pBlock->Validate());
6618 pBlock->m_Metadata.AddPoolStats(*pStats);
6622 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
6624 VkResult VmaBlockVector::Allocate(
6626 uint32_t currentFrameIndex,
6627 const VkMemoryRequirements& vkMemReq,
6629 VmaSuballocationType suballocType,
6635 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6639 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6641 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6642 VMA_ASSERT(pCurrBlock);
6643 VmaAllocationRequest currRequest = {};
6644 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6647 m_BufferImageGranularity,
6655 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
6659 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
6660 if(res != VK_SUCCESS)
6667 if(pCurrBlock->m_Metadata.IsEmpty())
6669 m_HasEmptyBlock =
false;
6672 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6673 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
6674 (*pAllocation)->InitBlockAllocation(
6683 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
6684 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6685 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6690 const bool canCreateNewBlock =
6692 (m_Blocks.size() < m_MaxBlockCount);
6695 if(canCreateNewBlock)
6698 VkDeviceSize newBlockSize = m_PreferredBlockSize;
6699 uint32_t newBlockSizeShift = 0;
6700 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
6704 if(m_IsCustomPool ==
false)
6707 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
6708 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
6710 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6711 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
6713 newBlockSize = smallerNewBlockSize;
6714 ++newBlockSizeShift;
6723 size_t newBlockIndex = 0;
6724 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
6726 if(m_IsCustomPool ==
false)
6728 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
6730 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6731 if(smallerNewBlockSize >= vkMemReq.size)
6733 newBlockSize = smallerNewBlockSize;
6734 ++newBlockSizeShift;
6735 res = CreateBlock(newBlockSize, &newBlockIndex);
6744 if(res == VK_SUCCESS)
6746 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
6747 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
6751 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
6752 if(res != VK_SUCCESS)
6759 VmaAllocationRequest allocRequest;
6760 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6761 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6762 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6763 (*pAllocation)->InitBlockAllocation(
6766 allocRequest.offset,
6772 VMA_HEAVY_ASSERT(pBlock->Validate());
6773 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6774 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6782 if(canMakeOtherLost)
6784 uint32_t tryIndex = 0;
6785 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6787 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6788 VmaAllocationRequest bestRequest = {};
6789 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6793 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6795 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6796 VMA_ASSERT(pCurrBlock);
6797 VmaAllocationRequest currRequest = {};
6798 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6801 m_BufferImageGranularity,
6808 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6809 if(pBestRequestBlock == VMA_NULL ||
6810 currRequestCost < bestRequestCost)
6812 pBestRequestBlock = pCurrBlock;
6813 bestRequest = currRequest;
6814 bestRequestCost = currRequestCost;
6816 if(bestRequestCost == 0)
6824 if(pBestRequestBlock != VMA_NULL)
6828 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
6829 if(res != VK_SUCCESS)
6835 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6841 if(pBestRequestBlock->m_Metadata.IsEmpty())
6843 m_HasEmptyBlock =
false;
6846 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6847 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6848 (*pAllocation)->InitBlockAllocation(
6857 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
6858 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6859 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6873 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6875 return VK_ERROR_TOO_MANY_OBJECTS;
6879 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6882 void VmaBlockVector::Free(
6885 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6889 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6891 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6893 if(hAllocation->IsPersistentMap())
6895 pBlock->Unmap(m_hAllocator, 1);
6898 pBlock->m_Metadata.Free(hAllocation);
6899 VMA_HEAVY_ASSERT(pBlock->Validate());
6901 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6904 if(pBlock->m_Metadata.IsEmpty())
6907 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6909 pBlockToDelete = pBlock;
6915 m_HasEmptyBlock =
true;
6920 else if(m_HasEmptyBlock)
6922 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6923 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6925 pBlockToDelete = pLastBlock;
6926 m_Blocks.pop_back();
6927 m_HasEmptyBlock =
false;
6931 IncrementallySortBlocks();
6936 if(pBlockToDelete != VMA_NULL)
6938 VMA_DEBUG_LOG(
" Deleted empty allocation");
6939 pBlockToDelete->Destroy(m_hAllocator);
6940 vma_delete(m_hAllocator, pBlockToDelete);
6944 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 6946 VkDeviceSize result = 0;
6947 for(
size_t i = m_Blocks.size(); i--; )
6949 result = VMA_MAX(result, m_Blocks[i]->m_Metadata.GetSize());
6950 if(result >= m_PreferredBlockSize)
6958 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6960 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6962 if(m_Blocks[blockIndex] == pBlock)
6964 VmaVectorRemove(m_Blocks, blockIndex);
6971 void VmaBlockVector::IncrementallySortBlocks()
6974 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6976 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6978 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6984 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6986 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6987 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6988 allocInfo.allocationSize = blockSize;
6989 VkDeviceMemory mem = VK_NULL_HANDLE;
6990 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6999 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
7003 allocInfo.allocationSize);
7005 m_Blocks.push_back(pBlock);
7006 if(pNewBlockIndex != VMA_NULL)
7008 *pNewBlockIndex = m_Blocks.size() - 1;
7014 #if VMA_STATS_STRING_ENABLED 7016 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
7018 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7024 json.WriteString(
"MemoryTypeIndex");
7025 json.WriteNumber(m_MemoryTypeIndex);
7027 json.WriteString(
"BlockSize");
7028 json.WriteNumber(m_PreferredBlockSize);
7030 json.WriteString(
"BlockCount");
7031 json.BeginObject(
true);
7032 if(m_MinBlockCount > 0)
7034 json.WriteString(
"Min");
7035 json.WriteNumber((uint64_t)m_MinBlockCount);
7037 if(m_MaxBlockCount < SIZE_MAX)
7039 json.WriteString(
"Max");
7040 json.WriteNumber((uint64_t)m_MaxBlockCount);
7042 json.WriteString(
"Cur");
7043 json.WriteNumber((uint64_t)m_Blocks.size());
7046 if(m_FrameInUseCount > 0)
7048 json.WriteString(
"FrameInUseCount");
7049 json.WriteNumber(m_FrameInUseCount);
7054 json.WriteString(
"PreferredBlockSize");
7055 json.WriteNumber(m_PreferredBlockSize);
7058 json.WriteString(
"Blocks");
7060 for(
size_t i = 0; i < m_Blocks.size(); ++i)
7062 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
7069 #endif // #if VMA_STATS_STRING_ENABLED 7071 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
7073 uint32_t currentFrameIndex)
7075 if(m_pDefragmentator == VMA_NULL)
7077 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
7083 return m_pDefragmentator;
7086 VkResult VmaBlockVector::Defragment(
7088 VkDeviceSize& maxBytesToMove,
7089 uint32_t& maxAllocationsToMove)
7091 if(m_pDefragmentator == VMA_NULL)
7096 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7099 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
7102 if(pDefragmentationStats != VMA_NULL)
7104 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
7105 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
7108 VMA_ASSERT(bytesMoved <= maxBytesToMove);
7109 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
7115 m_HasEmptyBlock =
false;
7116 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
7118 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
7119 if(pBlock->m_Metadata.IsEmpty())
7121 if(m_Blocks.size() > m_MinBlockCount)
7123 if(pDefragmentationStats != VMA_NULL)
7126 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
7129 VmaVectorRemove(m_Blocks, blockIndex);
7130 pBlock->Destroy(m_hAllocator);
7131 vma_delete(m_hAllocator, pBlock);
7135 m_HasEmptyBlock =
true;
7143 void VmaBlockVector::DestroyDefragmentator()
7145 if(m_pDefragmentator != VMA_NULL)
7147 vma_delete(m_hAllocator, m_pDefragmentator);
7148 m_pDefragmentator = VMA_NULL;
7152 void VmaBlockVector::MakePoolAllocationsLost(
7153 uint32_t currentFrameIndex,
7154 size_t* pLostAllocationCount)
7156 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7157 size_t lostAllocationCount = 0;
7158 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7160 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7162 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
7164 if(pLostAllocationCount != VMA_NULL)
7166 *pLostAllocationCount = lostAllocationCount;
7170 void VmaBlockVector::AddStats(
VmaStats* pStats)
7172 const uint32_t memTypeIndex = m_MemoryTypeIndex;
7173 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
7175 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7177 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7179 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7181 VMA_HEAVY_ASSERT(pBlock->Validate());
7183 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
7184 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7185 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7186 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7193 VmaDefragmentator::VmaDefragmentator(
7195 VmaBlockVector* pBlockVector,
7196 uint32_t currentFrameIndex) :
7197 m_hAllocator(hAllocator),
7198 m_pBlockVector(pBlockVector),
7199 m_CurrentFrameIndex(currentFrameIndex),
7201 m_AllocationsMoved(0),
7202 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
7203 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
7207 VmaDefragmentator::~VmaDefragmentator()
7209 for(
size_t i = m_Blocks.size(); i--; )
7211 vma_delete(m_hAllocator, m_Blocks[i]);
7215 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
7217 AllocationInfo allocInfo;
7218 allocInfo.m_hAllocation = hAlloc;
7219 allocInfo.m_pChanged = pChanged;
7220 m_Allocations.push_back(allocInfo);
7223 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
7226 if(m_pMappedDataForDefragmentation)
7228 *ppMappedData = m_pMappedDataForDefragmentation;
7233 if(m_pBlock->GetMappedData())
7235 *ppMappedData = m_pBlock->GetMappedData();
7240 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
7241 *ppMappedData = m_pMappedDataForDefragmentation;
7245 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
7247 if(m_pMappedDataForDefragmentation != VMA_NULL)
7249 m_pBlock->Unmap(hAllocator, 1);
7253 VkResult VmaDefragmentator::DefragmentRound(
7254 VkDeviceSize maxBytesToMove,
7255 uint32_t maxAllocationsToMove)
7257 if(m_Blocks.empty())
7262 size_t srcBlockIndex = m_Blocks.size() - 1;
7263 size_t srcAllocIndex = SIZE_MAX;
7269 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
7271 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
7274 if(srcBlockIndex == 0)
7281 srcAllocIndex = SIZE_MAX;
7286 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
7290 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
7291 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
7293 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
7294 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
7295 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
7296 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
7299 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
7301 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
7302 VmaAllocationRequest dstAllocRequest;
7303 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
7304 m_CurrentFrameIndex,
7305 m_pBlockVector->GetFrameInUseCount(),
7306 m_pBlockVector->GetBufferImageGranularity(),
7311 &dstAllocRequest) &&
7313 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
7315 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
7318 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
7319 (m_BytesMoved + size > maxBytesToMove))
7321 return VK_INCOMPLETE;
7324 void* pDstMappedData = VMA_NULL;
7325 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
7326 if(res != VK_SUCCESS)
7331 void* pSrcMappedData = VMA_NULL;
7332 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
7333 if(res != VK_SUCCESS)
7340 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
7341 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
7342 static_cast<size_t>(size));
7344 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
7345 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
7347 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
7349 if(allocInfo.m_pChanged != VMA_NULL)
7351 *allocInfo.m_pChanged = VK_TRUE;
7354 ++m_AllocationsMoved;
7355 m_BytesMoved += size;
7357 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
7365 if(srcAllocIndex > 0)
7371 if(srcBlockIndex > 0)
7374 srcAllocIndex = SIZE_MAX;
7384 VkResult VmaDefragmentator::Defragment(
7385 VkDeviceSize maxBytesToMove,
7386 uint32_t maxAllocationsToMove)
7388 if(m_Allocations.empty())
7394 const size_t blockCount = m_pBlockVector->m_Blocks.size();
7395 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7397 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
7398 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
7399 m_Blocks.push_back(pBlockInfo);
7403 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
7406 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
7408 AllocationInfo& allocInfo = m_Allocations[blockIndex];
7410 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7412 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
7413 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
7414 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
7416 (*it)->m_Allocations.push_back(allocInfo);
7424 m_Allocations.clear();
7426 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7428 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
7429 pBlockInfo->CalcHasNonMovableAllocations();
7430 pBlockInfo->SortAllocationsBySizeDescecnding();
7434 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
7437 VkResult result = VK_SUCCESS;
7438 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
7440 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
7444 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7446 m_Blocks[blockIndex]->Unmap(m_hAllocator);
7452 bool VmaDefragmentator::MoveMakesSense(
7453 size_t dstBlockIndex, VkDeviceSize dstOffset,
7454 size_t srcBlockIndex, VkDeviceSize srcOffset)
7456 if(dstBlockIndex < srcBlockIndex)
7460 if(dstBlockIndex > srcBlockIndex)
7464 if(dstOffset < srcOffset)
7477 m_hDevice(pCreateInfo->device),
7478 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
7479 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
7480 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
7481 m_PreferredLargeHeapBlockSize(0),
7482 m_PhysicalDevice(pCreateInfo->physicalDevice),
7483 m_CurrentFrameIndex(0),
7484 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks()))
7488 #if !(VMA_DEDICATED_ALLOCATION) 7491 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
7495 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
7496 memset(&m_MemProps, 0,
sizeof(m_MemProps));
7497 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
7499 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
7500 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
7502 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7504 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
7515 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
7516 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
7523 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
7525 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
7526 if(limit != VK_WHOLE_SIZE)
7528 m_HeapSizeLimit[heapIndex] = limit;
7529 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
7531 m_MemProps.memoryHeaps[heapIndex].size = limit;
7537 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7539 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
7541 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
7547 GetBufferImageGranularity(),
7552 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
7556 VmaAllocator_T::~VmaAllocator_T()
7558 VMA_ASSERT(m_Pools.empty());
7560 for(
size_t i = GetMemoryTypeCount(); i--; )
7562 vma_delete(
this, m_pDedicatedAllocations[i]);
7563 vma_delete(
this, m_pBlockVectors[i]);
7567 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
7569 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7570 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
7571 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
7572 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
7573 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
7574 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
7575 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
7576 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
7577 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
7578 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
7579 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
7580 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
7581 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
7582 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
7583 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
7584 #if VMA_DEDICATED_ALLOCATION 7585 if(m_UseKhrDedicatedAllocation)
7587 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
7588 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
7589 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
7590 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
7592 #endif // #if VMA_DEDICATED_ALLOCATION 7593 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7595 #define VMA_COPY_IF_NOT_NULL(funcName) \ 7596 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 7598 if(pVulkanFunctions != VMA_NULL)
7600 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
7601 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
7602 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
7603 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
7604 VMA_COPY_IF_NOT_NULL(vkMapMemory);
7605 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
7606 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
7607 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
7608 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
7609 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
7610 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
7611 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
7612 VMA_COPY_IF_NOT_NULL(vkCreateImage);
7613 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
7614 #if VMA_DEDICATED_ALLOCATION 7615 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
7616 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
7620 #undef VMA_COPY_IF_NOT_NULL 7624 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
7625 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
7626 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
7627 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
7628 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
7629 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
7630 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
7631 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
7632 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
7633 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
7634 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
7635 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
7636 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
7637 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
7638 #if VMA_DEDICATED_ALLOCATION 7639 if(m_UseKhrDedicatedAllocation)
7641 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
7642 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
7647 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
7649 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7650 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
7651 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
7652 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
7655 VkResult VmaAllocator_T::AllocateMemoryOfType(
7656 const VkMemoryRequirements& vkMemReq,
7657 bool dedicatedAllocation,
7658 VkBuffer dedicatedBuffer,
7659 VkImage dedicatedImage,
7661 uint32_t memTypeIndex,
7662 VmaSuballocationType suballocType,
7665 VMA_ASSERT(pAllocation != VMA_NULL);
7666 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
7672 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
7677 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
7678 VMA_ASSERT(blockVector);
7680 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
7681 bool preferDedicatedMemory =
7682 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
7683 dedicatedAllocation ||
7685 vkMemReq.size > preferredBlockSize / 2;
7687 if(preferDedicatedMemory &&
7689 finalCreateInfo.
pool == VK_NULL_HANDLE)
7698 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7702 return AllocateDedicatedMemory(
7716 VkResult res = blockVector->Allocate(
7718 m_CurrentFrameIndex.load(),
7723 if(res == VK_SUCCESS)
7731 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7735 res = AllocateDedicatedMemory(
7741 finalCreateInfo.pUserData,
7745 if(res == VK_SUCCESS)
7748 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
7754 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7761 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7763 VmaSuballocationType suballocType,
7764 uint32_t memTypeIndex,
7766 bool isUserDataString,
7768 VkBuffer dedicatedBuffer,
7769 VkImage dedicatedImage,
7772 VMA_ASSERT(pAllocation);
7774 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7775 allocInfo.memoryTypeIndex = memTypeIndex;
7776 allocInfo.allocationSize = size;
7778 #if VMA_DEDICATED_ALLOCATION 7779 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7780 if(m_UseKhrDedicatedAllocation)
7782 if(dedicatedBuffer != VK_NULL_HANDLE)
7784 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7785 dedicatedAllocInfo.buffer = dedicatedBuffer;
7786 allocInfo.pNext = &dedicatedAllocInfo;
7788 else if(dedicatedImage != VK_NULL_HANDLE)
7790 dedicatedAllocInfo.image = dedicatedImage;
7791 allocInfo.pNext = &dedicatedAllocInfo;
7794 #endif // #if VMA_DEDICATED_ALLOCATION 7797 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7798 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7801 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7805 void* pMappedData = VMA_NULL;
7808 res = (*m_VulkanFunctions.vkMapMemory)(
7817 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7818 FreeVulkanMemory(memTypeIndex, size, hMemory);
7823 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7824 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7825 (*pAllocation)->SetUserData(
this, pUserData);
7829 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7830 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7831 VMA_ASSERT(pDedicatedAllocations);
7832 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7835 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7840 void VmaAllocator_T::GetBufferMemoryRequirements(
7842 VkMemoryRequirements& memReq,
7843 bool& requiresDedicatedAllocation,
7844 bool& prefersDedicatedAllocation)
const 7846 #if VMA_DEDICATED_ALLOCATION 7847 if(m_UseKhrDedicatedAllocation)
7849 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7850 memReqInfo.buffer = hBuffer;
7852 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7854 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7855 memReq2.pNext = &memDedicatedReq;
7857 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7859 memReq = memReq2.memoryRequirements;
7860 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7861 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7864 #endif // #if VMA_DEDICATED_ALLOCATION 7866 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7867 requiresDedicatedAllocation =
false;
7868 prefersDedicatedAllocation =
false;
7872 void VmaAllocator_T::GetImageMemoryRequirements(
7874 VkMemoryRequirements& memReq,
7875 bool& requiresDedicatedAllocation,
7876 bool& prefersDedicatedAllocation)
const 7878 #if VMA_DEDICATED_ALLOCATION 7879 if(m_UseKhrDedicatedAllocation)
7881 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7882 memReqInfo.image = hImage;
7884 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7886 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7887 memReq2.pNext = &memDedicatedReq;
7889 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7891 memReq = memReq2.memoryRequirements;
7892 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7893 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7896 #endif // #if VMA_DEDICATED_ALLOCATION 7898 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7899 requiresDedicatedAllocation =
false;
7900 prefersDedicatedAllocation =
false;
7904 VkResult VmaAllocator_T::AllocateMemory(
7905 const VkMemoryRequirements& vkMemReq,
7906 bool requiresDedicatedAllocation,
7907 bool prefersDedicatedAllocation,
7908 VkBuffer dedicatedBuffer,
7909 VkImage dedicatedImage,
7911 VmaSuballocationType suballocType,
7917 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7918 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7923 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7924 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7926 if(requiresDedicatedAllocation)
7930 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7931 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7933 if(createInfo.
pool != VK_NULL_HANDLE)
7935 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7936 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7939 if((createInfo.
pool != VK_NULL_HANDLE) &&
7942 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7943 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7946 if(createInfo.
pool != VK_NULL_HANDLE)
7948 return createInfo.
pool->m_BlockVector.Allocate(
7950 m_CurrentFrameIndex.load(),
7959 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7960 uint32_t memTypeIndex = UINT32_MAX;
7962 if(res == VK_SUCCESS)
7964 res = AllocateMemoryOfType(
7966 requiresDedicatedAllocation || prefersDedicatedAllocation,
7974 if(res == VK_SUCCESS)
7984 memoryTypeBits &= ~(1u << memTypeIndex);
7987 if(res == VK_SUCCESS)
7989 res = AllocateMemoryOfType(
7991 requiresDedicatedAllocation || prefersDedicatedAllocation,
7999 if(res == VK_SUCCESS)
8009 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8020 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
8022 VMA_ASSERT(allocation);
8024 if(allocation->CanBecomeLost() ==
false ||
8025 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
8027 switch(allocation->GetType())
8029 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8031 VmaBlockVector* pBlockVector = VMA_NULL;
8032 VmaPool hPool = allocation->GetPool();
8033 if(hPool != VK_NULL_HANDLE)
8035 pBlockVector = &hPool->m_BlockVector;
8039 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
8040 pBlockVector = m_pBlockVectors[memTypeIndex];
8042 pBlockVector->Free(allocation);
8045 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8046 FreeDedicatedMemory(allocation);
8053 allocation->SetUserData(
this, VMA_NULL);
8054 vma_delete(
this, allocation);
8057 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
8060 InitStatInfo(pStats->
total);
8061 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
8063 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
8067 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8069 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
8070 VMA_ASSERT(pBlockVector);
8071 pBlockVector->AddStats(pStats);
8076 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8077 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
8079 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
8084 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8086 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
8087 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8088 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8089 VMA_ASSERT(pDedicatedAllocVector);
8090 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
8093 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
8094 VmaAddStatInfo(pStats->
total, allocationStatInfo);
8095 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
8096 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
8101 VmaPostprocessCalcStatInfo(pStats->
total);
8102 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
8103 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
8104 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
8105 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
8108 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
8110 VkResult VmaAllocator_T::Defragment(
8112 size_t allocationCount,
8113 VkBool32* pAllocationsChanged,
8117 if(pAllocationsChanged != VMA_NULL)
8119 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
8121 if(pDefragmentationStats != VMA_NULL)
8123 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
8126 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
8128 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
8130 const size_t poolCount = m_Pools.size();
8133 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
8137 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
8139 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
8141 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
8143 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
8145 VmaBlockVector* pAllocBlockVector = VMA_NULL;
8147 const VmaPool hAllocPool = hAlloc->GetPool();
8149 if(hAllocPool != VK_NULL_HANDLE)
8151 pAllocBlockVector = &hAllocPool->GetBlockVector();
8156 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
8159 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
8161 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
8162 &pAllocationsChanged[allocIndex] : VMA_NULL;
8163 pDefragmentator->AddAllocation(hAlloc, pChanged);
8167 VkResult result = VK_SUCCESS;
8171 VkDeviceSize maxBytesToMove = SIZE_MAX;
8172 uint32_t maxAllocationsToMove = UINT32_MAX;
8173 if(pDefragmentationInfo != VMA_NULL)
8180 for(uint32_t memTypeIndex = 0;
8181 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
8185 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8187 result = m_pBlockVectors[memTypeIndex]->Defragment(
8188 pDefragmentationStats,
8190 maxAllocationsToMove);
8195 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
8197 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
8198 pDefragmentationStats,
8200 maxAllocationsToMove);
8206 for(
size_t poolIndex = poolCount; poolIndex--; )
8208 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
8212 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
8214 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8216 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
8225 if(hAllocation->CanBecomeLost())
8231 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8232 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8235 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8239 pAllocationInfo->
offset = 0;
8240 pAllocationInfo->
size = hAllocation->GetSize();
8242 pAllocationInfo->
pUserData = hAllocation->GetUserData();
8245 else if(localLastUseFrameIndex == localCurrFrameIndex)
8247 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
8248 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
8249 pAllocationInfo->
offset = hAllocation->GetOffset();
8250 pAllocationInfo->
size = hAllocation->GetSize();
8252 pAllocationInfo->
pUserData = hAllocation->GetUserData();
8257 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8259 localLastUseFrameIndex = localCurrFrameIndex;
8266 #if VMA_STATS_STRING_ENABLED 8267 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8268 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8271 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
8272 if(localLastUseFrameIndex == localCurrFrameIndex)
8278 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8280 localLastUseFrameIndex = localCurrFrameIndex;
8286 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
8287 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
8288 pAllocationInfo->
offset = hAllocation->GetOffset();
8289 pAllocationInfo->
size = hAllocation->GetSize();
8290 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
8291 pAllocationInfo->
pUserData = hAllocation->GetUserData();
8295 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
8298 if(hAllocation->CanBecomeLost())
8300 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8301 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8304 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8308 else if(localLastUseFrameIndex == localCurrFrameIndex)
8314 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8316 localLastUseFrameIndex = localCurrFrameIndex;
8323 #if VMA_STATS_STRING_ENABLED 8324 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8325 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8328 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
8329 if(localLastUseFrameIndex == localCurrFrameIndex)
8335 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8337 localLastUseFrameIndex = localCurrFrameIndex;
8349 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
8362 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
8364 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
8365 if(res != VK_SUCCESS)
8367 vma_delete(
this, *pPool);
8374 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8375 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
8381 void VmaAllocator_T::DestroyPool(
VmaPool pool)
8385 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8386 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
8387 VMA_ASSERT(success &&
"Pool not found in Allocator.");
8390 vma_delete(
this, pool);
8395 pool->m_BlockVector.GetPoolStats(pPoolStats);
8398 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
8400 m_CurrentFrameIndex.store(frameIndex);
8403 void VmaAllocator_T::MakePoolAllocationsLost(
8405 size_t* pLostAllocationCount)
8407 hPool->m_BlockVector.MakePoolAllocationsLost(
8408 m_CurrentFrameIndex.load(),
8409 pLostAllocationCount);
8412 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
8414 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
8415 (*pAllocation)->InitLost();
8418 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
8420 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
8423 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8425 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8426 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
8428 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8429 if(res == VK_SUCCESS)
8431 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
8436 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
8441 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8444 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
8446 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
8452 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
8454 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
8456 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
8459 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
8461 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
8462 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8464 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8465 m_HeapSizeLimit[heapIndex] += size;
8469 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
8471 if(hAllocation->CanBecomeLost())
8473 return VK_ERROR_MEMORY_MAP_FAILED;
8476 switch(hAllocation->GetType())
8478 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8480 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8481 char *pBytes = VMA_NULL;
8482 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
8483 if(res == VK_SUCCESS)
8485 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
8486 hAllocation->BlockAllocMap();
8490 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8491 return hAllocation->DedicatedAllocMap(
this, ppData);
8494 return VK_ERROR_MEMORY_MAP_FAILED;
8500 switch(hAllocation->GetType())
8502 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8504 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8505 hAllocation->BlockAllocUnmap();
8506 pBlock->Unmap(
this, 1);
8509 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8510 hAllocation->DedicatedAllocUnmap(
this);
8517 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
8519 VkResult res = VK_SUCCESS;
8520 switch(hAllocation->GetType())
8522 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8523 res = GetVulkanFunctions().vkBindBufferMemory(
8526 hAllocation->GetMemory(),
8529 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8531 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
8532 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
8533 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
8542 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
8544 VkResult res = VK_SUCCESS;
8545 switch(hAllocation->GetType())
8547 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8548 res = GetVulkanFunctions().vkBindImageMemory(
8551 hAllocation->GetMemory(),
8554 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8556 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
8557 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
8558 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
8567 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
8569 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
8571 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
8573 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8574 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
8575 VMA_ASSERT(pDedicatedAllocations);
8576 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
8577 VMA_ASSERT(success);
8580 VkDeviceMemory hMemory = allocation->GetMemory();
8582 if(allocation->GetMappedData() != VMA_NULL)
8584 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
8587 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
8589 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
8592 #if VMA_STATS_STRING_ENABLED 8594 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
8596 bool dedicatedAllocationsStarted =
false;
8597 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8599 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8600 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8601 VMA_ASSERT(pDedicatedAllocVector);
8602 if(pDedicatedAllocVector->empty() ==
false)
8604 if(dedicatedAllocationsStarted ==
false)
8606 dedicatedAllocationsStarted =
true;
8607 json.WriteString(
"DedicatedAllocations");
8611 json.BeginString(
"Type ");
8612 json.ContinueString(memTypeIndex);
8617 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
8619 json.BeginObject(
true);
8621 hAlloc->PrintParameters(json);
8628 if(dedicatedAllocationsStarted)
8634 bool allocationsStarted =
false;
8635 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8637 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
8639 if(allocationsStarted ==
false)
8641 allocationsStarted =
true;
8642 json.WriteString(
"DefaultPools");
8646 json.BeginString(
"Type ");
8647 json.ContinueString(memTypeIndex);
8650 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
8653 if(allocationsStarted)
8660 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8661 const size_t poolCount = m_Pools.size();
8664 json.WriteString(
"Pools");
8666 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
8668 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
8675 #endif // #if VMA_STATS_STRING_ENABLED 8677 static VkResult AllocateMemoryForImage(
8681 VmaSuballocationType suballocType,
8684 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
8686 VkMemoryRequirements vkMemReq = {};
8687 bool requiresDedicatedAllocation =
false;
8688 bool prefersDedicatedAllocation =
false;
8689 allocator->GetImageMemoryRequirements(image, vkMemReq,
8690 requiresDedicatedAllocation, prefersDedicatedAllocation);
8692 return allocator->AllocateMemory(
8694 requiresDedicatedAllocation,
8695 prefersDedicatedAllocation,
8698 *pAllocationCreateInfo,
8710 VMA_ASSERT(pCreateInfo && pAllocator);
8711 VMA_DEBUG_LOG(
"vmaCreateAllocator");
8719 if(allocator != VK_NULL_HANDLE)
8721 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
8722 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
8723 vma_delete(&allocationCallbacks, allocator);
8729 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
8731 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
8732 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
8737 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
8739 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
8740 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
8745 uint32_t memoryTypeIndex,
8746 VkMemoryPropertyFlags* pFlags)
8748 VMA_ASSERT(allocator && pFlags);
8749 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
8750 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
8755 uint32_t frameIndex)
8757 VMA_ASSERT(allocator);
8758 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
8760 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8762 allocator->SetCurrentFrameIndex(frameIndex);
8769 VMA_ASSERT(allocator && pStats);
8770 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8771 allocator->CalculateStats(pStats);
8774 #if VMA_STATS_STRING_ENABLED 8778 char** ppStatsString,
8779 VkBool32 detailedMap)
8781 VMA_ASSERT(allocator && ppStatsString);
8782 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8784 VmaStringBuilder sb(allocator);
8786 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
8790 allocator->CalculateStats(&stats);
8792 json.WriteString(
"Total");
8793 VmaPrintStatInfo(json, stats.
total);
8795 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
8797 json.BeginString(
"Heap ");
8798 json.ContinueString(heapIndex);
8802 json.WriteString(
"Size");
8803 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
8805 json.WriteString(
"Flags");
8806 json.BeginArray(
true);
8807 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
8809 json.WriteString(
"DEVICE_LOCAL");
8815 json.WriteString(
"Stats");
8816 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
8819 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
8821 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
8823 json.BeginString(
"Type ");
8824 json.ContinueString(typeIndex);
8829 json.WriteString(
"Flags");
8830 json.BeginArray(
true);
8831 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
8832 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
8834 json.WriteString(
"DEVICE_LOCAL");
8836 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8838 json.WriteString(
"HOST_VISIBLE");
8840 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
8842 json.WriteString(
"HOST_COHERENT");
8844 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
8846 json.WriteString(
"HOST_CACHED");
8848 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
8850 json.WriteString(
"LAZILY_ALLOCATED");
8856 json.WriteString(
"Stats");
8857 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
8866 if(detailedMap == VK_TRUE)
8868 allocator->PrintDetailedMap(json);
8874 const size_t len = sb.GetLength();
8875 char*
const pChars = vma_new_array(allocator,
char, len + 1);
8878 memcpy(pChars, sb.GetData(), len);
8881 *ppStatsString = pChars;
8888 if(pStatsString != VMA_NULL)
8890 VMA_ASSERT(allocator);
8891 size_t len = strlen(pStatsString);
8892 vma_delete_array(allocator, pStatsString, len + 1);
8896 #endif // #if VMA_STATS_STRING_ENABLED 8903 uint32_t memoryTypeBits,
8905 uint32_t* pMemoryTypeIndex)
8907 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8908 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8909 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8916 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
8922 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8926 switch(pAllocationCreateInfo->
usage)
8931 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
8933 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8937 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8940 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8941 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
8943 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8947 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8948 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8954 *pMemoryTypeIndex = UINT32_MAX;
8955 uint32_t minCost = UINT32_MAX;
8956 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8957 memTypeIndex < allocator->GetMemoryTypeCount();
8958 ++memTypeIndex, memTypeBit <<= 1)
8961 if((memTypeBit & memoryTypeBits) != 0)
8963 const VkMemoryPropertyFlags currFlags =
8964 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8966 if((requiredFlags & ~currFlags) == 0)
8969 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8971 if(currCost < minCost)
8973 *pMemoryTypeIndex = memTypeIndex;
8983 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8988 const VkBufferCreateInfo* pBufferCreateInfo,
8990 uint32_t* pMemoryTypeIndex)
8992 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8993 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
8994 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8995 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8997 const VkDevice hDev = allocator->m_hDevice;
8998 VkBuffer hBuffer = VK_NULL_HANDLE;
8999 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
9000 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
9001 if(res == VK_SUCCESS)
9003 VkMemoryRequirements memReq = {};
9004 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
9005 hDev, hBuffer, &memReq);
9009 memReq.memoryTypeBits,
9010 pAllocationCreateInfo,
9013 allocator->GetVulkanFunctions().vkDestroyBuffer(
9014 hDev, hBuffer, allocator->GetAllocationCallbacks());
9021 const VkImageCreateInfo* pImageCreateInfo,
9023 uint32_t* pMemoryTypeIndex)
9025 VMA_ASSERT(allocator != VK_NULL_HANDLE);
9026 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
9027 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
9028 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
9030 const VkDevice hDev = allocator->m_hDevice;
9031 VkImage hImage = VK_NULL_HANDLE;
9032 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
9033 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
9034 if(res == VK_SUCCESS)
9036 VkMemoryRequirements memReq = {};
9037 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
9038 hDev, hImage, &memReq);
9042 memReq.memoryTypeBits,
9043 pAllocationCreateInfo,
9046 allocator->GetVulkanFunctions().vkDestroyImage(
9047 hDev, hImage, allocator->GetAllocationCallbacks());
9057 VMA_ASSERT(allocator && pCreateInfo && pPool);
9059 VMA_DEBUG_LOG(
"vmaCreatePool");
9061 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9063 return allocator->CreatePool(pCreateInfo, pPool);
9070 VMA_ASSERT(allocator);
9072 if(pool == VK_NULL_HANDLE)
9077 VMA_DEBUG_LOG(
"vmaDestroyPool");
9079 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9081 allocator->DestroyPool(pool);
9089 VMA_ASSERT(allocator && pool && pPoolStats);
9091 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9093 allocator->GetPoolStats(pool, pPoolStats);
9099 size_t* pLostAllocationCount)
9101 VMA_ASSERT(allocator && pool);
9103 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9105 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
9110 const VkMemoryRequirements* pVkMemoryRequirements,
9115 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
9117 VMA_DEBUG_LOG(
"vmaAllocateMemory");
9119 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9121 VkResult result = allocator->AllocateMemory(
9122 *pVkMemoryRequirements,
9128 VMA_SUBALLOCATION_TYPE_UNKNOWN,
9131 if(pAllocationInfo && result == VK_SUCCESS)
9133 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9146 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
9148 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
9150 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9152 VkMemoryRequirements vkMemReq = {};
9153 bool requiresDedicatedAllocation =
false;
9154 bool prefersDedicatedAllocation =
false;
9155 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
9156 requiresDedicatedAllocation,
9157 prefersDedicatedAllocation);
9159 VkResult result = allocator->AllocateMemory(
9161 requiresDedicatedAllocation,
9162 prefersDedicatedAllocation,
9166 VMA_SUBALLOCATION_TYPE_BUFFER,
9169 if(pAllocationInfo && result == VK_SUCCESS)
9171 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9184 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
9186 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
9188 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9190 VkResult result = AllocateMemoryForImage(
9194 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
9197 if(pAllocationInfo && result == VK_SUCCESS)
9199 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9209 VMA_ASSERT(allocator);
9210 VMA_DEBUG_LOG(
"vmaFreeMemory");
9211 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9212 if(allocation != VK_NULL_HANDLE)
9214 allocator->FreeMemory(allocation);
9223 VMA_ASSERT(allocator && allocation && pAllocationInfo);
9225 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9227 allocator->GetAllocationInfo(allocation, pAllocationInfo);
9234 VMA_ASSERT(allocator && allocation);
9236 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9238 return allocator->TouchAllocation(allocation);
9246 VMA_ASSERT(allocator && allocation);
9248 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9250 allocation->SetUserData(allocator, pUserData);
9257 VMA_ASSERT(allocator && pAllocation);
9259 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
9261 allocator->CreateLostAllocation(pAllocation);
9269 VMA_ASSERT(allocator && allocation && ppData);
9271 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9273 return allocator->Map(allocation, ppData);
9280 VMA_ASSERT(allocator && allocation);
9282 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9284 allocator->Unmap(allocation);
9290 size_t allocationCount,
9291 VkBool32* pAllocationsChanged,
9295 VMA_ASSERT(allocator && pAllocations);
9297 VMA_DEBUG_LOG(
"vmaDefragment");
9299 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9301 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
9309 VMA_ASSERT(allocator && allocation && buffer);
9311 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
9313 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9315 return allocator->BindBufferMemory(allocation, buffer);
9323 VMA_ASSERT(allocator && allocation && image);
9325 VMA_DEBUG_LOG(
"vmaBindImageMemory");
9327 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9329 return allocator->BindImageMemory(allocation, image);
9334 const VkBufferCreateInfo* pBufferCreateInfo,
9340 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
9342 VMA_DEBUG_LOG(
"vmaCreateBuffer");
9344 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9346 *pBuffer = VK_NULL_HANDLE;
9347 *pAllocation = VK_NULL_HANDLE;
9350 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
9351 allocator->m_hDevice,
9353 allocator->GetAllocationCallbacks(),
9358 VkMemoryRequirements vkMemReq = {};
9359 bool requiresDedicatedAllocation =
false;
9360 bool prefersDedicatedAllocation =
false;
9361 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
9362 requiresDedicatedAllocation, prefersDedicatedAllocation);
9366 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
9368 VMA_ASSERT(vkMemReq.alignment %
9369 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
9371 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
9373 VMA_ASSERT(vkMemReq.alignment %
9374 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
9376 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
9378 VMA_ASSERT(vkMemReq.alignment %
9379 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
9383 res = allocator->AllocateMemory(
9385 requiresDedicatedAllocation,
9386 prefersDedicatedAllocation,
9389 *pAllocationCreateInfo,
9390 VMA_SUBALLOCATION_TYPE_BUFFER,
9395 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
9399 #if VMA_STATS_STRING_ENABLED 9400 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
9402 if(pAllocationInfo != VMA_NULL)
9404 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9408 allocator->FreeMemory(*pAllocation);
9409 *pAllocation = VK_NULL_HANDLE;
9410 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
9411 *pBuffer = VK_NULL_HANDLE;
9414 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
9415 *pBuffer = VK_NULL_HANDLE;
9426 VMA_ASSERT(allocator);
9427 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
9428 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9429 if(buffer != VK_NULL_HANDLE)
9431 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
9433 if(allocation != VK_NULL_HANDLE)
9435 allocator->FreeMemory(allocation);
9441 const VkImageCreateInfo* pImageCreateInfo,
9447 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
9449 VMA_DEBUG_LOG(
"vmaCreateImage");
9451 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9453 *pImage = VK_NULL_HANDLE;
9454 *pAllocation = VK_NULL_HANDLE;
9457 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
9458 allocator->m_hDevice,
9460 allocator->GetAllocationCallbacks(),
9464 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
9465 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
9466 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
9469 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
9473 res = allocator->BindImageMemory(*pAllocation, *pImage);
9477 #if VMA_STATS_STRING_ENABLED 9478 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
9480 if(pAllocationInfo != VMA_NULL)
9482 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9486 allocator->FreeMemory(*pAllocation);
9487 *pAllocation = VK_NULL_HANDLE;
9488 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
9489 *pImage = VK_NULL_HANDLE;
9492 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
9493 *pImage = VK_NULL_HANDLE;
9504 VMA_ASSERT(allocator);
9505 VMA_DEBUG_LOG(
"vmaDestroyImage");
9506 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9507 if(image != VK_NULL_HANDLE)
9509 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
9511 if(allocation != VK_NULL_HANDLE)
9513 allocator->FreeMemory(allocation);
9517 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1179
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1443
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1206
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
Represents single memory allocation.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1189
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1400
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1183
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1773
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1203
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1972
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1619
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1673
Definition: vk_mem_alloc.h:1480
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1172
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1518
Definition: vk_mem_alloc.h:1427
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1215
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1268
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1200
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1431
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1333
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1186
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1332
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1976
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1232
VmaStatInfo total
Definition: vk_mem_alloc.h:1342
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1984
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1502
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1967
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1187
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1114
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1209
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1627
Definition: vk_mem_alloc.h:1621
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1783
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1184
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1539
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1643
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1679
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1170
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1630
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VmaMemoryUsage
Definition: vk_mem_alloc.h:1378
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1962
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1980
Definition: vk_mem_alloc.h:1417
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1526
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1185
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1338
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1120
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1141
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1146
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1982
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1513
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1689
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1180
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1321
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1638
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1133
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1487
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1334
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1137
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1633
Definition: vk_mem_alloc.h:1426
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1508
Definition: vk_mem_alloc.h:1499
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1324
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1182
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1651
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1218
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1682
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1497
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1532
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1256
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1340
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1467
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1333
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1191
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1135
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1190
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1665
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1797
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1212
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1333
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1330
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1670
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1778
Definition: vk_mem_alloc.h:1495
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1978
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1178
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1328
Definition: vk_mem_alloc.h:1383
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1623
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1326
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1188
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1192
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1454
Definition: vk_mem_alloc.h:1410
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1792
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1168
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1181
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1759
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1601
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1334
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
Definition: vk_mem_alloc.h:1493
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1341
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1676
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1334
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1764