23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1078 #include <vulkan/vulkan.h> 1094 uint32_t memoryType,
1095 VkDeviceMemory memory,
1100 uint32_t memoryType,
1101 VkDeviceMemory memory,
1262 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1270 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1280 uint32_t memoryTypeIndex,
1281 VkMemoryPropertyFlags* pFlags);
1293 uint32_t frameIndex);
1326 #define VMA_STATS_STRING_ENABLED 1 1328 #if VMA_STATS_STRING_ENABLED 1335 char** ppStatsString,
1336 VkBool32 detailedMap);
1340 char* pStatsString);
1342 #endif // #if VMA_STATS_STRING_ENABLED 1536 uint32_t memoryTypeBits,
1538 uint32_t* pMemoryTypeIndex);
1554 const VkBufferCreateInfo* pBufferCreateInfo,
1556 uint32_t* pMemoryTypeIndex);
1572 const VkImageCreateInfo* pImageCreateInfo,
1574 uint32_t* pMemoryTypeIndex);
1705 size_t* pLostAllocationCount);
1788 const VkMemoryRequirements* pVkMemoryRequirements,
2048 size_t allocationCount,
2049 VkBool32* pAllocationsChanged,
2115 const VkBufferCreateInfo* pBufferCreateInfo,
2140 const VkImageCreateInfo* pImageCreateInfo,
2166 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2169 #ifdef __INTELLISENSE__ 2170 #define VMA_IMPLEMENTATION 2173 #ifdef VMA_IMPLEMENTATION 2174 #undef VMA_IMPLEMENTATION 2196 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2197 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2209 #if VMA_USE_STL_CONTAINERS 2210 #define VMA_USE_STL_VECTOR 1 2211 #define VMA_USE_STL_UNORDERED_MAP 1 2212 #define VMA_USE_STL_LIST 1 2215 #if VMA_USE_STL_VECTOR 2219 #if VMA_USE_STL_UNORDERED_MAP 2220 #include <unordered_map> 2223 #if VMA_USE_STL_LIST 2232 #include <algorithm> 2236 #if !defined(_WIN32) && !defined(__APPLE__) 2242 #define VMA_NULL nullptr 2245 #if defined(__APPLE__) || defined(__ANDROID__) 2247 void *aligned_alloc(
size_t alignment,
size_t size)
2250 if(alignment <
sizeof(
void*))
2252 alignment =
sizeof(
void*);
2256 if(posix_memalign(&pointer, alignment, size) == 0)
2265 #define VMA_ASSERT(expr) assert(expr) 2267 #define VMA_ASSERT(expr) 2273 #ifndef VMA_HEAVY_ASSERT 2275 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2277 #define VMA_HEAVY_ASSERT(expr) 2281 #ifndef VMA_ALIGN_OF 2282 #define VMA_ALIGN_OF(type) (__alignof(type)) 2285 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2287 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2289 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2293 #ifndef VMA_SYSTEM_FREE 2295 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2297 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2302 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2306 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2310 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2314 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2317 #ifndef VMA_DEBUG_LOG 2318 #define VMA_DEBUG_LOG(format, ...) 2328 #if VMA_STATS_STRING_ENABLED 2329 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2331 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2333 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2335 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2337 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2339 snprintf(outStr, strLen,
"%p", ptr);
2349 void Lock() { m_Mutex.lock(); }
2350 void Unlock() { m_Mutex.unlock(); }
2354 #define VMA_MUTEX VmaMutex 2365 #ifndef VMA_ATOMIC_UINT32 2366 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2369 #ifndef VMA_BEST_FIT 2382 #define VMA_BEST_FIT (1) 2385 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2390 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2393 #ifndef VMA_DEBUG_ALIGNMENT 2398 #define VMA_DEBUG_ALIGNMENT (1) 2401 #ifndef VMA_DEBUG_MARGIN 2406 #define VMA_DEBUG_MARGIN (0) 2409 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2414 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2417 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2422 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2425 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2426 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2430 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2431 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2435 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2441 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2442 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2445 static inline uint32_t VmaCountBitsSet(uint32_t v)
2447 uint32_t c = v - ((v >> 1) & 0x55555555);
2448 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2449 c = ((c >> 4) + c) & 0x0F0F0F0F;
2450 c = ((c >> 8) + c) & 0x00FF00FF;
2451 c = ((c >> 16) + c) & 0x0000FFFF;
2457 template <
typename T>
2458 static inline T VmaAlignUp(T val, T align)
2460 return (val + align - 1) / align * align;
2464 template <
typename T>
2465 inline T VmaRoundDiv(T x, T y)
2467 return (x + (y / (T)2)) / y;
2472 template<
typename Iterator,
typename Compare>
2473 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2475 Iterator centerValue = end; --centerValue;
2476 Iterator insertIndex = beg;
2477 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2479 if(cmp(*memTypeIndex, *centerValue))
2481 if(insertIndex != memTypeIndex)
2483 VMA_SWAP(*memTypeIndex, *insertIndex);
2488 if(insertIndex != centerValue)
2490 VMA_SWAP(*insertIndex, *centerValue);
2495 template<
typename Iterator,
typename Compare>
2496 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2500 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2501 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2502 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2506 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2508 #endif // #ifndef VMA_SORT 2517 static inline bool VmaBlocksOnSamePage(
2518 VkDeviceSize resourceAOffset,
2519 VkDeviceSize resourceASize,
2520 VkDeviceSize resourceBOffset,
2521 VkDeviceSize pageSize)
2523 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2524 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2525 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2526 VkDeviceSize resourceBStart = resourceBOffset;
2527 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2528 return resourceAEndPage == resourceBStartPage;
2531 enum VmaSuballocationType
2533 VMA_SUBALLOCATION_TYPE_FREE = 0,
2534 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2535 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2536 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2537 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2538 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2539 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2548 static inline bool VmaIsBufferImageGranularityConflict(
2549 VmaSuballocationType suballocType1,
2550 VmaSuballocationType suballocType2)
2552 if(suballocType1 > suballocType2)
2554 VMA_SWAP(suballocType1, suballocType2);
2557 switch(suballocType1)
2559 case VMA_SUBALLOCATION_TYPE_FREE:
2561 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2563 case VMA_SUBALLOCATION_TYPE_BUFFER:
2565 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2566 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2567 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2569 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2570 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2571 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2572 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2574 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2575 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2587 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2588 m_pMutex(useMutex ? &mutex : VMA_NULL)
2605 VMA_MUTEX* m_pMutex;
2608 #if VMA_DEBUG_GLOBAL_MUTEX 2609 static VMA_MUTEX gDebugGlobalMutex;
2610 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2612 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2616 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2627 template <
typename IterT,
typename KeyT,
typename CmpT>
2628 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2630 size_t down = 0, up = (end - beg);
2633 const size_t mid = (down + up) / 2;
2634 if(cmp(*(beg+mid), key))
2649 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2651 if((pAllocationCallbacks != VMA_NULL) &&
2652 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2654 return (*pAllocationCallbacks->pfnAllocation)(
2655 pAllocationCallbacks->pUserData,
2658 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2662 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2666 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2668 if((pAllocationCallbacks != VMA_NULL) &&
2669 (pAllocationCallbacks->pfnFree != VMA_NULL))
2671 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2675 VMA_SYSTEM_FREE(ptr);
2679 template<
typename T>
2680 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2682 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2685 template<
typename T>
2686 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2688 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2691 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2693 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2695 template<
typename T>
2696 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2699 VmaFree(pAllocationCallbacks, ptr);
2702 template<
typename T>
2703 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2707 for(
size_t i = count; i--; )
2711 VmaFree(pAllocationCallbacks, ptr);
2716 template<
typename T>
2717 class VmaStlAllocator
2720 const VkAllocationCallbacks*
const m_pCallbacks;
2721 typedef T value_type;
2723 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2724 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2726 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2727 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2729 template<
typename U>
2730 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2732 return m_pCallbacks == rhs.m_pCallbacks;
2734 template<
typename U>
2735 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2737 return m_pCallbacks != rhs.m_pCallbacks;
2740 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2743 #if VMA_USE_STL_VECTOR 2745 #define VmaVector std::vector 2747 template<
typename T,
typename allocatorT>
2748 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2750 vec.insert(vec.begin() + index, item);
2753 template<
typename T,
typename allocatorT>
2754 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2756 vec.erase(vec.begin() + index);
2759 #else // #if VMA_USE_STL_VECTOR 2764 template<
typename T,
typename AllocatorT>
2768 typedef T value_type;
2770 VmaVector(
const AllocatorT& allocator) :
2771 m_Allocator(allocator),
2778 VmaVector(
size_t count,
const AllocatorT& allocator) :
2779 m_Allocator(allocator),
2780 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2786 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2787 m_Allocator(src.m_Allocator),
2788 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2789 m_Count(src.m_Count),
2790 m_Capacity(src.m_Count)
2794 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2800 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2803 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2807 resize(rhs.m_Count);
2810 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2816 bool empty()
const {
return m_Count == 0; }
2817 size_t size()
const {
return m_Count; }
2818 T* data() {
return m_pArray; }
2819 const T* data()
const {
return m_pArray; }
2821 T& operator[](
size_t index)
2823 VMA_HEAVY_ASSERT(index < m_Count);
2824 return m_pArray[index];
2826 const T& operator[](
size_t index)
const 2828 VMA_HEAVY_ASSERT(index < m_Count);
2829 return m_pArray[index];
2834 VMA_HEAVY_ASSERT(m_Count > 0);
2837 const T& front()
const 2839 VMA_HEAVY_ASSERT(m_Count > 0);
2844 VMA_HEAVY_ASSERT(m_Count > 0);
2845 return m_pArray[m_Count - 1];
2847 const T& back()
const 2849 VMA_HEAVY_ASSERT(m_Count > 0);
2850 return m_pArray[m_Count - 1];
2853 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2855 newCapacity = VMA_MAX(newCapacity, m_Count);
2857 if((newCapacity < m_Capacity) && !freeMemory)
2859 newCapacity = m_Capacity;
2862 if(newCapacity != m_Capacity)
2864 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2867 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2869 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2870 m_Capacity = newCapacity;
2871 m_pArray = newArray;
2875 void resize(
size_t newCount,
bool freeMemory =
false)
2877 size_t newCapacity = m_Capacity;
2878 if(newCount > m_Capacity)
2880 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2884 newCapacity = newCount;
2887 if(newCapacity != m_Capacity)
2889 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2890 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2891 if(elementsToCopy != 0)
2893 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2895 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2896 m_Capacity = newCapacity;
2897 m_pArray = newArray;
2903 void clear(
bool freeMemory =
false)
2905 resize(0, freeMemory);
2908 void insert(
size_t index,
const T& src)
2910 VMA_HEAVY_ASSERT(index <= m_Count);
2911 const size_t oldCount = size();
2912 resize(oldCount + 1);
2913 if(index < oldCount)
2915 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2917 m_pArray[index] = src;
2920 void remove(
size_t index)
2922 VMA_HEAVY_ASSERT(index < m_Count);
2923 const size_t oldCount = size();
2924 if(index < oldCount - 1)
2926 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2928 resize(oldCount - 1);
2931 void push_back(
const T& src)
2933 const size_t newIndex = size();
2934 resize(newIndex + 1);
2935 m_pArray[newIndex] = src;
2940 VMA_HEAVY_ASSERT(m_Count > 0);
2944 void push_front(
const T& src)
2951 VMA_HEAVY_ASSERT(m_Count > 0);
2955 typedef T* iterator;
2957 iterator begin() {
return m_pArray; }
2958 iterator end() {
return m_pArray + m_Count; }
2961 AllocatorT m_Allocator;
2967 template<
typename T,
typename allocatorT>
2968 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2970 vec.insert(index, item);
2973 template<
typename T,
typename allocatorT>
2974 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2979 #endif // #if VMA_USE_STL_VECTOR 2981 template<
typename CmpLess,
typename VectorT>
2982 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2984 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2986 vector.data() + vector.size(),
2988 CmpLess()) - vector.data();
2989 VmaVectorInsert(vector, indexToInsert, value);
2990 return indexToInsert;
2993 template<
typename CmpLess,
typename VectorT>
2994 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2997 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3002 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3004 size_t indexToRemove = it - vector.begin();
3005 VmaVectorRemove(vector, indexToRemove);
3011 template<
typename CmpLess,
typename VectorT>
3012 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
3015 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3017 vector.data() + vector.size(),
3020 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
3022 return it - vector.begin();
3026 return vector.size();
3038 template<
typename T>
3039 class VmaPoolAllocator
3042 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3043 ~VmaPoolAllocator();
3051 uint32_t NextFreeIndex;
3058 uint32_t FirstFreeIndex;
3061 const VkAllocationCallbacks* m_pAllocationCallbacks;
3062 size_t m_ItemsPerBlock;
3063 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3065 ItemBlock& CreateNewBlock();
3068 template<
typename T>
3069 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3070 m_pAllocationCallbacks(pAllocationCallbacks),
3071 m_ItemsPerBlock(itemsPerBlock),
3072 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3074 VMA_ASSERT(itemsPerBlock > 0);
3077 template<
typename T>
3078 VmaPoolAllocator<T>::~VmaPoolAllocator()
3083 template<
typename T>
3084 void VmaPoolAllocator<T>::Clear()
3086 for(
size_t i = m_ItemBlocks.size(); i--; )
3087 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3088 m_ItemBlocks.clear();
3091 template<
typename T>
3092 T* VmaPoolAllocator<T>::Alloc()
3094 for(
size_t i = m_ItemBlocks.size(); i--; )
3096 ItemBlock& block = m_ItemBlocks[i];
3098 if(block.FirstFreeIndex != UINT32_MAX)
3100 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3101 block.FirstFreeIndex = pItem->NextFreeIndex;
3102 return &pItem->Value;
3107 ItemBlock& newBlock = CreateNewBlock();
3108 Item*
const pItem = &newBlock.pItems[0];
3109 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3110 return &pItem->Value;
3113 template<
typename T>
3114 void VmaPoolAllocator<T>::Free(T* ptr)
3117 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3119 ItemBlock& block = m_ItemBlocks[i];
3123 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3126 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3128 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3129 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3130 block.FirstFreeIndex = index;
3134 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3137 template<
typename T>
3138 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3140 ItemBlock newBlock = {
3141 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3143 m_ItemBlocks.push_back(newBlock);
3146 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3147 newBlock.pItems[i].NextFreeIndex = i + 1;
3148 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3149 return m_ItemBlocks.back();
3155 #if VMA_USE_STL_LIST 3157 #define VmaList std::list 3159 #else // #if VMA_USE_STL_LIST 3161 template<
typename T>
3170 template<
typename T>
3174 typedef VmaListItem<T> ItemType;
3176 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3180 size_t GetCount()
const {
return m_Count; }
3181 bool IsEmpty()
const {
return m_Count == 0; }
3183 ItemType* Front() {
return m_pFront; }
3184 const ItemType* Front()
const {
return m_pFront; }
3185 ItemType* Back() {
return m_pBack; }
3186 const ItemType* Back()
const {
return m_pBack; }
3188 ItemType* PushBack();
3189 ItemType* PushFront();
3190 ItemType* PushBack(
const T& value);
3191 ItemType* PushFront(
const T& value);
3196 ItemType* InsertBefore(ItemType* pItem);
3198 ItemType* InsertAfter(ItemType* pItem);
3200 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3201 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3203 void Remove(ItemType* pItem);
3206 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3207 VmaPoolAllocator<ItemType> m_ItemAllocator;
3213 VmaRawList(
const VmaRawList<T>& src);
3214 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
3217 template<
typename T>
3218 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3219 m_pAllocationCallbacks(pAllocationCallbacks),
3220 m_ItemAllocator(pAllocationCallbacks, 128),
3227 template<
typename T>
3228 VmaRawList<T>::~VmaRawList()
3234 template<
typename T>
3235 void VmaRawList<T>::Clear()
3237 if(IsEmpty() ==
false)
3239 ItemType* pItem = m_pBack;
3240 while(pItem != VMA_NULL)
3242 ItemType*
const pPrevItem = pItem->pPrev;
3243 m_ItemAllocator.Free(pItem);
3246 m_pFront = VMA_NULL;
3252 template<
typename T>
3253 VmaListItem<T>* VmaRawList<T>::PushBack()
3255 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3256 pNewItem->pNext = VMA_NULL;
3259 pNewItem->pPrev = VMA_NULL;
3260 m_pFront = pNewItem;
3266 pNewItem->pPrev = m_pBack;
3267 m_pBack->pNext = pNewItem;
3274 template<
typename T>
3275 VmaListItem<T>* VmaRawList<T>::PushFront()
3277 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3278 pNewItem->pPrev = VMA_NULL;
3281 pNewItem->pNext = VMA_NULL;
3282 m_pFront = pNewItem;
3288 pNewItem->pNext = m_pFront;
3289 m_pFront->pPrev = pNewItem;
3290 m_pFront = pNewItem;
3296 template<
typename T>
3297 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3299 ItemType*
const pNewItem = PushBack();
3300 pNewItem->Value = value;
3304 template<
typename T>
3305 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3307 ItemType*
const pNewItem = PushFront();
3308 pNewItem->Value = value;
3312 template<
typename T>
3313 void VmaRawList<T>::PopBack()
3315 VMA_HEAVY_ASSERT(m_Count > 0);
3316 ItemType*
const pBackItem = m_pBack;
3317 ItemType*
const pPrevItem = pBackItem->pPrev;
3318 if(pPrevItem != VMA_NULL)
3320 pPrevItem->pNext = VMA_NULL;
3322 m_pBack = pPrevItem;
3323 m_ItemAllocator.Free(pBackItem);
3327 template<
typename T>
3328 void VmaRawList<T>::PopFront()
3330 VMA_HEAVY_ASSERT(m_Count > 0);
3331 ItemType*
const pFrontItem = m_pFront;
3332 ItemType*
const pNextItem = pFrontItem->pNext;
3333 if(pNextItem != VMA_NULL)
3335 pNextItem->pPrev = VMA_NULL;
3337 m_pFront = pNextItem;
3338 m_ItemAllocator.Free(pFrontItem);
3342 template<
typename T>
3343 void VmaRawList<T>::Remove(ItemType* pItem)
3345 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3346 VMA_HEAVY_ASSERT(m_Count > 0);
3348 if(pItem->pPrev != VMA_NULL)
3350 pItem->pPrev->pNext = pItem->pNext;
3354 VMA_HEAVY_ASSERT(m_pFront == pItem);
3355 m_pFront = pItem->pNext;
3358 if(pItem->pNext != VMA_NULL)
3360 pItem->pNext->pPrev = pItem->pPrev;
3364 VMA_HEAVY_ASSERT(m_pBack == pItem);
3365 m_pBack = pItem->pPrev;
3368 m_ItemAllocator.Free(pItem);
3372 template<
typename T>
3373 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3375 if(pItem != VMA_NULL)
3377 ItemType*
const prevItem = pItem->pPrev;
3378 ItemType*
const newItem = m_ItemAllocator.Alloc();
3379 newItem->pPrev = prevItem;
3380 newItem->pNext = pItem;
3381 pItem->pPrev = newItem;
3382 if(prevItem != VMA_NULL)
3384 prevItem->pNext = newItem;
3388 VMA_HEAVY_ASSERT(m_pFront == pItem);
3398 template<
typename T>
3399 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3401 if(pItem != VMA_NULL)
3403 ItemType*
const nextItem = pItem->pNext;
3404 ItemType*
const newItem = m_ItemAllocator.Alloc();
3405 newItem->pNext = nextItem;
3406 newItem->pPrev = pItem;
3407 pItem->pNext = newItem;
3408 if(nextItem != VMA_NULL)
3410 nextItem->pPrev = newItem;
3414 VMA_HEAVY_ASSERT(m_pBack == pItem);
3424 template<
typename T>
3425 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3427 ItemType*
const newItem = InsertBefore(pItem);
3428 newItem->Value = value;
3432 template<
typename T>
3433 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3435 ItemType*
const newItem = InsertAfter(pItem);
3436 newItem->Value = value;
3440 template<
typename T,
typename AllocatorT>
3453 T& operator*()
const 3455 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3456 return m_pItem->Value;
3458 T* operator->()
const 3460 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3461 return &m_pItem->Value;
3464 iterator& operator++()
3466 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3467 m_pItem = m_pItem->pNext;
3470 iterator& operator--()
3472 if(m_pItem != VMA_NULL)
3474 m_pItem = m_pItem->pPrev;
3478 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3479 m_pItem = m_pList->Back();
3484 iterator operator++(
int)
3486 iterator result = *
this;
3490 iterator operator--(
int)
3492 iterator result = *
this;
3497 bool operator==(
const iterator& rhs)
const 3499 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3500 return m_pItem == rhs.m_pItem;
3502 bool operator!=(
const iterator& rhs)
const 3504 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3505 return m_pItem != rhs.m_pItem;
3509 VmaRawList<T>* m_pList;
3510 VmaListItem<T>* m_pItem;
3512 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3518 friend class VmaList<T, AllocatorT>;
3521 class const_iterator
3530 const_iterator(
const iterator& src) :
3531 m_pList(src.m_pList),
3532 m_pItem(src.m_pItem)
3536 const T& operator*()
const 3538 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3539 return m_pItem->Value;
3541 const T* operator->()
const 3543 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3544 return &m_pItem->Value;
3547 const_iterator& operator++()
3549 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3550 m_pItem = m_pItem->pNext;
3553 const_iterator& operator--()
3555 if(m_pItem != VMA_NULL)
3557 m_pItem = m_pItem->pPrev;
3561 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3562 m_pItem = m_pList->Back();
3567 const_iterator operator++(
int)
3569 const_iterator result = *
this;
3573 const_iterator operator--(
int)
3575 const_iterator result = *
this;
3580 bool operator==(
const const_iterator& rhs)
const 3582 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3583 return m_pItem == rhs.m_pItem;
3585 bool operator!=(
const const_iterator& rhs)
const 3587 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3588 return m_pItem != rhs.m_pItem;
3592 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3598 const VmaRawList<T>* m_pList;
3599 const VmaListItem<T>* m_pItem;
3601 friend class VmaList<T, AllocatorT>;
3604 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3606 bool empty()
const {
return m_RawList.IsEmpty(); }
3607 size_t size()
const {
return m_RawList.GetCount(); }
3609 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3610 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3612 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3613 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3615 void clear() { m_RawList.Clear(); }
3616 void push_back(
const T& value) { m_RawList.PushBack(value); }
3617 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3618 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3621 VmaRawList<T> m_RawList;
3624 #endif // #if VMA_USE_STL_LIST 3632 #if VMA_USE_STL_UNORDERED_MAP 3634 #define VmaPair std::pair 3636 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3637 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3639 #else // #if VMA_USE_STL_UNORDERED_MAP 3641 template<
typename T1,
typename T2>
3647 VmaPair() : first(), second() { }
3648 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3654 template<
typename KeyT,
typename ValueT>
3658 typedef VmaPair<KeyT, ValueT> PairType;
3659 typedef PairType* iterator;
3661 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3663 iterator begin() {
return m_Vector.begin(); }
3664 iterator end() {
return m_Vector.end(); }
3666 void insert(
const PairType& pair);
3667 iterator find(
const KeyT& key);
3668 void erase(iterator it);
3671 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3674 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3676 template<
typename FirstT,
typename SecondT>
3677 struct VmaPairFirstLess
3679 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3681 return lhs.first < rhs.first;
3683 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3685 return lhs.first < rhsFirst;
3689 template<
typename KeyT,
typename ValueT>
3690 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3692 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3694 m_Vector.data() + m_Vector.size(),
3696 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3697 VmaVectorInsert(m_Vector, indexToInsert, pair);
3700 template<
typename KeyT,
typename ValueT>
3701 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3703 PairType* it = VmaBinaryFindFirstNotLess(
3705 m_Vector.data() + m_Vector.size(),
3707 VmaPairFirstLess<KeyT, ValueT>());
3708 if((it != m_Vector.end()) && (it->first == key))
3714 return m_Vector.end();
3718 template<
typename KeyT,
typename ValueT>
3719 void VmaMap<KeyT, ValueT>::erase(iterator it)
3721 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3724 #endif // #if VMA_USE_STL_UNORDERED_MAP 3730 class VmaDeviceMemoryBlock;
3732 struct VmaAllocation_T
3735 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3739 FLAG_USER_DATA_STRING = 0x01,
3743 enum ALLOCATION_TYPE
3745 ALLOCATION_TYPE_NONE,
3746 ALLOCATION_TYPE_BLOCK,
3747 ALLOCATION_TYPE_DEDICATED,
3750 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3753 m_pUserData(VMA_NULL),
3754 m_LastUseFrameIndex(currentFrameIndex),
3755 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3756 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3758 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3764 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3767 VMA_ASSERT(m_pUserData == VMA_NULL);
3770 void InitBlockAllocation(
3772 VmaDeviceMemoryBlock* block,
3773 VkDeviceSize offset,
3774 VkDeviceSize alignment,
3776 VmaSuballocationType suballocationType,
3780 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3781 VMA_ASSERT(block != VMA_NULL);
3782 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3783 m_Alignment = alignment;
3785 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3786 m_SuballocationType = (uint8_t)suballocationType;
3787 m_BlockAllocation.m_hPool = hPool;
3788 m_BlockAllocation.m_Block = block;
3789 m_BlockAllocation.m_Offset = offset;
3790 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3795 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3796 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3797 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3798 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3799 m_BlockAllocation.m_Block = VMA_NULL;
3800 m_BlockAllocation.m_Offset = 0;
3801 m_BlockAllocation.m_CanBecomeLost =
true;
3804 void ChangeBlockAllocation(
3806 VmaDeviceMemoryBlock* block,
3807 VkDeviceSize offset);
3810 void InitDedicatedAllocation(
3811 uint32_t memoryTypeIndex,
3812 VkDeviceMemory hMemory,
3813 VmaSuballocationType suballocationType,
3817 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3818 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3819 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3822 m_SuballocationType = (uint8_t)suballocationType;
3823 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3824 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3825 m_DedicatedAllocation.m_hMemory = hMemory;
3826 m_DedicatedAllocation.m_pMappedData = pMappedData;
3829 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3830 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3831 VkDeviceSize GetSize()
const {
return m_Size; }
3832 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3833 void* GetUserData()
const {
return m_pUserData; }
3834 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
3835 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3837 VmaDeviceMemoryBlock* GetBlock()
const 3839 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3840 return m_BlockAllocation.m_Block;
3842 VkDeviceSize GetOffset()
const;
3843 VkDeviceMemory GetMemory()
const;
3844 uint32_t GetMemoryTypeIndex()
const;
3845 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3846 void* GetMappedData()
const;
3847 bool CanBecomeLost()
const;
3850 uint32_t GetLastUseFrameIndex()
const 3852 return m_LastUseFrameIndex.load();
3854 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3856 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3866 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3868 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3870 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3881 void BlockAllocMap();
3882 void BlockAllocUnmap();
3883 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
3887 VkDeviceSize m_Alignment;
3888 VkDeviceSize m_Size;
3890 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3892 uint8_t m_SuballocationType;
3899 struct BlockAllocation
3902 VmaDeviceMemoryBlock* m_Block;
3903 VkDeviceSize m_Offset;
3904 bool m_CanBecomeLost;
3908 struct DedicatedAllocation
3910 uint32_t m_MemoryTypeIndex;
3911 VkDeviceMemory m_hMemory;
3912 void* m_pMappedData;
3918 BlockAllocation m_BlockAllocation;
3920 DedicatedAllocation m_DedicatedAllocation;
3930 struct VmaSuballocation
3932 VkDeviceSize offset;
3935 VmaSuballocationType type;
3938 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3941 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3956 struct VmaAllocationRequest
3958 VkDeviceSize offset;
3959 VkDeviceSize sumFreeSize;
3960 VkDeviceSize sumItemSize;
3961 VmaSuballocationList::iterator item;
3962 size_t itemsToMakeLostCount;
3964 VkDeviceSize CalcCost()
const 3966 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3974 class VmaBlockMetadata
3978 ~VmaBlockMetadata();
3979 void Init(VkDeviceSize size);
3982 bool Validate()
const;
3983 VkDeviceSize GetSize()
const {
return m_Size; }
3984 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3985 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3986 VkDeviceSize GetUnusedRangeSizeMax()
const;
3988 bool IsEmpty()
const;
3990 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3993 #if VMA_STATS_STRING_ENABLED 3994 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3998 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
4003 bool CreateAllocationRequest(
4004 uint32_t currentFrameIndex,
4005 uint32_t frameInUseCount,
4006 VkDeviceSize bufferImageGranularity,
4007 VkDeviceSize allocSize,
4008 VkDeviceSize allocAlignment,
4009 VmaSuballocationType allocType,
4010 bool canMakeOtherLost,
4011 VmaAllocationRequest* pAllocationRequest);
4013 bool MakeRequestedAllocationsLost(
4014 uint32_t currentFrameIndex,
4015 uint32_t frameInUseCount,
4016 VmaAllocationRequest* pAllocationRequest);
4018 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4022 const VmaAllocationRequest& request,
4023 VmaSuballocationType type,
4024 VkDeviceSize allocSize,
4029 void FreeAtOffset(VkDeviceSize offset);
4032 VkDeviceSize m_Size;
4033 uint32_t m_FreeCount;
4034 VkDeviceSize m_SumFreeSize;
4035 VmaSuballocationList m_Suballocations;
4038 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4040 bool ValidateFreeSuballocationList()
const;
4044 bool CheckAllocation(
4045 uint32_t currentFrameIndex,
4046 uint32_t frameInUseCount,
4047 VkDeviceSize bufferImageGranularity,
4048 VkDeviceSize allocSize,
4049 VkDeviceSize allocAlignment,
4050 VmaSuballocationType allocType,
4051 VmaSuballocationList::const_iterator suballocItem,
4052 bool canMakeOtherLost,
4053 VkDeviceSize* pOffset,
4054 size_t* itemsToMakeLostCount,
4055 VkDeviceSize* pSumFreeSize,
4056 VkDeviceSize* pSumItemSize)
const;
4058 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4062 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4065 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4068 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4077 class VmaDeviceMemoryBlock
4080 VmaBlockMetadata m_Metadata;
4084 ~VmaDeviceMemoryBlock()
4086 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
4087 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
4092 uint32_t newMemoryTypeIndex,
4093 VkDeviceMemory newMemory,
4094 VkDeviceSize newSize);
4098 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
4099 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4100 void* GetMappedData()
const {
return m_pMappedData; }
4103 bool Validate()
const;
4106 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
4109 VkResult BindBufferMemory(
4113 VkResult BindImageMemory(
4119 uint32_t m_MemoryTypeIndex;
4120 VkDeviceMemory m_hMemory;
4125 uint32_t m_MapCount;
4126 void* m_pMappedData;
4129 struct VmaPointerLess
4131 bool operator()(
const void* lhs,
const void* rhs)
const 4137 class VmaDefragmentator;
4145 struct VmaBlockVector
4149 uint32_t memoryTypeIndex,
4150 VkDeviceSize preferredBlockSize,
4151 size_t minBlockCount,
4152 size_t maxBlockCount,
4153 VkDeviceSize bufferImageGranularity,
4154 uint32_t frameInUseCount,
4158 VkResult CreateMinBlocks();
4160 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4161 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
4162 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
4163 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
4167 bool IsEmpty()
const {
return m_Blocks.empty(); }
4171 uint32_t currentFrameIndex,
4172 const VkMemoryRequirements& vkMemReq,
4174 VmaSuballocationType suballocType,
4183 #if VMA_STATS_STRING_ENABLED 4184 void PrintDetailedMap(
class VmaJsonWriter& json);
4187 void MakePoolAllocationsLost(
4188 uint32_t currentFrameIndex,
4189 size_t* pLostAllocationCount);
4191 VmaDefragmentator* EnsureDefragmentator(
4193 uint32_t currentFrameIndex);
4195 VkResult Defragment(
4197 VkDeviceSize& maxBytesToMove,
4198 uint32_t& maxAllocationsToMove);
4200 void DestroyDefragmentator();
4203 friend class VmaDefragmentator;
4206 const uint32_t m_MemoryTypeIndex;
4207 const VkDeviceSize m_PreferredBlockSize;
4208 const size_t m_MinBlockCount;
4209 const size_t m_MaxBlockCount;
4210 const VkDeviceSize m_BufferImageGranularity;
4211 const uint32_t m_FrameInUseCount;
4212 const bool m_IsCustomPool;
4215 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
4219 bool m_HasEmptyBlock;
4220 VmaDefragmentator* m_pDefragmentator;
4222 size_t CalcMaxBlockSize()
const;
4225 void Remove(VmaDeviceMemoryBlock* pBlock);
4229 void IncrementallySortBlocks();
4231 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
4237 VmaBlockVector m_BlockVector;
4245 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
4247 #if VMA_STATS_STRING_ENABLED 4252 class VmaDefragmentator
4255 VmaBlockVector*
const m_pBlockVector;
4256 uint32_t m_CurrentFrameIndex;
4257 VkDeviceSize m_BytesMoved;
4258 uint32_t m_AllocationsMoved;
4260 struct AllocationInfo
4263 VkBool32* m_pChanged;
4266 m_hAllocation(VK_NULL_HANDLE),
4267 m_pChanged(VMA_NULL)
4272 struct AllocationInfoSizeGreater
4274 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 4276 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
4281 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4285 VmaDeviceMemoryBlock* m_pBlock;
4286 bool m_HasNonMovableAllocations;
4287 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4289 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
4291 m_HasNonMovableAllocations(true),
4292 m_Allocations(pAllocationCallbacks),
4293 m_pMappedDataForDefragmentation(VMA_NULL)
4297 void CalcHasNonMovableAllocations()
4299 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
4300 const size_t defragmentAllocCount = m_Allocations.size();
4301 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
4304 void SortAllocationsBySizeDescecnding()
4306 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
4309 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
4314 void* m_pMappedDataForDefragmentation;
4317 struct BlockPointerLess
4319 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 4321 return pLhsBlockInfo->m_pBlock < pRhsBlock;
4323 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4325 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
4331 struct BlockInfoCompareMoveDestination
4333 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4335 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
4339 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
4343 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
4351 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
4352 BlockInfoVector m_Blocks;
4354 VkResult DefragmentRound(
4355 VkDeviceSize maxBytesToMove,
4356 uint32_t maxAllocationsToMove);
4358 static bool MoveMakesSense(
4359 size_t dstBlockIndex, VkDeviceSize dstOffset,
4360 size_t srcBlockIndex, VkDeviceSize srcOffset);
4365 VmaBlockVector* pBlockVector,
4366 uint32_t currentFrameIndex);
4368 ~VmaDefragmentator();
4370 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
4371 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
4373 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
4375 VkResult Defragment(
4376 VkDeviceSize maxBytesToMove,
4377 uint32_t maxAllocationsToMove);
4381 struct VmaAllocator_T
4384 bool m_UseKhrDedicatedAllocation;
4386 bool m_AllocationCallbacksSpecified;
4387 VkAllocationCallbacks m_AllocationCallbacks;
4391 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
4392 VMA_MUTEX m_HeapSizeLimitMutex;
4394 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
4395 VkPhysicalDeviceMemoryProperties m_MemProps;
4398 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
4401 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
4402 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
4403 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
4408 const VkAllocationCallbacks* GetAllocationCallbacks()
const 4410 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
4414 return m_VulkanFunctions;
4417 VkDeviceSize GetBufferImageGranularity()
const 4420 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
4421 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
4424 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
4425 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
4427 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 4429 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
4430 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4433 void GetBufferMemoryRequirements(
4435 VkMemoryRequirements& memReq,
4436 bool& requiresDedicatedAllocation,
4437 bool& prefersDedicatedAllocation)
const;
4438 void GetImageMemoryRequirements(
4440 VkMemoryRequirements& memReq,
4441 bool& requiresDedicatedAllocation,
4442 bool& prefersDedicatedAllocation)
const;
4445 VkResult AllocateMemory(
4446 const VkMemoryRequirements& vkMemReq,
4447 bool requiresDedicatedAllocation,
4448 bool prefersDedicatedAllocation,
4449 VkBuffer dedicatedBuffer,
4450 VkImage dedicatedImage,
4452 VmaSuballocationType suballocType,
4458 void CalculateStats(
VmaStats* pStats);
4460 #if VMA_STATS_STRING_ENABLED 4461 void PrintDetailedMap(
class VmaJsonWriter& json);
4464 VkResult Defragment(
4466 size_t allocationCount,
4467 VkBool32* pAllocationsChanged,
4475 void DestroyPool(
VmaPool pool);
4478 void SetCurrentFrameIndex(uint32_t frameIndex);
4480 void MakePoolAllocationsLost(
4482 size_t* pLostAllocationCount);
4486 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
4487 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
4492 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
4493 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
4496 VkDeviceSize m_PreferredLargeHeapBlockSize;
4498 VkPhysicalDevice m_PhysicalDevice;
4499 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
4501 VMA_MUTEX m_PoolsMutex;
4503 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
4509 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
4511 VkResult AllocateMemoryOfType(
4512 const VkMemoryRequirements& vkMemReq,
4513 bool dedicatedAllocation,
4514 VkBuffer dedicatedBuffer,
4515 VkImage dedicatedImage,
4517 uint32_t memTypeIndex,
4518 VmaSuballocationType suballocType,
4522 VkResult AllocateDedicatedMemory(
4524 VmaSuballocationType suballocType,
4525 uint32_t memTypeIndex,
4527 bool isUserDataString,
4529 VkBuffer dedicatedBuffer,
4530 VkImage dedicatedImage,
4540 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
4542 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4545 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
4547 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4550 template<
typename T>
4553 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
4556 template<
typename T>
4557 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
4559 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
4562 template<
typename T>
4563 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
4568 VmaFree(hAllocator, ptr);
4572 template<
typename T>
4573 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
4577 for(
size_t i = count; i--; )
4579 VmaFree(hAllocator, ptr);
4586 #if VMA_STATS_STRING_ENABLED 4588 class VmaStringBuilder
4591 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4592 size_t GetLength()
const {
return m_Data.size(); }
4593 const char* GetData()
const {
return m_Data.data(); }
4595 void Add(
char ch) { m_Data.push_back(ch); }
4596 void Add(
const char* pStr);
4597 void AddNewLine() { Add(
'\n'); }
4598 void AddNumber(uint32_t num);
4599 void AddNumber(uint64_t num);
4600 void AddPointer(
const void* ptr);
4603 VmaVector< char, VmaStlAllocator<char> > m_Data;
4606 void VmaStringBuilder::Add(
const char* pStr)
4608 const size_t strLen = strlen(pStr);
4611 const size_t oldCount = m_Data.size();
4612 m_Data.resize(oldCount + strLen);
4613 memcpy(m_Data.data() + oldCount, pStr, strLen);
4617 void VmaStringBuilder::AddNumber(uint32_t num)
4620 VmaUint32ToStr(buf,
sizeof(buf), num);
4624 void VmaStringBuilder::AddNumber(uint64_t num)
4627 VmaUint64ToStr(buf,
sizeof(buf), num);
4631 void VmaStringBuilder::AddPointer(
const void* ptr)
4634 VmaPtrToStr(buf,
sizeof(buf), ptr);
4638 #endif // #if VMA_STATS_STRING_ENABLED 4643 #if VMA_STATS_STRING_ENABLED 4648 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4651 void BeginObject(
bool singleLine =
false);
4654 void BeginArray(
bool singleLine =
false);
4657 void WriteString(
const char* pStr);
4658 void BeginString(
const char* pStr = VMA_NULL);
4659 void ContinueString(
const char* pStr);
4660 void ContinueString(uint32_t n);
4661 void ContinueString(uint64_t n);
4662 void ContinueString_Pointer(
const void* ptr);
4663 void EndString(
const char* pStr = VMA_NULL);
4665 void WriteNumber(uint32_t n);
4666 void WriteNumber(uint64_t n);
4667 void WriteBool(
bool b);
4671 static const char*
const INDENT;
4673 enum COLLECTION_TYPE
4675 COLLECTION_TYPE_OBJECT,
4676 COLLECTION_TYPE_ARRAY,
4680 COLLECTION_TYPE type;
4681 uint32_t valueCount;
4682 bool singleLineMode;
4685 VmaStringBuilder& m_SB;
4686 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4687 bool m_InsideString;
4689 void BeginValue(
bool isString);
4690 void WriteIndent(
bool oneLess =
false);
4693 const char*
const VmaJsonWriter::INDENT =
" ";
4695 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4697 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4698 m_InsideString(false)
4702 VmaJsonWriter::~VmaJsonWriter()
4704 VMA_ASSERT(!m_InsideString);
4705 VMA_ASSERT(m_Stack.empty());
4708 void VmaJsonWriter::BeginObject(
bool singleLine)
4710 VMA_ASSERT(!m_InsideString);
4716 item.type = COLLECTION_TYPE_OBJECT;
4717 item.valueCount = 0;
4718 item.singleLineMode = singleLine;
4719 m_Stack.push_back(item);
4722 void VmaJsonWriter::EndObject()
4724 VMA_ASSERT(!m_InsideString);
4729 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4733 void VmaJsonWriter::BeginArray(
bool singleLine)
4735 VMA_ASSERT(!m_InsideString);
4741 item.type = COLLECTION_TYPE_ARRAY;
4742 item.valueCount = 0;
4743 item.singleLineMode = singleLine;
4744 m_Stack.push_back(item);
4747 void VmaJsonWriter::EndArray()
4749 VMA_ASSERT(!m_InsideString);
4754 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4758 void VmaJsonWriter::WriteString(
const char* pStr)
4764 void VmaJsonWriter::BeginString(
const char* pStr)
4766 VMA_ASSERT(!m_InsideString);
4770 m_InsideString =
true;
4771 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4773 ContinueString(pStr);
4777 void VmaJsonWriter::ContinueString(
const char* pStr)
4779 VMA_ASSERT(m_InsideString);
4781 const size_t strLen = strlen(pStr);
4782 for(
size_t i = 0; i < strLen; ++i)
4815 VMA_ASSERT(0 &&
"Character not currently supported.");
4821 void VmaJsonWriter::ContinueString(uint32_t n)
4823 VMA_ASSERT(m_InsideString);
4827 void VmaJsonWriter::ContinueString(uint64_t n)
4829 VMA_ASSERT(m_InsideString);
4833 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4835 VMA_ASSERT(m_InsideString);
4836 m_SB.AddPointer(ptr);
4839 void VmaJsonWriter::EndString(
const char* pStr)
4841 VMA_ASSERT(m_InsideString);
4842 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4844 ContinueString(pStr);
4847 m_InsideString =
false;
4850 void VmaJsonWriter::WriteNumber(uint32_t n)
4852 VMA_ASSERT(!m_InsideString);
4857 void VmaJsonWriter::WriteNumber(uint64_t n)
4859 VMA_ASSERT(!m_InsideString);
4864 void VmaJsonWriter::WriteBool(
bool b)
4866 VMA_ASSERT(!m_InsideString);
4868 m_SB.Add(b ?
"true" :
"false");
4871 void VmaJsonWriter::WriteNull()
4873 VMA_ASSERT(!m_InsideString);
4878 void VmaJsonWriter::BeginValue(
bool isString)
4880 if(!m_Stack.empty())
4882 StackItem& currItem = m_Stack.back();
4883 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4884 currItem.valueCount % 2 == 0)
4886 VMA_ASSERT(isString);
4889 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4890 currItem.valueCount % 2 != 0)
4894 else if(currItem.valueCount > 0)
4903 ++currItem.valueCount;
4907 void VmaJsonWriter::WriteIndent(
bool oneLess)
4909 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4913 size_t count = m_Stack.size();
4914 if(count > 0 && oneLess)
4918 for(
size_t i = 0; i < count; ++i)
4925 #endif // #if VMA_STATS_STRING_ENABLED 4929 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
4931 if(IsUserDataString())
4933 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4935 FreeUserDataString(hAllocator);
4937 if(pUserData != VMA_NULL)
4939 const char*
const newStrSrc = (
char*)pUserData;
4940 const size_t newStrLen = strlen(newStrSrc);
4941 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4942 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4943 m_pUserData = newStrDst;
4948 m_pUserData = pUserData;
4952 void VmaAllocation_T::ChangeBlockAllocation(
4954 VmaDeviceMemoryBlock* block,
4955 VkDeviceSize offset)
4957 VMA_ASSERT(block != VMA_NULL);
4958 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4961 if(block != m_BlockAllocation.m_Block)
4963 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
4964 if(IsPersistentMap())
4966 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
4967 block->Map(hAllocator, mapRefCount, VMA_NULL);
4970 m_BlockAllocation.m_Block = block;
4971 m_BlockAllocation.m_Offset = offset;
4974 VkDeviceSize VmaAllocation_T::GetOffset()
const 4978 case ALLOCATION_TYPE_BLOCK:
4979 return m_BlockAllocation.m_Offset;
4980 case ALLOCATION_TYPE_DEDICATED:
4988 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4992 case ALLOCATION_TYPE_BLOCK:
4993 return m_BlockAllocation.m_Block->GetDeviceMemory();
4994 case ALLOCATION_TYPE_DEDICATED:
4995 return m_DedicatedAllocation.m_hMemory;
4998 return VK_NULL_HANDLE;
5002 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 5006 case ALLOCATION_TYPE_BLOCK:
5007 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
5008 case ALLOCATION_TYPE_DEDICATED:
5009 return m_DedicatedAllocation.m_MemoryTypeIndex;
5016 void* VmaAllocation_T::GetMappedData()
const 5020 case ALLOCATION_TYPE_BLOCK:
5023 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
5024 VMA_ASSERT(pBlockData != VMA_NULL);
5025 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
5032 case ALLOCATION_TYPE_DEDICATED:
5033 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
5034 return m_DedicatedAllocation.m_pMappedData;
5041 bool VmaAllocation_T::CanBecomeLost()
const 5045 case ALLOCATION_TYPE_BLOCK:
5046 return m_BlockAllocation.m_CanBecomeLost;
5047 case ALLOCATION_TYPE_DEDICATED:
5055 VmaPool VmaAllocation_T::GetPool()
const 5057 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5058 return m_BlockAllocation.m_hPool;
5061 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5063 VMA_ASSERT(CanBecomeLost());
5069 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
5072 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
5077 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
5083 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
5093 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
5095 VMA_ASSERT(IsUserDataString());
5096 if(m_pUserData != VMA_NULL)
5098 char*
const oldStr = (
char*)m_pUserData;
5099 const size_t oldStrLen = strlen(oldStr);
5100 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
5101 m_pUserData = VMA_NULL;
5105 void VmaAllocation_T::BlockAllocMap()
5107 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
5109 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
5115 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
5119 void VmaAllocation_T::BlockAllocUnmap()
5121 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
5123 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
5129 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
5133 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
5135 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
5139 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
5141 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
5142 *ppData = m_DedicatedAllocation.m_pMappedData;
5148 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
5149 return VK_ERROR_MEMORY_MAP_FAILED;
5154 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5155 hAllocator->m_hDevice,
5156 m_DedicatedAllocation.m_hMemory,
5161 if(result == VK_SUCCESS)
5163 m_DedicatedAllocation.m_pMappedData = *ppData;
5170 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
5172 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
5174 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
5179 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
5180 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
5181 hAllocator->m_hDevice,
5182 m_DedicatedAllocation.m_hMemory);
5187 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
5191 #if VMA_STATS_STRING_ENABLED 5194 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
5203 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
5207 json.WriteString(
"Blocks");
5210 json.WriteString(
"Allocations");
5213 json.WriteString(
"UnusedRanges");
5216 json.WriteString(
"UsedBytes");
5219 json.WriteString(
"UnusedBytes");
5224 json.WriteString(
"AllocationSize");
5225 json.BeginObject(
true);
5226 json.WriteString(
"Min");
5228 json.WriteString(
"Avg");
5230 json.WriteString(
"Max");
5237 json.WriteString(
"UnusedRangeSize");
5238 json.BeginObject(
true);
5239 json.WriteString(
"Min");
5241 json.WriteString(
"Avg");
5243 json.WriteString(
"Max");
5251 #endif // #if VMA_STATS_STRING_ENABLED 5253 struct VmaSuballocationItemSizeLess
5256 const VmaSuballocationList::iterator lhs,
5257 const VmaSuballocationList::iterator rhs)
const 5259 return lhs->size < rhs->size;
5262 const VmaSuballocationList::iterator lhs,
5263 VkDeviceSize rhsSize)
const 5265 return lhs->size < rhsSize;
5272 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
5276 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
5277 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
5281 VmaBlockMetadata::~VmaBlockMetadata()
5285 void VmaBlockMetadata::Init(VkDeviceSize size)
5289 m_SumFreeSize = size;
5291 VmaSuballocation suballoc = {};
5292 suballoc.offset = 0;
5293 suballoc.size = size;
5294 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5295 suballoc.hAllocation = VK_NULL_HANDLE;
5297 m_Suballocations.push_back(suballoc);
5298 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
5300 m_FreeSuballocationsBySize.push_back(suballocItem);
5303 bool VmaBlockMetadata::Validate()
const 5305 if(m_Suballocations.empty())
5311 VkDeviceSize calculatedOffset = 0;
5313 uint32_t calculatedFreeCount = 0;
5315 VkDeviceSize calculatedSumFreeSize = 0;
5318 size_t freeSuballocationsToRegister = 0;
5320 bool prevFree =
false;
5322 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5323 suballocItem != m_Suballocations.cend();
5326 const VmaSuballocation& subAlloc = *suballocItem;
5329 if(subAlloc.offset != calculatedOffset)
5334 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
5336 if(prevFree && currFree)
5341 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
5348 calculatedSumFreeSize += subAlloc.size;
5349 ++calculatedFreeCount;
5350 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5352 ++freeSuballocationsToRegister;
5357 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
5361 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
5367 calculatedOffset += subAlloc.size;
5368 prevFree = currFree;
5373 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
5378 VkDeviceSize lastSize = 0;
5379 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
5381 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
5384 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5389 if(suballocItem->size < lastSize)
5394 lastSize = suballocItem->size;
5398 if(!ValidateFreeSuballocationList() ||
5399 (calculatedOffset != m_Size) ||
5400 (calculatedSumFreeSize != m_SumFreeSize) ||
5401 (calculatedFreeCount != m_FreeCount))
5409 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 5411 if(!m_FreeSuballocationsBySize.empty())
5413 return m_FreeSuballocationsBySize.back()->size;
5421 bool VmaBlockMetadata::IsEmpty()
const 5423 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
5426 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 5430 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5442 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5443 suballocItem != m_Suballocations.cend();
5446 const VmaSuballocation& suballoc = *suballocItem;
5447 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5460 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 5462 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5464 inoutStats.
size += m_Size;
5471 #if VMA_STATS_STRING_ENABLED 5473 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 5477 json.WriteString(
"TotalBytes");
5478 json.WriteNumber(m_Size);
5480 json.WriteString(
"UnusedBytes");
5481 json.WriteNumber(m_SumFreeSize);
5483 json.WriteString(
"Allocations");
5484 json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
5486 json.WriteString(
"UnusedRanges");
5487 json.WriteNumber(m_FreeCount);
5489 json.WriteString(
"Suballocations");
5492 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5493 suballocItem != m_Suballocations.cend();
5494 ++suballocItem, ++i)
5496 json.BeginObject(
true);
5498 json.WriteString(
"Type");
5499 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
5501 json.WriteString(
"Size");
5502 json.WriteNumber(suballocItem->size);
5504 json.WriteString(
"Offset");
5505 json.WriteNumber(suballocItem->offset);
5507 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5509 const void* pUserData = suballocItem->hAllocation->GetUserData();
5510 if(pUserData != VMA_NULL)
5512 json.WriteString(
"UserData");
5513 if(suballocItem->hAllocation->IsUserDataString())
5515 json.WriteString((
const char*)pUserData);
5520 json.ContinueString_Pointer(pUserData);
5533 #endif // #if VMA_STATS_STRING_ENABLED 5545 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
5547 VMA_ASSERT(IsEmpty());
5548 pAllocationRequest->offset = 0;
5549 pAllocationRequest->sumFreeSize = m_SumFreeSize;
5550 pAllocationRequest->sumItemSize = 0;
5551 pAllocationRequest->item = m_Suballocations.begin();
5552 pAllocationRequest->itemsToMakeLostCount = 0;
5555 bool VmaBlockMetadata::CreateAllocationRequest(
5556 uint32_t currentFrameIndex,
5557 uint32_t frameInUseCount,
5558 VkDeviceSize bufferImageGranularity,
5559 VkDeviceSize allocSize,
5560 VkDeviceSize allocAlignment,
5561 VmaSuballocationType allocType,
5562 bool canMakeOtherLost,
5563 VmaAllocationRequest* pAllocationRequest)
5565 VMA_ASSERT(allocSize > 0);
5566 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5567 VMA_ASSERT(pAllocationRequest != VMA_NULL);
5568 VMA_HEAVY_ASSERT(Validate());
5571 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
5577 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5578 if(freeSuballocCount > 0)
5583 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5584 m_FreeSuballocationsBySize.data(),
5585 m_FreeSuballocationsBySize.data() + freeSuballocCount,
5587 VmaSuballocationItemSizeLess());
5588 size_t index = it - m_FreeSuballocationsBySize.data();
5589 for(; index < freeSuballocCount; ++index)
5594 bufferImageGranularity,
5598 m_FreeSuballocationsBySize[index],
5600 &pAllocationRequest->offset,
5601 &pAllocationRequest->itemsToMakeLostCount,
5602 &pAllocationRequest->sumFreeSize,
5603 &pAllocationRequest->sumItemSize))
5605 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5613 for(
size_t index = freeSuballocCount; index--; )
5618 bufferImageGranularity,
5622 m_FreeSuballocationsBySize[index],
5624 &pAllocationRequest->offset,
5625 &pAllocationRequest->itemsToMakeLostCount,
5626 &pAllocationRequest->sumFreeSize,
5627 &pAllocationRequest->sumItemSize))
5629 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5636 if(canMakeOtherLost)
5640 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5641 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5643 VmaAllocationRequest tmpAllocRequest = {};
5644 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5645 suballocIt != m_Suballocations.end();
5648 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5649 suballocIt->hAllocation->CanBecomeLost())
5654 bufferImageGranularity,
5660 &tmpAllocRequest.offset,
5661 &tmpAllocRequest.itemsToMakeLostCount,
5662 &tmpAllocRequest.sumFreeSize,
5663 &tmpAllocRequest.sumItemSize))
5665 tmpAllocRequest.item = suballocIt;
5667 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5669 *pAllocationRequest = tmpAllocRequest;
5675 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5684 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5685 uint32_t currentFrameIndex,
5686 uint32_t frameInUseCount,
5687 VmaAllocationRequest* pAllocationRequest)
5689 while(pAllocationRequest->itemsToMakeLostCount > 0)
5691 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5693 ++pAllocationRequest->item;
5695 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5696 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5697 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5698 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5700 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5701 --pAllocationRequest->itemsToMakeLostCount;
5709 VMA_HEAVY_ASSERT(Validate());
5710 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5711 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5716 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5718 uint32_t lostAllocationCount = 0;
5719 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5720 it != m_Suballocations.end();
5723 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5724 it->hAllocation->CanBecomeLost() &&
5725 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5727 it = FreeSuballocation(it);
5728 ++lostAllocationCount;
5731 return lostAllocationCount;
5734 void VmaBlockMetadata::Alloc(
5735 const VmaAllocationRequest& request,
5736 VmaSuballocationType type,
5737 VkDeviceSize allocSize,
5740 VMA_ASSERT(request.item != m_Suballocations.end());
5741 VmaSuballocation& suballoc = *request.item;
5743 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5745 VMA_ASSERT(request.offset >= suballoc.offset);
5746 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5747 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5748 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5752 UnregisterFreeSuballocation(request.item);
5754 suballoc.offset = request.offset;
5755 suballoc.size = allocSize;
5756 suballoc.type = type;
5757 suballoc.hAllocation = hAllocation;
5762 VmaSuballocation paddingSuballoc = {};
5763 paddingSuballoc.offset = request.offset + allocSize;
5764 paddingSuballoc.size = paddingEnd;
5765 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5766 VmaSuballocationList::iterator next = request.item;
5768 const VmaSuballocationList::iterator paddingEndItem =
5769 m_Suballocations.insert(next, paddingSuballoc);
5770 RegisterFreeSuballocation(paddingEndItem);
5776 VmaSuballocation paddingSuballoc = {};
5777 paddingSuballoc.offset = request.offset - paddingBegin;
5778 paddingSuballoc.size = paddingBegin;
5779 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5780 const VmaSuballocationList::iterator paddingBeginItem =
5781 m_Suballocations.insert(request.item, paddingSuballoc);
5782 RegisterFreeSuballocation(paddingBeginItem);
5786 m_FreeCount = m_FreeCount - 1;
5787 if(paddingBegin > 0)
5795 m_SumFreeSize -= allocSize;
5800 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5801 suballocItem != m_Suballocations.end();
5804 VmaSuballocation& suballoc = *suballocItem;
5805 if(suballoc.hAllocation == allocation)
5807 FreeSuballocation(suballocItem);
5808 VMA_HEAVY_ASSERT(Validate());
5812 VMA_ASSERT(0 &&
"Not found!");
5815 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
5817 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5818 suballocItem != m_Suballocations.end();
5821 VmaSuballocation& suballoc = *suballocItem;
5822 if(suballoc.offset == offset)
5824 FreeSuballocation(suballocItem);
5828 VMA_ASSERT(0 &&
"Not found!");
5831 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5833 VkDeviceSize lastSize = 0;
5834 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5836 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5838 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5843 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5848 if(it->size < lastSize)
5854 lastSize = it->size;
5859 bool VmaBlockMetadata::CheckAllocation(
5860 uint32_t currentFrameIndex,
5861 uint32_t frameInUseCount,
5862 VkDeviceSize bufferImageGranularity,
5863 VkDeviceSize allocSize,
5864 VkDeviceSize allocAlignment,
5865 VmaSuballocationType allocType,
5866 VmaSuballocationList::const_iterator suballocItem,
5867 bool canMakeOtherLost,
5868 VkDeviceSize* pOffset,
5869 size_t* itemsToMakeLostCount,
5870 VkDeviceSize* pSumFreeSize,
5871 VkDeviceSize* pSumItemSize)
const 5873 VMA_ASSERT(allocSize > 0);
5874 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5875 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5876 VMA_ASSERT(pOffset != VMA_NULL);
5878 *itemsToMakeLostCount = 0;
5882 if(canMakeOtherLost)
5884 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5886 *pSumFreeSize = suballocItem->size;
5890 if(suballocItem->hAllocation->CanBecomeLost() &&
5891 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5893 ++*itemsToMakeLostCount;
5894 *pSumItemSize = suballocItem->size;
5903 if(m_Size - suballocItem->offset < allocSize)
5909 *pOffset = suballocItem->offset;
5912 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5914 *pOffset += VMA_DEBUG_MARGIN;
5918 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5919 *pOffset = VmaAlignUp(*pOffset, alignment);
5923 if(bufferImageGranularity > 1)
5925 bool bufferImageGranularityConflict =
false;
5926 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5927 while(prevSuballocItem != m_Suballocations.cbegin())
5930 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5931 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5933 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5935 bufferImageGranularityConflict =
true;
5943 if(bufferImageGranularityConflict)
5945 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5951 if(*pOffset >= suballocItem->offset + suballocItem->size)
5957 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5960 VmaSuballocationList::const_iterator next = suballocItem;
5962 const VkDeviceSize requiredEndMargin =
5963 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5965 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5967 if(suballocItem->offset + totalSize > m_Size)
5974 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5975 if(totalSize > suballocItem->size)
5977 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5978 while(remainingSize > 0)
5981 if(lastSuballocItem == m_Suballocations.cend())
5985 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5987 *pSumFreeSize += lastSuballocItem->size;
5991 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5992 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5993 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5995 ++*itemsToMakeLostCount;
5996 *pSumItemSize += lastSuballocItem->size;
6003 remainingSize = (lastSuballocItem->size < remainingSize) ?
6004 remainingSize - lastSuballocItem->size : 0;
6010 if(bufferImageGranularity > 1)
6012 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
6014 while(nextSuballocItem != m_Suballocations.cend())
6016 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
6017 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
6019 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
6021 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
6022 if(nextSuballoc.hAllocation->CanBecomeLost() &&
6023 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6025 ++*itemsToMakeLostCount;
6044 const VmaSuballocation& suballoc = *suballocItem;
6045 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
6047 *pSumFreeSize = suballoc.size;
6050 if(suballoc.size < allocSize)
6056 *pOffset = suballoc.offset;
6059 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
6061 *pOffset += VMA_DEBUG_MARGIN;
6065 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
6066 *pOffset = VmaAlignUp(*pOffset, alignment);
6070 if(bufferImageGranularity > 1)
6072 bool bufferImageGranularityConflict =
false;
6073 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
6074 while(prevSuballocItem != m_Suballocations.cbegin())
6077 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
6078 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
6080 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
6082 bufferImageGranularityConflict =
true;
6090 if(bufferImageGranularityConflict)
6092 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
6097 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
6100 VmaSuballocationList::const_iterator next = suballocItem;
6102 const VkDeviceSize requiredEndMargin =
6103 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
6106 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
6113 if(bufferImageGranularity > 1)
6115 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
6117 while(nextSuballocItem != m_Suballocations.cend())
6119 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
6120 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
6122 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
6141 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
6143 VMA_ASSERT(item != m_Suballocations.end());
6144 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6146 VmaSuballocationList::iterator nextItem = item;
6148 VMA_ASSERT(nextItem != m_Suballocations.end());
6149 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6151 item->size += nextItem->size;
6153 m_Suballocations.erase(nextItem);
6156 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
6159 VmaSuballocation& suballoc = *suballocItem;
6160 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6161 suballoc.hAllocation = VK_NULL_HANDLE;
6165 m_SumFreeSize += suballoc.size;
6168 bool mergeWithNext =
false;
6169 bool mergeWithPrev =
false;
6171 VmaSuballocationList::iterator nextItem = suballocItem;
6173 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
6175 mergeWithNext =
true;
6178 VmaSuballocationList::iterator prevItem = suballocItem;
6179 if(suballocItem != m_Suballocations.begin())
6182 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6184 mergeWithPrev =
true;
6190 UnregisterFreeSuballocation(nextItem);
6191 MergeFreeWithNext(suballocItem);
6196 UnregisterFreeSuballocation(prevItem);
6197 MergeFreeWithNext(prevItem);
6198 RegisterFreeSuballocation(prevItem);
6203 RegisterFreeSuballocation(suballocItem);
6204 return suballocItem;
6208 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
6210 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6211 VMA_ASSERT(item->size > 0);
6215 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6217 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6219 if(m_FreeSuballocationsBySize.empty())
6221 m_FreeSuballocationsBySize.push_back(item);
6225 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
6233 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
6235 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6236 VMA_ASSERT(item->size > 0);
6240 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6242 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6244 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6245 m_FreeSuballocationsBySize.data(),
6246 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
6248 VmaSuballocationItemSizeLess());
6249 for(
size_t index = it - m_FreeSuballocationsBySize.data();
6250 index < m_FreeSuballocationsBySize.size();
6253 if(m_FreeSuballocationsBySize[index] == item)
6255 VmaVectorRemove(m_FreeSuballocationsBySize, index);
6258 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
6260 VMA_ASSERT(0 &&
"Not found.");
6269 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
6270 m_Metadata(hAllocator),
6271 m_MemoryTypeIndex(UINT32_MAX),
6272 m_hMemory(VK_NULL_HANDLE),
6274 m_pMappedData(VMA_NULL)
6278 void VmaDeviceMemoryBlock::Init(
6279 uint32_t newMemoryTypeIndex,
6280 VkDeviceMemory newMemory,
6281 VkDeviceSize newSize)
6283 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6285 m_MemoryTypeIndex = newMemoryTypeIndex;
6286 m_hMemory = newMemory;
6288 m_Metadata.Init(newSize);
6291 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
6295 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
6297 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
6298 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
6299 m_hMemory = VK_NULL_HANDLE;
6302 bool VmaDeviceMemoryBlock::Validate()
const 6304 if((m_hMemory == VK_NULL_HANDLE) ||
6305 (m_Metadata.GetSize() == 0))
6310 return m_Metadata.Validate();
6313 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
6320 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6323 m_MapCount += count;
6324 VMA_ASSERT(m_pMappedData != VMA_NULL);
6325 if(ppData != VMA_NULL)
6327 *ppData = m_pMappedData;
6333 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6334 hAllocator->m_hDevice,
6340 if(result == VK_SUCCESS)
6342 if(ppData != VMA_NULL)
6344 *ppData = m_pMappedData;
6352 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
6359 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6360 if(m_MapCount >= count)
6362 m_MapCount -= count;
6365 m_pMappedData = VMA_NULL;
6366 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
6371 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
6375 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
6380 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
6381 hAllocation->GetBlock() ==
this);
6383 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6384 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
6385 hAllocator->m_hDevice,
6388 hAllocation->GetOffset());
6391 VkResult VmaDeviceMemoryBlock::BindImageMemory(
6396 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
6397 hAllocation->GetBlock() ==
this);
6399 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6400 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
6401 hAllocator->m_hDevice,
6404 hAllocation->GetOffset());
6409 memset(&outInfo, 0,
sizeof(outInfo));
6428 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
6436 VmaPool_T::VmaPool_T(
6441 createInfo.memoryTypeIndex,
6442 createInfo.blockSize,
6443 createInfo.minBlockCount,
6444 createInfo.maxBlockCount,
6446 createInfo.frameInUseCount,
6451 VmaPool_T::~VmaPool_T()
6455 #if VMA_STATS_STRING_ENABLED 6457 #endif // #if VMA_STATS_STRING_ENABLED 6459 VmaBlockVector::VmaBlockVector(
6461 uint32_t memoryTypeIndex,
6462 VkDeviceSize preferredBlockSize,
6463 size_t minBlockCount,
6464 size_t maxBlockCount,
6465 VkDeviceSize bufferImageGranularity,
6466 uint32_t frameInUseCount,
6467 bool isCustomPool) :
6468 m_hAllocator(hAllocator),
6469 m_MemoryTypeIndex(memoryTypeIndex),
6470 m_PreferredBlockSize(preferredBlockSize),
6471 m_MinBlockCount(minBlockCount),
6472 m_MaxBlockCount(maxBlockCount),
6473 m_BufferImageGranularity(bufferImageGranularity),
6474 m_FrameInUseCount(frameInUseCount),
6475 m_IsCustomPool(isCustomPool),
6476 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
6477 m_HasEmptyBlock(false),
6478 m_pDefragmentator(VMA_NULL)
6482 VmaBlockVector::~VmaBlockVector()
6484 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
6486 for(
size_t i = m_Blocks.size(); i--; )
6488 m_Blocks[i]->Destroy(m_hAllocator);
6489 vma_delete(m_hAllocator, m_Blocks[i]);
6493 VkResult VmaBlockVector::CreateMinBlocks()
6495 for(
size_t i = 0; i < m_MinBlockCount; ++i)
6497 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
6498 if(res != VK_SUCCESS)
6506 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
6514 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6516 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6518 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6520 VMA_HEAVY_ASSERT(pBlock->Validate());
6521 pBlock->m_Metadata.AddPoolStats(*pStats);
6525 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
6527 VkResult VmaBlockVector::Allocate(
6529 uint32_t currentFrameIndex,
6530 const VkMemoryRequirements& vkMemReq,
6532 VmaSuballocationType suballocType,
6538 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6542 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6544 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6545 VMA_ASSERT(pCurrBlock);
6546 VmaAllocationRequest currRequest = {};
6547 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6550 m_BufferImageGranularity,
6558 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
6562 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
6563 if(res != VK_SUCCESS)
6570 if(pCurrBlock->m_Metadata.IsEmpty())
6572 m_HasEmptyBlock =
false;
6575 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6576 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
6577 (*pAllocation)->InitBlockAllocation(
6586 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
6587 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6588 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6593 const bool canCreateNewBlock =
6595 (m_Blocks.size() < m_MaxBlockCount);
6598 if(canCreateNewBlock)
6601 VkDeviceSize newBlockSize = m_PreferredBlockSize;
6602 uint32_t newBlockSizeShift = 0;
6603 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
6607 if(m_IsCustomPool ==
false)
6610 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
6611 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
6613 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6614 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
6616 newBlockSize = smallerNewBlockSize;
6617 ++newBlockSizeShift;
6626 size_t newBlockIndex = 0;
6627 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
6629 if(m_IsCustomPool ==
false)
6631 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
6633 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6634 if(smallerNewBlockSize >= vkMemReq.size)
6636 newBlockSize = smallerNewBlockSize;
6637 ++newBlockSizeShift;
6638 res = CreateBlock(newBlockSize, &newBlockIndex);
6647 if(res == VK_SUCCESS)
6649 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
6650 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
6654 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
6655 if(res != VK_SUCCESS)
6662 VmaAllocationRequest allocRequest;
6663 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6664 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6665 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6666 (*pAllocation)->InitBlockAllocation(
6669 allocRequest.offset,
6675 VMA_HEAVY_ASSERT(pBlock->Validate());
6676 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6677 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6685 if(canMakeOtherLost)
6687 uint32_t tryIndex = 0;
6688 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6690 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6691 VmaAllocationRequest bestRequest = {};
6692 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6696 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6698 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6699 VMA_ASSERT(pCurrBlock);
6700 VmaAllocationRequest currRequest = {};
6701 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6704 m_BufferImageGranularity,
6711 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6712 if(pBestRequestBlock == VMA_NULL ||
6713 currRequestCost < bestRequestCost)
6715 pBestRequestBlock = pCurrBlock;
6716 bestRequest = currRequest;
6717 bestRequestCost = currRequestCost;
6719 if(bestRequestCost == 0)
6727 if(pBestRequestBlock != VMA_NULL)
6731 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
6732 if(res != VK_SUCCESS)
6738 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6744 if(pBestRequestBlock->m_Metadata.IsEmpty())
6746 m_HasEmptyBlock =
false;
6749 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6750 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6751 (*pAllocation)->InitBlockAllocation(
6760 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
6761 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6762 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6776 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6778 return VK_ERROR_TOO_MANY_OBJECTS;
6782 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6785 void VmaBlockVector::Free(
6788 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6792 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6794 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6796 if(hAllocation->IsPersistentMap())
6798 pBlock->Unmap(m_hAllocator, 1);
6801 pBlock->m_Metadata.Free(hAllocation);
6802 VMA_HEAVY_ASSERT(pBlock->Validate());
6804 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6807 if(pBlock->m_Metadata.IsEmpty())
6810 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6812 pBlockToDelete = pBlock;
6818 m_HasEmptyBlock =
true;
6823 else if(m_HasEmptyBlock)
6825 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6826 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6828 pBlockToDelete = pLastBlock;
6829 m_Blocks.pop_back();
6830 m_HasEmptyBlock =
false;
6834 IncrementallySortBlocks();
6839 if(pBlockToDelete != VMA_NULL)
6841 VMA_DEBUG_LOG(
" Deleted empty allocation");
6842 pBlockToDelete->Destroy(m_hAllocator);
6843 vma_delete(m_hAllocator, pBlockToDelete);
6847 size_t VmaBlockVector::CalcMaxBlockSize()
const 6850 for(
size_t i = m_Blocks.size(); i--; )
6852 result = VMA_MAX((uint64_t)result, (uint64_t)m_Blocks[i]->m_Metadata.GetSize());
6853 if(result >= m_PreferredBlockSize)
6861 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6863 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6865 if(m_Blocks[blockIndex] == pBlock)
6867 VmaVectorRemove(m_Blocks, blockIndex);
6874 void VmaBlockVector::IncrementallySortBlocks()
6877 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6879 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6881 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6887 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6889 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6890 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6891 allocInfo.allocationSize = blockSize;
6892 VkDeviceMemory mem = VK_NULL_HANDLE;
6893 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6902 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6906 allocInfo.allocationSize);
6908 m_Blocks.push_back(pBlock);
6909 if(pNewBlockIndex != VMA_NULL)
6911 *pNewBlockIndex = m_Blocks.size() - 1;
6917 #if VMA_STATS_STRING_ENABLED 6919 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6921 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6927 json.WriteString(
"MemoryTypeIndex");
6928 json.WriteNumber(m_MemoryTypeIndex);
6930 json.WriteString(
"BlockSize");
6931 json.WriteNumber(m_PreferredBlockSize);
6933 json.WriteString(
"BlockCount");
6934 json.BeginObject(
true);
6935 if(m_MinBlockCount > 0)
6937 json.WriteString(
"Min");
6938 json.WriteNumber((uint64_t)m_MinBlockCount);
6940 if(m_MaxBlockCount < SIZE_MAX)
6942 json.WriteString(
"Max");
6943 json.WriteNumber((uint64_t)m_MaxBlockCount);
6945 json.WriteString(
"Cur");
6946 json.WriteNumber((uint64_t)m_Blocks.size());
6949 if(m_FrameInUseCount > 0)
6951 json.WriteString(
"FrameInUseCount");
6952 json.WriteNumber(m_FrameInUseCount);
6957 json.WriteString(
"PreferredBlockSize");
6958 json.WriteNumber(m_PreferredBlockSize);
6961 json.WriteString(
"Blocks");
6963 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6965 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6972 #endif // #if VMA_STATS_STRING_ENABLED 6974 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6976 uint32_t currentFrameIndex)
6978 if(m_pDefragmentator == VMA_NULL)
6980 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6986 return m_pDefragmentator;
6989 VkResult VmaBlockVector::Defragment(
6991 VkDeviceSize& maxBytesToMove,
6992 uint32_t& maxAllocationsToMove)
6994 if(m_pDefragmentator == VMA_NULL)
6999 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7002 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
7005 if(pDefragmentationStats != VMA_NULL)
7007 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
7008 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
7011 VMA_ASSERT(bytesMoved <= maxBytesToMove);
7012 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
7018 m_HasEmptyBlock =
false;
7019 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
7021 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
7022 if(pBlock->m_Metadata.IsEmpty())
7024 if(m_Blocks.size() > m_MinBlockCount)
7026 if(pDefragmentationStats != VMA_NULL)
7029 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
7032 VmaVectorRemove(m_Blocks, blockIndex);
7033 pBlock->Destroy(m_hAllocator);
7034 vma_delete(m_hAllocator, pBlock);
7038 m_HasEmptyBlock =
true;
7046 void VmaBlockVector::DestroyDefragmentator()
7048 if(m_pDefragmentator != VMA_NULL)
7050 vma_delete(m_hAllocator, m_pDefragmentator);
7051 m_pDefragmentator = VMA_NULL;
7055 void VmaBlockVector::MakePoolAllocationsLost(
7056 uint32_t currentFrameIndex,
7057 size_t* pLostAllocationCount)
7059 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7060 size_t lostAllocationCount = 0;
7061 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7063 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7065 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
7067 if(pLostAllocationCount != VMA_NULL)
7069 *pLostAllocationCount = lostAllocationCount;
7073 void VmaBlockVector::AddStats(
VmaStats* pStats)
7075 const uint32_t memTypeIndex = m_MemoryTypeIndex;
7076 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
7078 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7080 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7082 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7084 VMA_HEAVY_ASSERT(pBlock->Validate());
7086 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
7087 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7088 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7089 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7096 VmaDefragmentator::VmaDefragmentator(
7098 VmaBlockVector* pBlockVector,
7099 uint32_t currentFrameIndex) :
7100 m_hAllocator(hAllocator),
7101 m_pBlockVector(pBlockVector),
7102 m_CurrentFrameIndex(currentFrameIndex),
7104 m_AllocationsMoved(0),
7105 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
7106 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
7110 VmaDefragmentator::~VmaDefragmentator()
7112 for(
size_t i = m_Blocks.size(); i--; )
7114 vma_delete(m_hAllocator, m_Blocks[i]);
7118 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
7120 AllocationInfo allocInfo;
7121 allocInfo.m_hAllocation = hAlloc;
7122 allocInfo.m_pChanged = pChanged;
7123 m_Allocations.push_back(allocInfo);
7126 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
7129 if(m_pMappedDataForDefragmentation)
7131 *ppMappedData = m_pMappedDataForDefragmentation;
7136 if(m_pBlock->GetMappedData())
7138 *ppMappedData = m_pBlock->GetMappedData();
7143 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
7144 *ppMappedData = m_pMappedDataForDefragmentation;
7148 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
7150 if(m_pMappedDataForDefragmentation != VMA_NULL)
7152 m_pBlock->Unmap(hAllocator, 1);
7156 VkResult VmaDefragmentator::DefragmentRound(
7157 VkDeviceSize maxBytesToMove,
7158 uint32_t maxAllocationsToMove)
7160 if(m_Blocks.empty())
7165 size_t srcBlockIndex = m_Blocks.size() - 1;
7166 size_t srcAllocIndex = SIZE_MAX;
7172 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
7174 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
7177 if(srcBlockIndex == 0)
7184 srcAllocIndex = SIZE_MAX;
7189 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
7193 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
7194 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
7196 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
7197 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
7198 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
7199 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
7202 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
7204 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
7205 VmaAllocationRequest dstAllocRequest;
7206 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
7207 m_CurrentFrameIndex,
7208 m_pBlockVector->GetFrameInUseCount(),
7209 m_pBlockVector->GetBufferImageGranularity(),
7214 &dstAllocRequest) &&
7216 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
7218 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
7221 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
7222 (m_BytesMoved + size > maxBytesToMove))
7224 return VK_INCOMPLETE;
7227 void* pDstMappedData = VMA_NULL;
7228 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
7229 if(res != VK_SUCCESS)
7234 void* pSrcMappedData = VMA_NULL;
7235 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
7236 if(res != VK_SUCCESS)
7243 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
7244 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
7245 static_cast<size_t>(size));
7247 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
7248 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
7250 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
7252 if(allocInfo.m_pChanged != VMA_NULL)
7254 *allocInfo.m_pChanged = VK_TRUE;
7257 ++m_AllocationsMoved;
7258 m_BytesMoved += size;
7260 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
7268 if(srcAllocIndex > 0)
7274 if(srcBlockIndex > 0)
7277 srcAllocIndex = SIZE_MAX;
7287 VkResult VmaDefragmentator::Defragment(
7288 VkDeviceSize maxBytesToMove,
7289 uint32_t maxAllocationsToMove)
7291 if(m_Allocations.empty())
7297 const size_t blockCount = m_pBlockVector->m_Blocks.size();
7298 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7300 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
7301 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
7302 m_Blocks.push_back(pBlockInfo);
7306 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
7309 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
7311 AllocationInfo& allocInfo = m_Allocations[blockIndex];
7313 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7315 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
7316 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
7317 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
7319 (*it)->m_Allocations.push_back(allocInfo);
7327 m_Allocations.clear();
7329 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7331 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
7332 pBlockInfo->CalcHasNonMovableAllocations();
7333 pBlockInfo->SortAllocationsBySizeDescecnding();
7337 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
7340 VkResult result = VK_SUCCESS;
7341 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
7343 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
7347 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7349 m_Blocks[blockIndex]->Unmap(m_hAllocator);
7355 bool VmaDefragmentator::MoveMakesSense(
7356 size_t dstBlockIndex, VkDeviceSize dstOffset,
7357 size_t srcBlockIndex, VkDeviceSize srcOffset)
7359 if(dstBlockIndex < srcBlockIndex)
7363 if(dstBlockIndex > srcBlockIndex)
7367 if(dstOffset < srcOffset)
7380 m_hDevice(pCreateInfo->device),
7381 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
7382 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
7383 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
7384 m_PreferredLargeHeapBlockSize(0),
7385 m_PhysicalDevice(pCreateInfo->physicalDevice),
7386 m_CurrentFrameIndex(0),
7387 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks()))
7391 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
7392 memset(&m_MemProps, 0,
sizeof(m_MemProps));
7393 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
7395 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
7396 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
7398 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7400 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
7411 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
7412 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
7419 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
7421 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
7422 if(limit != VK_WHOLE_SIZE)
7424 m_HeapSizeLimit[heapIndex] = limit;
7425 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
7427 m_MemProps.memoryHeaps[heapIndex].size = limit;
7433 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7435 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
7437 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
7443 GetBufferImageGranularity(),
7448 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
7452 VmaAllocator_T::~VmaAllocator_T()
7454 VMA_ASSERT(m_Pools.empty());
7456 for(
size_t i = GetMemoryTypeCount(); i--; )
7458 vma_delete(
this, m_pDedicatedAllocations[i]);
7459 vma_delete(
this, m_pBlockVectors[i]);
7463 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
7465 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7466 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
7467 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
7468 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
7469 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
7470 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
7471 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
7472 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
7473 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
7474 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
7475 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
7476 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
7477 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
7478 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
7479 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
7480 if(m_UseKhrDedicatedAllocation)
7482 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
7483 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
7484 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
7485 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
7487 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7489 #define VMA_COPY_IF_NOT_NULL(funcName) \ 7490 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 7492 if(pVulkanFunctions != VMA_NULL)
7494 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
7495 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
7496 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
7497 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
7498 VMA_COPY_IF_NOT_NULL(vkMapMemory);
7499 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
7500 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
7501 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
7502 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
7503 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
7504 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
7505 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
7506 VMA_COPY_IF_NOT_NULL(vkCreateImage);
7507 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
7508 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
7509 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
7512 #undef VMA_COPY_IF_NOT_NULL 7516 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
7517 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
7518 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
7519 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
7520 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
7521 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
7522 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
7523 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
7524 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
7525 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
7526 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
7527 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
7528 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
7529 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
7530 if(m_UseKhrDedicatedAllocation)
7532 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
7533 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
7537 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
7539 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7540 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
7541 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
7542 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
7545 VkResult VmaAllocator_T::AllocateMemoryOfType(
7546 const VkMemoryRequirements& vkMemReq,
7547 bool dedicatedAllocation,
7548 VkBuffer dedicatedBuffer,
7549 VkImage dedicatedImage,
7551 uint32_t memTypeIndex,
7552 VmaSuballocationType suballocType,
7555 VMA_ASSERT(pAllocation != VMA_NULL);
7556 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
7562 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
7567 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
7568 VMA_ASSERT(blockVector);
7570 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
7571 bool preferDedicatedMemory =
7572 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
7573 dedicatedAllocation ||
7575 vkMemReq.size > preferredBlockSize / 2;
7577 if(preferDedicatedMemory &&
7579 finalCreateInfo.
pool == VK_NULL_HANDLE)
7588 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7592 return AllocateDedicatedMemory(
7606 VkResult res = blockVector->Allocate(
7608 m_CurrentFrameIndex.load(),
7613 if(res == VK_SUCCESS)
7621 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7625 res = AllocateDedicatedMemory(
7631 finalCreateInfo.pUserData,
7635 if(res == VK_SUCCESS)
7638 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
7644 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7651 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7653 VmaSuballocationType suballocType,
7654 uint32_t memTypeIndex,
7656 bool isUserDataString,
7658 VkBuffer dedicatedBuffer,
7659 VkImage dedicatedImage,
7662 VMA_ASSERT(pAllocation);
7664 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7665 allocInfo.memoryTypeIndex = memTypeIndex;
7666 allocInfo.allocationSize = size;
7668 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7669 if(m_UseKhrDedicatedAllocation)
7671 if(dedicatedBuffer != VK_NULL_HANDLE)
7673 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7674 dedicatedAllocInfo.buffer = dedicatedBuffer;
7675 allocInfo.pNext = &dedicatedAllocInfo;
7677 else if(dedicatedImage != VK_NULL_HANDLE)
7679 dedicatedAllocInfo.image = dedicatedImage;
7680 allocInfo.pNext = &dedicatedAllocInfo;
7685 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7686 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7689 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7693 void* pMappedData = VMA_NULL;
7696 res = (*m_VulkanFunctions.vkMapMemory)(
7705 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7706 FreeVulkanMemory(memTypeIndex, size, hMemory);
7711 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7712 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7713 (*pAllocation)->SetUserData(
this, pUserData);
7717 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7718 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7719 VMA_ASSERT(pDedicatedAllocations);
7720 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7723 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7728 void VmaAllocator_T::GetBufferMemoryRequirements(
7730 VkMemoryRequirements& memReq,
7731 bool& requiresDedicatedAllocation,
7732 bool& prefersDedicatedAllocation)
const 7734 if(m_UseKhrDedicatedAllocation)
7736 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7737 memReqInfo.buffer = hBuffer;
7739 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7741 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7742 memReq2.pNext = &memDedicatedReq;
7744 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7746 memReq = memReq2.memoryRequirements;
7747 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7748 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7752 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7753 requiresDedicatedAllocation =
false;
7754 prefersDedicatedAllocation =
false;
7758 void VmaAllocator_T::GetImageMemoryRequirements(
7760 VkMemoryRequirements& memReq,
7761 bool& requiresDedicatedAllocation,
7762 bool& prefersDedicatedAllocation)
const 7764 if(m_UseKhrDedicatedAllocation)
7766 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7767 memReqInfo.image = hImage;
7769 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7771 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7772 memReq2.pNext = &memDedicatedReq;
7774 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7776 memReq = memReq2.memoryRequirements;
7777 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7778 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7782 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7783 requiresDedicatedAllocation =
false;
7784 prefersDedicatedAllocation =
false;
7788 VkResult VmaAllocator_T::AllocateMemory(
7789 const VkMemoryRequirements& vkMemReq,
7790 bool requiresDedicatedAllocation,
7791 bool prefersDedicatedAllocation,
7792 VkBuffer dedicatedBuffer,
7793 VkImage dedicatedImage,
7795 VmaSuballocationType suballocType,
7801 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7802 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7807 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7808 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7810 if(requiresDedicatedAllocation)
7814 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7815 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7817 if(createInfo.
pool != VK_NULL_HANDLE)
7819 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7820 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7823 if((createInfo.
pool != VK_NULL_HANDLE) &&
7826 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7827 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7830 if(createInfo.
pool != VK_NULL_HANDLE)
7832 return createInfo.
pool->m_BlockVector.Allocate(
7834 m_CurrentFrameIndex.load(),
7843 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7844 uint32_t memTypeIndex = UINT32_MAX;
7846 if(res == VK_SUCCESS)
7848 res = AllocateMemoryOfType(
7850 requiresDedicatedAllocation || prefersDedicatedAllocation,
7858 if(res == VK_SUCCESS)
7868 memoryTypeBits &= ~(1u << memTypeIndex);
7871 if(res == VK_SUCCESS)
7873 res = AllocateMemoryOfType(
7875 requiresDedicatedAllocation || prefersDedicatedAllocation,
7883 if(res == VK_SUCCESS)
7893 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7904 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7906 VMA_ASSERT(allocation);
7908 if(allocation->CanBecomeLost() ==
false ||
7909 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7911 switch(allocation->GetType())
7913 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7915 VmaBlockVector* pBlockVector = VMA_NULL;
7916 VmaPool hPool = allocation->GetPool();
7917 if(hPool != VK_NULL_HANDLE)
7919 pBlockVector = &hPool->m_BlockVector;
7923 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7924 pBlockVector = m_pBlockVectors[memTypeIndex];
7926 pBlockVector->Free(allocation);
7929 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7930 FreeDedicatedMemory(allocation);
7937 allocation->SetUserData(
this, VMA_NULL);
7938 vma_delete(
this, allocation);
7941 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7944 InitStatInfo(pStats->
total);
7945 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7947 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7951 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7953 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7954 VMA_ASSERT(pBlockVector);
7955 pBlockVector->AddStats(pStats);
7960 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7961 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7963 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7968 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7970 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7971 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7972 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7973 VMA_ASSERT(pDedicatedAllocVector);
7974 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7977 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7978 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7979 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7980 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7985 VmaPostprocessCalcStatInfo(pStats->
total);
7986 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
7987 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
7988 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
7989 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
7992 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7994 VkResult VmaAllocator_T::Defragment(
7996 size_t allocationCount,
7997 VkBool32* pAllocationsChanged,
8001 if(pAllocationsChanged != VMA_NULL)
8003 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
8005 if(pDefragmentationStats != VMA_NULL)
8007 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
8010 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
8012 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
8014 const size_t poolCount = m_Pools.size();
8017 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
8021 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
8023 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
8025 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
8027 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
8029 VmaBlockVector* pAllocBlockVector = VMA_NULL;
8031 const VmaPool hAllocPool = hAlloc->GetPool();
8033 if(hAllocPool != VK_NULL_HANDLE)
8035 pAllocBlockVector = &hAllocPool->GetBlockVector();
8040 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
8043 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
8045 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
8046 &pAllocationsChanged[allocIndex] : VMA_NULL;
8047 pDefragmentator->AddAllocation(hAlloc, pChanged);
8051 VkResult result = VK_SUCCESS;
8055 VkDeviceSize maxBytesToMove = SIZE_MAX;
8056 uint32_t maxAllocationsToMove = UINT32_MAX;
8057 if(pDefragmentationInfo != VMA_NULL)
8064 for(uint32_t memTypeIndex = 0;
8065 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
8069 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8071 result = m_pBlockVectors[memTypeIndex]->Defragment(
8072 pDefragmentationStats,
8074 maxAllocationsToMove);
8079 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
8081 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
8082 pDefragmentationStats,
8084 maxAllocationsToMove);
8090 for(
size_t poolIndex = poolCount; poolIndex--; )
8092 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
8096 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
8098 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8100 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
8109 if(hAllocation->CanBecomeLost())
8115 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8116 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8119 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8123 pAllocationInfo->
offset = 0;
8124 pAllocationInfo->
size = hAllocation->GetSize();
8126 pAllocationInfo->
pUserData = hAllocation->GetUserData();
8129 else if(localLastUseFrameIndex == localCurrFrameIndex)
8131 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
8132 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
8133 pAllocationInfo->
offset = hAllocation->GetOffset();
8134 pAllocationInfo->
size = hAllocation->GetSize();
8136 pAllocationInfo->
pUserData = hAllocation->GetUserData();
8141 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8143 localLastUseFrameIndex = localCurrFrameIndex;
8150 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
8151 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
8152 pAllocationInfo->
offset = hAllocation->GetOffset();
8153 pAllocationInfo->
size = hAllocation->GetSize();
8154 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
8155 pAllocationInfo->
pUserData = hAllocation->GetUserData();
8159 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
8162 if(hAllocation->CanBecomeLost())
8164 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8165 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8168 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8172 else if(localLastUseFrameIndex == localCurrFrameIndex)
8178 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8180 localLastUseFrameIndex = localCurrFrameIndex;
8193 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
8206 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
8208 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
8209 if(res != VK_SUCCESS)
8211 vma_delete(
this, *pPool);
8218 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8219 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
8225 void VmaAllocator_T::DestroyPool(
VmaPool pool)
8229 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8230 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
8231 VMA_ASSERT(success &&
"Pool not found in Allocator.");
8234 vma_delete(
this, pool);
8239 pool->m_BlockVector.GetPoolStats(pPoolStats);
8242 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
8244 m_CurrentFrameIndex.store(frameIndex);
8247 void VmaAllocator_T::MakePoolAllocationsLost(
8249 size_t* pLostAllocationCount)
8251 hPool->m_BlockVector.MakePoolAllocationsLost(
8252 m_CurrentFrameIndex.load(),
8253 pLostAllocationCount);
8256 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
8258 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
8259 (*pAllocation)->InitLost();
8262 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
8264 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
8267 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8269 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8270 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
8272 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8273 if(res == VK_SUCCESS)
8275 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
8280 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
8285 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8288 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
8290 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
8296 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
8298 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
8300 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
8303 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
8305 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
8306 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8308 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8309 m_HeapSizeLimit[heapIndex] += size;
8313 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
8315 if(hAllocation->CanBecomeLost())
8317 return VK_ERROR_MEMORY_MAP_FAILED;
8320 switch(hAllocation->GetType())
8322 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8324 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8325 char *pBytes = VMA_NULL;
8326 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
8327 if(res == VK_SUCCESS)
8329 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
8330 hAllocation->BlockAllocMap();
8334 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8335 return hAllocation->DedicatedAllocMap(
this, ppData);
8338 return VK_ERROR_MEMORY_MAP_FAILED;
8344 switch(hAllocation->GetType())
8346 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8348 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8349 hAllocation->BlockAllocUnmap();
8350 pBlock->Unmap(
this, 1);
8353 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8354 hAllocation->DedicatedAllocUnmap(
this);
8361 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
8363 VkResult res = VK_SUCCESS;
8364 switch(hAllocation->GetType())
8366 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8367 res = GetVulkanFunctions().vkBindBufferMemory(
8370 hAllocation->GetMemory(),
8373 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8375 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
8376 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
8377 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
8386 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
8388 VkResult res = VK_SUCCESS;
8389 switch(hAllocation->GetType())
8391 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8392 res = GetVulkanFunctions().vkBindImageMemory(
8395 hAllocation->GetMemory(),
8398 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8400 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
8401 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
8402 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
8411 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
8413 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
8415 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
8417 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8418 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
8419 VMA_ASSERT(pDedicatedAllocations);
8420 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
8421 VMA_ASSERT(success);
8424 VkDeviceMemory hMemory = allocation->GetMemory();
8426 if(allocation->GetMappedData() != VMA_NULL)
8428 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
8431 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
8433 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
8436 #if VMA_STATS_STRING_ENABLED 8438 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
8440 bool dedicatedAllocationsStarted =
false;
8441 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8443 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8444 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8445 VMA_ASSERT(pDedicatedAllocVector);
8446 if(pDedicatedAllocVector->empty() ==
false)
8448 if(dedicatedAllocationsStarted ==
false)
8450 dedicatedAllocationsStarted =
true;
8451 json.WriteString(
"DedicatedAllocations");
8455 json.BeginString(
"Type ");
8456 json.ContinueString(memTypeIndex);
8461 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
8464 json.BeginObject(
true);
8466 json.WriteString(
"Type");
8467 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
8469 json.WriteString(
"Size");
8470 json.WriteNumber(hAlloc->GetSize());
8472 const void* pUserData = hAlloc->GetUserData();
8473 if(pUserData != VMA_NULL)
8475 json.WriteString(
"UserData");
8476 if(hAlloc->IsUserDataString())
8478 json.WriteString((
const char*)pUserData);
8483 json.ContinueString_Pointer(pUserData);
8494 if(dedicatedAllocationsStarted)
8500 bool allocationsStarted =
false;
8501 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8503 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
8505 if(allocationsStarted ==
false)
8507 allocationsStarted =
true;
8508 json.WriteString(
"DefaultPools");
8512 json.BeginString(
"Type ");
8513 json.ContinueString(memTypeIndex);
8516 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
8519 if(allocationsStarted)
8526 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8527 const size_t poolCount = m_Pools.size();
8530 json.WriteString(
"Pools");
8532 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
8534 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
8541 #endif // #if VMA_STATS_STRING_ENABLED 8543 static VkResult AllocateMemoryForImage(
8547 VmaSuballocationType suballocType,
8550 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
8552 VkMemoryRequirements vkMemReq = {};
8553 bool requiresDedicatedAllocation =
false;
8554 bool prefersDedicatedAllocation =
false;
8555 allocator->GetImageMemoryRequirements(image, vkMemReq,
8556 requiresDedicatedAllocation, prefersDedicatedAllocation);
8558 return allocator->AllocateMemory(
8560 requiresDedicatedAllocation,
8561 prefersDedicatedAllocation,
8564 *pAllocationCreateInfo,
8576 VMA_ASSERT(pCreateInfo && pAllocator);
8577 VMA_DEBUG_LOG(
"vmaCreateAllocator");
8585 if(allocator != VK_NULL_HANDLE)
8587 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
8588 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
8589 vma_delete(&allocationCallbacks, allocator);
8595 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
8597 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
8598 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
8603 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
8605 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
8606 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
8611 uint32_t memoryTypeIndex,
8612 VkMemoryPropertyFlags* pFlags)
8614 VMA_ASSERT(allocator && pFlags);
8615 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
8616 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
8621 uint32_t frameIndex)
8623 VMA_ASSERT(allocator);
8624 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
8626 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8628 allocator->SetCurrentFrameIndex(frameIndex);
8635 VMA_ASSERT(allocator && pStats);
8636 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8637 allocator->CalculateStats(pStats);
8640 #if VMA_STATS_STRING_ENABLED 8644 char** ppStatsString,
8645 VkBool32 detailedMap)
8647 VMA_ASSERT(allocator && ppStatsString);
8648 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8650 VmaStringBuilder sb(allocator);
8652 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
8656 allocator->CalculateStats(&stats);
8658 json.WriteString(
"Total");
8659 VmaPrintStatInfo(json, stats.
total);
8661 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
8663 json.BeginString(
"Heap ");
8664 json.ContinueString(heapIndex);
8668 json.WriteString(
"Size");
8669 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
8671 json.WriteString(
"Flags");
8672 json.BeginArray(
true);
8673 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
8675 json.WriteString(
"DEVICE_LOCAL");
8681 json.WriteString(
"Stats");
8682 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
8685 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
8687 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
8689 json.BeginString(
"Type ");
8690 json.ContinueString(typeIndex);
8695 json.WriteString(
"Flags");
8696 json.BeginArray(
true);
8697 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
8698 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
8700 json.WriteString(
"DEVICE_LOCAL");
8702 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8704 json.WriteString(
"HOST_VISIBLE");
8706 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
8708 json.WriteString(
"HOST_COHERENT");
8710 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
8712 json.WriteString(
"HOST_CACHED");
8714 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
8716 json.WriteString(
"LAZILY_ALLOCATED");
8722 json.WriteString(
"Stats");
8723 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
8732 if(detailedMap == VK_TRUE)
8734 allocator->PrintDetailedMap(json);
8740 const size_t len = sb.GetLength();
8741 char*
const pChars = vma_new_array(allocator,
char, len + 1);
8744 memcpy(pChars, sb.GetData(), len);
8747 *ppStatsString = pChars;
8754 if(pStatsString != VMA_NULL)
8756 VMA_ASSERT(allocator);
8757 size_t len = strlen(pStatsString);
8758 vma_delete_array(allocator, pStatsString, len + 1);
8762 #endif // #if VMA_STATS_STRING_ENABLED 8769 uint32_t memoryTypeBits,
8771 uint32_t* pMemoryTypeIndex)
8773 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8774 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8775 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8782 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
8786 switch(pAllocationCreateInfo->
usage)
8791 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8794 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8797 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8798 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8801 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8802 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8808 *pMemoryTypeIndex = UINT32_MAX;
8809 uint32_t minCost = UINT32_MAX;
8810 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8811 memTypeIndex < allocator->GetMemoryTypeCount();
8812 ++memTypeIndex, memTypeBit <<= 1)
8815 if((memTypeBit & memoryTypeBits) != 0)
8817 const VkMemoryPropertyFlags currFlags =
8818 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8820 if((requiredFlags & ~currFlags) == 0)
8823 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8825 if(currCost < minCost)
8827 *pMemoryTypeIndex = memTypeIndex;
8837 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8842 const VkBufferCreateInfo* pBufferCreateInfo,
8844 uint32_t* pMemoryTypeIndex)
8846 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8847 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
8848 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8849 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8851 const VkDevice hDev = allocator->m_hDevice;
8852 VkBuffer hBuffer = VK_NULL_HANDLE;
8853 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
8854 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
8855 if(res == VK_SUCCESS)
8857 VkMemoryRequirements memReq = {};
8858 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
8859 hDev, hBuffer, &memReq);
8863 memReq.memoryTypeBits,
8864 pAllocationCreateInfo,
8867 allocator->GetVulkanFunctions().vkDestroyBuffer(
8868 hDev, hBuffer, allocator->GetAllocationCallbacks());
8875 const VkImageCreateInfo* pImageCreateInfo,
8877 uint32_t* pMemoryTypeIndex)
8879 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8880 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
8881 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8882 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8884 const VkDevice hDev = allocator->m_hDevice;
8885 VkImage hImage = VK_NULL_HANDLE;
8886 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
8887 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
8888 if(res == VK_SUCCESS)
8890 VkMemoryRequirements memReq = {};
8891 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
8892 hDev, hImage, &memReq);
8896 memReq.memoryTypeBits,
8897 pAllocationCreateInfo,
8900 allocator->GetVulkanFunctions().vkDestroyImage(
8901 hDev, hImage, allocator->GetAllocationCallbacks());
8911 VMA_ASSERT(allocator && pCreateInfo && pPool);
8913 VMA_DEBUG_LOG(
"vmaCreatePool");
8915 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8917 return allocator->CreatePool(pCreateInfo, pPool);
8924 VMA_ASSERT(allocator);
8926 if(pool == VK_NULL_HANDLE)
8931 VMA_DEBUG_LOG(
"vmaDestroyPool");
8933 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8935 allocator->DestroyPool(pool);
8943 VMA_ASSERT(allocator && pool && pPoolStats);
8945 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8947 allocator->GetPoolStats(pool, pPoolStats);
8953 size_t* pLostAllocationCount)
8955 VMA_ASSERT(allocator && pool);
8957 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8959 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8964 const VkMemoryRequirements* pVkMemoryRequirements,
8969 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8971 VMA_DEBUG_LOG(
"vmaAllocateMemory");
8973 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8975 VkResult result = allocator->AllocateMemory(
8976 *pVkMemoryRequirements,
8982 VMA_SUBALLOCATION_TYPE_UNKNOWN,
8985 if(pAllocationInfo && result == VK_SUCCESS)
8987 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9000 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
9002 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
9004 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9006 VkMemoryRequirements vkMemReq = {};
9007 bool requiresDedicatedAllocation =
false;
9008 bool prefersDedicatedAllocation =
false;
9009 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
9010 requiresDedicatedAllocation,
9011 prefersDedicatedAllocation);
9013 VkResult result = allocator->AllocateMemory(
9015 requiresDedicatedAllocation,
9016 prefersDedicatedAllocation,
9020 VMA_SUBALLOCATION_TYPE_BUFFER,
9023 if(pAllocationInfo && result == VK_SUCCESS)
9025 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9038 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
9040 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
9042 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9044 VkResult result = AllocateMemoryForImage(
9048 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
9051 if(pAllocationInfo && result == VK_SUCCESS)
9053 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9063 VMA_ASSERT(allocator && allocation);
9065 VMA_DEBUG_LOG(
"vmaFreeMemory");
9067 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9069 allocator->FreeMemory(allocation);
9077 VMA_ASSERT(allocator && allocation && pAllocationInfo);
9079 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9081 allocator->GetAllocationInfo(allocation, pAllocationInfo);
9088 VMA_ASSERT(allocator && allocation);
9090 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9092 return allocator->TouchAllocation(allocation);
9100 VMA_ASSERT(allocator && allocation);
9102 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9104 allocation->SetUserData(allocator, pUserData);
9111 VMA_ASSERT(allocator && pAllocation);
9113 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
9115 allocator->CreateLostAllocation(pAllocation);
9123 VMA_ASSERT(allocator && allocation && ppData);
9125 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9127 return allocator->Map(allocation, ppData);
9134 VMA_ASSERT(allocator && allocation);
9136 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9138 allocator->Unmap(allocation);
9144 size_t allocationCount,
9145 VkBool32* pAllocationsChanged,
9149 VMA_ASSERT(allocator && pAllocations);
9151 VMA_DEBUG_LOG(
"vmaDefragment");
9153 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9155 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
9163 VMA_ASSERT(allocator && allocation && buffer);
9165 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
9167 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9169 return allocator->BindBufferMemory(allocation, buffer);
9177 VMA_ASSERT(allocator && allocation && image);
9179 VMA_DEBUG_LOG(
"vmaBindImageMemory");
9181 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9183 return allocator->BindImageMemory(allocation, image);
9188 const VkBufferCreateInfo* pBufferCreateInfo,
9194 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
9196 VMA_DEBUG_LOG(
"vmaCreateBuffer");
9198 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9200 *pBuffer = VK_NULL_HANDLE;
9201 *pAllocation = VK_NULL_HANDLE;
9204 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
9205 allocator->m_hDevice,
9207 allocator->GetAllocationCallbacks(),
9212 VkMemoryRequirements vkMemReq = {};
9213 bool requiresDedicatedAllocation =
false;
9214 bool prefersDedicatedAllocation =
false;
9215 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
9216 requiresDedicatedAllocation, prefersDedicatedAllocation);
9220 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
9222 VMA_ASSERT(vkMemReq.alignment %
9223 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
9225 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
9227 VMA_ASSERT(vkMemReq.alignment %
9228 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
9230 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
9232 VMA_ASSERT(vkMemReq.alignment %
9233 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
9237 res = allocator->AllocateMemory(
9239 requiresDedicatedAllocation,
9240 prefersDedicatedAllocation,
9243 *pAllocationCreateInfo,
9244 VMA_SUBALLOCATION_TYPE_BUFFER,
9249 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
9253 if(pAllocationInfo != VMA_NULL)
9255 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9259 allocator->FreeMemory(*pAllocation);
9260 *pAllocation = VK_NULL_HANDLE;
9261 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
9262 *pBuffer = VK_NULL_HANDLE;
9265 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
9266 *pBuffer = VK_NULL_HANDLE;
9277 if(buffer != VK_NULL_HANDLE)
9279 VMA_ASSERT(allocator);
9281 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
9283 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9285 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
9287 allocator->FreeMemory(allocation);
9293 const VkImageCreateInfo* pImageCreateInfo,
9299 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
9301 VMA_DEBUG_LOG(
"vmaCreateImage");
9303 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9305 *pImage = VK_NULL_HANDLE;
9306 *pAllocation = VK_NULL_HANDLE;
9309 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
9310 allocator->m_hDevice,
9312 allocator->GetAllocationCallbacks(),
9316 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
9317 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
9318 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
9321 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
9325 res = allocator->BindImageMemory(*pAllocation, *pImage);
9329 if(pAllocationInfo != VMA_NULL)
9331 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9335 allocator->FreeMemory(*pAllocation);
9336 *pAllocation = VK_NULL_HANDLE;
9337 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
9338 *pImage = VK_NULL_HANDLE;
9341 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
9342 *pImage = VK_NULL_HANDLE;
9353 if(image != VK_NULL_HANDLE)
9355 VMA_ASSERT(allocator);
9357 VMA_DEBUG_LOG(
"vmaDestroyImage");
9359 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9361 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
9363 allocator->FreeMemory(allocation);
9367 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1157
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1419
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1182
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
Represents single memory allocation.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1167
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1376
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1161
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1749
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1179
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1948
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1595
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1649
Definition: vk_mem_alloc.h:1456
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1150
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1494
Definition: vk_mem_alloc.h:1403
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1191
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1244
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1176
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1407
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1309
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1164
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1308
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:1172
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1952
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1208
VmaStatInfo total
Definition: vk_mem_alloc.h:1318
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1960
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1478
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1943
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1165
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1092
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1185
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1603
Definition: vk_mem_alloc.h:1597
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1759
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1162
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1515
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1619
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1655
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1148
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1606
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VmaMemoryUsage
Definition: vk_mem_alloc.h:1354
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1938
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1956
Definition: vk_mem_alloc.h:1393
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1502
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1163
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1314
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1098
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1119
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1124
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1958
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1489
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1665
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1158
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1297
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1614
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1111
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1463
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1310
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1115
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1609
Definition: vk_mem_alloc.h:1402
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1484
Definition: vk_mem_alloc.h:1475
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1300
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1160
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1627
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1194
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1658
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1473
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1508
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1232
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1316
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1443
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1309
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1169
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1113
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1168
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1641
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1773
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1188
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1309
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1306
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1646
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1754
Definition: vk_mem_alloc.h:1471
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1954
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1156
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:1171
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1304
Definition: vk_mem_alloc.h:1359
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1599
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1302
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1166
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1170
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1430
Definition: vk_mem_alloc.h:1386
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1768
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1146
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1159
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1735
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1577
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1310
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
Definition: vk_mem_alloc.h:1469
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1317
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1652
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1310
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1740