23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1074 #include <vulkan/vulkan.h> 1090 uint32_t memoryType,
1091 VkDeviceMemory memory,
1096 uint32_t memoryType,
1097 VkDeviceMemory memory,
1258 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1266 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1276 uint32_t memoryTypeIndex,
1277 VkMemoryPropertyFlags* pFlags);
1289 uint32_t frameIndex);
1322 #define VMA_STATS_STRING_ENABLED 1 1324 #if VMA_STATS_STRING_ENABLED 1331 char** ppStatsString,
1332 VkBool32 detailedMap);
1336 char* pStatsString);
1338 #endif // #if VMA_STATS_STRING_ENABLED 1532 uint32_t memoryTypeBits,
1534 uint32_t* pMemoryTypeIndex);
1550 const VkBufferCreateInfo* pBufferCreateInfo,
1552 uint32_t* pMemoryTypeIndex);
1568 const VkImageCreateInfo* pImageCreateInfo,
1570 uint32_t* pMemoryTypeIndex);
1701 size_t* pLostAllocationCount);
1784 const VkMemoryRequirements* pVkMemoryRequirements,
2044 size_t allocationCount,
2045 VkBool32* pAllocationsChanged,
2111 const VkBufferCreateInfo* pBufferCreateInfo,
2136 const VkImageCreateInfo* pImageCreateInfo,
2162 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2165 #ifdef __INTELLISENSE__ 2166 #define VMA_IMPLEMENTATION 2169 #ifdef VMA_IMPLEMENTATION 2170 #undef VMA_IMPLEMENTATION 2192 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2193 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2205 #if VMA_USE_STL_CONTAINERS 2206 #define VMA_USE_STL_VECTOR 1 2207 #define VMA_USE_STL_UNORDERED_MAP 1 2208 #define VMA_USE_STL_LIST 1 2211 #if VMA_USE_STL_VECTOR 2215 #if VMA_USE_STL_UNORDERED_MAP 2216 #include <unordered_map> 2219 #if VMA_USE_STL_LIST 2228 #include <algorithm> 2232 #if !defined(_WIN32) && !defined(__APPLE__) 2238 #define VMA_NULL nullptr 2241 #if defined(__APPLE__) || defined(__ANDROID__) 2243 void *aligned_alloc(
size_t alignment,
size_t size)
2246 if(alignment <
sizeof(
void*))
2248 alignment =
sizeof(
void*);
2252 if(posix_memalign(&pointer, alignment, size) == 0)
2261 #define VMA_ASSERT(expr) assert(expr) 2263 #define VMA_ASSERT(expr) 2269 #ifndef VMA_HEAVY_ASSERT 2271 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2273 #define VMA_HEAVY_ASSERT(expr) 2277 #ifndef VMA_ALIGN_OF 2278 #define VMA_ALIGN_OF(type) (__alignof(type)) 2281 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2283 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2285 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2289 #ifndef VMA_SYSTEM_FREE 2291 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2293 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2298 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2302 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2306 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2310 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2313 #ifndef VMA_DEBUG_LOG 2314 #define VMA_DEBUG_LOG(format, ...) 2324 #if VMA_STATS_STRING_ENABLED 2325 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2327 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2329 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2331 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2333 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2335 snprintf(outStr, strLen,
"%p", ptr);
2345 void Lock() { m_Mutex.lock(); }
2346 void Unlock() { m_Mutex.unlock(); }
2350 #define VMA_MUTEX VmaMutex 2361 #ifndef VMA_ATOMIC_UINT32 2362 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2365 #ifndef VMA_BEST_FIT 2378 #define VMA_BEST_FIT (1) 2381 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2386 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2389 #ifndef VMA_DEBUG_ALIGNMENT 2394 #define VMA_DEBUG_ALIGNMENT (1) 2397 #ifndef VMA_DEBUG_MARGIN 2402 #define VMA_DEBUG_MARGIN (0) 2405 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2410 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2413 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2418 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2421 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2422 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2426 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2427 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2431 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2437 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2438 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2441 static inline uint32_t VmaCountBitsSet(uint32_t v)
2443 uint32_t c = v - ((v >> 1) & 0x55555555);
2444 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2445 c = ((c >> 4) + c) & 0x0F0F0F0F;
2446 c = ((c >> 8) + c) & 0x00FF00FF;
2447 c = ((c >> 16) + c) & 0x0000FFFF;
2453 template <
typename T>
2454 static inline T VmaAlignUp(T val, T align)
2456 return (val + align - 1) / align * align;
2460 template <
typename T>
2461 inline T VmaRoundDiv(T x, T y)
2463 return (x + (y / (T)2)) / y;
2468 template<
typename Iterator,
typename Compare>
2469 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2471 Iterator centerValue = end; --centerValue;
2472 Iterator insertIndex = beg;
2473 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2475 if(cmp(*memTypeIndex, *centerValue))
2477 if(insertIndex != memTypeIndex)
2479 VMA_SWAP(*memTypeIndex, *insertIndex);
2484 if(insertIndex != centerValue)
2486 VMA_SWAP(*insertIndex, *centerValue);
2491 template<
typename Iterator,
typename Compare>
2492 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2496 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2497 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2498 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2502 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2504 #endif // #ifndef VMA_SORT 2513 static inline bool VmaBlocksOnSamePage(
2514 VkDeviceSize resourceAOffset,
2515 VkDeviceSize resourceASize,
2516 VkDeviceSize resourceBOffset,
2517 VkDeviceSize pageSize)
2519 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2520 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2521 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2522 VkDeviceSize resourceBStart = resourceBOffset;
2523 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2524 return resourceAEndPage == resourceBStartPage;
2527 enum VmaSuballocationType
2529 VMA_SUBALLOCATION_TYPE_FREE = 0,
2530 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2531 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2532 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2533 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2534 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2535 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2544 static inline bool VmaIsBufferImageGranularityConflict(
2545 VmaSuballocationType suballocType1,
2546 VmaSuballocationType suballocType2)
2548 if(suballocType1 > suballocType2)
2550 VMA_SWAP(suballocType1, suballocType2);
2553 switch(suballocType1)
2555 case VMA_SUBALLOCATION_TYPE_FREE:
2557 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2559 case VMA_SUBALLOCATION_TYPE_BUFFER:
2561 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2562 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2563 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2565 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2566 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2567 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2568 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2570 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2571 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2583 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2584 m_pMutex(useMutex ? &mutex : VMA_NULL)
2601 VMA_MUTEX* m_pMutex;
2604 #if VMA_DEBUG_GLOBAL_MUTEX 2605 static VMA_MUTEX gDebugGlobalMutex;
2606 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2608 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2612 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2623 template <
typename IterT,
typename KeyT,
typename CmpT>
2624 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2626 size_t down = 0, up = (end - beg);
2629 const size_t mid = (down + up) / 2;
2630 if(cmp(*(beg+mid), key))
2645 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2647 if((pAllocationCallbacks != VMA_NULL) &&
2648 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2650 return (*pAllocationCallbacks->pfnAllocation)(
2651 pAllocationCallbacks->pUserData,
2654 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2658 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2662 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2664 if((pAllocationCallbacks != VMA_NULL) &&
2665 (pAllocationCallbacks->pfnFree != VMA_NULL))
2667 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2671 VMA_SYSTEM_FREE(ptr);
2675 template<
typename T>
2676 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2678 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2681 template<
typename T>
2682 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2684 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2687 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2689 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2691 template<
typename T>
2692 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2695 VmaFree(pAllocationCallbacks, ptr);
2698 template<
typename T>
2699 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2703 for(
size_t i = count; i--; )
2707 VmaFree(pAllocationCallbacks, ptr);
2712 template<
typename T>
2713 class VmaStlAllocator
2716 const VkAllocationCallbacks*
const m_pCallbacks;
2717 typedef T value_type;
2719 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2720 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2722 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2723 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2725 template<
typename U>
2726 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2728 return m_pCallbacks == rhs.m_pCallbacks;
2730 template<
typename U>
2731 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2733 return m_pCallbacks != rhs.m_pCallbacks;
2736 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2739 #if VMA_USE_STL_VECTOR 2741 #define VmaVector std::vector 2743 template<
typename T,
typename allocatorT>
2744 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2746 vec.insert(vec.begin() + index, item);
2749 template<
typename T,
typename allocatorT>
2750 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2752 vec.erase(vec.begin() + index);
2755 #else // #if VMA_USE_STL_VECTOR 2760 template<
typename T,
typename AllocatorT>
2764 typedef T value_type;
2766 VmaVector(
const AllocatorT& allocator) :
2767 m_Allocator(allocator),
2774 VmaVector(
size_t count,
const AllocatorT& allocator) :
2775 m_Allocator(allocator),
2776 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2782 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2783 m_Allocator(src.m_Allocator),
2784 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2785 m_Count(src.m_Count),
2786 m_Capacity(src.m_Count)
2790 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2796 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2799 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2803 resize(rhs.m_Count);
2806 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2812 bool empty()
const {
return m_Count == 0; }
2813 size_t size()
const {
return m_Count; }
2814 T* data() {
return m_pArray; }
2815 const T* data()
const {
return m_pArray; }
2817 T& operator[](
size_t index)
2819 VMA_HEAVY_ASSERT(index < m_Count);
2820 return m_pArray[index];
2822 const T& operator[](
size_t index)
const 2824 VMA_HEAVY_ASSERT(index < m_Count);
2825 return m_pArray[index];
2830 VMA_HEAVY_ASSERT(m_Count > 0);
2833 const T& front()
const 2835 VMA_HEAVY_ASSERT(m_Count > 0);
2840 VMA_HEAVY_ASSERT(m_Count > 0);
2841 return m_pArray[m_Count - 1];
2843 const T& back()
const 2845 VMA_HEAVY_ASSERT(m_Count > 0);
2846 return m_pArray[m_Count - 1];
2849 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2851 newCapacity = VMA_MAX(newCapacity, m_Count);
2853 if((newCapacity < m_Capacity) && !freeMemory)
2855 newCapacity = m_Capacity;
2858 if(newCapacity != m_Capacity)
2860 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2863 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2865 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2866 m_Capacity = newCapacity;
2867 m_pArray = newArray;
2871 void resize(
size_t newCount,
bool freeMemory =
false)
2873 size_t newCapacity = m_Capacity;
2874 if(newCount > m_Capacity)
2876 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2880 newCapacity = newCount;
2883 if(newCapacity != m_Capacity)
2885 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2886 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2887 if(elementsToCopy != 0)
2889 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2891 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2892 m_Capacity = newCapacity;
2893 m_pArray = newArray;
2899 void clear(
bool freeMemory =
false)
2901 resize(0, freeMemory);
2904 void insert(
size_t index,
const T& src)
2906 VMA_HEAVY_ASSERT(index <= m_Count);
2907 const size_t oldCount = size();
2908 resize(oldCount + 1);
2909 if(index < oldCount)
2911 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2913 m_pArray[index] = src;
2916 void remove(
size_t index)
2918 VMA_HEAVY_ASSERT(index < m_Count);
2919 const size_t oldCount = size();
2920 if(index < oldCount - 1)
2922 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2924 resize(oldCount - 1);
2927 void push_back(
const T& src)
2929 const size_t newIndex = size();
2930 resize(newIndex + 1);
2931 m_pArray[newIndex] = src;
2936 VMA_HEAVY_ASSERT(m_Count > 0);
2940 void push_front(
const T& src)
2947 VMA_HEAVY_ASSERT(m_Count > 0);
2951 typedef T* iterator;
2953 iterator begin() {
return m_pArray; }
2954 iterator end() {
return m_pArray + m_Count; }
2957 AllocatorT m_Allocator;
2963 template<
typename T,
typename allocatorT>
2964 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2966 vec.insert(index, item);
2969 template<
typename T,
typename allocatorT>
2970 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2975 #endif // #if VMA_USE_STL_VECTOR 2977 template<
typename CmpLess,
typename VectorT>
2978 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2980 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2982 vector.data() + vector.size(),
2984 CmpLess()) - vector.data();
2985 VmaVectorInsert(vector, indexToInsert, value);
2986 return indexToInsert;
2989 template<
typename CmpLess,
typename VectorT>
2990 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2993 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2998 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3000 size_t indexToRemove = it - vector.begin();
3001 VmaVectorRemove(vector, indexToRemove);
3007 template<
typename CmpLess,
typename VectorT>
3008 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
3011 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3013 vector.data() + vector.size(),
3016 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
3018 return it - vector.begin();
3022 return vector.size();
3034 template<
typename T>
3035 class VmaPoolAllocator
3038 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3039 ~VmaPoolAllocator();
3047 uint32_t NextFreeIndex;
3054 uint32_t FirstFreeIndex;
3057 const VkAllocationCallbacks* m_pAllocationCallbacks;
3058 size_t m_ItemsPerBlock;
3059 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3061 ItemBlock& CreateNewBlock();
3064 template<
typename T>
3065 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3066 m_pAllocationCallbacks(pAllocationCallbacks),
3067 m_ItemsPerBlock(itemsPerBlock),
3068 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3070 VMA_ASSERT(itemsPerBlock > 0);
3073 template<
typename T>
3074 VmaPoolAllocator<T>::~VmaPoolAllocator()
3079 template<
typename T>
3080 void VmaPoolAllocator<T>::Clear()
3082 for(
size_t i = m_ItemBlocks.size(); i--; )
3083 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3084 m_ItemBlocks.clear();
3087 template<
typename T>
3088 T* VmaPoolAllocator<T>::Alloc()
3090 for(
size_t i = m_ItemBlocks.size(); i--; )
3092 ItemBlock& block = m_ItemBlocks[i];
3094 if(block.FirstFreeIndex != UINT32_MAX)
3096 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3097 block.FirstFreeIndex = pItem->NextFreeIndex;
3098 return &pItem->Value;
3103 ItemBlock& newBlock = CreateNewBlock();
3104 Item*
const pItem = &newBlock.pItems[0];
3105 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3106 return &pItem->Value;
3109 template<
typename T>
3110 void VmaPoolAllocator<T>::Free(T* ptr)
3113 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3115 ItemBlock& block = m_ItemBlocks[i];
3119 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3122 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3124 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3125 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3126 block.FirstFreeIndex = index;
3130 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3133 template<
typename T>
3134 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3136 ItemBlock newBlock = {
3137 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3139 m_ItemBlocks.push_back(newBlock);
3142 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3143 newBlock.pItems[i].NextFreeIndex = i + 1;
3144 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3145 return m_ItemBlocks.back();
3151 #if VMA_USE_STL_LIST 3153 #define VmaList std::list 3155 #else // #if VMA_USE_STL_LIST 3157 template<
typename T>
3166 template<
typename T>
3170 typedef VmaListItem<T> ItemType;
3172 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3176 size_t GetCount()
const {
return m_Count; }
3177 bool IsEmpty()
const {
return m_Count == 0; }
3179 ItemType* Front() {
return m_pFront; }
3180 const ItemType* Front()
const {
return m_pFront; }
3181 ItemType* Back() {
return m_pBack; }
3182 const ItemType* Back()
const {
return m_pBack; }
3184 ItemType* PushBack();
3185 ItemType* PushFront();
3186 ItemType* PushBack(
const T& value);
3187 ItemType* PushFront(
const T& value);
3192 ItemType* InsertBefore(ItemType* pItem);
3194 ItemType* InsertAfter(ItemType* pItem);
3196 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3197 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3199 void Remove(ItemType* pItem);
3202 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3203 VmaPoolAllocator<ItemType> m_ItemAllocator;
3209 VmaRawList(
const VmaRawList<T>& src);
3210 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
3213 template<
typename T>
3214 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3215 m_pAllocationCallbacks(pAllocationCallbacks),
3216 m_ItemAllocator(pAllocationCallbacks, 128),
3223 template<
typename T>
3224 VmaRawList<T>::~VmaRawList()
3230 template<
typename T>
3231 void VmaRawList<T>::Clear()
3233 if(IsEmpty() ==
false)
3235 ItemType* pItem = m_pBack;
3236 while(pItem != VMA_NULL)
3238 ItemType*
const pPrevItem = pItem->pPrev;
3239 m_ItemAllocator.Free(pItem);
3242 m_pFront = VMA_NULL;
3248 template<
typename T>
3249 VmaListItem<T>* VmaRawList<T>::PushBack()
3251 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3252 pNewItem->pNext = VMA_NULL;
3255 pNewItem->pPrev = VMA_NULL;
3256 m_pFront = pNewItem;
3262 pNewItem->pPrev = m_pBack;
3263 m_pBack->pNext = pNewItem;
3270 template<
typename T>
3271 VmaListItem<T>* VmaRawList<T>::PushFront()
3273 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3274 pNewItem->pPrev = VMA_NULL;
3277 pNewItem->pNext = VMA_NULL;
3278 m_pFront = pNewItem;
3284 pNewItem->pNext = m_pFront;
3285 m_pFront->pPrev = pNewItem;
3286 m_pFront = pNewItem;
3292 template<
typename T>
3293 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3295 ItemType*
const pNewItem = PushBack();
3296 pNewItem->Value = value;
3300 template<
typename T>
3301 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3303 ItemType*
const pNewItem = PushFront();
3304 pNewItem->Value = value;
3308 template<
typename T>
3309 void VmaRawList<T>::PopBack()
3311 VMA_HEAVY_ASSERT(m_Count > 0);
3312 ItemType*
const pBackItem = m_pBack;
3313 ItemType*
const pPrevItem = pBackItem->pPrev;
3314 if(pPrevItem != VMA_NULL)
3316 pPrevItem->pNext = VMA_NULL;
3318 m_pBack = pPrevItem;
3319 m_ItemAllocator.Free(pBackItem);
3323 template<
typename T>
3324 void VmaRawList<T>::PopFront()
3326 VMA_HEAVY_ASSERT(m_Count > 0);
3327 ItemType*
const pFrontItem = m_pFront;
3328 ItemType*
const pNextItem = pFrontItem->pNext;
3329 if(pNextItem != VMA_NULL)
3331 pNextItem->pPrev = VMA_NULL;
3333 m_pFront = pNextItem;
3334 m_ItemAllocator.Free(pFrontItem);
3338 template<
typename T>
3339 void VmaRawList<T>::Remove(ItemType* pItem)
3341 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3342 VMA_HEAVY_ASSERT(m_Count > 0);
3344 if(pItem->pPrev != VMA_NULL)
3346 pItem->pPrev->pNext = pItem->pNext;
3350 VMA_HEAVY_ASSERT(m_pFront == pItem);
3351 m_pFront = pItem->pNext;
3354 if(pItem->pNext != VMA_NULL)
3356 pItem->pNext->pPrev = pItem->pPrev;
3360 VMA_HEAVY_ASSERT(m_pBack == pItem);
3361 m_pBack = pItem->pPrev;
3364 m_ItemAllocator.Free(pItem);
3368 template<
typename T>
3369 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3371 if(pItem != VMA_NULL)
3373 ItemType*
const prevItem = pItem->pPrev;
3374 ItemType*
const newItem = m_ItemAllocator.Alloc();
3375 newItem->pPrev = prevItem;
3376 newItem->pNext = pItem;
3377 pItem->pPrev = newItem;
3378 if(prevItem != VMA_NULL)
3380 prevItem->pNext = newItem;
3384 VMA_HEAVY_ASSERT(m_pFront == pItem);
3394 template<
typename T>
3395 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3397 if(pItem != VMA_NULL)
3399 ItemType*
const nextItem = pItem->pNext;
3400 ItemType*
const newItem = m_ItemAllocator.Alloc();
3401 newItem->pNext = nextItem;
3402 newItem->pPrev = pItem;
3403 pItem->pNext = newItem;
3404 if(nextItem != VMA_NULL)
3406 nextItem->pPrev = newItem;
3410 VMA_HEAVY_ASSERT(m_pBack == pItem);
3420 template<
typename T>
3421 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3423 ItemType*
const newItem = InsertBefore(pItem);
3424 newItem->Value = value;
3428 template<
typename T>
3429 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3431 ItemType*
const newItem = InsertAfter(pItem);
3432 newItem->Value = value;
3436 template<
typename T,
typename AllocatorT>
3449 T& operator*()
const 3451 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3452 return m_pItem->Value;
3454 T* operator->()
const 3456 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3457 return &m_pItem->Value;
3460 iterator& operator++()
3462 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3463 m_pItem = m_pItem->pNext;
3466 iterator& operator--()
3468 if(m_pItem != VMA_NULL)
3470 m_pItem = m_pItem->pPrev;
3474 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3475 m_pItem = m_pList->Back();
3480 iterator operator++(
int)
3482 iterator result = *
this;
3486 iterator operator--(
int)
3488 iterator result = *
this;
3493 bool operator==(
const iterator& rhs)
const 3495 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3496 return m_pItem == rhs.m_pItem;
3498 bool operator!=(
const iterator& rhs)
const 3500 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3501 return m_pItem != rhs.m_pItem;
3505 VmaRawList<T>* m_pList;
3506 VmaListItem<T>* m_pItem;
3508 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3514 friend class VmaList<T, AllocatorT>;
3517 class const_iterator
3526 const_iterator(
const iterator& src) :
3527 m_pList(src.m_pList),
3528 m_pItem(src.m_pItem)
3532 const T& operator*()
const 3534 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3535 return m_pItem->Value;
3537 const T* operator->()
const 3539 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3540 return &m_pItem->Value;
3543 const_iterator& operator++()
3545 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3546 m_pItem = m_pItem->pNext;
3549 const_iterator& operator--()
3551 if(m_pItem != VMA_NULL)
3553 m_pItem = m_pItem->pPrev;
3557 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3558 m_pItem = m_pList->Back();
3563 const_iterator operator++(
int)
3565 const_iterator result = *
this;
3569 const_iterator operator--(
int)
3571 const_iterator result = *
this;
3576 bool operator==(
const const_iterator& rhs)
const 3578 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3579 return m_pItem == rhs.m_pItem;
3581 bool operator!=(
const const_iterator& rhs)
const 3583 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3584 return m_pItem != rhs.m_pItem;
3588 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3594 const VmaRawList<T>* m_pList;
3595 const VmaListItem<T>* m_pItem;
3597 friend class VmaList<T, AllocatorT>;
3600 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3602 bool empty()
const {
return m_RawList.IsEmpty(); }
3603 size_t size()
const {
return m_RawList.GetCount(); }
3605 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3606 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3608 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3609 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3611 void clear() { m_RawList.Clear(); }
3612 void push_back(
const T& value) { m_RawList.PushBack(value); }
3613 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3614 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3617 VmaRawList<T> m_RawList;
3620 #endif // #if VMA_USE_STL_LIST 3628 #if VMA_USE_STL_UNORDERED_MAP 3630 #define VmaPair std::pair 3632 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3633 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3635 #else // #if VMA_USE_STL_UNORDERED_MAP 3637 template<
typename T1,
typename T2>
3643 VmaPair() : first(), second() { }
3644 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3650 template<
typename KeyT,
typename ValueT>
3654 typedef VmaPair<KeyT, ValueT> PairType;
3655 typedef PairType* iterator;
3657 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3659 iterator begin() {
return m_Vector.begin(); }
3660 iterator end() {
return m_Vector.end(); }
3662 void insert(
const PairType& pair);
3663 iterator find(
const KeyT& key);
3664 void erase(iterator it);
3667 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3670 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3672 template<
typename FirstT,
typename SecondT>
3673 struct VmaPairFirstLess
3675 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3677 return lhs.first < rhs.first;
3679 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3681 return lhs.first < rhsFirst;
3685 template<
typename KeyT,
typename ValueT>
3686 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3688 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3690 m_Vector.data() + m_Vector.size(),
3692 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3693 VmaVectorInsert(m_Vector, indexToInsert, pair);
3696 template<
typename KeyT,
typename ValueT>
3697 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3699 PairType* it = VmaBinaryFindFirstNotLess(
3701 m_Vector.data() + m_Vector.size(),
3703 VmaPairFirstLess<KeyT, ValueT>());
3704 if((it != m_Vector.end()) && (it->first == key))
3710 return m_Vector.end();
3714 template<
typename KeyT,
typename ValueT>
3715 void VmaMap<KeyT, ValueT>::erase(iterator it)
3717 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3720 #endif // #if VMA_USE_STL_UNORDERED_MAP 3726 class VmaDeviceMemoryBlock;
3728 struct VmaAllocation_T
3731 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3735 FLAG_USER_DATA_STRING = 0x01,
3739 enum ALLOCATION_TYPE
3741 ALLOCATION_TYPE_NONE,
3742 ALLOCATION_TYPE_BLOCK,
3743 ALLOCATION_TYPE_DEDICATED,
3746 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3749 m_pUserData(VMA_NULL),
3750 m_LastUseFrameIndex(currentFrameIndex),
3751 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3752 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3754 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3760 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3763 VMA_ASSERT(m_pUserData == VMA_NULL);
3766 void InitBlockAllocation(
3768 VmaDeviceMemoryBlock* block,
3769 VkDeviceSize offset,
3770 VkDeviceSize alignment,
3772 VmaSuballocationType suballocationType,
3776 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3777 VMA_ASSERT(block != VMA_NULL);
3778 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3779 m_Alignment = alignment;
3781 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3782 m_SuballocationType = (uint8_t)suballocationType;
3783 m_BlockAllocation.m_hPool = hPool;
3784 m_BlockAllocation.m_Block = block;
3785 m_BlockAllocation.m_Offset = offset;
3786 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3791 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3792 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3793 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3794 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3795 m_BlockAllocation.m_Block = VMA_NULL;
3796 m_BlockAllocation.m_Offset = 0;
3797 m_BlockAllocation.m_CanBecomeLost =
true;
3800 void ChangeBlockAllocation(
3802 VmaDeviceMemoryBlock* block,
3803 VkDeviceSize offset);
3806 void InitDedicatedAllocation(
3807 uint32_t memoryTypeIndex,
3808 VkDeviceMemory hMemory,
3809 VmaSuballocationType suballocationType,
3813 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3814 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3815 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3818 m_SuballocationType = (uint8_t)suballocationType;
3819 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3820 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3821 m_DedicatedAllocation.m_hMemory = hMemory;
3822 m_DedicatedAllocation.m_pMappedData = pMappedData;
3825 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3826 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3827 VkDeviceSize GetSize()
const {
return m_Size; }
3828 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3829 void* GetUserData()
const {
return m_pUserData; }
3830 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
3831 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3833 VmaDeviceMemoryBlock* GetBlock()
const 3835 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3836 return m_BlockAllocation.m_Block;
3838 VkDeviceSize GetOffset()
const;
3839 VkDeviceMemory GetMemory()
const;
3840 uint32_t GetMemoryTypeIndex()
const;
3841 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3842 void* GetMappedData()
const;
3843 bool CanBecomeLost()
const;
3846 uint32_t GetLastUseFrameIndex()
const 3848 return m_LastUseFrameIndex.load();
3850 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3852 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3862 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3864 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3866 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3877 void BlockAllocMap();
3878 void BlockAllocUnmap();
3879 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
3883 VkDeviceSize m_Alignment;
3884 VkDeviceSize m_Size;
3886 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3888 uint8_t m_SuballocationType;
3895 struct BlockAllocation
3898 VmaDeviceMemoryBlock* m_Block;
3899 VkDeviceSize m_Offset;
3900 bool m_CanBecomeLost;
3904 struct DedicatedAllocation
3906 uint32_t m_MemoryTypeIndex;
3907 VkDeviceMemory m_hMemory;
3908 void* m_pMappedData;
3914 BlockAllocation m_BlockAllocation;
3916 DedicatedAllocation m_DedicatedAllocation;
3926 struct VmaSuballocation
3928 VkDeviceSize offset;
3931 VmaSuballocationType type;
3934 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3937 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3952 struct VmaAllocationRequest
3954 VkDeviceSize offset;
3955 VkDeviceSize sumFreeSize;
3956 VkDeviceSize sumItemSize;
3957 VmaSuballocationList::iterator item;
3958 size_t itemsToMakeLostCount;
3960 VkDeviceSize CalcCost()
const 3962 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3970 class VmaBlockMetadata
3974 ~VmaBlockMetadata();
3975 void Init(VkDeviceSize size);
3978 bool Validate()
const;
3979 VkDeviceSize GetSize()
const {
return m_Size; }
3980 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3981 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3982 VkDeviceSize GetUnusedRangeSizeMax()
const;
3984 bool IsEmpty()
const;
3986 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3989 #if VMA_STATS_STRING_ENABLED 3990 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3994 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3999 bool CreateAllocationRequest(
4000 uint32_t currentFrameIndex,
4001 uint32_t frameInUseCount,
4002 VkDeviceSize bufferImageGranularity,
4003 VkDeviceSize allocSize,
4004 VkDeviceSize allocAlignment,
4005 VmaSuballocationType allocType,
4006 bool canMakeOtherLost,
4007 VmaAllocationRequest* pAllocationRequest);
4009 bool MakeRequestedAllocationsLost(
4010 uint32_t currentFrameIndex,
4011 uint32_t frameInUseCount,
4012 VmaAllocationRequest* pAllocationRequest);
4014 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4018 const VmaAllocationRequest& request,
4019 VmaSuballocationType type,
4020 VkDeviceSize allocSize,
4025 void FreeAtOffset(VkDeviceSize offset);
4028 VkDeviceSize m_Size;
4029 uint32_t m_FreeCount;
4030 VkDeviceSize m_SumFreeSize;
4031 VmaSuballocationList m_Suballocations;
4034 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4036 bool ValidateFreeSuballocationList()
const;
4040 bool CheckAllocation(
4041 uint32_t currentFrameIndex,
4042 uint32_t frameInUseCount,
4043 VkDeviceSize bufferImageGranularity,
4044 VkDeviceSize allocSize,
4045 VkDeviceSize allocAlignment,
4046 VmaSuballocationType allocType,
4047 VmaSuballocationList::const_iterator suballocItem,
4048 bool canMakeOtherLost,
4049 VkDeviceSize* pOffset,
4050 size_t* itemsToMakeLostCount,
4051 VkDeviceSize* pSumFreeSize,
4052 VkDeviceSize* pSumItemSize)
const;
4054 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4058 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4061 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4064 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4073 class VmaDeviceMemoryBlock
4076 VmaBlockMetadata m_Metadata;
4080 ~VmaDeviceMemoryBlock()
4082 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
4083 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
4088 uint32_t newMemoryTypeIndex,
4089 VkDeviceMemory newMemory,
4090 VkDeviceSize newSize);
4094 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
4095 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4096 void* GetMappedData()
const {
return m_pMappedData; }
4099 bool Validate()
const;
4102 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
4105 VkResult BindBufferMemory(
4109 VkResult BindImageMemory(
4115 uint32_t m_MemoryTypeIndex;
4116 VkDeviceMemory m_hMemory;
4121 uint32_t m_MapCount;
4122 void* m_pMappedData;
4125 struct VmaPointerLess
4127 bool operator()(
const void* lhs,
const void* rhs)
const 4133 class VmaDefragmentator;
4141 struct VmaBlockVector
4145 uint32_t memoryTypeIndex,
4146 VkDeviceSize preferredBlockSize,
4147 size_t minBlockCount,
4148 size_t maxBlockCount,
4149 VkDeviceSize bufferImageGranularity,
4150 uint32_t frameInUseCount,
4154 VkResult CreateMinBlocks();
4156 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4157 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
4158 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
4159 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
4163 bool IsEmpty()
const {
return m_Blocks.empty(); }
4167 uint32_t currentFrameIndex,
4168 const VkMemoryRequirements& vkMemReq,
4170 VmaSuballocationType suballocType,
4179 #if VMA_STATS_STRING_ENABLED 4180 void PrintDetailedMap(
class VmaJsonWriter& json);
4183 void MakePoolAllocationsLost(
4184 uint32_t currentFrameIndex,
4185 size_t* pLostAllocationCount);
4187 VmaDefragmentator* EnsureDefragmentator(
4189 uint32_t currentFrameIndex);
4191 VkResult Defragment(
4193 VkDeviceSize& maxBytesToMove,
4194 uint32_t& maxAllocationsToMove);
4196 void DestroyDefragmentator();
4199 friend class VmaDefragmentator;
4202 const uint32_t m_MemoryTypeIndex;
4203 const VkDeviceSize m_PreferredBlockSize;
4204 const size_t m_MinBlockCount;
4205 const size_t m_MaxBlockCount;
4206 const VkDeviceSize m_BufferImageGranularity;
4207 const uint32_t m_FrameInUseCount;
4208 const bool m_IsCustomPool;
4211 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
4215 bool m_HasEmptyBlock;
4216 VmaDefragmentator* m_pDefragmentator;
4218 size_t CalcMaxBlockSize()
const;
4221 void Remove(VmaDeviceMemoryBlock* pBlock);
4225 void IncrementallySortBlocks();
4227 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
4233 VmaBlockVector m_BlockVector;
4241 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
4243 #if VMA_STATS_STRING_ENABLED 4248 class VmaDefragmentator
4251 VmaBlockVector*
const m_pBlockVector;
4252 uint32_t m_CurrentFrameIndex;
4253 VkDeviceSize m_BytesMoved;
4254 uint32_t m_AllocationsMoved;
4256 struct AllocationInfo
4259 VkBool32* m_pChanged;
4262 m_hAllocation(VK_NULL_HANDLE),
4263 m_pChanged(VMA_NULL)
4268 struct AllocationInfoSizeGreater
4270 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 4272 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
4277 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4281 VmaDeviceMemoryBlock* m_pBlock;
4282 bool m_HasNonMovableAllocations;
4283 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4285 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
4287 m_HasNonMovableAllocations(true),
4288 m_Allocations(pAllocationCallbacks),
4289 m_pMappedDataForDefragmentation(VMA_NULL)
4293 void CalcHasNonMovableAllocations()
4295 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
4296 const size_t defragmentAllocCount = m_Allocations.size();
4297 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
4300 void SortAllocationsBySizeDescecnding()
4302 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
4305 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
4310 void* m_pMappedDataForDefragmentation;
4313 struct BlockPointerLess
4315 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 4317 return pLhsBlockInfo->m_pBlock < pRhsBlock;
4319 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4321 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
4327 struct BlockInfoCompareMoveDestination
4329 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4331 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
4335 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
4339 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
4347 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
4348 BlockInfoVector m_Blocks;
4350 VkResult DefragmentRound(
4351 VkDeviceSize maxBytesToMove,
4352 uint32_t maxAllocationsToMove);
4354 static bool MoveMakesSense(
4355 size_t dstBlockIndex, VkDeviceSize dstOffset,
4356 size_t srcBlockIndex, VkDeviceSize srcOffset);
4361 VmaBlockVector* pBlockVector,
4362 uint32_t currentFrameIndex);
4364 ~VmaDefragmentator();
4366 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
4367 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
4369 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
4371 VkResult Defragment(
4372 VkDeviceSize maxBytesToMove,
4373 uint32_t maxAllocationsToMove);
4377 struct VmaAllocator_T
4380 bool m_UseKhrDedicatedAllocation;
4382 bool m_AllocationCallbacksSpecified;
4383 VkAllocationCallbacks m_AllocationCallbacks;
4387 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
4388 VMA_MUTEX m_HeapSizeLimitMutex;
4390 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
4391 VkPhysicalDeviceMemoryProperties m_MemProps;
4394 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
4397 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
4398 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
4399 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
4404 const VkAllocationCallbacks* GetAllocationCallbacks()
const 4406 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
4410 return m_VulkanFunctions;
4413 VkDeviceSize GetBufferImageGranularity()
const 4416 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
4417 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
4420 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
4421 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
4423 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 4425 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
4426 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4429 void GetBufferMemoryRequirements(
4431 VkMemoryRequirements& memReq,
4432 bool& requiresDedicatedAllocation,
4433 bool& prefersDedicatedAllocation)
const;
4434 void GetImageMemoryRequirements(
4436 VkMemoryRequirements& memReq,
4437 bool& requiresDedicatedAllocation,
4438 bool& prefersDedicatedAllocation)
const;
4441 VkResult AllocateMemory(
4442 const VkMemoryRequirements& vkMemReq,
4443 bool requiresDedicatedAllocation,
4444 bool prefersDedicatedAllocation,
4445 VkBuffer dedicatedBuffer,
4446 VkImage dedicatedImage,
4448 VmaSuballocationType suballocType,
4454 void CalculateStats(
VmaStats* pStats);
4456 #if VMA_STATS_STRING_ENABLED 4457 void PrintDetailedMap(
class VmaJsonWriter& json);
4460 VkResult Defragment(
4462 size_t allocationCount,
4463 VkBool32* pAllocationsChanged,
4471 void DestroyPool(
VmaPool pool);
4474 void SetCurrentFrameIndex(uint32_t frameIndex);
4476 void MakePoolAllocationsLost(
4478 size_t* pLostAllocationCount);
4482 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
4483 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
4488 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
4489 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
4492 VkDeviceSize m_PreferredLargeHeapBlockSize;
4494 VkPhysicalDevice m_PhysicalDevice;
4495 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
4497 VMA_MUTEX m_PoolsMutex;
4499 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
4505 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
4507 VkResult AllocateMemoryOfType(
4508 const VkMemoryRequirements& vkMemReq,
4509 bool dedicatedAllocation,
4510 VkBuffer dedicatedBuffer,
4511 VkImage dedicatedImage,
4513 uint32_t memTypeIndex,
4514 VmaSuballocationType suballocType,
4518 VkResult AllocateDedicatedMemory(
4520 VmaSuballocationType suballocType,
4521 uint32_t memTypeIndex,
4523 bool isUserDataString,
4525 VkBuffer dedicatedBuffer,
4526 VkImage dedicatedImage,
4536 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
4538 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4541 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
4543 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4546 template<
typename T>
4549 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
4552 template<
typename T>
4553 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
4555 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
4558 template<
typename T>
4559 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
4564 VmaFree(hAllocator, ptr);
4568 template<
typename T>
4569 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
4573 for(
size_t i = count; i--; )
4575 VmaFree(hAllocator, ptr);
4582 #if VMA_STATS_STRING_ENABLED 4584 class VmaStringBuilder
4587 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4588 size_t GetLength()
const {
return m_Data.size(); }
4589 const char* GetData()
const {
return m_Data.data(); }
4591 void Add(
char ch) { m_Data.push_back(ch); }
4592 void Add(
const char* pStr);
4593 void AddNewLine() { Add(
'\n'); }
4594 void AddNumber(uint32_t num);
4595 void AddNumber(uint64_t num);
4596 void AddPointer(
const void* ptr);
4599 VmaVector< char, VmaStlAllocator<char> > m_Data;
4602 void VmaStringBuilder::Add(
const char* pStr)
4604 const size_t strLen = strlen(pStr);
4607 const size_t oldCount = m_Data.size();
4608 m_Data.resize(oldCount + strLen);
4609 memcpy(m_Data.data() + oldCount, pStr, strLen);
4613 void VmaStringBuilder::AddNumber(uint32_t num)
4616 VmaUint32ToStr(buf,
sizeof(buf), num);
4620 void VmaStringBuilder::AddNumber(uint64_t num)
4623 VmaUint64ToStr(buf,
sizeof(buf), num);
4627 void VmaStringBuilder::AddPointer(
const void* ptr)
4630 VmaPtrToStr(buf,
sizeof(buf), ptr);
4634 #endif // #if VMA_STATS_STRING_ENABLED 4639 #if VMA_STATS_STRING_ENABLED 4644 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4647 void BeginObject(
bool singleLine =
false);
4650 void BeginArray(
bool singleLine =
false);
4653 void WriteString(
const char* pStr);
4654 void BeginString(
const char* pStr = VMA_NULL);
4655 void ContinueString(
const char* pStr);
4656 void ContinueString(uint32_t n);
4657 void ContinueString(uint64_t n);
4658 void ContinueString_Pointer(
const void* ptr);
4659 void EndString(
const char* pStr = VMA_NULL);
4661 void WriteNumber(uint32_t n);
4662 void WriteNumber(uint64_t n);
4663 void WriteBool(
bool b);
4667 static const char*
const INDENT;
4669 enum COLLECTION_TYPE
4671 COLLECTION_TYPE_OBJECT,
4672 COLLECTION_TYPE_ARRAY,
4676 COLLECTION_TYPE type;
4677 uint32_t valueCount;
4678 bool singleLineMode;
4681 VmaStringBuilder& m_SB;
4682 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4683 bool m_InsideString;
4685 void BeginValue(
bool isString);
4686 void WriteIndent(
bool oneLess =
false);
4689 const char*
const VmaJsonWriter::INDENT =
" ";
4691 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4693 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4694 m_InsideString(false)
4698 VmaJsonWriter::~VmaJsonWriter()
4700 VMA_ASSERT(!m_InsideString);
4701 VMA_ASSERT(m_Stack.empty());
4704 void VmaJsonWriter::BeginObject(
bool singleLine)
4706 VMA_ASSERT(!m_InsideString);
4712 item.type = COLLECTION_TYPE_OBJECT;
4713 item.valueCount = 0;
4714 item.singleLineMode = singleLine;
4715 m_Stack.push_back(item);
4718 void VmaJsonWriter::EndObject()
4720 VMA_ASSERT(!m_InsideString);
4725 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4729 void VmaJsonWriter::BeginArray(
bool singleLine)
4731 VMA_ASSERT(!m_InsideString);
4737 item.type = COLLECTION_TYPE_ARRAY;
4738 item.valueCount = 0;
4739 item.singleLineMode = singleLine;
4740 m_Stack.push_back(item);
4743 void VmaJsonWriter::EndArray()
4745 VMA_ASSERT(!m_InsideString);
4750 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4754 void VmaJsonWriter::WriteString(
const char* pStr)
4760 void VmaJsonWriter::BeginString(
const char* pStr)
4762 VMA_ASSERT(!m_InsideString);
4766 m_InsideString =
true;
4767 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4769 ContinueString(pStr);
4773 void VmaJsonWriter::ContinueString(
const char* pStr)
4775 VMA_ASSERT(m_InsideString);
4777 const size_t strLen = strlen(pStr);
4778 for(
size_t i = 0; i < strLen; ++i)
4811 VMA_ASSERT(0 &&
"Character not currently supported.");
4817 void VmaJsonWriter::ContinueString(uint32_t n)
4819 VMA_ASSERT(m_InsideString);
4823 void VmaJsonWriter::ContinueString(uint64_t n)
4825 VMA_ASSERT(m_InsideString);
4829 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4831 VMA_ASSERT(m_InsideString);
4832 m_SB.AddPointer(ptr);
4835 void VmaJsonWriter::EndString(
const char* pStr)
4837 VMA_ASSERT(m_InsideString);
4838 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4840 ContinueString(pStr);
4843 m_InsideString =
false;
4846 void VmaJsonWriter::WriteNumber(uint32_t n)
4848 VMA_ASSERT(!m_InsideString);
4853 void VmaJsonWriter::WriteNumber(uint64_t n)
4855 VMA_ASSERT(!m_InsideString);
4860 void VmaJsonWriter::WriteBool(
bool b)
4862 VMA_ASSERT(!m_InsideString);
4864 m_SB.Add(b ?
"true" :
"false");
4867 void VmaJsonWriter::WriteNull()
4869 VMA_ASSERT(!m_InsideString);
4874 void VmaJsonWriter::BeginValue(
bool isString)
4876 if(!m_Stack.empty())
4878 StackItem& currItem = m_Stack.back();
4879 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4880 currItem.valueCount % 2 == 0)
4882 VMA_ASSERT(isString);
4885 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4886 currItem.valueCount % 2 != 0)
4890 else if(currItem.valueCount > 0)
4899 ++currItem.valueCount;
4903 void VmaJsonWriter::WriteIndent(
bool oneLess)
4905 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4909 size_t count = m_Stack.size();
4910 if(count > 0 && oneLess)
4914 for(
size_t i = 0; i < count; ++i)
4921 #endif // #if VMA_STATS_STRING_ENABLED 4925 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
4927 if(IsUserDataString())
4929 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4931 FreeUserDataString(hAllocator);
4933 if(pUserData != VMA_NULL)
4935 const char*
const newStrSrc = (
char*)pUserData;
4936 const size_t newStrLen = strlen(newStrSrc);
4937 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4938 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4939 m_pUserData = newStrDst;
4944 m_pUserData = pUserData;
4948 void VmaAllocation_T::ChangeBlockAllocation(
4950 VmaDeviceMemoryBlock* block,
4951 VkDeviceSize offset)
4953 VMA_ASSERT(block != VMA_NULL);
4954 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4957 if(block != m_BlockAllocation.m_Block)
4959 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
4960 if(IsPersistentMap())
4962 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
4963 block->Map(hAllocator, mapRefCount, VMA_NULL);
4966 m_BlockAllocation.m_Block = block;
4967 m_BlockAllocation.m_Offset = offset;
4970 VkDeviceSize VmaAllocation_T::GetOffset()
const 4974 case ALLOCATION_TYPE_BLOCK:
4975 return m_BlockAllocation.m_Offset;
4976 case ALLOCATION_TYPE_DEDICATED:
4984 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4988 case ALLOCATION_TYPE_BLOCK:
4989 return m_BlockAllocation.m_Block->GetDeviceMemory();
4990 case ALLOCATION_TYPE_DEDICATED:
4991 return m_DedicatedAllocation.m_hMemory;
4994 return VK_NULL_HANDLE;
4998 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 5002 case ALLOCATION_TYPE_BLOCK:
5003 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
5004 case ALLOCATION_TYPE_DEDICATED:
5005 return m_DedicatedAllocation.m_MemoryTypeIndex;
5012 void* VmaAllocation_T::GetMappedData()
const 5016 case ALLOCATION_TYPE_BLOCK:
5019 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
5020 VMA_ASSERT(pBlockData != VMA_NULL);
5021 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
5028 case ALLOCATION_TYPE_DEDICATED:
5029 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
5030 return m_DedicatedAllocation.m_pMappedData;
5037 bool VmaAllocation_T::CanBecomeLost()
const 5041 case ALLOCATION_TYPE_BLOCK:
5042 return m_BlockAllocation.m_CanBecomeLost;
5043 case ALLOCATION_TYPE_DEDICATED:
5051 VmaPool VmaAllocation_T::GetPool()
const 5053 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5054 return m_BlockAllocation.m_hPool;
5057 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5059 VMA_ASSERT(CanBecomeLost());
5065 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
5068 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
5073 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
5079 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
5089 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
5091 VMA_ASSERT(IsUserDataString());
5092 if(m_pUserData != VMA_NULL)
5094 char*
const oldStr = (
char*)m_pUserData;
5095 const size_t oldStrLen = strlen(oldStr);
5096 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
5097 m_pUserData = VMA_NULL;
5101 void VmaAllocation_T::BlockAllocMap()
5103 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
5105 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
5111 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
5115 void VmaAllocation_T::BlockAllocUnmap()
5117 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
5119 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
5125 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
5129 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
5131 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
5135 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
5137 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
5138 *ppData = m_DedicatedAllocation.m_pMappedData;
5144 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
5145 return VK_ERROR_MEMORY_MAP_FAILED;
5150 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5151 hAllocator->m_hDevice,
5152 m_DedicatedAllocation.m_hMemory,
5157 if(result == VK_SUCCESS)
5159 m_DedicatedAllocation.m_pMappedData = *ppData;
5166 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
5168 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
5170 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
5175 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
5176 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
5177 hAllocator->m_hDevice,
5178 m_DedicatedAllocation.m_hMemory);
5183 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
5187 #if VMA_STATS_STRING_ENABLED 5190 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
5199 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
5203 json.WriteString(
"Blocks");
5206 json.WriteString(
"Allocations");
5209 json.WriteString(
"UnusedRanges");
5212 json.WriteString(
"UsedBytes");
5215 json.WriteString(
"UnusedBytes");
5220 json.WriteString(
"AllocationSize");
5221 json.BeginObject(
true);
5222 json.WriteString(
"Min");
5224 json.WriteString(
"Avg");
5226 json.WriteString(
"Max");
5233 json.WriteString(
"UnusedRangeSize");
5234 json.BeginObject(
true);
5235 json.WriteString(
"Min");
5237 json.WriteString(
"Avg");
5239 json.WriteString(
"Max");
5247 #endif // #if VMA_STATS_STRING_ENABLED 5249 struct VmaSuballocationItemSizeLess
5252 const VmaSuballocationList::iterator lhs,
5253 const VmaSuballocationList::iterator rhs)
const 5255 return lhs->size < rhs->size;
5258 const VmaSuballocationList::iterator lhs,
5259 VkDeviceSize rhsSize)
const 5261 return lhs->size < rhsSize;
5268 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
5272 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
5273 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
5277 VmaBlockMetadata::~VmaBlockMetadata()
5281 void VmaBlockMetadata::Init(VkDeviceSize size)
5285 m_SumFreeSize = size;
5287 VmaSuballocation suballoc = {};
5288 suballoc.offset = 0;
5289 suballoc.size = size;
5290 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5291 suballoc.hAllocation = VK_NULL_HANDLE;
5293 m_Suballocations.push_back(suballoc);
5294 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
5296 m_FreeSuballocationsBySize.push_back(suballocItem);
5299 bool VmaBlockMetadata::Validate()
const 5301 if(m_Suballocations.empty())
5307 VkDeviceSize calculatedOffset = 0;
5309 uint32_t calculatedFreeCount = 0;
5311 VkDeviceSize calculatedSumFreeSize = 0;
5314 size_t freeSuballocationsToRegister = 0;
5316 bool prevFree =
false;
5318 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5319 suballocItem != m_Suballocations.cend();
5322 const VmaSuballocation& subAlloc = *suballocItem;
5325 if(subAlloc.offset != calculatedOffset)
5330 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
5332 if(prevFree && currFree)
5337 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
5344 calculatedSumFreeSize += subAlloc.size;
5345 ++calculatedFreeCount;
5346 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5348 ++freeSuballocationsToRegister;
5353 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
5357 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
5363 calculatedOffset += subAlloc.size;
5364 prevFree = currFree;
5369 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
5374 VkDeviceSize lastSize = 0;
5375 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
5377 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
5380 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5385 if(suballocItem->size < lastSize)
5390 lastSize = suballocItem->size;
5394 if(!ValidateFreeSuballocationList() ||
5395 (calculatedOffset != m_Size) ||
5396 (calculatedSumFreeSize != m_SumFreeSize) ||
5397 (calculatedFreeCount != m_FreeCount))
5405 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 5407 if(!m_FreeSuballocationsBySize.empty())
5409 return m_FreeSuballocationsBySize.back()->size;
5417 bool VmaBlockMetadata::IsEmpty()
const 5419 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
5422 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 5426 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5438 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5439 suballocItem != m_Suballocations.cend();
5442 const VmaSuballocation& suballoc = *suballocItem;
5443 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5456 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 5458 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5460 inoutStats.
size += m_Size;
5467 #if VMA_STATS_STRING_ENABLED 5469 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 5473 json.WriteString(
"TotalBytes");
5474 json.WriteNumber(m_Size);
5476 json.WriteString(
"UnusedBytes");
5477 json.WriteNumber(m_SumFreeSize);
5479 json.WriteString(
"Allocations");
5480 json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
5482 json.WriteString(
"UnusedRanges");
5483 json.WriteNumber(m_FreeCount);
5485 json.WriteString(
"Suballocations");
5488 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5489 suballocItem != m_Suballocations.cend();
5490 ++suballocItem, ++i)
5492 json.BeginObject(
true);
5494 json.WriteString(
"Type");
5495 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
5497 json.WriteString(
"Size");
5498 json.WriteNumber(suballocItem->size);
5500 json.WriteString(
"Offset");
5501 json.WriteNumber(suballocItem->offset);
5503 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5505 const void* pUserData = suballocItem->hAllocation->GetUserData();
5506 if(pUserData != VMA_NULL)
5508 json.WriteString(
"UserData");
5509 if(suballocItem->hAllocation->IsUserDataString())
5511 json.WriteString((
const char*)pUserData);
5516 json.ContinueString_Pointer(pUserData);
5529 #endif // #if VMA_STATS_STRING_ENABLED 5541 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
5543 VMA_ASSERT(IsEmpty());
5544 pAllocationRequest->offset = 0;
5545 pAllocationRequest->sumFreeSize = m_SumFreeSize;
5546 pAllocationRequest->sumItemSize = 0;
5547 pAllocationRequest->item = m_Suballocations.begin();
5548 pAllocationRequest->itemsToMakeLostCount = 0;
5551 bool VmaBlockMetadata::CreateAllocationRequest(
5552 uint32_t currentFrameIndex,
5553 uint32_t frameInUseCount,
5554 VkDeviceSize bufferImageGranularity,
5555 VkDeviceSize allocSize,
5556 VkDeviceSize allocAlignment,
5557 VmaSuballocationType allocType,
5558 bool canMakeOtherLost,
5559 VmaAllocationRequest* pAllocationRequest)
5561 VMA_ASSERT(allocSize > 0);
5562 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5563 VMA_ASSERT(pAllocationRequest != VMA_NULL);
5564 VMA_HEAVY_ASSERT(Validate());
5567 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
5573 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5574 if(freeSuballocCount > 0)
5579 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5580 m_FreeSuballocationsBySize.data(),
5581 m_FreeSuballocationsBySize.data() + freeSuballocCount,
5583 VmaSuballocationItemSizeLess());
5584 size_t index = it - m_FreeSuballocationsBySize.data();
5585 for(; index < freeSuballocCount; ++index)
5590 bufferImageGranularity,
5594 m_FreeSuballocationsBySize[index],
5596 &pAllocationRequest->offset,
5597 &pAllocationRequest->itemsToMakeLostCount,
5598 &pAllocationRequest->sumFreeSize,
5599 &pAllocationRequest->sumItemSize))
5601 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5609 for(
size_t index = freeSuballocCount; index--; )
5614 bufferImageGranularity,
5618 m_FreeSuballocationsBySize[index],
5620 &pAllocationRequest->offset,
5621 &pAllocationRequest->itemsToMakeLostCount,
5622 &pAllocationRequest->sumFreeSize,
5623 &pAllocationRequest->sumItemSize))
5625 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5632 if(canMakeOtherLost)
5636 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5637 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5639 VmaAllocationRequest tmpAllocRequest = {};
5640 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5641 suballocIt != m_Suballocations.end();
5644 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5645 suballocIt->hAllocation->CanBecomeLost())
5650 bufferImageGranularity,
5656 &tmpAllocRequest.offset,
5657 &tmpAllocRequest.itemsToMakeLostCount,
5658 &tmpAllocRequest.sumFreeSize,
5659 &tmpAllocRequest.sumItemSize))
5661 tmpAllocRequest.item = suballocIt;
5663 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5665 *pAllocationRequest = tmpAllocRequest;
5671 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5680 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5681 uint32_t currentFrameIndex,
5682 uint32_t frameInUseCount,
5683 VmaAllocationRequest* pAllocationRequest)
5685 while(pAllocationRequest->itemsToMakeLostCount > 0)
5687 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5689 ++pAllocationRequest->item;
5691 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5692 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5693 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5694 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5696 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5697 --pAllocationRequest->itemsToMakeLostCount;
5705 VMA_HEAVY_ASSERT(Validate());
5706 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5707 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5712 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5714 uint32_t lostAllocationCount = 0;
5715 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5716 it != m_Suballocations.end();
5719 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5720 it->hAllocation->CanBecomeLost() &&
5721 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5723 it = FreeSuballocation(it);
5724 ++lostAllocationCount;
5727 return lostAllocationCount;
5730 void VmaBlockMetadata::Alloc(
5731 const VmaAllocationRequest& request,
5732 VmaSuballocationType type,
5733 VkDeviceSize allocSize,
5736 VMA_ASSERT(request.item != m_Suballocations.end());
5737 VmaSuballocation& suballoc = *request.item;
5739 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5741 VMA_ASSERT(request.offset >= suballoc.offset);
5742 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5743 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5744 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5748 UnregisterFreeSuballocation(request.item);
5750 suballoc.offset = request.offset;
5751 suballoc.size = allocSize;
5752 suballoc.type = type;
5753 suballoc.hAllocation = hAllocation;
5758 VmaSuballocation paddingSuballoc = {};
5759 paddingSuballoc.offset = request.offset + allocSize;
5760 paddingSuballoc.size = paddingEnd;
5761 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5762 VmaSuballocationList::iterator next = request.item;
5764 const VmaSuballocationList::iterator paddingEndItem =
5765 m_Suballocations.insert(next, paddingSuballoc);
5766 RegisterFreeSuballocation(paddingEndItem);
5772 VmaSuballocation paddingSuballoc = {};
5773 paddingSuballoc.offset = request.offset - paddingBegin;
5774 paddingSuballoc.size = paddingBegin;
5775 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5776 const VmaSuballocationList::iterator paddingBeginItem =
5777 m_Suballocations.insert(request.item, paddingSuballoc);
5778 RegisterFreeSuballocation(paddingBeginItem);
5782 m_FreeCount = m_FreeCount - 1;
5783 if(paddingBegin > 0)
5791 m_SumFreeSize -= allocSize;
5796 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5797 suballocItem != m_Suballocations.end();
5800 VmaSuballocation& suballoc = *suballocItem;
5801 if(suballoc.hAllocation == allocation)
5803 FreeSuballocation(suballocItem);
5804 VMA_HEAVY_ASSERT(Validate());
5808 VMA_ASSERT(0 &&
"Not found!");
5811 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
5813 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5814 suballocItem != m_Suballocations.end();
5817 VmaSuballocation& suballoc = *suballocItem;
5818 if(suballoc.offset == offset)
5820 FreeSuballocation(suballocItem);
5824 VMA_ASSERT(0 &&
"Not found!");
5827 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5829 VkDeviceSize lastSize = 0;
5830 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5832 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5834 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5839 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5844 if(it->size < lastSize)
5850 lastSize = it->size;
5855 bool VmaBlockMetadata::CheckAllocation(
5856 uint32_t currentFrameIndex,
5857 uint32_t frameInUseCount,
5858 VkDeviceSize bufferImageGranularity,
5859 VkDeviceSize allocSize,
5860 VkDeviceSize allocAlignment,
5861 VmaSuballocationType allocType,
5862 VmaSuballocationList::const_iterator suballocItem,
5863 bool canMakeOtherLost,
5864 VkDeviceSize* pOffset,
5865 size_t* itemsToMakeLostCount,
5866 VkDeviceSize* pSumFreeSize,
5867 VkDeviceSize* pSumItemSize)
const 5869 VMA_ASSERT(allocSize > 0);
5870 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5871 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5872 VMA_ASSERT(pOffset != VMA_NULL);
5874 *itemsToMakeLostCount = 0;
5878 if(canMakeOtherLost)
5880 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5882 *pSumFreeSize = suballocItem->size;
5886 if(suballocItem->hAllocation->CanBecomeLost() &&
5887 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5889 ++*itemsToMakeLostCount;
5890 *pSumItemSize = suballocItem->size;
5899 if(m_Size - suballocItem->offset < allocSize)
5905 *pOffset = suballocItem->offset;
5908 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5910 *pOffset += VMA_DEBUG_MARGIN;
5914 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5915 *pOffset = VmaAlignUp(*pOffset, alignment);
5919 if(bufferImageGranularity > 1)
5921 bool bufferImageGranularityConflict =
false;
5922 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5923 while(prevSuballocItem != m_Suballocations.cbegin())
5926 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5927 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5929 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5931 bufferImageGranularityConflict =
true;
5939 if(bufferImageGranularityConflict)
5941 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5947 if(*pOffset >= suballocItem->offset + suballocItem->size)
5953 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5956 VmaSuballocationList::const_iterator next = suballocItem;
5958 const VkDeviceSize requiredEndMargin =
5959 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5961 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5963 if(suballocItem->offset + totalSize > m_Size)
5970 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5971 if(totalSize > suballocItem->size)
5973 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5974 while(remainingSize > 0)
5977 if(lastSuballocItem == m_Suballocations.cend())
5981 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5983 *pSumFreeSize += lastSuballocItem->size;
5987 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5988 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5989 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5991 ++*itemsToMakeLostCount;
5992 *pSumItemSize += lastSuballocItem->size;
5999 remainingSize = (lastSuballocItem->size < remainingSize) ?
6000 remainingSize - lastSuballocItem->size : 0;
6006 if(bufferImageGranularity > 1)
6008 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
6010 while(nextSuballocItem != m_Suballocations.cend())
6012 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
6013 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
6015 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
6017 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
6018 if(nextSuballoc.hAllocation->CanBecomeLost() &&
6019 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6021 ++*itemsToMakeLostCount;
6040 const VmaSuballocation& suballoc = *suballocItem;
6041 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
6043 *pSumFreeSize = suballoc.size;
6046 if(suballoc.size < allocSize)
6052 *pOffset = suballoc.offset;
6055 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
6057 *pOffset += VMA_DEBUG_MARGIN;
6061 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
6062 *pOffset = VmaAlignUp(*pOffset, alignment);
6066 if(bufferImageGranularity > 1)
6068 bool bufferImageGranularityConflict =
false;
6069 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
6070 while(prevSuballocItem != m_Suballocations.cbegin())
6073 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
6074 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
6076 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
6078 bufferImageGranularityConflict =
true;
6086 if(bufferImageGranularityConflict)
6088 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
6093 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
6096 VmaSuballocationList::const_iterator next = suballocItem;
6098 const VkDeviceSize requiredEndMargin =
6099 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
6102 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
6109 if(bufferImageGranularity > 1)
6111 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
6113 while(nextSuballocItem != m_Suballocations.cend())
6115 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
6116 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
6118 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
6137 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
6139 VMA_ASSERT(item != m_Suballocations.end());
6140 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6142 VmaSuballocationList::iterator nextItem = item;
6144 VMA_ASSERT(nextItem != m_Suballocations.end());
6145 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6147 item->size += nextItem->size;
6149 m_Suballocations.erase(nextItem);
6152 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
6155 VmaSuballocation& suballoc = *suballocItem;
6156 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6157 suballoc.hAllocation = VK_NULL_HANDLE;
6161 m_SumFreeSize += suballoc.size;
6164 bool mergeWithNext =
false;
6165 bool mergeWithPrev =
false;
6167 VmaSuballocationList::iterator nextItem = suballocItem;
6169 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
6171 mergeWithNext =
true;
6174 VmaSuballocationList::iterator prevItem = suballocItem;
6175 if(suballocItem != m_Suballocations.begin())
6178 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6180 mergeWithPrev =
true;
6186 UnregisterFreeSuballocation(nextItem);
6187 MergeFreeWithNext(suballocItem);
6192 UnregisterFreeSuballocation(prevItem);
6193 MergeFreeWithNext(prevItem);
6194 RegisterFreeSuballocation(prevItem);
6199 RegisterFreeSuballocation(suballocItem);
6200 return suballocItem;
6204 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
6206 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6207 VMA_ASSERT(item->size > 0);
6211 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6213 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6215 if(m_FreeSuballocationsBySize.empty())
6217 m_FreeSuballocationsBySize.push_back(item);
6221 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
6229 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
6231 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6232 VMA_ASSERT(item->size > 0);
6236 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6238 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6240 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6241 m_FreeSuballocationsBySize.data(),
6242 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
6244 VmaSuballocationItemSizeLess());
6245 for(
size_t index = it - m_FreeSuballocationsBySize.data();
6246 index < m_FreeSuballocationsBySize.size();
6249 if(m_FreeSuballocationsBySize[index] == item)
6251 VmaVectorRemove(m_FreeSuballocationsBySize, index);
6254 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
6256 VMA_ASSERT(0 &&
"Not found.");
6265 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
6266 m_Metadata(hAllocator),
6267 m_MemoryTypeIndex(UINT32_MAX),
6268 m_hMemory(VK_NULL_HANDLE),
6270 m_pMappedData(VMA_NULL)
6274 void VmaDeviceMemoryBlock::Init(
6275 uint32_t newMemoryTypeIndex,
6276 VkDeviceMemory newMemory,
6277 VkDeviceSize newSize)
6279 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6281 m_MemoryTypeIndex = newMemoryTypeIndex;
6282 m_hMemory = newMemory;
6284 m_Metadata.Init(newSize);
6287 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
6291 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
6293 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
6294 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
6295 m_hMemory = VK_NULL_HANDLE;
6298 bool VmaDeviceMemoryBlock::Validate()
const 6300 if((m_hMemory == VK_NULL_HANDLE) ||
6301 (m_Metadata.GetSize() == 0))
6306 return m_Metadata.Validate();
6309 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
6316 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6319 m_MapCount += count;
6320 VMA_ASSERT(m_pMappedData != VMA_NULL);
6321 if(ppData != VMA_NULL)
6323 *ppData = m_pMappedData;
6329 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6330 hAllocator->m_hDevice,
6336 if(result == VK_SUCCESS)
6338 if(ppData != VMA_NULL)
6340 *ppData = m_pMappedData;
6348 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
6355 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6356 if(m_MapCount >= count)
6358 m_MapCount -= count;
6361 m_pMappedData = VMA_NULL;
6362 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
6367 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
6371 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
6376 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
6377 hAllocation->GetBlock() ==
this);
6379 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6380 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
6381 hAllocator->m_hDevice,
6384 hAllocation->GetOffset());
6387 VkResult VmaDeviceMemoryBlock::BindImageMemory(
6392 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
6393 hAllocation->GetBlock() ==
this);
6395 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6396 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
6397 hAllocator->m_hDevice,
6400 hAllocation->GetOffset());
6405 memset(&outInfo, 0,
sizeof(outInfo));
6424 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
6432 VmaPool_T::VmaPool_T(
6437 createInfo.memoryTypeIndex,
6438 createInfo.blockSize,
6439 createInfo.minBlockCount,
6440 createInfo.maxBlockCount,
6442 createInfo.frameInUseCount,
6447 VmaPool_T::~VmaPool_T()
6451 #if VMA_STATS_STRING_ENABLED 6453 #endif // #if VMA_STATS_STRING_ENABLED 6455 VmaBlockVector::VmaBlockVector(
6457 uint32_t memoryTypeIndex,
6458 VkDeviceSize preferredBlockSize,
6459 size_t minBlockCount,
6460 size_t maxBlockCount,
6461 VkDeviceSize bufferImageGranularity,
6462 uint32_t frameInUseCount,
6463 bool isCustomPool) :
6464 m_hAllocator(hAllocator),
6465 m_MemoryTypeIndex(memoryTypeIndex),
6466 m_PreferredBlockSize(preferredBlockSize),
6467 m_MinBlockCount(minBlockCount),
6468 m_MaxBlockCount(maxBlockCount),
6469 m_BufferImageGranularity(bufferImageGranularity),
6470 m_FrameInUseCount(frameInUseCount),
6471 m_IsCustomPool(isCustomPool),
6472 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
6473 m_HasEmptyBlock(false),
6474 m_pDefragmentator(VMA_NULL)
6478 VmaBlockVector::~VmaBlockVector()
6480 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
6482 for(
size_t i = m_Blocks.size(); i--; )
6484 m_Blocks[i]->Destroy(m_hAllocator);
6485 vma_delete(m_hAllocator, m_Blocks[i]);
6489 VkResult VmaBlockVector::CreateMinBlocks()
6491 for(
size_t i = 0; i < m_MinBlockCount; ++i)
6493 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
6494 if(res != VK_SUCCESS)
6502 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
6510 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6512 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6514 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6516 VMA_HEAVY_ASSERT(pBlock->Validate());
6517 pBlock->m_Metadata.AddPoolStats(*pStats);
6521 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
6523 VkResult VmaBlockVector::Allocate(
6525 uint32_t currentFrameIndex,
6526 const VkMemoryRequirements& vkMemReq,
6528 VmaSuballocationType suballocType,
6534 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6538 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6540 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6541 VMA_ASSERT(pCurrBlock);
6542 VmaAllocationRequest currRequest = {};
6543 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6546 m_BufferImageGranularity,
6554 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
6558 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
6559 if(res != VK_SUCCESS)
6566 if(pCurrBlock->m_Metadata.IsEmpty())
6568 m_HasEmptyBlock =
false;
6571 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6572 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
6573 (*pAllocation)->InitBlockAllocation(
6582 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
6583 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6584 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6589 const bool canCreateNewBlock =
6591 (m_Blocks.size() < m_MaxBlockCount);
6594 if(canCreateNewBlock)
6597 VkDeviceSize newBlockSize = m_PreferredBlockSize;
6598 uint32_t newBlockSizeShift = 0;
6599 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
6603 if(m_IsCustomPool ==
false)
6606 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
6607 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
6609 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6610 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
6612 newBlockSize = smallerNewBlockSize;
6613 ++newBlockSizeShift;
6622 size_t newBlockIndex = 0;
6623 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
6625 if(m_IsCustomPool ==
false)
6627 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
6629 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6630 if(smallerNewBlockSize >= vkMemReq.size)
6632 newBlockSize = smallerNewBlockSize;
6633 ++newBlockSizeShift;
6634 res = CreateBlock(newBlockSize, &newBlockIndex);
6643 if(res == VK_SUCCESS)
6645 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
6646 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
6650 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
6651 if(res != VK_SUCCESS)
6658 VmaAllocationRequest allocRequest;
6659 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6660 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6661 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6662 (*pAllocation)->InitBlockAllocation(
6665 allocRequest.offset,
6671 VMA_HEAVY_ASSERT(pBlock->Validate());
6672 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6673 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6681 if(canMakeOtherLost)
6683 uint32_t tryIndex = 0;
6684 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6686 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6687 VmaAllocationRequest bestRequest = {};
6688 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6692 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6694 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6695 VMA_ASSERT(pCurrBlock);
6696 VmaAllocationRequest currRequest = {};
6697 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6700 m_BufferImageGranularity,
6707 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6708 if(pBestRequestBlock == VMA_NULL ||
6709 currRequestCost < bestRequestCost)
6711 pBestRequestBlock = pCurrBlock;
6712 bestRequest = currRequest;
6713 bestRequestCost = currRequestCost;
6715 if(bestRequestCost == 0)
6723 if(pBestRequestBlock != VMA_NULL)
6727 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
6728 if(res != VK_SUCCESS)
6734 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6740 if(pBestRequestBlock->m_Metadata.IsEmpty())
6742 m_HasEmptyBlock =
false;
6745 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6746 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6747 (*pAllocation)->InitBlockAllocation(
6756 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
6757 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6758 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6772 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6774 return VK_ERROR_TOO_MANY_OBJECTS;
6778 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6781 void VmaBlockVector::Free(
6784 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6788 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6790 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6792 if(hAllocation->IsPersistentMap())
6794 pBlock->Unmap(m_hAllocator, 1);
6797 pBlock->m_Metadata.Free(hAllocation);
6798 VMA_HEAVY_ASSERT(pBlock->Validate());
6800 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6803 if(pBlock->m_Metadata.IsEmpty())
6806 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6808 pBlockToDelete = pBlock;
6814 m_HasEmptyBlock =
true;
6819 else if(m_HasEmptyBlock)
6821 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6822 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6824 pBlockToDelete = pLastBlock;
6825 m_Blocks.pop_back();
6826 m_HasEmptyBlock =
false;
6830 IncrementallySortBlocks();
6835 if(pBlockToDelete != VMA_NULL)
6837 VMA_DEBUG_LOG(
" Deleted empty allocation");
6838 pBlockToDelete->Destroy(m_hAllocator);
6839 vma_delete(m_hAllocator, pBlockToDelete);
6843 size_t VmaBlockVector::CalcMaxBlockSize()
const 6846 for(
size_t i = m_Blocks.size(); i--; )
6848 result = VMA_MAX((uint64_t)result, (uint64_t)m_Blocks[i]->m_Metadata.GetSize());
6849 if(result >= m_PreferredBlockSize)
6857 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6859 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6861 if(m_Blocks[blockIndex] == pBlock)
6863 VmaVectorRemove(m_Blocks, blockIndex);
6870 void VmaBlockVector::IncrementallySortBlocks()
6873 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6875 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6877 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6883 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6885 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6886 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6887 allocInfo.allocationSize = blockSize;
6888 VkDeviceMemory mem = VK_NULL_HANDLE;
6889 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6898 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6902 allocInfo.allocationSize);
6904 m_Blocks.push_back(pBlock);
6905 if(pNewBlockIndex != VMA_NULL)
6907 *pNewBlockIndex = m_Blocks.size() - 1;
6913 #if VMA_STATS_STRING_ENABLED 6915 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6917 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6923 json.WriteString(
"MemoryTypeIndex");
6924 json.WriteNumber(m_MemoryTypeIndex);
6926 json.WriteString(
"BlockSize");
6927 json.WriteNumber(m_PreferredBlockSize);
6929 json.WriteString(
"BlockCount");
6930 json.BeginObject(
true);
6931 if(m_MinBlockCount > 0)
6933 json.WriteString(
"Min");
6934 json.WriteNumber((uint64_t)m_MinBlockCount);
6936 if(m_MaxBlockCount < SIZE_MAX)
6938 json.WriteString(
"Max");
6939 json.WriteNumber((uint64_t)m_MaxBlockCount);
6941 json.WriteString(
"Cur");
6942 json.WriteNumber((uint64_t)m_Blocks.size());
6945 if(m_FrameInUseCount > 0)
6947 json.WriteString(
"FrameInUseCount");
6948 json.WriteNumber(m_FrameInUseCount);
6953 json.WriteString(
"PreferredBlockSize");
6954 json.WriteNumber(m_PreferredBlockSize);
6957 json.WriteString(
"Blocks");
6959 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6961 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6968 #endif // #if VMA_STATS_STRING_ENABLED 6970 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6972 uint32_t currentFrameIndex)
6974 if(m_pDefragmentator == VMA_NULL)
6976 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6982 return m_pDefragmentator;
6985 VkResult VmaBlockVector::Defragment(
6987 VkDeviceSize& maxBytesToMove,
6988 uint32_t& maxAllocationsToMove)
6990 if(m_pDefragmentator == VMA_NULL)
6995 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6998 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
7001 if(pDefragmentationStats != VMA_NULL)
7003 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
7004 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
7007 VMA_ASSERT(bytesMoved <= maxBytesToMove);
7008 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
7014 m_HasEmptyBlock =
false;
7015 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
7017 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
7018 if(pBlock->m_Metadata.IsEmpty())
7020 if(m_Blocks.size() > m_MinBlockCount)
7022 if(pDefragmentationStats != VMA_NULL)
7025 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
7028 VmaVectorRemove(m_Blocks, blockIndex);
7029 pBlock->Destroy(m_hAllocator);
7030 vma_delete(m_hAllocator, pBlock);
7034 m_HasEmptyBlock =
true;
7042 void VmaBlockVector::DestroyDefragmentator()
7044 if(m_pDefragmentator != VMA_NULL)
7046 vma_delete(m_hAllocator, m_pDefragmentator);
7047 m_pDefragmentator = VMA_NULL;
7051 void VmaBlockVector::MakePoolAllocationsLost(
7052 uint32_t currentFrameIndex,
7053 size_t* pLostAllocationCount)
7055 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7056 size_t lostAllocationCount = 0;
7057 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7059 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7061 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
7063 if(pLostAllocationCount != VMA_NULL)
7065 *pLostAllocationCount = lostAllocationCount;
7069 void VmaBlockVector::AddStats(
VmaStats* pStats)
7071 const uint32_t memTypeIndex = m_MemoryTypeIndex;
7072 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
7074 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7076 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7078 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7080 VMA_HEAVY_ASSERT(pBlock->Validate());
7082 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
7083 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7084 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7085 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7092 VmaDefragmentator::VmaDefragmentator(
7094 VmaBlockVector* pBlockVector,
7095 uint32_t currentFrameIndex) :
7096 m_hAllocator(hAllocator),
7097 m_pBlockVector(pBlockVector),
7098 m_CurrentFrameIndex(currentFrameIndex),
7100 m_AllocationsMoved(0),
7101 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
7102 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
7106 VmaDefragmentator::~VmaDefragmentator()
7108 for(
size_t i = m_Blocks.size(); i--; )
7110 vma_delete(m_hAllocator, m_Blocks[i]);
7114 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
7116 AllocationInfo allocInfo;
7117 allocInfo.m_hAllocation = hAlloc;
7118 allocInfo.m_pChanged = pChanged;
7119 m_Allocations.push_back(allocInfo);
7122 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
7125 if(m_pMappedDataForDefragmentation)
7127 *ppMappedData = m_pMappedDataForDefragmentation;
7132 if(m_pBlock->GetMappedData())
7134 *ppMappedData = m_pBlock->GetMappedData();
7139 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
7140 *ppMappedData = m_pMappedDataForDefragmentation;
7144 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
7146 if(m_pMappedDataForDefragmentation != VMA_NULL)
7148 m_pBlock->Unmap(hAllocator, 1);
7152 VkResult VmaDefragmentator::DefragmentRound(
7153 VkDeviceSize maxBytesToMove,
7154 uint32_t maxAllocationsToMove)
7156 if(m_Blocks.empty())
7161 size_t srcBlockIndex = m_Blocks.size() - 1;
7162 size_t srcAllocIndex = SIZE_MAX;
7168 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
7170 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
7173 if(srcBlockIndex == 0)
7180 srcAllocIndex = SIZE_MAX;
7185 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
7189 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
7190 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
7192 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
7193 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
7194 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
7195 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
7198 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
7200 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
7201 VmaAllocationRequest dstAllocRequest;
7202 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
7203 m_CurrentFrameIndex,
7204 m_pBlockVector->GetFrameInUseCount(),
7205 m_pBlockVector->GetBufferImageGranularity(),
7210 &dstAllocRequest) &&
7212 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
7214 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
7217 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
7218 (m_BytesMoved + size > maxBytesToMove))
7220 return VK_INCOMPLETE;
7223 void* pDstMappedData = VMA_NULL;
7224 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
7225 if(res != VK_SUCCESS)
7230 void* pSrcMappedData = VMA_NULL;
7231 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
7232 if(res != VK_SUCCESS)
7239 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
7240 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
7241 static_cast<size_t>(size));
7243 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
7244 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
7246 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
7248 if(allocInfo.m_pChanged != VMA_NULL)
7250 *allocInfo.m_pChanged = VK_TRUE;
7253 ++m_AllocationsMoved;
7254 m_BytesMoved += size;
7256 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
7264 if(srcAllocIndex > 0)
7270 if(srcBlockIndex > 0)
7273 srcAllocIndex = SIZE_MAX;
7283 VkResult VmaDefragmentator::Defragment(
7284 VkDeviceSize maxBytesToMove,
7285 uint32_t maxAllocationsToMove)
7287 if(m_Allocations.empty())
7293 const size_t blockCount = m_pBlockVector->m_Blocks.size();
7294 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7296 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
7297 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
7298 m_Blocks.push_back(pBlockInfo);
7302 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
7305 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
7307 AllocationInfo& allocInfo = m_Allocations[blockIndex];
7309 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7311 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
7312 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
7313 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
7315 (*it)->m_Allocations.push_back(allocInfo);
7323 m_Allocations.clear();
7325 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7327 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
7328 pBlockInfo->CalcHasNonMovableAllocations();
7329 pBlockInfo->SortAllocationsBySizeDescecnding();
7333 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
7336 VkResult result = VK_SUCCESS;
7337 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
7339 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
7343 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7345 m_Blocks[blockIndex]->Unmap(m_hAllocator);
7351 bool VmaDefragmentator::MoveMakesSense(
7352 size_t dstBlockIndex, VkDeviceSize dstOffset,
7353 size_t srcBlockIndex, VkDeviceSize srcOffset)
7355 if(dstBlockIndex < srcBlockIndex)
7359 if(dstBlockIndex > srcBlockIndex)
7363 if(dstOffset < srcOffset)
7376 m_hDevice(pCreateInfo->device),
7377 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
7378 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
7379 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
7380 m_PreferredLargeHeapBlockSize(0),
7381 m_PhysicalDevice(pCreateInfo->physicalDevice),
7382 m_CurrentFrameIndex(0),
7383 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks()))
7387 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
7388 memset(&m_MemProps, 0,
sizeof(m_MemProps));
7389 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
7391 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
7392 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
7394 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7396 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
7407 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
7408 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
7415 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
7417 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
7418 if(limit != VK_WHOLE_SIZE)
7420 m_HeapSizeLimit[heapIndex] = limit;
7421 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
7423 m_MemProps.memoryHeaps[heapIndex].size = limit;
7429 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7431 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
7433 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
7439 GetBufferImageGranularity(),
7444 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
7448 VmaAllocator_T::~VmaAllocator_T()
7450 VMA_ASSERT(m_Pools.empty());
7452 for(
size_t i = GetMemoryTypeCount(); i--; )
7454 vma_delete(
this, m_pDedicatedAllocations[i]);
7455 vma_delete(
this, m_pBlockVectors[i]);
7459 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
7461 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7462 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
7463 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
7464 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
7465 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
7466 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
7467 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
7468 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
7469 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
7470 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
7471 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
7472 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
7473 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
7474 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
7475 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
7476 if(m_UseKhrDedicatedAllocation)
7478 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
7479 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
7480 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
7481 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
7483 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7485 #define VMA_COPY_IF_NOT_NULL(funcName) \ 7486 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 7488 if(pVulkanFunctions != VMA_NULL)
7490 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
7491 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
7492 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
7493 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
7494 VMA_COPY_IF_NOT_NULL(vkMapMemory);
7495 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
7496 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
7497 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
7498 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
7499 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
7500 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
7501 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
7502 VMA_COPY_IF_NOT_NULL(vkCreateImage);
7503 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
7504 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
7505 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
7508 #undef VMA_COPY_IF_NOT_NULL 7512 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
7513 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
7514 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
7515 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
7516 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
7517 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
7518 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
7519 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
7520 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
7521 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
7522 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
7523 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
7524 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
7525 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
7526 if(m_UseKhrDedicatedAllocation)
7528 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
7529 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
7533 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
7535 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7536 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
7537 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
7538 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
7541 VkResult VmaAllocator_T::AllocateMemoryOfType(
7542 const VkMemoryRequirements& vkMemReq,
7543 bool dedicatedAllocation,
7544 VkBuffer dedicatedBuffer,
7545 VkImage dedicatedImage,
7547 uint32_t memTypeIndex,
7548 VmaSuballocationType suballocType,
7551 VMA_ASSERT(pAllocation != VMA_NULL);
7552 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
7558 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
7563 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
7564 VMA_ASSERT(blockVector);
7566 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
7567 bool preferDedicatedMemory =
7568 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
7569 dedicatedAllocation ||
7571 vkMemReq.size > preferredBlockSize / 2;
7573 if(preferDedicatedMemory &&
7575 finalCreateInfo.
pool == VK_NULL_HANDLE)
7584 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7588 return AllocateDedicatedMemory(
7602 VkResult res = blockVector->Allocate(
7604 m_CurrentFrameIndex.load(),
7609 if(res == VK_SUCCESS)
7617 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7621 res = AllocateDedicatedMemory(
7627 finalCreateInfo.pUserData,
7631 if(res == VK_SUCCESS)
7634 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
7640 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7647 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7649 VmaSuballocationType suballocType,
7650 uint32_t memTypeIndex,
7652 bool isUserDataString,
7654 VkBuffer dedicatedBuffer,
7655 VkImage dedicatedImage,
7658 VMA_ASSERT(pAllocation);
7660 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7661 allocInfo.memoryTypeIndex = memTypeIndex;
7662 allocInfo.allocationSize = size;
7664 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7665 if(m_UseKhrDedicatedAllocation)
7667 if(dedicatedBuffer != VK_NULL_HANDLE)
7669 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7670 dedicatedAllocInfo.buffer = dedicatedBuffer;
7671 allocInfo.pNext = &dedicatedAllocInfo;
7673 else if(dedicatedImage != VK_NULL_HANDLE)
7675 dedicatedAllocInfo.image = dedicatedImage;
7676 allocInfo.pNext = &dedicatedAllocInfo;
7681 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7682 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7685 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7689 void* pMappedData = VMA_NULL;
7692 res = (*m_VulkanFunctions.vkMapMemory)(
7701 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7702 FreeVulkanMemory(memTypeIndex, size, hMemory);
7707 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7708 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7709 (*pAllocation)->SetUserData(
this, pUserData);
7713 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7714 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7715 VMA_ASSERT(pDedicatedAllocations);
7716 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7719 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7724 void VmaAllocator_T::GetBufferMemoryRequirements(
7726 VkMemoryRequirements& memReq,
7727 bool& requiresDedicatedAllocation,
7728 bool& prefersDedicatedAllocation)
const 7730 if(m_UseKhrDedicatedAllocation)
7732 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7733 memReqInfo.buffer = hBuffer;
7735 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7737 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7738 memReq2.pNext = &memDedicatedReq;
7740 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7742 memReq = memReq2.memoryRequirements;
7743 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7744 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7748 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7749 requiresDedicatedAllocation =
false;
7750 prefersDedicatedAllocation =
false;
7754 void VmaAllocator_T::GetImageMemoryRequirements(
7756 VkMemoryRequirements& memReq,
7757 bool& requiresDedicatedAllocation,
7758 bool& prefersDedicatedAllocation)
const 7760 if(m_UseKhrDedicatedAllocation)
7762 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7763 memReqInfo.image = hImage;
7765 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7767 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7768 memReq2.pNext = &memDedicatedReq;
7770 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7772 memReq = memReq2.memoryRequirements;
7773 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7774 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7778 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7779 requiresDedicatedAllocation =
false;
7780 prefersDedicatedAllocation =
false;
7784 VkResult VmaAllocator_T::AllocateMemory(
7785 const VkMemoryRequirements& vkMemReq,
7786 bool requiresDedicatedAllocation,
7787 bool prefersDedicatedAllocation,
7788 VkBuffer dedicatedBuffer,
7789 VkImage dedicatedImage,
7791 VmaSuballocationType suballocType,
7797 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7798 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7803 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7804 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7806 if(requiresDedicatedAllocation)
7810 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7811 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7813 if(createInfo.
pool != VK_NULL_HANDLE)
7815 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7816 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7819 if((createInfo.
pool != VK_NULL_HANDLE) &&
7822 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7823 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7826 if(createInfo.
pool != VK_NULL_HANDLE)
7828 return createInfo.
pool->m_BlockVector.Allocate(
7830 m_CurrentFrameIndex.load(),
7839 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7840 uint32_t memTypeIndex = UINT32_MAX;
7842 if(res == VK_SUCCESS)
7844 res = AllocateMemoryOfType(
7846 requiresDedicatedAllocation || prefersDedicatedAllocation,
7854 if(res == VK_SUCCESS)
7864 memoryTypeBits &= ~(1u << memTypeIndex);
7867 if(res == VK_SUCCESS)
7869 res = AllocateMemoryOfType(
7871 requiresDedicatedAllocation || prefersDedicatedAllocation,
7879 if(res == VK_SUCCESS)
7889 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7900 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7902 VMA_ASSERT(allocation);
7904 if(allocation->CanBecomeLost() ==
false ||
7905 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7907 switch(allocation->GetType())
7909 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7911 VmaBlockVector* pBlockVector = VMA_NULL;
7912 VmaPool hPool = allocation->GetPool();
7913 if(hPool != VK_NULL_HANDLE)
7915 pBlockVector = &hPool->m_BlockVector;
7919 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7920 pBlockVector = m_pBlockVectors[memTypeIndex];
7922 pBlockVector->Free(allocation);
7925 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7926 FreeDedicatedMemory(allocation);
7933 allocation->SetUserData(
this, VMA_NULL);
7934 vma_delete(
this, allocation);
7937 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7940 InitStatInfo(pStats->
total);
7941 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7943 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7947 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7949 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7950 VMA_ASSERT(pBlockVector);
7951 pBlockVector->AddStats(pStats);
7956 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7957 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7959 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7964 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7966 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7967 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7968 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7969 VMA_ASSERT(pDedicatedAllocVector);
7970 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7973 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7974 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7975 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7976 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7981 VmaPostprocessCalcStatInfo(pStats->
total);
7982 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
7983 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
7984 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
7985 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
7988 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7990 VkResult VmaAllocator_T::Defragment(
7992 size_t allocationCount,
7993 VkBool32* pAllocationsChanged,
7997 if(pAllocationsChanged != VMA_NULL)
7999 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
8001 if(pDefragmentationStats != VMA_NULL)
8003 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
8006 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
8008 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
8010 const size_t poolCount = m_Pools.size();
8013 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
8017 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
8019 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
8021 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
8023 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
8025 VmaBlockVector* pAllocBlockVector = VMA_NULL;
8027 const VmaPool hAllocPool = hAlloc->GetPool();
8029 if(hAllocPool != VK_NULL_HANDLE)
8031 pAllocBlockVector = &hAllocPool->GetBlockVector();
8036 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
8039 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
8041 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
8042 &pAllocationsChanged[allocIndex] : VMA_NULL;
8043 pDefragmentator->AddAllocation(hAlloc, pChanged);
8047 VkResult result = VK_SUCCESS;
8051 VkDeviceSize maxBytesToMove = SIZE_MAX;
8052 uint32_t maxAllocationsToMove = UINT32_MAX;
8053 if(pDefragmentationInfo != VMA_NULL)
8060 for(uint32_t memTypeIndex = 0;
8061 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
8065 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8067 result = m_pBlockVectors[memTypeIndex]->Defragment(
8068 pDefragmentationStats,
8070 maxAllocationsToMove);
8075 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
8077 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
8078 pDefragmentationStats,
8080 maxAllocationsToMove);
8086 for(
size_t poolIndex = poolCount; poolIndex--; )
8088 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
8092 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
8094 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8096 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
8105 if(hAllocation->CanBecomeLost())
8111 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8112 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8115 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8119 pAllocationInfo->
offset = 0;
8120 pAllocationInfo->
size = hAllocation->GetSize();
8122 pAllocationInfo->
pUserData = hAllocation->GetUserData();
8125 else if(localLastUseFrameIndex == localCurrFrameIndex)
8127 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
8128 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
8129 pAllocationInfo->
offset = hAllocation->GetOffset();
8130 pAllocationInfo->
size = hAllocation->GetSize();
8132 pAllocationInfo->
pUserData = hAllocation->GetUserData();
8137 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8139 localLastUseFrameIndex = localCurrFrameIndex;
8146 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
8147 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
8148 pAllocationInfo->
offset = hAllocation->GetOffset();
8149 pAllocationInfo->
size = hAllocation->GetSize();
8150 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
8151 pAllocationInfo->
pUserData = hAllocation->GetUserData();
8155 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
8158 if(hAllocation->CanBecomeLost())
8160 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8161 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8164 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8168 else if(localLastUseFrameIndex == localCurrFrameIndex)
8174 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8176 localLastUseFrameIndex = localCurrFrameIndex;
8189 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
8202 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
8204 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
8205 if(res != VK_SUCCESS)
8207 vma_delete(
this, *pPool);
8214 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8215 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
8221 void VmaAllocator_T::DestroyPool(
VmaPool pool)
8225 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8226 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
8227 VMA_ASSERT(success &&
"Pool not found in Allocator.");
8230 vma_delete(
this, pool);
8235 pool->m_BlockVector.GetPoolStats(pPoolStats);
8238 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
8240 m_CurrentFrameIndex.store(frameIndex);
8243 void VmaAllocator_T::MakePoolAllocationsLost(
8245 size_t* pLostAllocationCount)
8247 hPool->m_BlockVector.MakePoolAllocationsLost(
8248 m_CurrentFrameIndex.load(),
8249 pLostAllocationCount);
8252 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
8254 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
8255 (*pAllocation)->InitLost();
8258 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
8260 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
8263 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8265 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8266 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
8268 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8269 if(res == VK_SUCCESS)
8271 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
8276 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
8281 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8284 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
8286 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
8292 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
8294 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
8296 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
8299 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
8301 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
8302 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8304 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8305 m_HeapSizeLimit[heapIndex] += size;
8309 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
8311 if(hAllocation->CanBecomeLost())
8313 return VK_ERROR_MEMORY_MAP_FAILED;
8316 switch(hAllocation->GetType())
8318 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8320 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8321 char *pBytes = VMA_NULL;
8322 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
8323 if(res == VK_SUCCESS)
8325 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
8326 hAllocation->BlockAllocMap();
8330 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8331 return hAllocation->DedicatedAllocMap(
this, ppData);
8334 return VK_ERROR_MEMORY_MAP_FAILED;
8340 switch(hAllocation->GetType())
8342 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8344 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8345 hAllocation->BlockAllocUnmap();
8346 pBlock->Unmap(
this, 1);
8349 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8350 hAllocation->DedicatedAllocUnmap(
this);
8357 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
8359 VkResult res = VK_SUCCESS;
8360 switch(hAllocation->GetType())
8362 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8363 res = GetVulkanFunctions().vkBindBufferMemory(
8366 hAllocation->GetMemory(),
8369 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8371 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
8372 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
8373 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
8382 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
8384 VkResult res = VK_SUCCESS;
8385 switch(hAllocation->GetType())
8387 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8388 res = GetVulkanFunctions().vkBindImageMemory(
8391 hAllocation->GetMemory(),
8394 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8396 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
8397 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
8398 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
8407 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
8409 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
8411 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
8413 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8414 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
8415 VMA_ASSERT(pDedicatedAllocations);
8416 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
8417 VMA_ASSERT(success);
8420 VkDeviceMemory hMemory = allocation->GetMemory();
8422 if(allocation->GetMappedData() != VMA_NULL)
8424 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
8427 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
8429 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
8432 #if VMA_STATS_STRING_ENABLED 8434 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
8436 bool dedicatedAllocationsStarted =
false;
8437 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8439 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8440 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8441 VMA_ASSERT(pDedicatedAllocVector);
8442 if(pDedicatedAllocVector->empty() ==
false)
8444 if(dedicatedAllocationsStarted ==
false)
8446 dedicatedAllocationsStarted =
true;
8447 json.WriteString(
"DedicatedAllocations");
8451 json.BeginString(
"Type ");
8452 json.ContinueString(memTypeIndex);
8457 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
8460 json.BeginObject(
true);
8462 json.WriteString(
"Type");
8463 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
8465 json.WriteString(
"Size");
8466 json.WriteNumber(hAlloc->GetSize());
8468 const void* pUserData = hAlloc->GetUserData();
8469 if(pUserData != VMA_NULL)
8471 json.WriteString(
"UserData");
8472 if(hAlloc->IsUserDataString())
8474 json.WriteString((
const char*)pUserData);
8479 json.ContinueString_Pointer(pUserData);
8490 if(dedicatedAllocationsStarted)
8496 bool allocationsStarted =
false;
8497 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8499 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
8501 if(allocationsStarted ==
false)
8503 allocationsStarted =
true;
8504 json.WriteString(
"DefaultPools");
8508 json.BeginString(
"Type ");
8509 json.ContinueString(memTypeIndex);
8512 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
8515 if(allocationsStarted)
8522 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8523 const size_t poolCount = m_Pools.size();
8526 json.WriteString(
"Pools");
8528 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
8530 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
8537 #endif // #if VMA_STATS_STRING_ENABLED 8539 static VkResult AllocateMemoryForImage(
8543 VmaSuballocationType suballocType,
8546 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
8548 VkMemoryRequirements vkMemReq = {};
8549 bool requiresDedicatedAllocation =
false;
8550 bool prefersDedicatedAllocation =
false;
8551 allocator->GetImageMemoryRequirements(image, vkMemReq,
8552 requiresDedicatedAllocation, prefersDedicatedAllocation);
8554 return allocator->AllocateMemory(
8556 requiresDedicatedAllocation,
8557 prefersDedicatedAllocation,
8560 *pAllocationCreateInfo,
8572 VMA_ASSERT(pCreateInfo && pAllocator);
8573 VMA_DEBUG_LOG(
"vmaCreateAllocator");
8581 if(allocator != VK_NULL_HANDLE)
8583 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
8584 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
8585 vma_delete(&allocationCallbacks, allocator);
8591 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
8593 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
8594 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
8599 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
8601 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
8602 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
8607 uint32_t memoryTypeIndex,
8608 VkMemoryPropertyFlags* pFlags)
8610 VMA_ASSERT(allocator && pFlags);
8611 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
8612 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
8617 uint32_t frameIndex)
8619 VMA_ASSERT(allocator);
8620 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
8622 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8624 allocator->SetCurrentFrameIndex(frameIndex);
8631 VMA_ASSERT(allocator && pStats);
8632 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8633 allocator->CalculateStats(pStats);
8636 #if VMA_STATS_STRING_ENABLED 8640 char** ppStatsString,
8641 VkBool32 detailedMap)
8643 VMA_ASSERT(allocator && ppStatsString);
8644 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8646 VmaStringBuilder sb(allocator);
8648 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
8652 allocator->CalculateStats(&stats);
8654 json.WriteString(
"Total");
8655 VmaPrintStatInfo(json, stats.
total);
8657 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
8659 json.BeginString(
"Heap ");
8660 json.ContinueString(heapIndex);
8664 json.WriteString(
"Size");
8665 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
8667 json.WriteString(
"Flags");
8668 json.BeginArray(
true);
8669 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
8671 json.WriteString(
"DEVICE_LOCAL");
8677 json.WriteString(
"Stats");
8678 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
8681 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
8683 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
8685 json.BeginString(
"Type ");
8686 json.ContinueString(typeIndex);
8691 json.WriteString(
"Flags");
8692 json.BeginArray(
true);
8693 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
8694 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
8696 json.WriteString(
"DEVICE_LOCAL");
8698 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8700 json.WriteString(
"HOST_VISIBLE");
8702 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
8704 json.WriteString(
"HOST_COHERENT");
8706 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
8708 json.WriteString(
"HOST_CACHED");
8710 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
8712 json.WriteString(
"LAZILY_ALLOCATED");
8718 json.WriteString(
"Stats");
8719 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
8728 if(detailedMap == VK_TRUE)
8730 allocator->PrintDetailedMap(json);
8736 const size_t len = sb.GetLength();
8737 char*
const pChars = vma_new_array(allocator,
char, len + 1);
8740 memcpy(pChars, sb.GetData(), len);
8743 *ppStatsString = pChars;
8750 if(pStatsString != VMA_NULL)
8752 VMA_ASSERT(allocator);
8753 size_t len = strlen(pStatsString);
8754 vma_delete_array(allocator, pStatsString, len + 1);
8758 #endif // #if VMA_STATS_STRING_ENABLED 8765 uint32_t memoryTypeBits,
8767 uint32_t* pMemoryTypeIndex)
8769 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8770 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8771 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8778 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
8782 switch(pAllocationCreateInfo->
usage)
8787 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8790 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8793 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8794 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8797 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8798 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8804 *pMemoryTypeIndex = UINT32_MAX;
8805 uint32_t minCost = UINT32_MAX;
8806 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8807 memTypeIndex < allocator->GetMemoryTypeCount();
8808 ++memTypeIndex, memTypeBit <<= 1)
8811 if((memTypeBit & memoryTypeBits) != 0)
8813 const VkMemoryPropertyFlags currFlags =
8814 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8816 if((requiredFlags & ~currFlags) == 0)
8819 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8821 if(currCost < minCost)
8823 *pMemoryTypeIndex = memTypeIndex;
8833 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8838 const VkBufferCreateInfo* pBufferCreateInfo,
8840 uint32_t* pMemoryTypeIndex)
8842 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8843 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
8844 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8845 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8847 const VkDevice hDev = allocator->m_hDevice;
8848 VkBuffer hBuffer = VK_NULL_HANDLE;
8849 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
8850 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
8851 if(res == VK_SUCCESS)
8853 VkMemoryRequirements memReq = {};
8854 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
8855 hDev, hBuffer, &memReq);
8859 memReq.memoryTypeBits,
8860 pAllocationCreateInfo,
8863 allocator->GetVulkanFunctions().vkDestroyBuffer(
8864 hDev, hBuffer, allocator->GetAllocationCallbacks());
8871 const VkImageCreateInfo* pImageCreateInfo,
8873 uint32_t* pMemoryTypeIndex)
8875 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8876 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
8877 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8878 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8880 const VkDevice hDev = allocator->m_hDevice;
8881 VkImage hImage = VK_NULL_HANDLE;
8882 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
8883 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
8884 if(res == VK_SUCCESS)
8886 VkMemoryRequirements memReq = {};
8887 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
8888 hDev, hImage, &memReq);
8892 memReq.memoryTypeBits,
8893 pAllocationCreateInfo,
8896 allocator->GetVulkanFunctions().vkDestroyImage(
8897 hDev, hImage, allocator->GetAllocationCallbacks());
8907 VMA_ASSERT(allocator && pCreateInfo && pPool);
8909 VMA_DEBUG_LOG(
"vmaCreatePool");
8911 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8913 return allocator->CreatePool(pCreateInfo, pPool);
8920 VMA_ASSERT(allocator);
8922 if(pool == VK_NULL_HANDLE)
8927 VMA_DEBUG_LOG(
"vmaDestroyPool");
8929 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8931 allocator->DestroyPool(pool);
8939 VMA_ASSERT(allocator && pool && pPoolStats);
8941 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8943 allocator->GetPoolStats(pool, pPoolStats);
8949 size_t* pLostAllocationCount)
8951 VMA_ASSERT(allocator && pool);
8953 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8955 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8960 const VkMemoryRequirements* pVkMemoryRequirements,
8965 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8967 VMA_DEBUG_LOG(
"vmaAllocateMemory");
8969 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8971 VkResult result = allocator->AllocateMemory(
8972 *pVkMemoryRequirements,
8978 VMA_SUBALLOCATION_TYPE_UNKNOWN,
8981 if(pAllocationInfo && result == VK_SUCCESS)
8983 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8996 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8998 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
9000 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9002 VkMemoryRequirements vkMemReq = {};
9003 bool requiresDedicatedAllocation =
false;
9004 bool prefersDedicatedAllocation =
false;
9005 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
9006 requiresDedicatedAllocation,
9007 prefersDedicatedAllocation);
9009 VkResult result = allocator->AllocateMemory(
9011 requiresDedicatedAllocation,
9012 prefersDedicatedAllocation,
9016 VMA_SUBALLOCATION_TYPE_BUFFER,
9019 if(pAllocationInfo && result == VK_SUCCESS)
9021 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9034 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
9036 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
9038 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9040 VkResult result = AllocateMemoryForImage(
9044 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
9047 if(pAllocationInfo && result == VK_SUCCESS)
9049 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9059 VMA_ASSERT(allocator && allocation);
9061 VMA_DEBUG_LOG(
"vmaFreeMemory");
9063 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9065 allocator->FreeMemory(allocation);
9073 VMA_ASSERT(allocator && allocation && pAllocationInfo);
9075 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9077 allocator->GetAllocationInfo(allocation, pAllocationInfo);
9084 VMA_ASSERT(allocator && allocation);
9086 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9088 return allocator->TouchAllocation(allocation);
9096 VMA_ASSERT(allocator && allocation);
9098 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9100 allocation->SetUserData(allocator, pUserData);
9107 VMA_ASSERT(allocator && pAllocation);
9109 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
9111 allocator->CreateLostAllocation(pAllocation);
9119 VMA_ASSERT(allocator && allocation && ppData);
9121 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9123 return allocator->Map(allocation, ppData);
9130 VMA_ASSERT(allocator && allocation);
9132 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9134 allocator->Unmap(allocation);
9140 size_t allocationCount,
9141 VkBool32* pAllocationsChanged,
9145 VMA_ASSERT(allocator && pAllocations);
9147 VMA_DEBUG_LOG(
"vmaDefragment");
9149 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9151 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
9159 VMA_ASSERT(allocator && allocation && buffer);
9161 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
9163 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9165 return allocator->BindBufferMemory(allocation, buffer);
9173 VMA_ASSERT(allocator && allocation && image);
9175 VMA_DEBUG_LOG(
"vmaBindImageMemory");
9177 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9179 return allocator->BindImageMemory(allocation, image);
9184 const VkBufferCreateInfo* pBufferCreateInfo,
9190 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
9192 VMA_DEBUG_LOG(
"vmaCreateBuffer");
9194 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9196 *pBuffer = VK_NULL_HANDLE;
9197 *pAllocation = VK_NULL_HANDLE;
9200 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
9201 allocator->m_hDevice,
9203 allocator->GetAllocationCallbacks(),
9208 VkMemoryRequirements vkMemReq = {};
9209 bool requiresDedicatedAllocation =
false;
9210 bool prefersDedicatedAllocation =
false;
9211 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
9212 requiresDedicatedAllocation, prefersDedicatedAllocation);
9216 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
9218 VMA_ASSERT(vkMemReq.alignment %
9219 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
9221 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
9223 VMA_ASSERT(vkMemReq.alignment %
9224 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
9226 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
9228 VMA_ASSERT(vkMemReq.alignment %
9229 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
9233 res = allocator->AllocateMemory(
9235 requiresDedicatedAllocation,
9236 prefersDedicatedAllocation,
9239 *pAllocationCreateInfo,
9240 VMA_SUBALLOCATION_TYPE_BUFFER,
9245 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
9249 if(pAllocationInfo != VMA_NULL)
9251 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9255 allocator->FreeMemory(*pAllocation);
9256 *pAllocation = VK_NULL_HANDLE;
9257 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
9258 *pBuffer = VK_NULL_HANDLE;
9261 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
9262 *pBuffer = VK_NULL_HANDLE;
9273 if(buffer != VK_NULL_HANDLE)
9275 VMA_ASSERT(allocator);
9277 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
9279 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9281 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
9283 allocator->FreeMemory(allocation);
9289 const VkImageCreateInfo* pImageCreateInfo,
9295 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
9297 VMA_DEBUG_LOG(
"vmaCreateImage");
9299 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9301 *pImage = VK_NULL_HANDLE;
9302 *pAllocation = VK_NULL_HANDLE;
9305 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
9306 allocator->m_hDevice,
9308 allocator->GetAllocationCallbacks(),
9312 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
9313 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
9314 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
9317 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
9321 res = allocator->BindImageMemory(*pAllocation, *pImage);
9325 if(pAllocationInfo != VMA_NULL)
9327 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9331 allocator->FreeMemory(*pAllocation);
9332 *pAllocation = VK_NULL_HANDLE;
9333 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
9334 *pImage = VK_NULL_HANDLE;
9337 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
9338 *pImage = VK_NULL_HANDLE;
9349 if(image != VK_NULL_HANDLE)
9351 VMA_ASSERT(allocator);
9353 VMA_DEBUG_LOG(
"vmaDestroyImage");
9355 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9357 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
9359 allocator->FreeMemory(allocation);
9363 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1153
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1415
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1178
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
Represents single memory allocation.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1163
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1372
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1157
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1745
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1175
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1944
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1591
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1645
Definition: vk_mem_alloc.h:1452
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1146
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1490
Definition: vk_mem_alloc.h:1399
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1187
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1240
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1172
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1403
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1305
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1160
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1304
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:1168
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1948
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1204
VmaStatInfo total
Definition: vk_mem_alloc.h:1314
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1956
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1474
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1939
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1161
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1088
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1181
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1599
Definition: vk_mem_alloc.h:1593
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1755
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1158
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1511
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1615
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1651
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1144
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1602
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VmaMemoryUsage
Definition: vk_mem_alloc.h:1350
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1934
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1952
Definition: vk_mem_alloc.h:1389
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1498
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1159
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1310
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1094
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1115
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1120
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1954
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1485
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1661
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1154
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1293
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1610
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1107
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1459
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1306
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1111
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1605
Definition: vk_mem_alloc.h:1398
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1480
Definition: vk_mem_alloc.h:1471
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1296
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1156
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1623
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1190
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1654
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1469
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1504
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1228
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1312
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1439
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1305
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1165
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1109
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1164
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1637
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1769
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1184
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1305
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1302
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1642
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1750
Definition: vk_mem_alloc.h:1467
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1950
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1152
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:1167
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1300
Definition: vk_mem_alloc.h:1355
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1595
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1298
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1162
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1166
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1426
Definition: vk_mem_alloc.h:1382
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1764
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1142
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1155
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1731
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1573
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1306
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
Definition: vk_mem_alloc.h:1465
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1313
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1648
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1306
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1736