23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 1084 #include <vulkan/vulkan.h> 1086 #if !defined(VMA_DEDICATED_ALLOCATION) 1087 #if VK_KHR_get_memory_requirements2 && VK_KHR_dedicated_allocation 1088 #define VMA_DEDICATED_ALLOCATION 1 1090 #define VMA_DEDICATED_ALLOCATION 0 1108 uint32_t memoryType,
1109 VkDeviceMemory memory,
1114 uint32_t memoryType,
1115 VkDeviceMemory memory,
1187 #if VMA_DEDICATED_ALLOCATION 1188 PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR;
1189 PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR;
1280 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1288 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1298 uint32_t memoryTypeIndex,
1299 VkMemoryPropertyFlags* pFlags);
1311 uint32_t frameIndex);
1344 #define VMA_STATS_STRING_ENABLED 1 1346 #if VMA_STATS_STRING_ENABLED 1353 char** ppStatsString,
1354 VkBool32 detailedMap);
1358 char* pStatsString);
1360 #endif // #if VMA_STATS_STRING_ENABLED 1554 uint32_t memoryTypeBits,
1556 uint32_t* pMemoryTypeIndex);
1572 const VkBufferCreateInfo* pBufferCreateInfo,
1574 uint32_t* pMemoryTypeIndex);
1590 const VkImageCreateInfo* pImageCreateInfo,
1592 uint32_t* pMemoryTypeIndex);
1723 size_t* pLostAllocationCount);
1806 const VkMemoryRequirements* pVkMemoryRequirements,
2094 size_t allocationCount,
2095 VkBool32* pAllocationsChanged,
2161 const VkBufferCreateInfo* pBufferCreateInfo,
2186 const VkImageCreateInfo* pImageCreateInfo,
2212 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2215 #if defined(__cplusplus) && defined(__INTELLISENSE__) 2216 #define VMA_IMPLEMENTATION 2219 #ifdef VMA_IMPLEMENTATION 2220 #undef VMA_IMPLEMENTATION 2242 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2243 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2255 #if VMA_USE_STL_CONTAINERS 2256 #define VMA_USE_STL_VECTOR 1 2257 #define VMA_USE_STL_UNORDERED_MAP 1 2258 #define VMA_USE_STL_LIST 1 2261 #if VMA_USE_STL_VECTOR 2265 #if VMA_USE_STL_UNORDERED_MAP 2266 #include <unordered_map> 2269 #if VMA_USE_STL_LIST 2278 #include <algorithm> 2284 #define VMA_NULL nullptr 2287 #if defined(__APPLE__) || defined(__ANDROID__) 2289 void *aligned_alloc(
size_t alignment,
size_t size)
2292 if(alignment <
sizeof(
void*))
2294 alignment =
sizeof(
void*);
2298 if(posix_memalign(&pointer, alignment, size) == 0)
2312 #define VMA_ASSERT(expr) assert(expr) 2314 #define VMA_ASSERT(expr) 2320 #ifndef VMA_HEAVY_ASSERT 2322 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2324 #define VMA_HEAVY_ASSERT(expr) 2328 #ifndef VMA_ALIGN_OF 2329 #define VMA_ALIGN_OF(type) (__alignof(type)) 2332 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2334 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2336 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2340 #ifndef VMA_SYSTEM_FREE 2342 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2344 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2349 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2353 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2357 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2361 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2364 #ifndef VMA_DEBUG_LOG 2365 #define VMA_DEBUG_LOG(format, ...) 2375 #if VMA_STATS_STRING_ENABLED 2376 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2378 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2380 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2382 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2384 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2386 snprintf(outStr, strLen,
"%p", ptr);
2396 void Lock() { m_Mutex.lock(); }
2397 void Unlock() { m_Mutex.unlock(); }
2401 #define VMA_MUTEX VmaMutex 2412 #ifndef VMA_ATOMIC_UINT32 2413 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2416 #ifndef VMA_BEST_FIT 2429 #define VMA_BEST_FIT (1) 2432 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2437 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2440 #ifndef VMA_DEBUG_ALIGNMENT 2445 #define VMA_DEBUG_ALIGNMENT (1) 2448 #ifndef VMA_DEBUG_MARGIN 2453 #define VMA_DEBUG_MARGIN (0) 2456 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2461 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2464 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2469 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2472 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2473 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2477 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2478 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2482 #ifndef VMA_CLASS_NO_COPY 2483 #define VMA_CLASS_NO_COPY(className) \ 2485 className(const className&) = delete; \ 2486 className& operator=(const className&) = delete; 2489 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2495 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2496 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2499 static inline uint32_t VmaCountBitsSet(uint32_t v)
2501 uint32_t c = v - ((v >> 1) & 0x55555555);
2502 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2503 c = ((c >> 4) + c) & 0x0F0F0F0F;
2504 c = ((c >> 8) + c) & 0x00FF00FF;
2505 c = ((c >> 16) + c) & 0x0000FFFF;
2511 template <
typename T>
2512 static inline T VmaAlignUp(T val, T align)
2514 return (val + align - 1) / align * align;
2518 template <
typename T>
2519 static inline T VmaAlignDown(T val, T align)
2521 return val / align * align;
2525 template <
typename T>
2526 inline T VmaRoundDiv(T x, T y)
2528 return (x + (y / (T)2)) / y;
2533 template<
typename Iterator,
typename Compare>
2534 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2536 Iterator centerValue = end; --centerValue;
2537 Iterator insertIndex = beg;
2538 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2540 if(cmp(*memTypeIndex, *centerValue))
2542 if(insertIndex != memTypeIndex)
2544 VMA_SWAP(*memTypeIndex, *insertIndex);
2549 if(insertIndex != centerValue)
2551 VMA_SWAP(*insertIndex, *centerValue);
2556 template<
typename Iterator,
typename Compare>
2557 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2561 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2562 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2563 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2567 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2569 #endif // #ifndef VMA_SORT 2578 static inline bool VmaBlocksOnSamePage(
2579 VkDeviceSize resourceAOffset,
2580 VkDeviceSize resourceASize,
2581 VkDeviceSize resourceBOffset,
2582 VkDeviceSize pageSize)
2584 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2585 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2586 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2587 VkDeviceSize resourceBStart = resourceBOffset;
2588 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2589 return resourceAEndPage == resourceBStartPage;
2592 enum VmaSuballocationType
2594 VMA_SUBALLOCATION_TYPE_FREE = 0,
2595 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2596 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2597 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2598 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2599 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2600 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2609 static inline bool VmaIsBufferImageGranularityConflict(
2610 VmaSuballocationType suballocType1,
2611 VmaSuballocationType suballocType2)
2613 if(suballocType1 > suballocType2)
2615 VMA_SWAP(suballocType1, suballocType2);
2618 switch(suballocType1)
2620 case VMA_SUBALLOCATION_TYPE_FREE:
2622 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2624 case VMA_SUBALLOCATION_TYPE_BUFFER:
2626 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2627 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2628 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2630 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2631 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2632 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2633 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2635 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2636 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2647 VMA_CLASS_NO_COPY(VmaMutexLock)
2649 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2650 m_pMutex(useMutex ? &mutex : VMA_NULL)
2667 VMA_MUTEX* m_pMutex;
2670 #if VMA_DEBUG_GLOBAL_MUTEX 2671 static VMA_MUTEX gDebugGlobalMutex;
2672 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2674 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2678 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2689 template <
typename IterT,
typename KeyT,
typename CmpT>
2690 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2692 size_t down = 0, up = (end - beg);
2695 const size_t mid = (down + up) / 2;
2696 if(cmp(*(beg+mid), key))
2711 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2713 if((pAllocationCallbacks != VMA_NULL) &&
2714 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2716 return (*pAllocationCallbacks->pfnAllocation)(
2717 pAllocationCallbacks->pUserData,
2720 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2724 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2728 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2730 if((pAllocationCallbacks != VMA_NULL) &&
2731 (pAllocationCallbacks->pfnFree != VMA_NULL))
2733 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2737 VMA_SYSTEM_FREE(ptr);
2741 template<
typename T>
2742 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2744 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2747 template<
typename T>
2748 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2750 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2753 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2755 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2757 template<
typename T>
2758 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2761 VmaFree(pAllocationCallbacks, ptr);
2764 template<
typename T>
2765 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2769 for(
size_t i = count; i--; )
2773 VmaFree(pAllocationCallbacks, ptr);
2778 template<
typename T>
2779 class VmaStlAllocator
2782 const VkAllocationCallbacks*
const m_pCallbacks;
2783 typedef T value_type;
2785 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2786 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2788 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2789 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2791 template<
typename U>
2792 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2794 return m_pCallbacks == rhs.m_pCallbacks;
2796 template<
typename U>
2797 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2799 return m_pCallbacks != rhs.m_pCallbacks;
2802 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2805 #if VMA_USE_STL_VECTOR 2807 #define VmaVector std::vector 2809 template<
typename T,
typename allocatorT>
2810 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2812 vec.insert(vec.begin() + index, item);
2815 template<
typename T,
typename allocatorT>
2816 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2818 vec.erase(vec.begin() + index);
2821 #else // #if VMA_USE_STL_VECTOR 2826 template<
typename T,
typename AllocatorT>
2830 typedef T value_type;
2832 VmaVector(
const AllocatorT& allocator) :
2833 m_Allocator(allocator),
2840 VmaVector(
size_t count,
const AllocatorT& allocator) :
2841 m_Allocator(allocator),
2842 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2848 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2849 m_Allocator(src.m_Allocator),
2850 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2851 m_Count(src.m_Count),
2852 m_Capacity(src.m_Count)
2856 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2862 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2865 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2869 resize(rhs.m_Count);
2872 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2878 bool empty()
const {
return m_Count == 0; }
2879 size_t size()
const {
return m_Count; }
2880 T* data() {
return m_pArray; }
2881 const T* data()
const {
return m_pArray; }
2883 T& operator[](
size_t index)
2885 VMA_HEAVY_ASSERT(index < m_Count);
2886 return m_pArray[index];
2888 const T& operator[](
size_t index)
const 2890 VMA_HEAVY_ASSERT(index < m_Count);
2891 return m_pArray[index];
2896 VMA_HEAVY_ASSERT(m_Count > 0);
2899 const T& front()
const 2901 VMA_HEAVY_ASSERT(m_Count > 0);
2906 VMA_HEAVY_ASSERT(m_Count > 0);
2907 return m_pArray[m_Count - 1];
2909 const T& back()
const 2911 VMA_HEAVY_ASSERT(m_Count > 0);
2912 return m_pArray[m_Count - 1];
2915 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2917 newCapacity = VMA_MAX(newCapacity, m_Count);
2919 if((newCapacity < m_Capacity) && !freeMemory)
2921 newCapacity = m_Capacity;
2924 if(newCapacity != m_Capacity)
2926 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2929 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2931 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2932 m_Capacity = newCapacity;
2933 m_pArray = newArray;
2937 void resize(
size_t newCount,
bool freeMemory =
false)
2939 size_t newCapacity = m_Capacity;
2940 if(newCount > m_Capacity)
2942 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2946 newCapacity = newCount;
2949 if(newCapacity != m_Capacity)
2951 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2952 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2953 if(elementsToCopy != 0)
2955 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2957 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2958 m_Capacity = newCapacity;
2959 m_pArray = newArray;
2965 void clear(
bool freeMemory =
false)
2967 resize(0, freeMemory);
2970 void insert(
size_t index,
const T& src)
2972 VMA_HEAVY_ASSERT(index <= m_Count);
2973 const size_t oldCount = size();
2974 resize(oldCount + 1);
2975 if(index < oldCount)
2977 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2979 m_pArray[index] = src;
2982 void remove(
size_t index)
2984 VMA_HEAVY_ASSERT(index < m_Count);
2985 const size_t oldCount = size();
2986 if(index < oldCount - 1)
2988 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2990 resize(oldCount - 1);
2993 void push_back(
const T& src)
2995 const size_t newIndex = size();
2996 resize(newIndex + 1);
2997 m_pArray[newIndex] = src;
3002 VMA_HEAVY_ASSERT(m_Count > 0);
3006 void push_front(
const T& src)
3013 VMA_HEAVY_ASSERT(m_Count > 0);
3017 typedef T* iterator;
3019 iterator begin() {
return m_pArray; }
3020 iterator end() {
return m_pArray + m_Count; }
3023 AllocatorT m_Allocator;
3029 template<
typename T,
typename allocatorT>
3030 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
3032 vec.insert(index, item);
3035 template<
typename T,
typename allocatorT>
3036 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
3041 #endif // #if VMA_USE_STL_VECTOR 3043 template<
typename CmpLess,
typename VectorT>
3044 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
3046 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3048 vector.data() + vector.size(),
3050 CmpLess()) - vector.data();
3051 VmaVectorInsert(vector, indexToInsert, value);
3052 return indexToInsert;
3055 template<
typename CmpLess,
typename VectorT>
3056 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
3059 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3064 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
3066 size_t indexToRemove = it - vector.begin();
3067 VmaVectorRemove(vector, indexToRemove);
3073 template<
typename CmpLess,
typename VectorT>
3074 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
3077 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
3079 vector.data() + vector.size(),
3082 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
3084 return it - vector.begin();
3088 return vector.size();
3100 template<
typename T>
3101 class VmaPoolAllocator
3103 VMA_CLASS_NO_COPY(VmaPoolAllocator)
3105 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
3106 ~VmaPoolAllocator();
3114 uint32_t NextFreeIndex;
3121 uint32_t FirstFreeIndex;
3124 const VkAllocationCallbacks* m_pAllocationCallbacks;
3125 size_t m_ItemsPerBlock;
3126 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
3128 ItemBlock& CreateNewBlock();
3131 template<
typename T>
3132 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
3133 m_pAllocationCallbacks(pAllocationCallbacks),
3134 m_ItemsPerBlock(itemsPerBlock),
3135 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
3137 VMA_ASSERT(itemsPerBlock > 0);
3140 template<
typename T>
3141 VmaPoolAllocator<T>::~VmaPoolAllocator()
3146 template<
typename T>
3147 void VmaPoolAllocator<T>::Clear()
3149 for(
size_t i = m_ItemBlocks.size(); i--; )
3150 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
3151 m_ItemBlocks.clear();
3154 template<
typename T>
3155 T* VmaPoolAllocator<T>::Alloc()
3157 for(
size_t i = m_ItemBlocks.size(); i--; )
3159 ItemBlock& block = m_ItemBlocks[i];
3161 if(block.FirstFreeIndex != UINT32_MAX)
3163 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
3164 block.FirstFreeIndex = pItem->NextFreeIndex;
3165 return &pItem->Value;
3170 ItemBlock& newBlock = CreateNewBlock();
3171 Item*
const pItem = &newBlock.pItems[0];
3172 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
3173 return &pItem->Value;
3176 template<
typename T>
3177 void VmaPoolAllocator<T>::Free(T* ptr)
3180 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
3182 ItemBlock& block = m_ItemBlocks[i];
3186 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
3189 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
3191 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
3192 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
3193 block.FirstFreeIndex = index;
3197 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
3200 template<
typename T>
3201 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
3203 ItemBlock newBlock = {
3204 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
3206 m_ItemBlocks.push_back(newBlock);
3209 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
3210 newBlock.pItems[i].NextFreeIndex = i + 1;
3211 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
3212 return m_ItemBlocks.back();
3218 #if VMA_USE_STL_LIST 3220 #define VmaList std::list 3222 #else // #if VMA_USE_STL_LIST 3224 template<
typename T>
3233 template<
typename T>
3236 VMA_CLASS_NO_COPY(VmaRawList)
3238 typedef VmaListItem<T> ItemType;
3240 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3244 size_t GetCount()
const {
return m_Count; }
3245 bool IsEmpty()
const {
return m_Count == 0; }
3247 ItemType* Front() {
return m_pFront; }
3248 const ItemType* Front()
const {
return m_pFront; }
3249 ItemType* Back() {
return m_pBack; }
3250 const ItemType* Back()
const {
return m_pBack; }
3252 ItemType* PushBack();
3253 ItemType* PushFront();
3254 ItemType* PushBack(
const T& value);
3255 ItemType* PushFront(
const T& value);
3260 ItemType* InsertBefore(ItemType* pItem);
3262 ItemType* InsertAfter(ItemType* pItem);
3264 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3265 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3267 void Remove(ItemType* pItem);
3270 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3271 VmaPoolAllocator<ItemType> m_ItemAllocator;
3277 template<
typename T>
3278 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3279 m_pAllocationCallbacks(pAllocationCallbacks),
3280 m_ItemAllocator(pAllocationCallbacks, 128),
3287 template<
typename T>
3288 VmaRawList<T>::~VmaRawList()
3294 template<
typename T>
3295 void VmaRawList<T>::Clear()
3297 if(IsEmpty() ==
false)
3299 ItemType* pItem = m_pBack;
3300 while(pItem != VMA_NULL)
3302 ItemType*
const pPrevItem = pItem->pPrev;
3303 m_ItemAllocator.Free(pItem);
3306 m_pFront = VMA_NULL;
3312 template<
typename T>
3313 VmaListItem<T>* VmaRawList<T>::PushBack()
3315 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3316 pNewItem->pNext = VMA_NULL;
3319 pNewItem->pPrev = VMA_NULL;
3320 m_pFront = pNewItem;
3326 pNewItem->pPrev = m_pBack;
3327 m_pBack->pNext = pNewItem;
3334 template<
typename T>
3335 VmaListItem<T>* VmaRawList<T>::PushFront()
3337 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3338 pNewItem->pPrev = VMA_NULL;
3341 pNewItem->pNext = VMA_NULL;
3342 m_pFront = pNewItem;
3348 pNewItem->pNext = m_pFront;
3349 m_pFront->pPrev = pNewItem;
3350 m_pFront = pNewItem;
3356 template<
typename T>
3357 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3359 ItemType*
const pNewItem = PushBack();
3360 pNewItem->Value = value;
3364 template<
typename T>
3365 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3367 ItemType*
const pNewItem = PushFront();
3368 pNewItem->Value = value;
3372 template<
typename T>
3373 void VmaRawList<T>::PopBack()
3375 VMA_HEAVY_ASSERT(m_Count > 0);
3376 ItemType*
const pBackItem = m_pBack;
3377 ItemType*
const pPrevItem = pBackItem->pPrev;
3378 if(pPrevItem != VMA_NULL)
3380 pPrevItem->pNext = VMA_NULL;
3382 m_pBack = pPrevItem;
3383 m_ItemAllocator.Free(pBackItem);
3387 template<
typename T>
3388 void VmaRawList<T>::PopFront()
3390 VMA_HEAVY_ASSERT(m_Count > 0);
3391 ItemType*
const pFrontItem = m_pFront;
3392 ItemType*
const pNextItem = pFrontItem->pNext;
3393 if(pNextItem != VMA_NULL)
3395 pNextItem->pPrev = VMA_NULL;
3397 m_pFront = pNextItem;
3398 m_ItemAllocator.Free(pFrontItem);
3402 template<
typename T>
3403 void VmaRawList<T>::Remove(ItemType* pItem)
3405 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3406 VMA_HEAVY_ASSERT(m_Count > 0);
3408 if(pItem->pPrev != VMA_NULL)
3410 pItem->pPrev->pNext = pItem->pNext;
3414 VMA_HEAVY_ASSERT(m_pFront == pItem);
3415 m_pFront = pItem->pNext;
3418 if(pItem->pNext != VMA_NULL)
3420 pItem->pNext->pPrev = pItem->pPrev;
3424 VMA_HEAVY_ASSERT(m_pBack == pItem);
3425 m_pBack = pItem->pPrev;
3428 m_ItemAllocator.Free(pItem);
3432 template<
typename T>
3433 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3435 if(pItem != VMA_NULL)
3437 ItemType*
const prevItem = pItem->pPrev;
3438 ItemType*
const newItem = m_ItemAllocator.Alloc();
3439 newItem->pPrev = prevItem;
3440 newItem->pNext = pItem;
3441 pItem->pPrev = newItem;
3442 if(prevItem != VMA_NULL)
3444 prevItem->pNext = newItem;
3448 VMA_HEAVY_ASSERT(m_pFront == pItem);
3458 template<
typename T>
3459 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3461 if(pItem != VMA_NULL)
3463 ItemType*
const nextItem = pItem->pNext;
3464 ItemType*
const newItem = m_ItemAllocator.Alloc();
3465 newItem->pNext = nextItem;
3466 newItem->pPrev = pItem;
3467 pItem->pNext = newItem;
3468 if(nextItem != VMA_NULL)
3470 nextItem->pPrev = newItem;
3474 VMA_HEAVY_ASSERT(m_pBack == pItem);
3484 template<
typename T>
3485 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3487 ItemType*
const newItem = InsertBefore(pItem);
3488 newItem->Value = value;
3492 template<
typename T>
3493 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3495 ItemType*
const newItem = InsertAfter(pItem);
3496 newItem->Value = value;
3500 template<
typename T,
typename AllocatorT>
3503 VMA_CLASS_NO_COPY(VmaList)
3514 T& operator*()
const 3516 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3517 return m_pItem->Value;
3519 T* operator->()
const 3521 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3522 return &m_pItem->Value;
3525 iterator& operator++()
3527 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3528 m_pItem = m_pItem->pNext;
3531 iterator& operator--()
3533 if(m_pItem != VMA_NULL)
3535 m_pItem = m_pItem->pPrev;
3539 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3540 m_pItem = m_pList->Back();
3545 iterator operator++(
int)
3547 iterator result = *
this;
3551 iterator operator--(
int)
3553 iterator result = *
this;
3558 bool operator==(
const iterator& rhs)
const 3560 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3561 return m_pItem == rhs.m_pItem;
3563 bool operator!=(
const iterator& rhs)
const 3565 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3566 return m_pItem != rhs.m_pItem;
3570 VmaRawList<T>* m_pList;
3571 VmaListItem<T>* m_pItem;
3573 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3579 friend class VmaList<T, AllocatorT>;
3582 class const_iterator
3591 const_iterator(
const iterator& src) :
3592 m_pList(src.m_pList),
3593 m_pItem(src.m_pItem)
3597 const T& operator*()
const 3599 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3600 return m_pItem->Value;
3602 const T* operator->()
const 3604 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3605 return &m_pItem->Value;
3608 const_iterator& operator++()
3610 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3611 m_pItem = m_pItem->pNext;
3614 const_iterator& operator--()
3616 if(m_pItem != VMA_NULL)
3618 m_pItem = m_pItem->pPrev;
3622 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3623 m_pItem = m_pList->Back();
3628 const_iterator operator++(
int)
3630 const_iterator result = *
this;
3634 const_iterator operator--(
int)
3636 const_iterator result = *
this;
3641 bool operator==(
const const_iterator& rhs)
const 3643 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3644 return m_pItem == rhs.m_pItem;
3646 bool operator!=(
const const_iterator& rhs)
const 3648 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3649 return m_pItem != rhs.m_pItem;
3653 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3659 const VmaRawList<T>* m_pList;
3660 const VmaListItem<T>* m_pItem;
3662 friend class VmaList<T, AllocatorT>;
3665 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3667 bool empty()
const {
return m_RawList.IsEmpty(); }
3668 size_t size()
const {
return m_RawList.GetCount(); }
3670 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3671 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3673 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3674 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3676 void clear() { m_RawList.Clear(); }
3677 void push_back(
const T& value) { m_RawList.PushBack(value); }
3678 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3679 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3682 VmaRawList<T> m_RawList;
3685 #endif // #if VMA_USE_STL_LIST 3693 #if VMA_USE_STL_UNORDERED_MAP 3695 #define VmaPair std::pair 3697 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3698 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3700 #else // #if VMA_USE_STL_UNORDERED_MAP 3702 template<
typename T1,
typename T2>
3708 VmaPair() : first(), second() { }
3709 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3715 template<
typename KeyT,
typename ValueT>
3719 typedef VmaPair<KeyT, ValueT> PairType;
3720 typedef PairType* iterator;
3722 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3724 iterator begin() {
return m_Vector.begin(); }
3725 iterator end() {
return m_Vector.end(); }
3727 void insert(
const PairType& pair);
3728 iterator find(
const KeyT& key);
3729 void erase(iterator it);
3732 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3735 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3737 template<
typename FirstT,
typename SecondT>
3738 struct VmaPairFirstLess
3740 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3742 return lhs.first < rhs.first;
3744 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3746 return lhs.first < rhsFirst;
3750 template<
typename KeyT,
typename ValueT>
3751 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3753 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3755 m_Vector.data() + m_Vector.size(),
3757 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3758 VmaVectorInsert(m_Vector, indexToInsert, pair);
3761 template<
typename KeyT,
typename ValueT>
3762 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3764 PairType* it = VmaBinaryFindFirstNotLess(
3766 m_Vector.data() + m_Vector.size(),
3768 VmaPairFirstLess<KeyT, ValueT>());
3769 if((it != m_Vector.end()) && (it->first == key))
3775 return m_Vector.end();
3779 template<
typename KeyT,
typename ValueT>
3780 void VmaMap<KeyT, ValueT>::erase(iterator it)
3782 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3785 #endif // #if VMA_USE_STL_UNORDERED_MAP 3791 class VmaDeviceMemoryBlock;
3793 enum VMA_CACHE_OPERATION { VMA_CACHE_FLUSH, VMA_CACHE_INVALIDATE };
3795 struct VmaAllocation_T
3797 VMA_CLASS_NO_COPY(VmaAllocation_T)
3799 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3803 FLAG_USER_DATA_STRING = 0x01,
3807 enum ALLOCATION_TYPE
3809 ALLOCATION_TYPE_NONE,
3810 ALLOCATION_TYPE_BLOCK,
3811 ALLOCATION_TYPE_DEDICATED,
3814 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3817 m_pUserData(VMA_NULL),
3818 m_LastUseFrameIndex(currentFrameIndex),
3819 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3820 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3822 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3824 #if VMA_STATS_STRING_ENABLED 3825 m_CreationFrameIndex = currentFrameIndex;
3826 m_BufferImageUsage = 0;
3832 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3835 VMA_ASSERT(m_pUserData == VMA_NULL);
3838 void InitBlockAllocation(
3840 VmaDeviceMemoryBlock* block,
3841 VkDeviceSize offset,
3842 VkDeviceSize alignment,
3844 VmaSuballocationType suballocationType,
3848 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3849 VMA_ASSERT(block != VMA_NULL);
3850 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3851 m_Alignment = alignment;
3853 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3854 m_SuballocationType = (uint8_t)suballocationType;
3855 m_BlockAllocation.m_hPool = hPool;
3856 m_BlockAllocation.m_Block = block;
3857 m_BlockAllocation.m_Offset = offset;
3858 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3863 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3864 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3865 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3866 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3867 m_BlockAllocation.m_Block = VMA_NULL;
3868 m_BlockAllocation.m_Offset = 0;
3869 m_BlockAllocation.m_CanBecomeLost =
true;
3872 void ChangeBlockAllocation(
3874 VmaDeviceMemoryBlock* block,
3875 VkDeviceSize offset);
3878 void InitDedicatedAllocation(
3879 uint32_t memoryTypeIndex,
3880 VkDeviceMemory hMemory,
3881 VmaSuballocationType suballocationType,
3885 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3886 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3887 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3890 m_SuballocationType = (uint8_t)suballocationType;
3891 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3892 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3893 m_DedicatedAllocation.m_hMemory = hMemory;
3894 m_DedicatedAllocation.m_pMappedData = pMappedData;
3897 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3898 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3899 VkDeviceSize GetSize()
const {
return m_Size; }
3900 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3901 void* GetUserData()
const {
return m_pUserData; }
3902 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
3903 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3905 VmaDeviceMemoryBlock* GetBlock()
const 3907 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3908 return m_BlockAllocation.m_Block;
3910 VkDeviceSize GetOffset()
const;
3911 VkDeviceMemory GetMemory()
const;
3912 uint32_t GetMemoryTypeIndex()
const;
3913 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3914 void* GetMappedData()
const;
3915 bool CanBecomeLost()
const;
3918 uint32_t GetLastUseFrameIndex()
const 3920 return m_LastUseFrameIndex.load();
3922 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3924 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3934 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3936 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3938 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3949 void BlockAllocMap();
3950 void BlockAllocUnmap();
3951 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
3954 #if VMA_STATS_STRING_ENABLED 3955 uint32_t GetCreationFrameIndex()
const {
return m_CreationFrameIndex; }
3956 uint32_t GetBufferImageUsage()
const {
return m_BufferImageUsage; }
3958 void InitBufferImageUsage(uint32_t bufferImageUsage)
3960 VMA_ASSERT(m_BufferImageUsage == 0);
3961 m_BufferImageUsage = bufferImageUsage;
3964 void PrintParameters(
class VmaJsonWriter& json)
const;
3968 VkDeviceSize m_Alignment;
3969 VkDeviceSize m_Size;
3971 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3973 uint8_t m_SuballocationType;
3980 struct BlockAllocation
3983 VmaDeviceMemoryBlock* m_Block;
3984 VkDeviceSize m_Offset;
3985 bool m_CanBecomeLost;
3989 struct DedicatedAllocation
3991 uint32_t m_MemoryTypeIndex;
3992 VkDeviceMemory m_hMemory;
3993 void* m_pMappedData;
3999 BlockAllocation m_BlockAllocation;
4001 DedicatedAllocation m_DedicatedAllocation;
4004 #if VMA_STATS_STRING_ENABLED 4005 uint32_t m_CreationFrameIndex;
4006 uint32_t m_BufferImageUsage;
4016 struct VmaSuballocation
4018 VkDeviceSize offset;
4021 VmaSuballocationType type;
4024 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
4027 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
4042 struct VmaAllocationRequest
4044 VkDeviceSize offset;
4045 VkDeviceSize sumFreeSize;
4046 VkDeviceSize sumItemSize;
4047 VmaSuballocationList::iterator item;
4048 size_t itemsToMakeLostCount;
4050 VkDeviceSize CalcCost()
const 4052 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
4060 class VmaBlockMetadata
4062 VMA_CLASS_NO_COPY(VmaBlockMetadata)
4065 ~VmaBlockMetadata();
4066 void Init(VkDeviceSize size);
4069 bool Validate()
const;
4070 VkDeviceSize GetSize()
const {
return m_Size; }
4071 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
4072 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
4073 VkDeviceSize GetUnusedRangeSizeMax()
const;
4075 bool IsEmpty()
const;
4077 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
4080 #if VMA_STATS_STRING_ENABLED 4081 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
4085 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
4090 bool CreateAllocationRequest(
4091 uint32_t currentFrameIndex,
4092 uint32_t frameInUseCount,
4093 VkDeviceSize bufferImageGranularity,
4094 VkDeviceSize allocSize,
4095 VkDeviceSize allocAlignment,
4096 VmaSuballocationType allocType,
4097 bool canMakeOtherLost,
4098 VmaAllocationRequest* pAllocationRequest);
4100 bool MakeRequestedAllocationsLost(
4101 uint32_t currentFrameIndex,
4102 uint32_t frameInUseCount,
4103 VmaAllocationRequest* pAllocationRequest);
4105 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
4109 const VmaAllocationRequest& request,
4110 VmaSuballocationType type,
4111 VkDeviceSize allocSize,
4116 void FreeAtOffset(VkDeviceSize offset);
4119 VkDeviceSize m_Size;
4120 uint32_t m_FreeCount;
4121 VkDeviceSize m_SumFreeSize;
4122 VmaSuballocationList m_Suballocations;
4125 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
4127 bool ValidateFreeSuballocationList()
const;
4131 bool CheckAllocation(
4132 uint32_t currentFrameIndex,
4133 uint32_t frameInUseCount,
4134 VkDeviceSize bufferImageGranularity,
4135 VkDeviceSize allocSize,
4136 VkDeviceSize allocAlignment,
4137 VmaSuballocationType allocType,
4138 VmaSuballocationList::const_iterator suballocItem,
4139 bool canMakeOtherLost,
4140 VkDeviceSize* pOffset,
4141 size_t* itemsToMakeLostCount,
4142 VkDeviceSize* pSumFreeSize,
4143 VkDeviceSize* pSumItemSize)
const;
4145 void MergeFreeWithNext(VmaSuballocationList::iterator item);
4149 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
4152 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
4155 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
4164 class VmaDeviceMemoryBlock
4166 VMA_CLASS_NO_COPY(VmaDeviceMemoryBlock)
4168 VmaBlockMetadata m_Metadata;
4172 ~VmaDeviceMemoryBlock()
4174 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
4175 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
4180 uint32_t newMemoryTypeIndex,
4181 VkDeviceMemory newMemory,
4182 VkDeviceSize newSize,
4187 VkDeviceMemory GetDeviceMemory()
const {
return m_hMemory; }
4188 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4189 uint32_t GetId()
const {
return m_Id; }
4190 void* GetMappedData()
const {
return m_pMappedData; }
4193 bool Validate()
const;
4196 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
4199 VkResult BindBufferMemory(
4203 VkResult BindImageMemory(
4209 uint32_t m_MemoryTypeIndex;
4211 VkDeviceMemory m_hMemory;
4216 uint32_t m_MapCount;
4217 void* m_pMappedData;
4220 struct VmaPointerLess
4222 bool operator()(
const void* lhs,
const void* rhs)
const 4228 class VmaDefragmentator;
4236 struct VmaBlockVector
4238 VMA_CLASS_NO_COPY(VmaBlockVector)
4242 uint32_t memoryTypeIndex,
4243 VkDeviceSize preferredBlockSize,
4244 size_t minBlockCount,
4245 size_t maxBlockCount,
4246 VkDeviceSize bufferImageGranularity,
4247 uint32_t frameInUseCount,
4251 VkResult CreateMinBlocks();
4253 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
4254 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
4255 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
4256 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
4260 bool IsEmpty()
const {
return m_Blocks.empty(); }
4264 uint32_t currentFrameIndex,
4266 VkDeviceSize alignment,
4268 VmaSuballocationType suballocType,
4277 #if VMA_STATS_STRING_ENABLED 4278 void PrintDetailedMap(
class VmaJsonWriter& json);
4281 void MakePoolAllocationsLost(
4282 uint32_t currentFrameIndex,
4283 size_t* pLostAllocationCount);
4285 VmaDefragmentator* EnsureDefragmentator(
4287 uint32_t currentFrameIndex);
4289 VkResult Defragment(
4291 VkDeviceSize& maxBytesToMove,
4292 uint32_t& maxAllocationsToMove);
4294 void DestroyDefragmentator();
4297 friend class VmaDefragmentator;
4300 const uint32_t m_MemoryTypeIndex;
4301 const VkDeviceSize m_PreferredBlockSize;
4302 const size_t m_MinBlockCount;
4303 const size_t m_MaxBlockCount;
4304 const VkDeviceSize m_BufferImageGranularity;
4305 const uint32_t m_FrameInUseCount;
4306 const bool m_IsCustomPool;
4309 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
4313 bool m_HasEmptyBlock;
4314 VmaDefragmentator* m_pDefragmentator;
4315 uint32_t m_NextBlockId;
4317 VkDeviceSize CalcMaxBlockSize()
const;
4320 void Remove(VmaDeviceMemoryBlock* pBlock);
4324 void IncrementallySortBlocks();
4326 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
4331 VMA_CLASS_NO_COPY(VmaPool_T)
4333 VmaBlockVector m_BlockVector;
4340 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
4341 uint32_t GetId()
const {
return m_Id; }
4342 void SetId(uint32_t
id) { VMA_ASSERT(m_Id == 0); m_Id = id; }
4344 #if VMA_STATS_STRING_ENABLED 4352 class VmaDefragmentator
4354 VMA_CLASS_NO_COPY(VmaDefragmentator)
4357 VmaBlockVector*
const m_pBlockVector;
4358 uint32_t m_CurrentFrameIndex;
4359 VkDeviceSize m_BytesMoved;
4360 uint32_t m_AllocationsMoved;
4362 struct AllocationInfo
4365 VkBool32* m_pChanged;
4368 m_hAllocation(VK_NULL_HANDLE),
4369 m_pChanged(VMA_NULL)
4374 struct AllocationInfoSizeGreater
4376 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 4378 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
4383 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4387 VmaDeviceMemoryBlock* m_pBlock;
4388 bool m_HasNonMovableAllocations;
4389 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4391 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
4393 m_HasNonMovableAllocations(true),
4394 m_Allocations(pAllocationCallbacks),
4395 m_pMappedDataForDefragmentation(VMA_NULL)
4399 void CalcHasNonMovableAllocations()
4401 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
4402 const size_t defragmentAllocCount = m_Allocations.size();
4403 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
4406 void SortAllocationsBySizeDescecnding()
4408 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
4411 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
4416 void* m_pMappedDataForDefragmentation;
4419 struct BlockPointerLess
4421 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 4423 return pLhsBlockInfo->m_pBlock < pRhsBlock;
4425 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4427 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
4433 struct BlockInfoCompareMoveDestination
4435 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4437 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
4441 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
4445 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
4453 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
4454 BlockInfoVector m_Blocks;
4456 VkResult DefragmentRound(
4457 VkDeviceSize maxBytesToMove,
4458 uint32_t maxAllocationsToMove);
4460 static bool MoveMakesSense(
4461 size_t dstBlockIndex, VkDeviceSize dstOffset,
4462 size_t srcBlockIndex, VkDeviceSize srcOffset);
4467 VmaBlockVector* pBlockVector,
4468 uint32_t currentFrameIndex);
4470 ~VmaDefragmentator();
4472 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
4473 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
4475 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
4477 VkResult Defragment(
4478 VkDeviceSize maxBytesToMove,
4479 uint32_t maxAllocationsToMove);
4483 struct VmaAllocator_T
4485 VMA_CLASS_NO_COPY(VmaAllocator_T)
4488 bool m_UseKhrDedicatedAllocation;
4490 bool m_AllocationCallbacksSpecified;
4491 VkAllocationCallbacks m_AllocationCallbacks;
4495 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
4496 VMA_MUTEX m_HeapSizeLimitMutex;
4498 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
4499 VkPhysicalDeviceMemoryProperties m_MemProps;
4502 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
4505 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
4506 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
4507 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
4512 const VkAllocationCallbacks* GetAllocationCallbacks()
const 4514 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
4518 return m_VulkanFunctions;
4521 VkDeviceSize GetBufferImageGranularity()
const 4524 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
4525 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
4528 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
4529 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
4531 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 4533 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
4534 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4537 bool IsMemoryTypeNonCoherent(uint32_t memTypeIndex)
const 4539 return (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & (VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT)) ==
4540 VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
4543 VkDeviceSize GetMemoryTypeMinAlignment(uint32_t memTypeIndex)
const 4545 return IsMemoryTypeNonCoherent(memTypeIndex) ?
4546 VMA_MAX((VkDeviceSize)VMA_DEBUG_ALIGNMENT, m_PhysicalDeviceProperties.limits.nonCoherentAtomSize) :
4547 (VkDeviceSize)VMA_DEBUG_ALIGNMENT;
4550 bool IsIntegratedGpu()
const 4552 return m_PhysicalDeviceProperties.deviceType == VK_PHYSICAL_DEVICE_TYPE_INTEGRATED_GPU;
4555 void GetBufferMemoryRequirements(
4557 VkMemoryRequirements& memReq,
4558 bool& requiresDedicatedAllocation,
4559 bool& prefersDedicatedAllocation)
const;
4560 void GetImageMemoryRequirements(
4562 VkMemoryRequirements& memReq,
4563 bool& requiresDedicatedAllocation,
4564 bool& prefersDedicatedAllocation)
const;
4567 VkResult AllocateMemory(
4568 const VkMemoryRequirements& vkMemReq,
4569 bool requiresDedicatedAllocation,
4570 bool prefersDedicatedAllocation,
4571 VkBuffer dedicatedBuffer,
4572 VkImage dedicatedImage,
4574 VmaSuballocationType suballocType,
4580 void CalculateStats(
VmaStats* pStats);
4582 #if VMA_STATS_STRING_ENABLED 4583 void PrintDetailedMap(
class VmaJsonWriter& json);
4586 VkResult Defragment(
4588 size_t allocationCount,
4589 VkBool32* pAllocationsChanged,
4597 void DestroyPool(
VmaPool pool);
4600 void SetCurrentFrameIndex(uint32_t frameIndex);
4602 void MakePoolAllocationsLost(
4604 size_t* pLostAllocationCount);
4608 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
4609 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
4614 VkResult BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer);
4615 VkResult BindImageMemory(
VmaAllocation hAllocation, VkImage hImage);
4617 void FlushOrInvalidateAllocation(
4619 VkDeviceSize offset, VkDeviceSize size,
4620 VMA_CACHE_OPERATION op);
4623 VkDeviceSize m_PreferredLargeHeapBlockSize;
4625 VkPhysicalDevice m_PhysicalDevice;
4626 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
4628 VMA_MUTEX m_PoolsMutex;
4630 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
4631 uint32_t m_NextPoolId;
4637 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
4639 VkResult AllocateMemoryOfType(
4641 VkDeviceSize alignment,
4642 bool dedicatedAllocation,
4643 VkBuffer dedicatedBuffer,
4644 VkImage dedicatedImage,
4646 uint32_t memTypeIndex,
4647 VmaSuballocationType suballocType,
4651 VkResult AllocateDedicatedMemory(
4653 VmaSuballocationType suballocType,
4654 uint32_t memTypeIndex,
4656 bool isUserDataString,
4658 VkBuffer dedicatedBuffer,
4659 VkImage dedicatedImage,
4669 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
4671 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4674 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
4676 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4679 template<
typename T>
4682 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
4685 template<
typename T>
4686 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
4688 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
4691 template<
typename T>
4692 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
4697 VmaFree(hAllocator, ptr);
4701 template<
typename T>
4702 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
4706 for(
size_t i = count; i--; )
4708 VmaFree(hAllocator, ptr);
4715 #if VMA_STATS_STRING_ENABLED 4717 class VmaStringBuilder
4720 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4721 size_t GetLength()
const {
return m_Data.size(); }
4722 const char* GetData()
const {
return m_Data.data(); }
4724 void Add(
char ch) { m_Data.push_back(ch); }
4725 void Add(
const char* pStr);
4726 void AddNewLine() { Add(
'\n'); }
4727 void AddNumber(uint32_t num);
4728 void AddNumber(uint64_t num);
4729 void AddPointer(
const void* ptr);
4732 VmaVector< char, VmaStlAllocator<char> > m_Data;
4735 void VmaStringBuilder::Add(
const char* pStr)
4737 const size_t strLen = strlen(pStr);
4740 const size_t oldCount = m_Data.size();
4741 m_Data.resize(oldCount + strLen);
4742 memcpy(m_Data.data() + oldCount, pStr, strLen);
4746 void VmaStringBuilder::AddNumber(uint32_t num)
4749 VmaUint32ToStr(buf,
sizeof(buf), num);
4753 void VmaStringBuilder::AddNumber(uint64_t num)
4756 VmaUint64ToStr(buf,
sizeof(buf), num);
4760 void VmaStringBuilder::AddPointer(
const void* ptr)
4763 VmaPtrToStr(buf,
sizeof(buf), ptr);
4767 #endif // #if VMA_STATS_STRING_ENABLED 4772 #if VMA_STATS_STRING_ENABLED 4776 VMA_CLASS_NO_COPY(VmaJsonWriter)
4778 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4781 void BeginObject(
bool singleLine =
false);
4784 void BeginArray(
bool singleLine =
false);
4787 void WriteString(
const char* pStr);
4788 void BeginString(
const char* pStr = VMA_NULL);
4789 void ContinueString(
const char* pStr);
4790 void ContinueString(uint32_t n);
4791 void ContinueString(uint64_t n);
4792 void ContinueString_Pointer(
const void* ptr);
4793 void EndString(
const char* pStr = VMA_NULL);
4795 void WriteNumber(uint32_t n);
4796 void WriteNumber(uint64_t n);
4797 void WriteBool(
bool b);
4801 static const char*
const INDENT;
4803 enum COLLECTION_TYPE
4805 COLLECTION_TYPE_OBJECT,
4806 COLLECTION_TYPE_ARRAY,
4810 COLLECTION_TYPE type;
4811 uint32_t valueCount;
4812 bool singleLineMode;
4815 VmaStringBuilder& m_SB;
4816 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4817 bool m_InsideString;
4819 void BeginValue(
bool isString);
4820 void WriteIndent(
bool oneLess =
false);
4823 const char*
const VmaJsonWriter::INDENT =
" ";
4825 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4827 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4828 m_InsideString(false)
4832 VmaJsonWriter::~VmaJsonWriter()
4834 VMA_ASSERT(!m_InsideString);
4835 VMA_ASSERT(m_Stack.empty());
4838 void VmaJsonWriter::BeginObject(
bool singleLine)
4840 VMA_ASSERT(!m_InsideString);
4846 item.type = COLLECTION_TYPE_OBJECT;
4847 item.valueCount = 0;
4848 item.singleLineMode = singleLine;
4849 m_Stack.push_back(item);
4852 void VmaJsonWriter::EndObject()
4854 VMA_ASSERT(!m_InsideString);
4859 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4863 void VmaJsonWriter::BeginArray(
bool singleLine)
4865 VMA_ASSERT(!m_InsideString);
4871 item.type = COLLECTION_TYPE_ARRAY;
4872 item.valueCount = 0;
4873 item.singleLineMode = singleLine;
4874 m_Stack.push_back(item);
4877 void VmaJsonWriter::EndArray()
4879 VMA_ASSERT(!m_InsideString);
4884 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4888 void VmaJsonWriter::WriteString(
const char* pStr)
4894 void VmaJsonWriter::BeginString(
const char* pStr)
4896 VMA_ASSERT(!m_InsideString);
4900 m_InsideString =
true;
4901 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4903 ContinueString(pStr);
4907 void VmaJsonWriter::ContinueString(
const char* pStr)
4909 VMA_ASSERT(m_InsideString);
4911 const size_t strLen = strlen(pStr);
4912 for(
size_t i = 0; i < strLen; ++i)
4945 VMA_ASSERT(0 &&
"Character not currently supported.");
4951 void VmaJsonWriter::ContinueString(uint32_t n)
4953 VMA_ASSERT(m_InsideString);
4957 void VmaJsonWriter::ContinueString(uint64_t n)
4959 VMA_ASSERT(m_InsideString);
4963 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4965 VMA_ASSERT(m_InsideString);
4966 m_SB.AddPointer(ptr);
4969 void VmaJsonWriter::EndString(
const char* pStr)
4971 VMA_ASSERT(m_InsideString);
4972 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4974 ContinueString(pStr);
4977 m_InsideString =
false;
4980 void VmaJsonWriter::WriteNumber(uint32_t n)
4982 VMA_ASSERT(!m_InsideString);
4987 void VmaJsonWriter::WriteNumber(uint64_t n)
4989 VMA_ASSERT(!m_InsideString);
4994 void VmaJsonWriter::WriteBool(
bool b)
4996 VMA_ASSERT(!m_InsideString);
4998 m_SB.Add(b ?
"true" :
"false");
5001 void VmaJsonWriter::WriteNull()
5003 VMA_ASSERT(!m_InsideString);
5008 void VmaJsonWriter::BeginValue(
bool isString)
5010 if(!m_Stack.empty())
5012 StackItem& currItem = m_Stack.back();
5013 if(currItem.type == COLLECTION_TYPE_OBJECT &&
5014 currItem.valueCount % 2 == 0)
5016 VMA_ASSERT(isString);
5019 if(currItem.type == COLLECTION_TYPE_OBJECT &&
5020 currItem.valueCount % 2 != 0)
5024 else if(currItem.valueCount > 0)
5033 ++currItem.valueCount;
5037 void VmaJsonWriter::WriteIndent(
bool oneLess)
5039 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
5043 size_t count = m_Stack.size();
5044 if(count > 0 && oneLess)
5048 for(
size_t i = 0; i < count; ++i)
5055 #endif // #if VMA_STATS_STRING_ENABLED 5059 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
5061 if(IsUserDataString())
5063 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
5065 FreeUserDataString(hAllocator);
5067 if(pUserData != VMA_NULL)
5069 const char*
const newStrSrc = (
char*)pUserData;
5070 const size_t newStrLen = strlen(newStrSrc);
5071 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
5072 memcpy(newStrDst, newStrSrc, newStrLen + 1);
5073 m_pUserData = newStrDst;
5078 m_pUserData = pUserData;
5082 void VmaAllocation_T::ChangeBlockAllocation(
5084 VmaDeviceMemoryBlock* block,
5085 VkDeviceSize offset)
5087 VMA_ASSERT(block != VMA_NULL);
5088 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5091 if(block != m_BlockAllocation.m_Block)
5093 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
5094 if(IsPersistentMap())
5096 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
5097 block->Map(hAllocator, mapRefCount, VMA_NULL);
5100 m_BlockAllocation.m_Block = block;
5101 m_BlockAllocation.m_Offset = offset;
5104 VkDeviceSize VmaAllocation_T::GetOffset()
const 5108 case ALLOCATION_TYPE_BLOCK:
5109 return m_BlockAllocation.m_Offset;
5110 case ALLOCATION_TYPE_DEDICATED:
5118 VkDeviceMemory VmaAllocation_T::GetMemory()
const 5122 case ALLOCATION_TYPE_BLOCK:
5123 return m_BlockAllocation.m_Block->GetDeviceMemory();
5124 case ALLOCATION_TYPE_DEDICATED:
5125 return m_DedicatedAllocation.m_hMemory;
5128 return VK_NULL_HANDLE;
5132 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 5136 case ALLOCATION_TYPE_BLOCK:
5137 return m_BlockAllocation.m_Block->GetMemoryTypeIndex();
5138 case ALLOCATION_TYPE_DEDICATED:
5139 return m_DedicatedAllocation.m_MemoryTypeIndex;
5146 void* VmaAllocation_T::GetMappedData()
const 5150 case ALLOCATION_TYPE_BLOCK:
5153 void* pBlockData = m_BlockAllocation.m_Block->GetMappedData();
5154 VMA_ASSERT(pBlockData != VMA_NULL);
5155 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
5162 case ALLOCATION_TYPE_DEDICATED:
5163 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
5164 return m_DedicatedAllocation.m_pMappedData;
5171 bool VmaAllocation_T::CanBecomeLost()
const 5175 case ALLOCATION_TYPE_BLOCK:
5176 return m_BlockAllocation.m_CanBecomeLost;
5177 case ALLOCATION_TYPE_DEDICATED:
5185 VmaPool VmaAllocation_T::GetPool()
const 5187 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
5188 return m_BlockAllocation.m_hPool;
5191 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5193 VMA_ASSERT(CanBecomeLost());
5199 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
5202 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
5207 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
5213 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
5223 #if VMA_STATS_STRING_ENABLED 5226 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
5235 void VmaAllocation_T::PrintParameters(
class VmaJsonWriter& json)
const 5237 json.WriteString(
"Type");
5238 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[m_SuballocationType]);
5240 json.WriteString(
"Size");
5241 json.WriteNumber(m_Size);
5243 if(m_pUserData != VMA_NULL)
5245 json.WriteString(
"UserData");
5246 if(IsUserDataString())
5248 json.WriteString((
const char*)m_pUserData);
5253 json.ContinueString_Pointer(m_pUserData);
5258 json.WriteString(
"CreationFrameIndex");
5259 json.WriteNumber(m_CreationFrameIndex);
5261 json.WriteString(
"LastUseFrameIndex");
5262 json.WriteNumber(GetLastUseFrameIndex());
5264 if(m_BufferImageUsage != 0)
5266 json.WriteString(
"Usage");
5267 json.WriteNumber(m_BufferImageUsage);
5273 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
5275 VMA_ASSERT(IsUserDataString());
5276 if(m_pUserData != VMA_NULL)
5278 char*
const oldStr = (
char*)m_pUserData;
5279 const size_t oldStrLen = strlen(oldStr);
5280 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
5281 m_pUserData = VMA_NULL;
5285 void VmaAllocation_T::BlockAllocMap()
5287 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
5289 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
5295 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
5299 void VmaAllocation_T::BlockAllocUnmap()
5301 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
5303 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
5309 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
5313 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
5315 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
5319 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
5321 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
5322 *ppData = m_DedicatedAllocation.m_pMappedData;
5328 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
5329 return VK_ERROR_MEMORY_MAP_FAILED;
5334 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5335 hAllocator->m_hDevice,
5336 m_DedicatedAllocation.m_hMemory,
5341 if(result == VK_SUCCESS)
5343 m_DedicatedAllocation.m_pMappedData = *ppData;
5350 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
5352 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
5354 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
5359 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
5360 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
5361 hAllocator->m_hDevice,
5362 m_DedicatedAllocation.m_hMemory);
5367 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
5371 #if VMA_STATS_STRING_ENABLED 5373 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
5377 json.WriteString(
"Blocks");
5380 json.WriteString(
"Allocations");
5383 json.WriteString(
"UnusedRanges");
5386 json.WriteString(
"UsedBytes");
5389 json.WriteString(
"UnusedBytes");
5394 json.WriteString(
"AllocationSize");
5395 json.BeginObject(
true);
5396 json.WriteString(
"Min");
5398 json.WriteString(
"Avg");
5400 json.WriteString(
"Max");
5407 json.WriteString(
"UnusedRangeSize");
5408 json.BeginObject(
true);
5409 json.WriteString(
"Min");
5411 json.WriteString(
"Avg");
5413 json.WriteString(
"Max");
5421 #endif // #if VMA_STATS_STRING_ENABLED 5423 struct VmaSuballocationItemSizeLess
5426 const VmaSuballocationList::iterator lhs,
5427 const VmaSuballocationList::iterator rhs)
const 5429 return lhs->size < rhs->size;
5432 const VmaSuballocationList::iterator lhs,
5433 VkDeviceSize rhsSize)
const 5435 return lhs->size < rhsSize;
5442 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
5446 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
5447 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
5451 VmaBlockMetadata::~VmaBlockMetadata()
5455 void VmaBlockMetadata::Init(VkDeviceSize size)
5459 m_SumFreeSize = size;
5461 VmaSuballocation suballoc = {};
5462 suballoc.offset = 0;
5463 suballoc.size = size;
5464 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5465 suballoc.hAllocation = VK_NULL_HANDLE;
5467 m_Suballocations.push_back(suballoc);
5468 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
5470 m_FreeSuballocationsBySize.push_back(suballocItem);
5473 bool VmaBlockMetadata::Validate()
const 5475 if(m_Suballocations.empty())
5481 VkDeviceSize calculatedOffset = 0;
5483 uint32_t calculatedFreeCount = 0;
5485 VkDeviceSize calculatedSumFreeSize = 0;
5488 size_t freeSuballocationsToRegister = 0;
5490 bool prevFree =
false;
5492 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5493 suballocItem != m_Suballocations.cend();
5496 const VmaSuballocation& subAlloc = *suballocItem;
5499 if(subAlloc.offset != calculatedOffset)
5504 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
5506 if(prevFree && currFree)
5511 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
5518 calculatedSumFreeSize += subAlloc.size;
5519 ++calculatedFreeCount;
5520 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5522 ++freeSuballocationsToRegister;
5527 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
5531 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
5537 calculatedOffset += subAlloc.size;
5538 prevFree = currFree;
5543 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
5548 VkDeviceSize lastSize = 0;
5549 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
5551 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
5554 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5559 if(suballocItem->size < lastSize)
5564 lastSize = suballocItem->size;
5568 if(!ValidateFreeSuballocationList() ||
5569 (calculatedOffset != m_Size) ||
5570 (calculatedSumFreeSize != m_SumFreeSize) ||
5571 (calculatedFreeCount != m_FreeCount))
5579 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 5581 if(!m_FreeSuballocationsBySize.empty())
5583 return m_FreeSuballocationsBySize.back()->size;
5591 bool VmaBlockMetadata::IsEmpty()
const 5593 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
5596 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 5600 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5612 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5613 suballocItem != m_Suballocations.cend();
5616 const VmaSuballocation& suballoc = *suballocItem;
5617 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5630 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 5632 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5634 inoutStats.
size += m_Size;
5641 #if VMA_STATS_STRING_ENABLED 5643 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 5647 json.WriteString(
"TotalBytes");
5648 json.WriteNumber(m_Size);
5650 json.WriteString(
"UnusedBytes");
5651 json.WriteNumber(m_SumFreeSize);
5653 json.WriteString(
"Allocations");
5654 json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
5656 json.WriteString(
"UnusedRanges");
5657 json.WriteNumber(m_FreeCount);
5659 json.WriteString(
"Suballocations");
5662 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5663 suballocItem != m_Suballocations.cend();
5664 ++suballocItem, ++i)
5666 json.BeginObject(
true);
5668 json.WriteString(
"Offset");
5669 json.WriteNumber(suballocItem->offset);
5671 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5673 json.WriteString(
"Type");
5674 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[VMA_SUBALLOCATION_TYPE_FREE]);
5676 json.WriteString(
"Size");
5677 json.WriteNumber(suballocItem->size);
5681 suballocItem->hAllocation->PrintParameters(json);
5691 #endif // #if VMA_STATS_STRING_ENABLED 5703 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
5705 VMA_ASSERT(IsEmpty());
5706 pAllocationRequest->offset = 0;
5707 pAllocationRequest->sumFreeSize = m_SumFreeSize;
5708 pAllocationRequest->sumItemSize = 0;
5709 pAllocationRequest->item = m_Suballocations.begin();
5710 pAllocationRequest->itemsToMakeLostCount = 0;
5713 bool VmaBlockMetadata::CreateAllocationRequest(
5714 uint32_t currentFrameIndex,
5715 uint32_t frameInUseCount,
5716 VkDeviceSize bufferImageGranularity,
5717 VkDeviceSize allocSize,
5718 VkDeviceSize allocAlignment,
5719 VmaSuballocationType allocType,
5720 bool canMakeOtherLost,
5721 VmaAllocationRequest* pAllocationRequest)
5723 VMA_ASSERT(allocSize > 0);
5724 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5725 VMA_ASSERT(pAllocationRequest != VMA_NULL);
5726 VMA_HEAVY_ASSERT(Validate());
5729 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
5735 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5736 if(freeSuballocCount > 0)
5741 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5742 m_FreeSuballocationsBySize.data(),
5743 m_FreeSuballocationsBySize.data() + freeSuballocCount,
5745 VmaSuballocationItemSizeLess());
5746 size_t index = it - m_FreeSuballocationsBySize.data();
5747 for(; index < freeSuballocCount; ++index)
5752 bufferImageGranularity,
5756 m_FreeSuballocationsBySize[index],
5758 &pAllocationRequest->offset,
5759 &pAllocationRequest->itemsToMakeLostCount,
5760 &pAllocationRequest->sumFreeSize,
5761 &pAllocationRequest->sumItemSize))
5763 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5771 for(
size_t index = freeSuballocCount; index--; )
5776 bufferImageGranularity,
5780 m_FreeSuballocationsBySize[index],
5782 &pAllocationRequest->offset,
5783 &pAllocationRequest->itemsToMakeLostCount,
5784 &pAllocationRequest->sumFreeSize,
5785 &pAllocationRequest->sumItemSize))
5787 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5794 if(canMakeOtherLost)
5798 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5799 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5801 VmaAllocationRequest tmpAllocRequest = {};
5802 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5803 suballocIt != m_Suballocations.end();
5806 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5807 suballocIt->hAllocation->CanBecomeLost())
5812 bufferImageGranularity,
5818 &tmpAllocRequest.offset,
5819 &tmpAllocRequest.itemsToMakeLostCount,
5820 &tmpAllocRequest.sumFreeSize,
5821 &tmpAllocRequest.sumItemSize))
5823 tmpAllocRequest.item = suballocIt;
5825 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5827 *pAllocationRequest = tmpAllocRequest;
5833 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5842 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5843 uint32_t currentFrameIndex,
5844 uint32_t frameInUseCount,
5845 VmaAllocationRequest* pAllocationRequest)
5847 while(pAllocationRequest->itemsToMakeLostCount > 0)
5849 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5851 ++pAllocationRequest->item;
5853 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5854 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5855 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5856 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5858 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5859 --pAllocationRequest->itemsToMakeLostCount;
5867 VMA_HEAVY_ASSERT(Validate());
5868 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5869 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5874 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5876 uint32_t lostAllocationCount = 0;
5877 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5878 it != m_Suballocations.end();
5881 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5882 it->hAllocation->CanBecomeLost() &&
5883 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5885 it = FreeSuballocation(it);
5886 ++lostAllocationCount;
5889 return lostAllocationCount;
5892 void VmaBlockMetadata::Alloc(
5893 const VmaAllocationRequest& request,
5894 VmaSuballocationType type,
5895 VkDeviceSize allocSize,
5898 VMA_ASSERT(request.item != m_Suballocations.end());
5899 VmaSuballocation& suballoc = *request.item;
5901 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5903 VMA_ASSERT(request.offset >= suballoc.offset);
5904 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5905 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5906 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5910 UnregisterFreeSuballocation(request.item);
5912 suballoc.offset = request.offset;
5913 suballoc.size = allocSize;
5914 suballoc.type = type;
5915 suballoc.hAllocation = hAllocation;
5920 VmaSuballocation paddingSuballoc = {};
5921 paddingSuballoc.offset = request.offset + allocSize;
5922 paddingSuballoc.size = paddingEnd;
5923 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5924 VmaSuballocationList::iterator next = request.item;
5926 const VmaSuballocationList::iterator paddingEndItem =
5927 m_Suballocations.insert(next, paddingSuballoc);
5928 RegisterFreeSuballocation(paddingEndItem);
5934 VmaSuballocation paddingSuballoc = {};
5935 paddingSuballoc.offset = request.offset - paddingBegin;
5936 paddingSuballoc.size = paddingBegin;
5937 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5938 const VmaSuballocationList::iterator paddingBeginItem =
5939 m_Suballocations.insert(request.item, paddingSuballoc);
5940 RegisterFreeSuballocation(paddingBeginItem);
5944 m_FreeCount = m_FreeCount - 1;
5945 if(paddingBegin > 0)
5953 m_SumFreeSize -= allocSize;
5958 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5959 suballocItem != m_Suballocations.end();
5962 VmaSuballocation& suballoc = *suballocItem;
5963 if(suballoc.hAllocation == allocation)
5965 FreeSuballocation(suballocItem);
5966 VMA_HEAVY_ASSERT(Validate());
5970 VMA_ASSERT(0 &&
"Not found!");
5973 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
5975 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5976 suballocItem != m_Suballocations.end();
5979 VmaSuballocation& suballoc = *suballocItem;
5980 if(suballoc.offset == offset)
5982 FreeSuballocation(suballocItem);
5986 VMA_ASSERT(0 &&
"Not found!");
5989 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5991 VkDeviceSize lastSize = 0;
5992 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5994 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5996 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
6001 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6006 if(it->size < lastSize)
6012 lastSize = it->size;
6017 bool VmaBlockMetadata::CheckAllocation(
6018 uint32_t currentFrameIndex,
6019 uint32_t frameInUseCount,
6020 VkDeviceSize bufferImageGranularity,
6021 VkDeviceSize allocSize,
6022 VkDeviceSize allocAlignment,
6023 VmaSuballocationType allocType,
6024 VmaSuballocationList::const_iterator suballocItem,
6025 bool canMakeOtherLost,
6026 VkDeviceSize* pOffset,
6027 size_t* itemsToMakeLostCount,
6028 VkDeviceSize* pSumFreeSize,
6029 VkDeviceSize* pSumItemSize)
const 6031 VMA_ASSERT(allocSize > 0);
6032 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
6033 VMA_ASSERT(suballocItem != m_Suballocations.cend());
6034 VMA_ASSERT(pOffset != VMA_NULL);
6036 *itemsToMakeLostCount = 0;
6040 if(canMakeOtherLost)
6042 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6044 *pSumFreeSize = suballocItem->size;
6048 if(suballocItem->hAllocation->CanBecomeLost() &&
6049 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6051 ++*itemsToMakeLostCount;
6052 *pSumItemSize = suballocItem->size;
6061 if(m_Size - suballocItem->offset < allocSize)
6067 *pOffset = suballocItem->offset;
6070 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
6072 *pOffset += VMA_DEBUG_MARGIN;
6076 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
6080 if(bufferImageGranularity > 1)
6082 bool bufferImageGranularityConflict =
false;
6083 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
6084 while(prevSuballocItem != m_Suballocations.cbegin())
6087 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
6088 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
6090 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
6092 bufferImageGranularityConflict =
true;
6100 if(bufferImageGranularityConflict)
6102 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
6108 if(*pOffset >= suballocItem->offset + suballocItem->size)
6114 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
6117 VmaSuballocationList::const_iterator next = suballocItem;
6119 const VkDeviceSize requiredEndMargin =
6120 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
6122 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
6124 if(suballocItem->offset + totalSize > m_Size)
6131 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
6132 if(totalSize > suballocItem->size)
6134 VkDeviceSize remainingSize = totalSize - suballocItem->size;
6135 while(remainingSize > 0)
6138 if(lastSuballocItem == m_Suballocations.cend())
6142 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6144 *pSumFreeSize += lastSuballocItem->size;
6148 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
6149 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
6150 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6152 ++*itemsToMakeLostCount;
6153 *pSumItemSize += lastSuballocItem->size;
6160 remainingSize = (lastSuballocItem->size < remainingSize) ?
6161 remainingSize - lastSuballocItem->size : 0;
6167 if(bufferImageGranularity > 1)
6169 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
6171 while(nextSuballocItem != m_Suballocations.cend())
6173 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
6174 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
6176 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
6178 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
6179 if(nextSuballoc.hAllocation->CanBecomeLost() &&
6180 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
6182 ++*itemsToMakeLostCount;
6201 const VmaSuballocation& suballoc = *suballocItem;
6202 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
6204 *pSumFreeSize = suballoc.size;
6207 if(suballoc.size < allocSize)
6213 *pOffset = suballoc.offset;
6216 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
6218 *pOffset += VMA_DEBUG_MARGIN;
6222 *pOffset = VmaAlignUp(*pOffset, allocAlignment);
6226 if(bufferImageGranularity > 1)
6228 bool bufferImageGranularityConflict =
false;
6229 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
6230 while(prevSuballocItem != m_Suballocations.cbegin())
6233 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
6234 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
6236 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
6238 bufferImageGranularityConflict =
true;
6246 if(bufferImageGranularityConflict)
6248 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
6253 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
6256 VmaSuballocationList::const_iterator next = suballocItem;
6258 const VkDeviceSize requiredEndMargin =
6259 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
6262 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
6269 if(bufferImageGranularity > 1)
6271 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
6273 while(nextSuballocItem != m_Suballocations.cend())
6275 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
6276 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
6278 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
6297 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
6299 VMA_ASSERT(item != m_Suballocations.end());
6300 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6302 VmaSuballocationList::iterator nextItem = item;
6304 VMA_ASSERT(nextItem != m_Suballocations.end());
6305 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
6307 item->size += nextItem->size;
6309 m_Suballocations.erase(nextItem);
6312 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
6315 VmaSuballocation& suballoc = *suballocItem;
6316 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
6317 suballoc.hAllocation = VK_NULL_HANDLE;
6321 m_SumFreeSize += suballoc.size;
6324 bool mergeWithNext =
false;
6325 bool mergeWithPrev =
false;
6327 VmaSuballocationList::iterator nextItem = suballocItem;
6329 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
6331 mergeWithNext =
true;
6334 VmaSuballocationList::iterator prevItem = suballocItem;
6335 if(suballocItem != m_Suballocations.begin())
6338 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6340 mergeWithPrev =
true;
6346 UnregisterFreeSuballocation(nextItem);
6347 MergeFreeWithNext(suballocItem);
6352 UnregisterFreeSuballocation(prevItem);
6353 MergeFreeWithNext(prevItem);
6354 RegisterFreeSuballocation(prevItem);
6359 RegisterFreeSuballocation(suballocItem);
6360 return suballocItem;
6364 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
6366 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6367 VMA_ASSERT(item->size > 0);
6371 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6373 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6375 if(m_FreeSuballocationsBySize.empty())
6377 m_FreeSuballocationsBySize.push_back(item);
6381 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
6389 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
6391 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6392 VMA_ASSERT(item->size > 0);
6396 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6398 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6400 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6401 m_FreeSuballocationsBySize.data(),
6402 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
6404 VmaSuballocationItemSizeLess());
6405 for(
size_t index = it - m_FreeSuballocationsBySize.data();
6406 index < m_FreeSuballocationsBySize.size();
6409 if(m_FreeSuballocationsBySize[index] == item)
6411 VmaVectorRemove(m_FreeSuballocationsBySize, index);
6414 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
6416 VMA_ASSERT(0 &&
"Not found.");
6425 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
6426 m_Metadata(hAllocator),
6427 m_MemoryTypeIndex(UINT32_MAX),
6429 m_hMemory(VK_NULL_HANDLE),
6431 m_pMappedData(VMA_NULL)
6435 void VmaDeviceMemoryBlock::Init(
6436 uint32_t newMemoryTypeIndex,
6437 VkDeviceMemory newMemory,
6438 VkDeviceSize newSize,
6441 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6443 m_MemoryTypeIndex = newMemoryTypeIndex;
6445 m_hMemory = newMemory;
6447 m_Metadata.Init(newSize);
6450 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
6454 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
6456 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
6457 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
6458 m_hMemory = VK_NULL_HANDLE;
6461 bool VmaDeviceMemoryBlock::Validate()
const 6463 if((m_hMemory == VK_NULL_HANDLE) ||
6464 (m_Metadata.GetSize() == 0))
6469 return m_Metadata.Validate();
6472 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
6479 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6482 m_MapCount += count;
6483 VMA_ASSERT(m_pMappedData != VMA_NULL);
6484 if(ppData != VMA_NULL)
6486 *ppData = m_pMappedData;
6492 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6493 hAllocator->m_hDevice,
6499 if(result == VK_SUCCESS)
6501 if(ppData != VMA_NULL)
6503 *ppData = m_pMappedData;
6511 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
6518 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6519 if(m_MapCount >= count)
6521 m_MapCount -= count;
6524 m_pMappedData = VMA_NULL;
6525 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, m_hMemory);
6530 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
6534 VkResult VmaDeviceMemoryBlock::BindBufferMemory(
6539 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
6540 hAllocation->GetBlock() ==
this);
6542 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6543 return hAllocator->GetVulkanFunctions().vkBindBufferMemory(
6544 hAllocator->m_hDevice,
6547 hAllocation->GetOffset());
6550 VkResult VmaDeviceMemoryBlock::BindImageMemory(
6555 VMA_ASSERT(hAllocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK &&
6556 hAllocation->GetBlock() ==
this);
6558 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6559 return hAllocator->GetVulkanFunctions().vkBindImageMemory(
6560 hAllocator->m_hDevice,
6563 hAllocation->GetOffset());
6568 memset(&outInfo, 0,
sizeof(outInfo));
6587 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
6595 VmaPool_T::VmaPool_T(
6600 createInfo.memoryTypeIndex,
6601 createInfo.blockSize,
6602 createInfo.minBlockCount,
6603 createInfo.maxBlockCount,
6605 createInfo.frameInUseCount,
6611 VmaPool_T::~VmaPool_T()
6615 #if VMA_STATS_STRING_ENABLED 6617 #endif // #if VMA_STATS_STRING_ENABLED 6619 VmaBlockVector::VmaBlockVector(
6621 uint32_t memoryTypeIndex,
6622 VkDeviceSize preferredBlockSize,
6623 size_t minBlockCount,
6624 size_t maxBlockCount,
6625 VkDeviceSize bufferImageGranularity,
6626 uint32_t frameInUseCount,
6627 bool isCustomPool) :
6628 m_hAllocator(hAllocator),
6629 m_MemoryTypeIndex(memoryTypeIndex),
6630 m_PreferredBlockSize(preferredBlockSize),
6631 m_MinBlockCount(minBlockCount),
6632 m_MaxBlockCount(maxBlockCount),
6633 m_BufferImageGranularity(bufferImageGranularity),
6634 m_FrameInUseCount(frameInUseCount),
6635 m_IsCustomPool(isCustomPool),
6636 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
6637 m_HasEmptyBlock(false),
6638 m_pDefragmentator(VMA_NULL),
6643 VmaBlockVector::~VmaBlockVector()
6645 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
6647 for(
size_t i = m_Blocks.size(); i--; )
6649 m_Blocks[i]->Destroy(m_hAllocator);
6650 vma_delete(m_hAllocator, m_Blocks[i]);
6654 VkResult VmaBlockVector::CreateMinBlocks()
6656 for(
size_t i = 0; i < m_MinBlockCount; ++i)
6658 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
6659 if(res != VK_SUCCESS)
6667 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
6675 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6677 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6679 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6681 VMA_HEAVY_ASSERT(pBlock->Validate());
6682 pBlock->m_Metadata.AddPoolStats(*pStats);
6686 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
6688 VkResult VmaBlockVector::Allocate(
6690 uint32_t currentFrameIndex,
6692 VkDeviceSize alignment,
6694 VmaSuballocationType suballocType,
6700 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6704 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6706 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6707 VMA_ASSERT(pCurrBlock);
6708 VmaAllocationRequest currRequest = {};
6709 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6712 m_BufferImageGranularity,
6720 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
6724 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
6725 if(res != VK_SUCCESS)
6732 if(pCurrBlock->m_Metadata.IsEmpty())
6734 m_HasEmptyBlock =
false;
6737 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6738 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, size, *pAllocation);
6739 (*pAllocation)->InitBlockAllocation(
6748 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
6749 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6750 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6755 const bool canCreateNewBlock =
6757 (m_Blocks.size() < m_MaxBlockCount);
6760 if(canCreateNewBlock)
6763 VkDeviceSize newBlockSize = m_PreferredBlockSize;
6764 uint32_t newBlockSizeShift = 0;
6765 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
6769 if(m_IsCustomPool ==
false)
6772 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
6773 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
6775 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6776 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= size * 2)
6778 newBlockSize = smallerNewBlockSize;
6779 ++newBlockSizeShift;
6788 size_t newBlockIndex = 0;
6789 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
6791 if(m_IsCustomPool ==
false)
6793 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
6795 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6796 if(smallerNewBlockSize >= size)
6798 newBlockSize = smallerNewBlockSize;
6799 ++newBlockSizeShift;
6800 res = CreateBlock(newBlockSize, &newBlockIndex);
6809 if(res == VK_SUCCESS)
6811 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
6812 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= size);
6816 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
6817 if(res != VK_SUCCESS)
6824 VmaAllocationRequest allocRequest;
6825 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6826 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6827 pBlock->m_Metadata.Alloc(allocRequest, suballocType, size, *pAllocation);
6828 (*pAllocation)->InitBlockAllocation(
6831 allocRequest.offset,
6837 VMA_HEAVY_ASSERT(pBlock->Validate());
6838 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6839 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6847 if(canMakeOtherLost)
6849 uint32_t tryIndex = 0;
6850 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6852 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6853 VmaAllocationRequest bestRequest = {};
6854 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6858 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6860 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6861 VMA_ASSERT(pCurrBlock);
6862 VmaAllocationRequest currRequest = {};
6863 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6866 m_BufferImageGranularity,
6873 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6874 if(pBestRequestBlock == VMA_NULL ||
6875 currRequestCost < bestRequestCost)
6877 pBestRequestBlock = pCurrBlock;
6878 bestRequest = currRequest;
6879 bestRequestCost = currRequestCost;
6881 if(bestRequestCost == 0)
6889 if(pBestRequestBlock != VMA_NULL)
6893 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
6894 if(res != VK_SUCCESS)
6900 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6906 if(pBestRequestBlock->m_Metadata.IsEmpty())
6908 m_HasEmptyBlock =
false;
6911 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6912 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, size, *pAllocation);
6913 (*pAllocation)->InitBlockAllocation(
6922 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
6923 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6924 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6938 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6940 return VK_ERROR_TOO_MANY_OBJECTS;
6944 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6947 void VmaBlockVector::Free(
6950 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6954 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6956 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6958 if(hAllocation->IsPersistentMap())
6960 pBlock->Unmap(m_hAllocator, 1);
6963 pBlock->m_Metadata.Free(hAllocation);
6964 VMA_HEAVY_ASSERT(pBlock->Validate());
6966 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6969 if(pBlock->m_Metadata.IsEmpty())
6972 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6974 pBlockToDelete = pBlock;
6980 m_HasEmptyBlock =
true;
6985 else if(m_HasEmptyBlock)
6987 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6988 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6990 pBlockToDelete = pLastBlock;
6991 m_Blocks.pop_back();
6992 m_HasEmptyBlock =
false;
6996 IncrementallySortBlocks();
7001 if(pBlockToDelete != VMA_NULL)
7003 VMA_DEBUG_LOG(
" Deleted empty allocation");
7004 pBlockToDelete->Destroy(m_hAllocator);
7005 vma_delete(m_hAllocator, pBlockToDelete);
7009 VkDeviceSize VmaBlockVector::CalcMaxBlockSize()
const 7011 VkDeviceSize result = 0;
7012 for(
size_t i = m_Blocks.size(); i--; )
7014 result = VMA_MAX(result, m_Blocks[i]->m_Metadata.GetSize());
7015 if(result >= m_PreferredBlockSize)
7023 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
7025 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7027 if(m_Blocks[blockIndex] == pBlock)
7029 VmaVectorRemove(m_Blocks, blockIndex);
7036 void VmaBlockVector::IncrementallySortBlocks()
7039 for(
size_t i = 1; i < m_Blocks.size(); ++i)
7041 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
7043 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
7049 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
7051 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7052 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
7053 allocInfo.allocationSize = blockSize;
7054 VkDeviceMemory mem = VK_NULL_HANDLE;
7055 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
7064 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
7068 allocInfo.allocationSize,
7071 m_Blocks.push_back(pBlock);
7072 if(pNewBlockIndex != VMA_NULL)
7074 *pNewBlockIndex = m_Blocks.size() - 1;
7080 #if VMA_STATS_STRING_ENABLED 7082 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
7084 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7090 json.WriteString(
"MemoryTypeIndex");
7091 json.WriteNumber(m_MemoryTypeIndex);
7093 json.WriteString(
"BlockSize");
7094 json.WriteNumber(m_PreferredBlockSize);
7096 json.WriteString(
"BlockCount");
7097 json.BeginObject(
true);
7098 if(m_MinBlockCount > 0)
7100 json.WriteString(
"Min");
7101 json.WriteNumber((uint64_t)m_MinBlockCount);
7103 if(m_MaxBlockCount < SIZE_MAX)
7105 json.WriteString(
"Max");
7106 json.WriteNumber((uint64_t)m_MaxBlockCount);
7108 json.WriteString(
"Cur");
7109 json.WriteNumber((uint64_t)m_Blocks.size());
7112 if(m_FrameInUseCount > 0)
7114 json.WriteString(
"FrameInUseCount");
7115 json.WriteNumber(m_FrameInUseCount);
7120 json.WriteString(
"PreferredBlockSize");
7121 json.WriteNumber(m_PreferredBlockSize);
7124 json.WriteString(
"Blocks");
7126 for(
size_t i = 0; i < m_Blocks.size(); ++i)
7129 json.ContinueString(m_Blocks[i]->GetId());
7132 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
7139 #endif // #if VMA_STATS_STRING_ENABLED 7141 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
7143 uint32_t currentFrameIndex)
7145 if(m_pDefragmentator == VMA_NULL)
7147 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
7153 return m_pDefragmentator;
7156 VkResult VmaBlockVector::Defragment(
7158 VkDeviceSize& maxBytesToMove,
7159 uint32_t& maxAllocationsToMove)
7161 if(m_pDefragmentator == VMA_NULL)
7166 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7169 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
7172 if(pDefragmentationStats != VMA_NULL)
7174 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
7175 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
7178 VMA_ASSERT(bytesMoved <= maxBytesToMove);
7179 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
7185 m_HasEmptyBlock =
false;
7186 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
7188 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
7189 if(pBlock->m_Metadata.IsEmpty())
7191 if(m_Blocks.size() > m_MinBlockCount)
7193 if(pDefragmentationStats != VMA_NULL)
7196 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
7199 VmaVectorRemove(m_Blocks, blockIndex);
7200 pBlock->Destroy(m_hAllocator);
7201 vma_delete(m_hAllocator, pBlock);
7205 m_HasEmptyBlock =
true;
7213 void VmaBlockVector::DestroyDefragmentator()
7215 if(m_pDefragmentator != VMA_NULL)
7217 vma_delete(m_hAllocator, m_pDefragmentator);
7218 m_pDefragmentator = VMA_NULL;
7222 void VmaBlockVector::MakePoolAllocationsLost(
7223 uint32_t currentFrameIndex,
7224 size_t* pLostAllocationCount)
7226 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7227 size_t lostAllocationCount = 0;
7228 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7230 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7232 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
7234 if(pLostAllocationCount != VMA_NULL)
7236 *pLostAllocationCount = lostAllocationCount;
7240 void VmaBlockVector::AddStats(
VmaStats* pStats)
7242 const uint32_t memTypeIndex = m_MemoryTypeIndex;
7243 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
7245 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
7247 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
7249 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
7251 VMA_HEAVY_ASSERT(pBlock->Validate());
7253 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
7254 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7255 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7256 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7263 VmaDefragmentator::VmaDefragmentator(
7265 VmaBlockVector* pBlockVector,
7266 uint32_t currentFrameIndex) :
7267 m_hAllocator(hAllocator),
7268 m_pBlockVector(pBlockVector),
7269 m_CurrentFrameIndex(currentFrameIndex),
7271 m_AllocationsMoved(0),
7272 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
7273 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
7277 VmaDefragmentator::~VmaDefragmentator()
7279 for(
size_t i = m_Blocks.size(); i--; )
7281 vma_delete(m_hAllocator, m_Blocks[i]);
7285 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
7287 AllocationInfo allocInfo;
7288 allocInfo.m_hAllocation = hAlloc;
7289 allocInfo.m_pChanged = pChanged;
7290 m_Allocations.push_back(allocInfo);
7293 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
7296 if(m_pMappedDataForDefragmentation)
7298 *ppMappedData = m_pMappedDataForDefragmentation;
7303 if(m_pBlock->GetMappedData())
7305 *ppMappedData = m_pBlock->GetMappedData();
7310 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
7311 *ppMappedData = m_pMappedDataForDefragmentation;
7315 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
7317 if(m_pMappedDataForDefragmentation != VMA_NULL)
7319 m_pBlock->Unmap(hAllocator, 1);
7323 VkResult VmaDefragmentator::DefragmentRound(
7324 VkDeviceSize maxBytesToMove,
7325 uint32_t maxAllocationsToMove)
7327 if(m_Blocks.empty())
7332 size_t srcBlockIndex = m_Blocks.size() - 1;
7333 size_t srcAllocIndex = SIZE_MAX;
7339 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
7341 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
7344 if(srcBlockIndex == 0)
7351 srcAllocIndex = SIZE_MAX;
7356 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
7360 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
7361 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
7363 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
7364 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
7365 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
7366 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
7369 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
7371 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
7372 VmaAllocationRequest dstAllocRequest;
7373 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
7374 m_CurrentFrameIndex,
7375 m_pBlockVector->GetFrameInUseCount(),
7376 m_pBlockVector->GetBufferImageGranularity(),
7381 &dstAllocRequest) &&
7383 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
7385 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
7388 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
7389 (m_BytesMoved + size > maxBytesToMove))
7391 return VK_INCOMPLETE;
7394 void* pDstMappedData = VMA_NULL;
7395 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
7396 if(res != VK_SUCCESS)
7401 void* pSrcMappedData = VMA_NULL;
7402 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
7403 if(res != VK_SUCCESS)
7410 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
7411 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
7412 static_cast<size_t>(size));
7414 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
7415 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
7417 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
7419 if(allocInfo.m_pChanged != VMA_NULL)
7421 *allocInfo.m_pChanged = VK_TRUE;
7424 ++m_AllocationsMoved;
7425 m_BytesMoved += size;
7427 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
7435 if(srcAllocIndex > 0)
7441 if(srcBlockIndex > 0)
7444 srcAllocIndex = SIZE_MAX;
7454 VkResult VmaDefragmentator::Defragment(
7455 VkDeviceSize maxBytesToMove,
7456 uint32_t maxAllocationsToMove)
7458 if(m_Allocations.empty())
7464 const size_t blockCount = m_pBlockVector->m_Blocks.size();
7465 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7467 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
7468 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
7469 m_Blocks.push_back(pBlockInfo);
7473 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
7476 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
7478 AllocationInfo& allocInfo = m_Allocations[blockIndex];
7480 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7482 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
7483 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
7484 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
7486 (*it)->m_Allocations.push_back(allocInfo);
7494 m_Allocations.clear();
7496 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7498 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
7499 pBlockInfo->CalcHasNonMovableAllocations();
7500 pBlockInfo->SortAllocationsBySizeDescecnding();
7504 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
7507 VkResult result = VK_SUCCESS;
7508 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
7510 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
7514 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7516 m_Blocks[blockIndex]->Unmap(m_hAllocator);
7522 bool VmaDefragmentator::MoveMakesSense(
7523 size_t dstBlockIndex, VkDeviceSize dstOffset,
7524 size_t srcBlockIndex, VkDeviceSize srcOffset)
7526 if(dstBlockIndex < srcBlockIndex)
7530 if(dstBlockIndex > srcBlockIndex)
7534 if(dstOffset < srcOffset)
7547 m_hDevice(pCreateInfo->device),
7548 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
7549 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
7550 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
7551 m_PreferredLargeHeapBlockSize(0),
7552 m_PhysicalDevice(pCreateInfo->physicalDevice),
7553 m_CurrentFrameIndex(0),
7554 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks())),
7559 #if !(VMA_DEDICATED_ALLOCATION) 7562 VMA_ASSERT(0 &&
"VMA_ALLOCATOR_CREATE_KHR_DEDICATED_ALLOCATION_BIT set but required extensions are disabled by preprocessor macros.");
7566 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
7567 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
7568 memset(&m_MemProps, 0,
sizeof(m_MemProps));
7570 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
7571 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
7573 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7575 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
7586 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
7587 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
7594 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
7596 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
7597 if(limit != VK_WHOLE_SIZE)
7599 m_HeapSizeLimit[heapIndex] = limit;
7600 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
7602 m_MemProps.memoryHeaps[heapIndex].size = limit;
7608 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7610 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
7612 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
7618 GetBufferImageGranularity(),
7623 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
7628 VmaAllocator_T::~VmaAllocator_T()
7630 VMA_ASSERT(m_Pools.empty());
7632 for(
size_t i = GetMemoryTypeCount(); i--; )
7634 vma_delete(
this, m_pDedicatedAllocations[i]);
7635 vma_delete(
this, m_pBlockVectors[i]);
7639 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
7641 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7642 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
7643 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
7644 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
7645 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
7646 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
7647 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
7648 m_VulkanFunctions.vkFlushMappedMemoryRanges = &vkFlushMappedMemoryRanges;
7649 m_VulkanFunctions.vkInvalidateMappedMemoryRanges = &vkInvalidateMappedMemoryRanges;
7650 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
7651 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
7652 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
7653 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
7654 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
7655 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
7656 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
7657 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
7658 #if VMA_DEDICATED_ALLOCATION 7659 if(m_UseKhrDedicatedAllocation)
7661 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
7662 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
7663 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
7664 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
7666 #endif // #if VMA_DEDICATED_ALLOCATION 7667 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7669 #define VMA_COPY_IF_NOT_NULL(funcName) \ 7670 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 7672 if(pVulkanFunctions != VMA_NULL)
7674 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
7675 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
7676 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
7677 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
7678 VMA_COPY_IF_NOT_NULL(vkMapMemory);
7679 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
7680 VMA_COPY_IF_NOT_NULL(vkFlushMappedMemoryRanges);
7681 VMA_COPY_IF_NOT_NULL(vkInvalidateMappedMemoryRanges);
7682 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
7683 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
7684 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
7685 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
7686 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
7687 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
7688 VMA_COPY_IF_NOT_NULL(vkCreateImage);
7689 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
7690 #if VMA_DEDICATED_ALLOCATION 7691 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
7692 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
7696 #undef VMA_COPY_IF_NOT_NULL 7700 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
7701 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
7702 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
7703 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
7704 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
7705 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
7706 VMA_ASSERT(m_VulkanFunctions.vkFlushMappedMemoryRanges != VMA_NULL);
7707 VMA_ASSERT(m_VulkanFunctions.vkInvalidateMappedMemoryRanges != VMA_NULL);
7708 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
7709 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
7710 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
7711 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
7712 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
7713 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
7714 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
7715 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
7716 #if VMA_DEDICATED_ALLOCATION 7717 if(m_UseKhrDedicatedAllocation)
7719 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
7720 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
7725 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
7727 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7728 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
7729 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
7730 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
7733 VkResult VmaAllocator_T::AllocateMemoryOfType(
7735 VkDeviceSize alignment,
7736 bool dedicatedAllocation,
7737 VkBuffer dedicatedBuffer,
7738 VkImage dedicatedImage,
7740 uint32_t memTypeIndex,
7741 VmaSuballocationType suballocType,
7744 VMA_ASSERT(pAllocation != VMA_NULL);
7745 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
7751 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
7756 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
7757 VMA_ASSERT(blockVector);
7759 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
7760 bool preferDedicatedMemory =
7761 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
7762 dedicatedAllocation ||
7764 size > preferredBlockSize / 2;
7766 if(preferDedicatedMemory &&
7768 finalCreateInfo.
pool == VK_NULL_HANDLE)
7777 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7781 return AllocateDedicatedMemory(
7795 VkResult res = blockVector->Allocate(
7797 m_CurrentFrameIndex.load(),
7803 if(res == VK_SUCCESS)
7811 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7815 res = AllocateDedicatedMemory(
7821 finalCreateInfo.pUserData,
7825 if(res == VK_SUCCESS)
7828 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
7834 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7841 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7843 VmaSuballocationType suballocType,
7844 uint32_t memTypeIndex,
7846 bool isUserDataString,
7848 VkBuffer dedicatedBuffer,
7849 VkImage dedicatedImage,
7852 VMA_ASSERT(pAllocation);
7854 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7855 allocInfo.memoryTypeIndex = memTypeIndex;
7856 allocInfo.allocationSize = size;
7858 #if VMA_DEDICATED_ALLOCATION 7859 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7860 if(m_UseKhrDedicatedAllocation)
7862 if(dedicatedBuffer != VK_NULL_HANDLE)
7864 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7865 dedicatedAllocInfo.buffer = dedicatedBuffer;
7866 allocInfo.pNext = &dedicatedAllocInfo;
7868 else if(dedicatedImage != VK_NULL_HANDLE)
7870 dedicatedAllocInfo.image = dedicatedImage;
7871 allocInfo.pNext = &dedicatedAllocInfo;
7874 #endif // #if VMA_DEDICATED_ALLOCATION 7877 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7878 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7881 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7885 void* pMappedData = VMA_NULL;
7888 res = (*m_VulkanFunctions.vkMapMemory)(
7897 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7898 FreeVulkanMemory(memTypeIndex, size, hMemory);
7903 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7904 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7905 (*pAllocation)->SetUserData(
this, pUserData);
7909 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7910 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7911 VMA_ASSERT(pDedicatedAllocations);
7912 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7915 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7920 void VmaAllocator_T::GetBufferMemoryRequirements(
7922 VkMemoryRequirements& memReq,
7923 bool& requiresDedicatedAllocation,
7924 bool& prefersDedicatedAllocation)
const 7926 #if VMA_DEDICATED_ALLOCATION 7927 if(m_UseKhrDedicatedAllocation)
7929 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7930 memReqInfo.buffer = hBuffer;
7932 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7934 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7935 memReq2.pNext = &memDedicatedReq;
7937 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7939 memReq = memReq2.memoryRequirements;
7940 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7941 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7944 #endif // #if VMA_DEDICATED_ALLOCATION 7946 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7947 requiresDedicatedAllocation =
false;
7948 prefersDedicatedAllocation =
false;
7952 void VmaAllocator_T::GetImageMemoryRequirements(
7954 VkMemoryRequirements& memReq,
7955 bool& requiresDedicatedAllocation,
7956 bool& prefersDedicatedAllocation)
const 7958 #if VMA_DEDICATED_ALLOCATION 7959 if(m_UseKhrDedicatedAllocation)
7961 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7962 memReqInfo.image = hImage;
7964 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7966 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7967 memReq2.pNext = &memDedicatedReq;
7969 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7971 memReq = memReq2.memoryRequirements;
7972 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7973 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7976 #endif // #if VMA_DEDICATED_ALLOCATION 7978 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7979 requiresDedicatedAllocation =
false;
7980 prefersDedicatedAllocation =
false;
7984 VkResult VmaAllocator_T::AllocateMemory(
7985 const VkMemoryRequirements& vkMemReq,
7986 bool requiresDedicatedAllocation,
7987 bool prefersDedicatedAllocation,
7988 VkBuffer dedicatedBuffer,
7989 VkImage dedicatedImage,
7991 VmaSuballocationType suballocType,
7997 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7998 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8003 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
8004 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8006 if(requiresDedicatedAllocation)
8010 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
8011 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8013 if(createInfo.
pool != VK_NULL_HANDLE)
8015 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
8016 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8019 if((createInfo.
pool != VK_NULL_HANDLE) &&
8022 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
8023 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8026 if(createInfo.
pool != VK_NULL_HANDLE)
8028 const VkDeviceSize alignmentForPool = VMA_MAX(
8030 GetMemoryTypeMinAlignment(createInfo.
pool->m_BlockVector.GetMemoryTypeIndex()));
8031 return createInfo.
pool->m_BlockVector.Allocate(
8033 m_CurrentFrameIndex.load(),
8043 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
8044 uint32_t memTypeIndex = UINT32_MAX;
8046 if(res == VK_SUCCESS)
8048 VkDeviceSize alignmentForMemType = VMA_MAX(
8050 GetMemoryTypeMinAlignment(memTypeIndex));
8052 res = AllocateMemoryOfType(
8054 alignmentForMemType,
8055 requiresDedicatedAllocation || prefersDedicatedAllocation,
8063 if(res == VK_SUCCESS)
8073 memoryTypeBits &= ~(1u << memTypeIndex);
8076 if(res == VK_SUCCESS)
8078 alignmentForMemType = VMA_MAX(
8080 GetMemoryTypeMinAlignment(memTypeIndex));
8082 res = AllocateMemoryOfType(
8084 alignmentForMemType,
8085 requiresDedicatedAllocation || prefersDedicatedAllocation,
8093 if(res == VK_SUCCESS)
8103 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
8114 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
8116 VMA_ASSERT(allocation);
8118 if(allocation->CanBecomeLost() ==
false ||
8119 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
8121 switch(allocation->GetType())
8123 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8125 VmaBlockVector* pBlockVector = VMA_NULL;
8126 VmaPool hPool = allocation->GetPool();
8127 if(hPool != VK_NULL_HANDLE)
8129 pBlockVector = &hPool->m_BlockVector;
8133 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
8134 pBlockVector = m_pBlockVectors[memTypeIndex];
8136 pBlockVector->Free(allocation);
8139 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8140 FreeDedicatedMemory(allocation);
8147 allocation->SetUserData(
this, VMA_NULL);
8148 vma_delete(
this, allocation);
8151 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
8154 InitStatInfo(pStats->
total);
8155 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
8157 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
8161 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8163 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
8164 VMA_ASSERT(pBlockVector);
8165 pBlockVector->AddStats(pStats);
8170 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8171 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
8173 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
8178 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8180 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
8181 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8182 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8183 VMA_ASSERT(pDedicatedAllocVector);
8184 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
8187 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
8188 VmaAddStatInfo(pStats->
total, allocationStatInfo);
8189 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
8190 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
8195 VmaPostprocessCalcStatInfo(pStats->
total);
8196 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
8197 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
8198 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
8199 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
8202 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
8204 VkResult VmaAllocator_T::Defragment(
8206 size_t allocationCount,
8207 VkBool32* pAllocationsChanged,
8211 if(pAllocationsChanged != VMA_NULL)
8213 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
8215 if(pDefragmentationStats != VMA_NULL)
8217 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
8220 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
8222 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
8224 const size_t poolCount = m_Pools.size();
8227 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
8231 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
8233 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
8235 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
8237 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
8239 VmaBlockVector* pAllocBlockVector = VMA_NULL;
8241 const VmaPool hAllocPool = hAlloc->GetPool();
8243 if(hAllocPool != VK_NULL_HANDLE)
8245 pAllocBlockVector = &hAllocPool->GetBlockVector();
8250 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
8253 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
8255 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
8256 &pAllocationsChanged[allocIndex] : VMA_NULL;
8257 pDefragmentator->AddAllocation(hAlloc, pChanged);
8261 VkResult result = VK_SUCCESS;
8265 VkDeviceSize maxBytesToMove = SIZE_MAX;
8266 uint32_t maxAllocationsToMove = UINT32_MAX;
8267 if(pDefragmentationInfo != VMA_NULL)
8274 for(uint32_t memTypeIndex = 0;
8275 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
8279 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8281 result = m_pBlockVectors[memTypeIndex]->Defragment(
8282 pDefragmentationStats,
8284 maxAllocationsToMove);
8289 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
8291 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
8292 pDefragmentationStats,
8294 maxAllocationsToMove);
8300 for(
size_t poolIndex = poolCount; poolIndex--; )
8302 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
8306 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
8308 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8310 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
8319 if(hAllocation->CanBecomeLost())
8325 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8326 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8329 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8333 pAllocationInfo->
offset = 0;
8334 pAllocationInfo->
size = hAllocation->GetSize();
8336 pAllocationInfo->
pUserData = hAllocation->GetUserData();
8339 else if(localLastUseFrameIndex == localCurrFrameIndex)
8341 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
8342 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
8343 pAllocationInfo->
offset = hAllocation->GetOffset();
8344 pAllocationInfo->
size = hAllocation->GetSize();
8346 pAllocationInfo->
pUserData = hAllocation->GetUserData();
8351 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8353 localLastUseFrameIndex = localCurrFrameIndex;
8360 #if VMA_STATS_STRING_ENABLED 8361 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8362 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8365 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
8366 if(localLastUseFrameIndex == localCurrFrameIndex)
8372 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8374 localLastUseFrameIndex = localCurrFrameIndex;
8380 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
8381 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
8382 pAllocationInfo->
offset = hAllocation->GetOffset();
8383 pAllocationInfo->
size = hAllocation->GetSize();
8384 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
8385 pAllocationInfo->
pUserData = hAllocation->GetUserData();
8389 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
8392 if(hAllocation->CanBecomeLost())
8394 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8395 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8398 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
8402 else if(localLastUseFrameIndex == localCurrFrameIndex)
8408 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8410 localLastUseFrameIndex = localCurrFrameIndex;
8417 #if VMA_STATS_STRING_ENABLED 8418 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
8419 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
8422 VMA_ASSERT(localLastUseFrameIndex != VMA_FRAME_INDEX_LOST);
8423 if(localLastUseFrameIndex == localCurrFrameIndex)
8429 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
8431 localLastUseFrameIndex = localCurrFrameIndex;
8443 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
8456 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
8458 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
8459 if(res != VK_SUCCESS)
8461 vma_delete(
this, *pPool);
8468 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8469 (*pPool)->SetId(m_NextPoolId++);
8470 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
8476 void VmaAllocator_T::DestroyPool(
VmaPool pool)
8480 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8481 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
8482 VMA_ASSERT(success &&
"Pool not found in Allocator.");
8485 vma_delete(
this, pool);
8490 pool->m_BlockVector.GetPoolStats(pPoolStats);
8493 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
8495 m_CurrentFrameIndex.store(frameIndex);
8498 void VmaAllocator_T::MakePoolAllocationsLost(
8500 size_t* pLostAllocationCount)
8502 hPool->m_BlockVector.MakePoolAllocationsLost(
8503 m_CurrentFrameIndex.load(),
8504 pLostAllocationCount);
8507 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
8509 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
8510 (*pAllocation)->InitLost();
8513 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
8515 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
8518 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8520 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8521 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
8523 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8524 if(res == VK_SUCCESS)
8526 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
8531 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
8536 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8539 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
8541 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
8547 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
8549 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
8551 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
8554 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
8556 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
8557 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8559 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8560 m_HeapSizeLimit[heapIndex] += size;
8564 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
8566 if(hAllocation->CanBecomeLost())
8568 return VK_ERROR_MEMORY_MAP_FAILED;
8571 switch(hAllocation->GetType())
8573 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8575 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8576 char *pBytes = VMA_NULL;
8577 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
8578 if(res == VK_SUCCESS)
8580 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
8581 hAllocation->BlockAllocMap();
8585 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8586 return hAllocation->DedicatedAllocMap(
this, ppData);
8589 return VK_ERROR_MEMORY_MAP_FAILED;
8595 switch(hAllocation->GetType())
8597 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8599 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8600 hAllocation->BlockAllocUnmap();
8601 pBlock->Unmap(
this, 1);
8604 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8605 hAllocation->DedicatedAllocUnmap(
this);
8612 VkResult VmaAllocator_T::BindBufferMemory(
VmaAllocation hAllocation, VkBuffer hBuffer)
8614 VkResult res = VK_SUCCESS;
8615 switch(hAllocation->GetType())
8617 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8618 res = GetVulkanFunctions().vkBindBufferMemory(
8621 hAllocation->GetMemory(),
8624 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8626 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
8627 VMA_ASSERT(pBlock &&
"Binding buffer to allocation that doesn't belong to any block. Is the allocation lost?");
8628 res = pBlock->BindBufferMemory(
this, hAllocation, hBuffer);
8637 VkResult VmaAllocator_T::BindImageMemory(
VmaAllocation hAllocation, VkImage hImage)
8639 VkResult res = VK_SUCCESS;
8640 switch(hAllocation->GetType())
8642 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8643 res = GetVulkanFunctions().vkBindImageMemory(
8646 hAllocation->GetMemory(),
8649 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8651 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
8652 VMA_ASSERT(pBlock &&
"Binding image to allocation that doesn't belong to any block. Is the allocation lost?");
8653 res = pBlock->BindImageMemory(
this, hAllocation, hImage);
8662 void VmaAllocator_T::FlushOrInvalidateAllocation(
8664 VkDeviceSize offset, VkDeviceSize size,
8665 VMA_CACHE_OPERATION op)
8667 const uint32_t memTypeIndex = hAllocation->GetMemoryTypeIndex();
8668 if(size > 0 && IsMemoryTypeNonCoherent(memTypeIndex))
8670 const VkDeviceSize allocationSize = hAllocation->GetSize();
8671 VMA_ASSERT(offset <= allocationSize);
8673 const VkDeviceSize nonCoherentAtomSize = m_PhysicalDeviceProperties.limits.nonCoherentAtomSize;
8675 VkMappedMemoryRange memRange = { VK_STRUCTURE_TYPE_MAPPED_MEMORY_RANGE };
8676 memRange.memory = hAllocation->GetMemory();
8678 switch(hAllocation->GetType())
8680 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8681 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
8682 if(size == VK_WHOLE_SIZE)
8684 memRange.size = allocationSize - memRange.offset;
8688 VMA_ASSERT(offset + size <= allocationSize);
8689 memRange.size = VMA_MIN(
8690 VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize),
8691 allocationSize - memRange.offset);
8695 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8698 memRange.offset = VmaAlignDown(offset, nonCoherentAtomSize);
8699 if(size == VK_WHOLE_SIZE)
8701 size = allocationSize - offset;
8705 VMA_ASSERT(offset + size <= allocationSize);
8707 memRange.size = VmaAlignUp(size + (offset - memRange.offset), nonCoherentAtomSize);
8710 const VkDeviceSize allocationOffset = hAllocation->GetOffset();
8711 VMA_ASSERT(allocationOffset % nonCoherentAtomSize == 0);
8712 const VkDeviceSize blockSize = hAllocation->GetBlock()->m_Metadata.GetSize();
8713 memRange.offset += allocationOffset;
8714 memRange.size = VMA_MIN(memRange.size, blockSize - memRange.offset);
8725 case VMA_CACHE_FLUSH:
8726 (*GetVulkanFunctions().vkFlushMappedMemoryRanges)(m_hDevice, 1, &memRange);
8728 case VMA_CACHE_INVALIDATE:
8729 (*GetVulkanFunctions().vkInvalidateMappedMemoryRanges)(m_hDevice, 1, &memRange);
8738 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
8740 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
8742 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
8744 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8745 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
8746 VMA_ASSERT(pDedicatedAllocations);
8747 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
8748 VMA_ASSERT(success);
8751 VkDeviceMemory hMemory = allocation->GetMemory();
8753 if(allocation->GetMappedData() != VMA_NULL)
8755 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
8758 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
8760 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
8763 #if VMA_STATS_STRING_ENABLED 8765 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
8767 bool dedicatedAllocationsStarted =
false;
8768 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8770 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8771 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8772 VMA_ASSERT(pDedicatedAllocVector);
8773 if(pDedicatedAllocVector->empty() ==
false)
8775 if(dedicatedAllocationsStarted ==
false)
8777 dedicatedAllocationsStarted =
true;
8778 json.WriteString(
"DedicatedAllocations");
8782 json.BeginString(
"Type ");
8783 json.ContinueString(memTypeIndex);
8788 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
8790 json.BeginObject(
true);
8792 hAlloc->PrintParameters(json);
8799 if(dedicatedAllocationsStarted)
8805 bool allocationsStarted =
false;
8806 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8808 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
8810 if(allocationsStarted ==
false)
8812 allocationsStarted =
true;
8813 json.WriteString(
"DefaultPools");
8817 json.BeginString(
"Type ");
8818 json.ContinueString(memTypeIndex);
8821 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
8824 if(allocationsStarted)
8831 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8832 const size_t poolCount = m_Pools.size();
8835 json.WriteString(
"Pools");
8837 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
8840 json.ContinueString(m_Pools[poolIndex]->GetId());
8843 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
8850 #endif // #if VMA_STATS_STRING_ENABLED 8852 static VkResult AllocateMemoryForImage(
8856 VmaSuballocationType suballocType,
8859 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
8861 VkMemoryRequirements vkMemReq = {};
8862 bool requiresDedicatedAllocation =
false;
8863 bool prefersDedicatedAllocation =
false;
8864 allocator->GetImageMemoryRequirements(image, vkMemReq,
8865 requiresDedicatedAllocation, prefersDedicatedAllocation);
8867 return allocator->AllocateMemory(
8869 requiresDedicatedAllocation,
8870 prefersDedicatedAllocation,
8873 *pAllocationCreateInfo,
8885 VMA_ASSERT(pCreateInfo && pAllocator);
8886 VMA_DEBUG_LOG(
"vmaCreateAllocator");
8894 if(allocator != VK_NULL_HANDLE)
8896 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
8897 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
8898 vma_delete(&allocationCallbacks, allocator);
8904 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
8906 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
8907 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
8912 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
8914 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
8915 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
8920 uint32_t memoryTypeIndex,
8921 VkMemoryPropertyFlags* pFlags)
8923 VMA_ASSERT(allocator && pFlags);
8924 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
8925 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
8930 uint32_t frameIndex)
8932 VMA_ASSERT(allocator);
8933 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
8935 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8937 allocator->SetCurrentFrameIndex(frameIndex);
8944 VMA_ASSERT(allocator && pStats);
8945 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8946 allocator->CalculateStats(pStats);
8949 #if VMA_STATS_STRING_ENABLED 8953 char** ppStatsString,
8954 VkBool32 detailedMap)
8956 VMA_ASSERT(allocator && ppStatsString);
8957 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8959 VmaStringBuilder sb(allocator);
8961 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
8965 allocator->CalculateStats(&stats);
8967 json.WriteString(
"Total");
8968 VmaPrintStatInfo(json, stats.
total);
8970 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
8972 json.BeginString(
"Heap ");
8973 json.ContinueString(heapIndex);
8977 json.WriteString(
"Size");
8978 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
8980 json.WriteString(
"Flags");
8981 json.BeginArray(
true);
8982 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
8984 json.WriteString(
"DEVICE_LOCAL");
8990 json.WriteString(
"Stats");
8991 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
8994 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
8996 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
8998 json.BeginString(
"Type ");
8999 json.ContinueString(typeIndex);
9004 json.WriteString(
"Flags");
9005 json.BeginArray(
true);
9006 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
9007 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
9009 json.WriteString(
"DEVICE_LOCAL");
9011 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
9013 json.WriteString(
"HOST_VISIBLE");
9015 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
9017 json.WriteString(
"HOST_COHERENT");
9019 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
9021 json.WriteString(
"HOST_CACHED");
9023 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
9025 json.WriteString(
"LAZILY_ALLOCATED");
9031 json.WriteString(
"Stats");
9032 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
9041 if(detailedMap == VK_TRUE)
9043 allocator->PrintDetailedMap(json);
9049 const size_t len = sb.GetLength();
9050 char*
const pChars = vma_new_array(allocator,
char, len + 1);
9053 memcpy(pChars, sb.GetData(), len);
9056 *ppStatsString = pChars;
9063 if(pStatsString != VMA_NULL)
9065 VMA_ASSERT(allocator);
9066 size_t len = strlen(pStatsString);
9067 vma_delete_array(allocator, pStatsString, len + 1);
9071 #endif // #if VMA_STATS_STRING_ENABLED 9078 uint32_t memoryTypeBits,
9080 uint32_t* pMemoryTypeIndex)
9082 VMA_ASSERT(allocator != VK_NULL_HANDLE);
9083 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
9084 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
9091 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
9097 preferredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
9101 switch(pAllocationCreateInfo->
usage)
9106 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
9108 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
9112 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
9115 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
9116 if(!allocator->IsIntegratedGpu() || (preferredFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
9118 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
9122 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
9123 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
9129 *pMemoryTypeIndex = UINT32_MAX;
9130 uint32_t minCost = UINT32_MAX;
9131 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
9132 memTypeIndex < allocator->GetMemoryTypeCount();
9133 ++memTypeIndex, memTypeBit <<= 1)
9136 if((memTypeBit & memoryTypeBits) != 0)
9138 const VkMemoryPropertyFlags currFlags =
9139 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
9141 if((requiredFlags & ~currFlags) == 0)
9144 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
9146 if(currCost < minCost)
9148 *pMemoryTypeIndex = memTypeIndex;
9158 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
9163 const VkBufferCreateInfo* pBufferCreateInfo,
9165 uint32_t* pMemoryTypeIndex)
9167 VMA_ASSERT(allocator != VK_NULL_HANDLE);
9168 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
9169 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
9170 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
9172 const VkDevice hDev = allocator->m_hDevice;
9173 VkBuffer hBuffer = VK_NULL_HANDLE;
9174 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
9175 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
9176 if(res == VK_SUCCESS)
9178 VkMemoryRequirements memReq = {};
9179 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
9180 hDev, hBuffer, &memReq);
9184 memReq.memoryTypeBits,
9185 pAllocationCreateInfo,
9188 allocator->GetVulkanFunctions().vkDestroyBuffer(
9189 hDev, hBuffer, allocator->GetAllocationCallbacks());
9196 const VkImageCreateInfo* pImageCreateInfo,
9198 uint32_t* pMemoryTypeIndex)
9200 VMA_ASSERT(allocator != VK_NULL_HANDLE);
9201 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
9202 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
9203 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
9205 const VkDevice hDev = allocator->m_hDevice;
9206 VkImage hImage = VK_NULL_HANDLE;
9207 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
9208 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
9209 if(res == VK_SUCCESS)
9211 VkMemoryRequirements memReq = {};
9212 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
9213 hDev, hImage, &memReq);
9217 memReq.memoryTypeBits,
9218 pAllocationCreateInfo,
9221 allocator->GetVulkanFunctions().vkDestroyImage(
9222 hDev, hImage, allocator->GetAllocationCallbacks());
9232 VMA_ASSERT(allocator && pCreateInfo && pPool);
9234 VMA_DEBUG_LOG(
"vmaCreatePool");
9236 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9238 return allocator->CreatePool(pCreateInfo, pPool);
9245 VMA_ASSERT(allocator);
9247 if(pool == VK_NULL_HANDLE)
9252 VMA_DEBUG_LOG(
"vmaDestroyPool");
9254 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9256 allocator->DestroyPool(pool);
9264 VMA_ASSERT(allocator && pool && pPoolStats);
9266 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9268 allocator->GetPoolStats(pool, pPoolStats);
9274 size_t* pLostAllocationCount)
9276 VMA_ASSERT(allocator && pool);
9278 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9280 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
9285 const VkMemoryRequirements* pVkMemoryRequirements,
9290 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
9292 VMA_DEBUG_LOG(
"vmaAllocateMemory");
9294 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9296 VkResult result = allocator->AllocateMemory(
9297 *pVkMemoryRequirements,
9303 VMA_SUBALLOCATION_TYPE_UNKNOWN,
9306 if(pAllocationInfo && result == VK_SUCCESS)
9308 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9321 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
9323 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
9325 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9327 VkMemoryRequirements vkMemReq = {};
9328 bool requiresDedicatedAllocation =
false;
9329 bool prefersDedicatedAllocation =
false;
9330 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
9331 requiresDedicatedAllocation,
9332 prefersDedicatedAllocation);
9334 VkResult result = allocator->AllocateMemory(
9336 requiresDedicatedAllocation,
9337 prefersDedicatedAllocation,
9341 VMA_SUBALLOCATION_TYPE_BUFFER,
9344 if(pAllocationInfo && result == VK_SUCCESS)
9346 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9359 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
9361 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
9363 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9365 VkResult result = AllocateMemoryForImage(
9369 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
9372 if(pAllocationInfo && result == VK_SUCCESS)
9374 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9384 VMA_ASSERT(allocator);
9385 VMA_DEBUG_LOG(
"vmaFreeMemory");
9386 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9387 if(allocation != VK_NULL_HANDLE)
9389 allocator->FreeMemory(allocation);
9398 VMA_ASSERT(allocator && allocation && pAllocationInfo);
9400 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9402 allocator->GetAllocationInfo(allocation, pAllocationInfo);
9409 VMA_ASSERT(allocator && allocation);
9411 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9413 return allocator->TouchAllocation(allocation);
9421 VMA_ASSERT(allocator && allocation);
9423 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9425 allocation->SetUserData(allocator, pUserData);
9432 VMA_ASSERT(allocator && pAllocation);
9434 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
9436 allocator->CreateLostAllocation(pAllocation);
9444 VMA_ASSERT(allocator && allocation && ppData);
9446 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9448 return allocator->Map(allocation, ppData);
9455 VMA_ASSERT(allocator && allocation);
9457 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9459 allocator->Unmap(allocation);
9464 VMA_ASSERT(allocator && allocation);
9466 VMA_DEBUG_LOG(
"vmaFlushAllocation");
9468 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9470 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_FLUSH);
9475 VMA_ASSERT(allocator && allocation);
9477 VMA_DEBUG_LOG(
"vmaInvalidateAllocation");
9479 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9481 allocator->FlushOrInvalidateAllocation(allocation, offset, size, VMA_CACHE_INVALIDATE);
9487 size_t allocationCount,
9488 VkBool32* pAllocationsChanged,
9492 VMA_ASSERT(allocator && pAllocations);
9494 VMA_DEBUG_LOG(
"vmaDefragment");
9496 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9498 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
9506 VMA_ASSERT(allocator && allocation && buffer);
9508 VMA_DEBUG_LOG(
"vmaBindBufferMemory");
9510 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9512 return allocator->BindBufferMemory(allocation, buffer);
9520 VMA_ASSERT(allocator && allocation && image);
9522 VMA_DEBUG_LOG(
"vmaBindImageMemory");
9524 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9526 return allocator->BindImageMemory(allocation, image);
9531 const VkBufferCreateInfo* pBufferCreateInfo,
9537 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
9539 VMA_DEBUG_LOG(
"vmaCreateBuffer");
9541 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9543 *pBuffer = VK_NULL_HANDLE;
9544 *pAllocation = VK_NULL_HANDLE;
9547 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
9548 allocator->m_hDevice,
9550 allocator->GetAllocationCallbacks(),
9555 VkMemoryRequirements vkMemReq = {};
9556 bool requiresDedicatedAllocation =
false;
9557 bool prefersDedicatedAllocation =
false;
9558 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
9559 requiresDedicatedAllocation, prefersDedicatedAllocation);
9563 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
9565 VMA_ASSERT(vkMemReq.alignment %
9566 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
9568 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
9570 VMA_ASSERT(vkMemReq.alignment %
9571 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
9573 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
9575 VMA_ASSERT(vkMemReq.alignment %
9576 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
9580 res = allocator->AllocateMemory(
9582 requiresDedicatedAllocation,
9583 prefersDedicatedAllocation,
9586 *pAllocationCreateInfo,
9587 VMA_SUBALLOCATION_TYPE_BUFFER,
9592 res = allocator->BindBufferMemory(*pAllocation, *pBuffer);
9596 #if VMA_STATS_STRING_ENABLED 9597 (*pAllocation)->InitBufferImageUsage(pBufferCreateInfo->usage);
9599 if(pAllocationInfo != VMA_NULL)
9601 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9605 allocator->FreeMemory(*pAllocation);
9606 *pAllocation = VK_NULL_HANDLE;
9607 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
9608 *pBuffer = VK_NULL_HANDLE;
9611 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
9612 *pBuffer = VK_NULL_HANDLE;
9623 VMA_ASSERT(allocator);
9624 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
9625 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9626 if(buffer != VK_NULL_HANDLE)
9628 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
9630 if(allocation != VK_NULL_HANDLE)
9632 allocator->FreeMemory(allocation);
9638 const VkImageCreateInfo* pImageCreateInfo,
9644 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
9646 VMA_DEBUG_LOG(
"vmaCreateImage");
9648 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9650 *pImage = VK_NULL_HANDLE;
9651 *pAllocation = VK_NULL_HANDLE;
9654 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
9655 allocator->m_hDevice,
9657 allocator->GetAllocationCallbacks(),
9661 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
9662 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
9663 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
9666 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
9670 res = allocator->BindImageMemory(*pAllocation, *pImage);
9674 #if VMA_STATS_STRING_ENABLED 9675 (*pAllocation)->InitBufferImageUsage(pImageCreateInfo->usage);
9677 if(pAllocationInfo != VMA_NULL)
9679 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9683 allocator->FreeMemory(*pAllocation);
9684 *pAllocation = VK_NULL_HANDLE;
9685 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
9686 *pImage = VK_NULL_HANDLE;
9689 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
9690 *pImage = VK_NULL_HANDLE;
9701 VMA_ASSERT(allocator);
9702 VMA_DEBUG_LOG(
"vmaDestroyImage");
9703 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9704 if(image != VK_NULL_HANDLE)
9706 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
9708 if(allocation != VK_NULL_HANDLE)
9710 allocator->FreeMemory(allocation);
9714 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1171
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1437
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1200
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
void vmaInvalidateAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Invalidates memory of given allocation.
Represents single memory allocation.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1183
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1394
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1175
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1767
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1197
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1994
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1613
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1667
Definition: vk_mem_alloc.h:1474
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1164
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1512
Definition: vk_mem_alloc.h:1421
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1209
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1262
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1194
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1425
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1327
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1180
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1326
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1998
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1226
VmaStatInfo total
Definition: vk_mem_alloc.h:1336
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:2006
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1496
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1989
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1181
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:1106
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1203
VkResult vmaBindBufferMemory(VmaAllocator allocator, VmaAllocation allocation, VkBuffer buffer)
Binds buffer to allocation.
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1621
Definition: vk_mem_alloc.h:1615
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1777
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1176
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1533
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1637
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1673
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1162
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1624
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VmaMemoryUsage
Definition: vk_mem_alloc.h:1372
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
void vmaFlushAllocation(VmaAllocator allocator, VmaAllocation allocation, VkDeviceSize offset, VkDeviceSize size)
Flushes memory of given allocation.
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1984
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:2002
Definition: vk_mem_alloc.h:1411
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1520
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1179
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1332
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:1112
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:1133
VkResult vmaBindImageMemory(VmaAllocator allocator, VmaAllocation allocation, VkImage image)
Binds image to allocation.
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:1138
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:2004
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1507
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1683
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1172
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1315
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1632
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:1125
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1481
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1328
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:1129
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1627
Definition: vk_mem_alloc.h:1420
PFN_vkInvalidateMappedMemoryRanges vkInvalidateMappedMemoryRanges
Definition: vk_mem_alloc.h:1178
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1502
Definition: vk_mem_alloc.h:1493
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1318
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1174
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1645
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1212
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1676
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1491
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1526
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1250
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1334
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1461
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1327
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1185
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:1127
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1184
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1659
PFN_vkFlushMappedMemoryRanges vkFlushMappedMemoryRanges
Definition: vk_mem_alloc.h:1177
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1791
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1206
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1327
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1324
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1664
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1772
Definition: vk_mem_alloc.h:1489
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:2000
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1170
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1322
Definition: vk_mem_alloc.h:1377
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1617
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1320
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1182
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1186
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1448
Definition: vk_mem_alloc.h:1404
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1786
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1160
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1173
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1753
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1595
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1328
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
Definition: vk_mem_alloc.h:1487
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1335
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1670
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1328
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1758