23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 943 #include <vulkan/vulkan.h> 960 VkDeviceMemory memory,
966 VkDeviceMemory memory,
1127 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1135 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1145 uint32_t memoryTypeIndex,
1146 VkMemoryPropertyFlags* pFlags);
1158 uint32_t frameIndex);
1191 #define VMA_STATS_STRING_ENABLED 1 1193 #if VMA_STATS_STRING_ENABLED 1200 char** ppStatsString,
1201 VkBool32 detailedMap);
1205 char* pStatsString);
1207 #endif // #if VMA_STATS_STRING_ENABLED 1401 uint32_t memoryTypeBits,
1403 uint32_t* pMemoryTypeIndex);
1419 const VkBufferCreateInfo* pBufferCreateInfo,
1421 uint32_t* pMemoryTypeIndex);
1437 const VkImageCreateInfo* pImageCreateInfo,
1439 uint32_t* pMemoryTypeIndex);
1570 size_t* pLostAllocationCount);
1653 const VkMemoryRequirements* pVkMemoryRequirements,
1913 size_t allocationCount,
1914 VkBool32* pAllocationsChanged,
1946 const VkBufferCreateInfo* pBufferCreateInfo,
1971 const VkImageCreateInfo* pImageCreateInfo,
1997 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 2000 #ifdef __INTELLISENSE__ 2001 #define VMA_IMPLEMENTATION 2004 #ifdef VMA_IMPLEMENTATION 2005 #undef VMA_IMPLEMENTATION 2027 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 2028 #define VMA_STATIC_VULKAN_FUNCTIONS 1 2040 #if VMA_USE_STL_CONTAINERS 2041 #define VMA_USE_STL_VECTOR 1 2042 #define VMA_USE_STL_UNORDERED_MAP 1 2043 #define VMA_USE_STL_LIST 1 2046 #if VMA_USE_STL_VECTOR 2050 #if VMA_USE_STL_UNORDERED_MAP 2051 #include <unordered_map> 2054 #if VMA_USE_STL_LIST 2063 #include <algorithm> 2067 #if !defined(_WIN32) && !defined(__APPLE__) 2073 #define VMA_NULL nullptr 2076 #if defined(__APPLE__) || defined(__ANDROID__) 2078 void *aligned_alloc(
size_t alignment,
size_t size)
2081 if(alignment <
sizeof(
void*))
2083 alignment =
sizeof(
void*);
2087 if(posix_memalign(&pointer, alignment, size) == 0)
2096 #define VMA_ASSERT(expr) assert(expr) 2098 #define VMA_ASSERT(expr) 2104 #ifndef VMA_HEAVY_ASSERT 2106 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 2108 #define VMA_HEAVY_ASSERT(expr) 2112 #ifndef VMA_ALIGN_OF 2113 #define VMA_ALIGN_OF(type) (__alignof(type)) 2116 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 2118 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 2120 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 2124 #ifndef VMA_SYSTEM_FREE 2126 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 2128 #define VMA_SYSTEM_FREE(ptr) free(ptr) 2133 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 2137 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 2141 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 2145 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 2148 #ifndef VMA_DEBUG_LOG 2149 #define VMA_DEBUG_LOG(format, ...) 2159 #if VMA_STATS_STRING_ENABLED 2160 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
2162 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
2164 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
2166 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
2168 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
2170 snprintf(outStr, strLen,
"%p", ptr);
2180 void Lock() { m_Mutex.lock(); }
2181 void Unlock() { m_Mutex.unlock(); }
2185 #define VMA_MUTEX VmaMutex 2196 #ifndef VMA_ATOMIC_UINT32 2197 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 2200 #ifndef VMA_BEST_FIT 2213 #define VMA_BEST_FIT (1) 2216 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 2221 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 2224 #ifndef VMA_DEBUG_ALIGNMENT 2229 #define VMA_DEBUG_ALIGNMENT (1) 2232 #ifndef VMA_DEBUG_MARGIN 2237 #define VMA_DEBUG_MARGIN (0) 2240 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2245 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2248 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2253 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2256 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2257 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2261 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2262 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2266 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2272 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2273 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2276 static inline uint32_t VmaCountBitsSet(uint32_t v)
2278 uint32_t c = v - ((v >> 1) & 0x55555555);
2279 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2280 c = ((c >> 4) + c) & 0x0F0F0F0F;
2281 c = ((c >> 8) + c) & 0x00FF00FF;
2282 c = ((c >> 16) + c) & 0x0000FFFF;
2288 template <
typename T>
2289 static inline T VmaAlignUp(T val, T align)
2291 return (val + align - 1) / align * align;
2295 template <
typename T>
2296 inline T VmaRoundDiv(T x, T y)
2298 return (x + (y / (T)2)) / y;
2303 template<
typename Iterator,
typename Compare>
2304 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2306 Iterator centerValue = end; --centerValue;
2307 Iterator insertIndex = beg;
2308 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2310 if(cmp(*memTypeIndex, *centerValue))
2312 if(insertIndex != memTypeIndex)
2314 VMA_SWAP(*memTypeIndex, *insertIndex);
2319 if(insertIndex != centerValue)
2321 VMA_SWAP(*insertIndex, *centerValue);
2326 template<
typename Iterator,
typename Compare>
2327 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2331 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2332 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2333 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2337 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2339 #endif // #ifndef VMA_SORT 2348 static inline bool VmaBlocksOnSamePage(
2349 VkDeviceSize resourceAOffset,
2350 VkDeviceSize resourceASize,
2351 VkDeviceSize resourceBOffset,
2352 VkDeviceSize pageSize)
2354 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2355 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2356 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2357 VkDeviceSize resourceBStart = resourceBOffset;
2358 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2359 return resourceAEndPage == resourceBStartPage;
2362 enum VmaSuballocationType
2364 VMA_SUBALLOCATION_TYPE_FREE = 0,
2365 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2366 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2367 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2368 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2369 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2370 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2379 static inline bool VmaIsBufferImageGranularityConflict(
2380 VmaSuballocationType suballocType1,
2381 VmaSuballocationType suballocType2)
2383 if(suballocType1 > suballocType2)
2385 VMA_SWAP(suballocType1, suballocType2);
2388 switch(suballocType1)
2390 case VMA_SUBALLOCATION_TYPE_FREE:
2392 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2394 case VMA_SUBALLOCATION_TYPE_BUFFER:
2396 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2397 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2398 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2400 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2401 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2402 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2403 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2405 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2406 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2418 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2419 m_pMutex(useMutex ? &mutex : VMA_NULL)
2436 VMA_MUTEX* m_pMutex;
2439 #if VMA_DEBUG_GLOBAL_MUTEX 2440 static VMA_MUTEX gDebugGlobalMutex;
2441 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2443 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2447 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2458 template <
typename IterT,
typename KeyT,
typename CmpT>
2459 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2461 size_t down = 0, up = (end - beg);
2464 const size_t mid = (down + up) / 2;
2465 if(cmp(*(beg+mid), key))
2480 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2482 if((pAllocationCallbacks != VMA_NULL) &&
2483 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2485 return (*pAllocationCallbacks->pfnAllocation)(
2486 pAllocationCallbacks->pUserData,
2489 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2493 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2497 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2499 if((pAllocationCallbacks != VMA_NULL) &&
2500 (pAllocationCallbacks->pfnFree != VMA_NULL))
2502 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2506 VMA_SYSTEM_FREE(ptr);
2510 template<
typename T>
2511 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2513 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2516 template<
typename T>
2517 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2519 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2522 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2524 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2526 template<
typename T>
2527 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2530 VmaFree(pAllocationCallbacks, ptr);
2533 template<
typename T>
2534 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2538 for(
size_t i = count; i--; )
2542 VmaFree(pAllocationCallbacks, ptr);
2547 template<
typename T>
2548 class VmaStlAllocator
2551 const VkAllocationCallbacks*
const m_pCallbacks;
2552 typedef T value_type;
2554 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2555 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2557 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2558 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2560 template<
typename U>
2561 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2563 return m_pCallbacks == rhs.m_pCallbacks;
2565 template<
typename U>
2566 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2568 return m_pCallbacks != rhs.m_pCallbacks;
2571 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2574 #if VMA_USE_STL_VECTOR 2576 #define VmaVector std::vector 2578 template<
typename T,
typename allocatorT>
2579 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2581 vec.insert(vec.begin() + index, item);
2584 template<
typename T,
typename allocatorT>
2585 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2587 vec.erase(vec.begin() + index);
2590 #else // #if VMA_USE_STL_VECTOR 2595 template<
typename T,
typename AllocatorT>
2599 typedef T value_type;
2601 VmaVector(
const AllocatorT& allocator) :
2602 m_Allocator(allocator),
2609 VmaVector(
size_t count,
const AllocatorT& allocator) :
2610 m_Allocator(allocator),
2611 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2617 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2618 m_Allocator(src.m_Allocator),
2619 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2620 m_Count(src.m_Count),
2621 m_Capacity(src.m_Count)
2625 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2631 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2634 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2638 resize(rhs.m_Count);
2641 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2647 bool empty()
const {
return m_Count == 0; }
2648 size_t size()
const {
return m_Count; }
2649 T* data() {
return m_pArray; }
2650 const T* data()
const {
return m_pArray; }
2652 T& operator[](
size_t index)
2654 VMA_HEAVY_ASSERT(index < m_Count);
2655 return m_pArray[index];
2657 const T& operator[](
size_t index)
const 2659 VMA_HEAVY_ASSERT(index < m_Count);
2660 return m_pArray[index];
2665 VMA_HEAVY_ASSERT(m_Count > 0);
2668 const T& front()
const 2670 VMA_HEAVY_ASSERT(m_Count > 0);
2675 VMA_HEAVY_ASSERT(m_Count > 0);
2676 return m_pArray[m_Count - 1];
2678 const T& back()
const 2680 VMA_HEAVY_ASSERT(m_Count > 0);
2681 return m_pArray[m_Count - 1];
2684 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2686 newCapacity = VMA_MAX(newCapacity, m_Count);
2688 if((newCapacity < m_Capacity) && !freeMemory)
2690 newCapacity = m_Capacity;
2693 if(newCapacity != m_Capacity)
2695 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2698 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2700 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2701 m_Capacity = newCapacity;
2702 m_pArray = newArray;
2706 void resize(
size_t newCount,
bool freeMemory =
false)
2708 size_t newCapacity = m_Capacity;
2709 if(newCount > m_Capacity)
2711 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2715 newCapacity = newCount;
2718 if(newCapacity != m_Capacity)
2720 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2721 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2722 if(elementsToCopy != 0)
2724 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2726 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2727 m_Capacity = newCapacity;
2728 m_pArray = newArray;
2734 void clear(
bool freeMemory =
false)
2736 resize(0, freeMemory);
2739 void insert(
size_t index,
const T& src)
2741 VMA_HEAVY_ASSERT(index <= m_Count);
2742 const size_t oldCount = size();
2743 resize(oldCount + 1);
2744 if(index < oldCount)
2746 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2748 m_pArray[index] = src;
2751 void remove(
size_t index)
2753 VMA_HEAVY_ASSERT(index < m_Count);
2754 const size_t oldCount = size();
2755 if(index < oldCount - 1)
2757 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2759 resize(oldCount - 1);
2762 void push_back(
const T& src)
2764 const size_t newIndex = size();
2765 resize(newIndex + 1);
2766 m_pArray[newIndex] = src;
2771 VMA_HEAVY_ASSERT(m_Count > 0);
2775 void push_front(
const T& src)
2782 VMA_HEAVY_ASSERT(m_Count > 0);
2786 typedef T* iterator;
2788 iterator begin() {
return m_pArray; }
2789 iterator end() {
return m_pArray + m_Count; }
2792 AllocatorT m_Allocator;
2798 template<
typename T,
typename allocatorT>
2799 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2801 vec.insert(index, item);
2804 template<
typename T,
typename allocatorT>
2805 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2810 #endif // #if VMA_USE_STL_VECTOR 2812 template<
typename CmpLess,
typename VectorT>
2813 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2815 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2817 vector.data() + vector.size(),
2819 CmpLess()) - vector.data();
2820 VmaVectorInsert(vector, indexToInsert, value);
2821 return indexToInsert;
2824 template<
typename CmpLess,
typename VectorT>
2825 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2828 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2833 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2835 size_t indexToRemove = it - vector.begin();
2836 VmaVectorRemove(vector, indexToRemove);
2842 template<
typename CmpLess,
typename VectorT>
2843 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2846 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2848 vector.data() + vector.size(),
2851 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2853 return it - vector.begin();
2857 return vector.size();
2869 template<
typename T>
2870 class VmaPoolAllocator
2873 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2874 ~VmaPoolAllocator();
2882 uint32_t NextFreeIndex;
2889 uint32_t FirstFreeIndex;
2892 const VkAllocationCallbacks* m_pAllocationCallbacks;
2893 size_t m_ItemsPerBlock;
2894 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2896 ItemBlock& CreateNewBlock();
2899 template<
typename T>
2900 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2901 m_pAllocationCallbacks(pAllocationCallbacks),
2902 m_ItemsPerBlock(itemsPerBlock),
2903 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2905 VMA_ASSERT(itemsPerBlock > 0);
2908 template<
typename T>
2909 VmaPoolAllocator<T>::~VmaPoolAllocator()
2914 template<
typename T>
2915 void VmaPoolAllocator<T>::Clear()
2917 for(
size_t i = m_ItemBlocks.size(); i--; )
2918 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2919 m_ItemBlocks.clear();
2922 template<
typename T>
2923 T* VmaPoolAllocator<T>::Alloc()
2925 for(
size_t i = m_ItemBlocks.size(); i--; )
2927 ItemBlock& block = m_ItemBlocks[i];
2929 if(block.FirstFreeIndex != UINT32_MAX)
2931 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2932 block.FirstFreeIndex = pItem->NextFreeIndex;
2933 return &pItem->Value;
2938 ItemBlock& newBlock = CreateNewBlock();
2939 Item*
const pItem = &newBlock.pItems[0];
2940 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2941 return &pItem->Value;
2944 template<
typename T>
2945 void VmaPoolAllocator<T>::Free(T* ptr)
2948 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2950 ItemBlock& block = m_ItemBlocks[i];
2954 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2957 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2959 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2960 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2961 block.FirstFreeIndex = index;
2965 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2968 template<
typename T>
2969 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2971 ItemBlock newBlock = {
2972 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2974 m_ItemBlocks.push_back(newBlock);
2977 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2978 newBlock.pItems[i].NextFreeIndex = i + 1;
2979 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2980 return m_ItemBlocks.back();
2986 #if VMA_USE_STL_LIST 2988 #define VmaList std::list 2990 #else // #if VMA_USE_STL_LIST 2992 template<
typename T>
3001 template<
typename T>
3005 typedef VmaListItem<T> ItemType;
3007 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
3011 size_t GetCount()
const {
return m_Count; }
3012 bool IsEmpty()
const {
return m_Count == 0; }
3014 ItemType* Front() {
return m_pFront; }
3015 const ItemType* Front()
const {
return m_pFront; }
3016 ItemType* Back() {
return m_pBack; }
3017 const ItemType* Back()
const {
return m_pBack; }
3019 ItemType* PushBack();
3020 ItemType* PushFront();
3021 ItemType* PushBack(
const T& value);
3022 ItemType* PushFront(
const T& value);
3027 ItemType* InsertBefore(ItemType* pItem);
3029 ItemType* InsertAfter(ItemType* pItem);
3031 ItemType* InsertBefore(ItemType* pItem,
const T& value);
3032 ItemType* InsertAfter(ItemType* pItem,
const T& value);
3034 void Remove(ItemType* pItem);
3037 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
3038 VmaPoolAllocator<ItemType> m_ItemAllocator;
3044 VmaRawList(
const VmaRawList<T>& src);
3045 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
3048 template<
typename T>
3049 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
3050 m_pAllocationCallbacks(pAllocationCallbacks),
3051 m_ItemAllocator(pAllocationCallbacks, 128),
3058 template<
typename T>
3059 VmaRawList<T>::~VmaRawList()
3065 template<
typename T>
3066 void VmaRawList<T>::Clear()
3068 if(IsEmpty() ==
false)
3070 ItemType* pItem = m_pBack;
3071 while(pItem != VMA_NULL)
3073 ItemType*
const pPrevItem = pItem->pPrev;
3074 m_ItemAllocator.Free(pItem);
3077 m_pFront = VMA_NULL;
3083 template<
typename T>
3084 VmaListItem<T>* VmaRawList<T>::PushBack()
3086 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3087 pNewItem->pNext = VMA_NULL;
3090 pNewItem->pPrev = VMA_NULL;
3091 m_pFront = pNewItem;
3097 pNewItem->pPrev = m_pBack;
3098 m_pBack->pNext = pNewItem;
3105 template<
typename T>
3106 VmaListItem<T>* VmaRawList<T>::PushFront()
3108 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
3109 pNewItem->pPrev = VMA_NULL;
3112 pNewItem->pNext = VMA_NULL;
3113 m_pFront = pNewItem;
3119 pNewItem->pNext = m_pFront;
3120 m_pFront->pPrev = pNewItem;
3121 m_pFront = pNewItem;
3127 template<
typename T>
3128 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
3130 ItemType*
const pNewItem = PushBack();
3131 pNewItem->Value = value;
3135 template<
typename T>
3136 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
3138 ItemType*
const pNewItem = PushFront();
3139 pNewItem->Value = value;
3143 template<
typename T>
3144 void VmaRawList<T>::PopBack()
3146 VMA_HEAVY_ASSERT(m_Count > 0);
3147 ItemType*
const pBackItem = m_pBack;
3148 ItemType*
const pPrevItem = pBackItem->pPrev;
3149 if(pPrevItem != VMA_NULL)
3151 pPrevItem->pNext = VMA_NULL;
3153 m_pBack = pPrevItem;
3154 m_ItemAllocator.Free(pBackItem);
3158 template<
typename T>
3159 void VmaRawList<T>::PopFront()
3161 VMA_HEAVY_ASSERT(m_Count > 0);
3162 ItemType*
const pFrontItem = m_pFront;
3163 ItemType*
const pNextItem = pFrontItem->pNext;
3164 if(pNextItem != VMA_NULL)
3166 pNextItem->pPrev = VMA_NULL;
3168 m_pFront = pNextItem;
3169 m_ItemAllocator.Free(pFrontItem);
3173 template<
typename T>
3174 void VmaRawList<T>::Remove(ItemType* pItem)
3176 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
3177 VMA_HEAVY_ASSERT(m_Count > 0);
3179 if(pItem->pPrev != VMA_NULL)
3181 pItem->pPrev->pNext = pItem->pNext;
3185 VMA_HEAVY_ASSERT(m_pFront == pItem);
3186 m_pFront = pItem->pNext;
3189 if(pItem->pNext != VMA_NULL)
3191 pItem->pNext->pPrev = pItem->pPrev;
3195 VMA_HEAVY_ASSERT(m_pBack == pItem);
3196 m_pBack = pItem->pPrev;
3199 m_ItemAllocator.Free(pItem);
3203 template<
typename T>
3204 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
3206 if(pItem != VMA_NULL)
3208 ItemType*
const prevItem = pItem->pPrev;
3209 ItemType*
const newItem = m_ItemAllocator.Alloc();
3210 newItem->pPrev = prevItem;
3211 newItem->pNext = pItem;
3212 pItem->pPrev = newItem;
3213 if(prevItem != VMA_NULL)
3215 prevItem->pNext = newItem;
3219 VMA_HEAVY_ASSERT(m_pFront == pItem);
3229 template<
typename T>
3230 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3232 if(pItem != VMA_NULL)
3234 ItemType*
const nextItem = pItem->pNext;
3235 ItemType*
const newItem = m_ItemAllocator.Alloc();
3236 newItem->pNext = nextItem;
3237 newItem->pPrev = pItem;
3238 pItem->pNext = newItem;
3239 if(nextItem != VMA_NULL)
3241 nextItem->pPrev = newItem;
3245 VMA_HEAVY_ASSERT(m_pBack == pItem);
3255 template<
typename T>
3256 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3258 ItemType*
const newItem = InsertBefore(pItem);
3259 newItem->Value = value;
3263 template<
typename T>
3264 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3266 ItemType*
const newItem = InsertAfter(pItem);
3267 newItem->Value = value;
3271 template<
typename T,
typename AllocatorT>
3284 T& operator*()
const 3286 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3287 return m_pItem->Value;
3289 T* operator->()
const 3291 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3292 return &m_pItem->Value;
3295 iterator& operator++()
3297 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3298 m_pItem = m_pItem->pNext;
3301 iterator& operator--()
3303 if(m_pItem != VMA_NULL)
3305 m_pItem = m_pItem->pPrev;
3309 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3310 m_pItem = m_pList->Back();
3315 iterator operator++(
int)
3317 iterator result = *
this;
3321 iterator operator--(
int)
3323 iterator result = *
this;
3328 bool operator==(
const iterator& rhs)
const 3330 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3331 return m_pItem == rhs.m_pItem;
3333 bool operator!=(
const iterator& rhs)
const 3335 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3336 return m_pItem != rhs.m_pItem;
3340 VmaRawList<T>* m_pList;
3341 VmaListItem<T>* m_pItem;
3343 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3349 friend class VmaList<T, AllocatorT>;
3352 class const_iterator
3361 const_iterator(
const iterator& src) :
3362 m_pList(src.m_pList),
3363 m_pItem(src.m_pItem)
3367 const T& operator*()
const 3369 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3370 return m_pItem->Value;
3372 const T* operator->()
const 3374 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3375 return &m_pItem->Value;
3378 const_iterator& operator++()
3380 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3381 m_pItem = m_pItem->pNext;
3384 const_iterator& operator--()
3386 if(m_pItem != VMA_NULL)
3388 m_pItem = m_pItem->pPrev;
3392 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3393 m_pItem = m_pList->Back();
3398 const_iterator operator++(
int)
3400 const_iterator result = *
this;
3404 const_iterator operator--(
int)
3406 const_iterator result = *
this;
3411 bool operator==(
const const_iterator& rhs)
const 3413 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3414 return m_pItem == rhs.m_pItem;
3416 bool operator!=(
const const_iterator& rhs)
const 3418 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3419 return m_pItem != rhs.m_pItem;
3423 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3429 const VmaRawList<T>* m_pList;
3430 const VmaListItem<T>* m_pItem;
3432 friend class VmaList<T, AllocatorT>;
3435 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3437 bool empty()
const {
return m_RawList.IsEmpty(); }
3438 size_t size()
const {
return m_RawList.GetCount(); }
3440 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3441 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3443 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3444 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3446 void clear() { m_RawList.Clear(); }
3447 void push_back(
const T& value) { m_RawList.PushBack(value); }
3448 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3449 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3452 VmaRawList<T> m_RawList;
3455 #endif // #if VMA_USE_STL_LIST 3463 #if VMA_USE_STL_UNORDERED_MAP 3465 #define VmaPair std::pair 3467 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3468 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3470 #else // #if VMA_USE_STL_UNORDERED_MAP 3472 template<
typename T1,
typename T2>
3478 VmaPair() : first(), second() { }
3479 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3485 template<
typename KeyT,
typename ValueT>
3489 typedef VmaPair<KeyT, ValueT> PairType;
3490 typedef PairType* iterator;
3492 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3494 iterator begin() {
return m_Vector.begin(); }
3495 iterator end() {
return m_Vector.end(); }
3497 void insert(
const PairType& pair);
3498 iterator find(
const KeyT& key);
3499 void erase(iterator it);
3502 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3505 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3507 template<
typename FirstT,
typename SecondT>
3508 struct VmaPairFirstLess
3510 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3512 return lhs.first < rhs.first;
3514 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3516 return lhs.first < rhsFirst;
3520 template<
typename KeyT,
typename ValueT>
3521 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3523 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3525 m_Vector.data() + m_Vector.size(),
3527 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3528 VmaVectorInsert(m_Vector, indexToInsert, pair);
3531 template<
typename KeyT,
typename ValueT>
3532 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3534 PairType* it = VmaBinaryFindFirstNotLess(
3536 m_Vector.data() + m_Vector.size(),
3538 VmaPairFirstLess<KeyT, ValueT>());
3539 if((it != m_Vector.end()) && (it->first == key))
3545 return m_Vector.end();
3549 template<
typename KeyT,
typename ValueT>
3550 void VmaMap<KeyT, ValueT>::erase(iterator it)
3552 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3555 #endif // #if VMA_USE_STL_UNORDERED_MAP 3561 class VmaDeviceMemoryBlock;
3563 struct VmaAllocation_T
3566 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3570 FLAG_USER_DATA_STRING = 0x01,
3574 enum ALLOCATION_TYPE
3576 ALLOCATION_TYPE_NONE,
3577 ALLOCATION_TYPE_BLOCK,
3578 ALLOCATION_TYPE_DEDICATED,
3581 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3584 m_pUserData(VMA_NULL),
3585 m_LastUseFrameIndex(currentFrameIndex),
3586 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3587 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3589 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3595 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3598 VMA_ASSERT(m_pUserData == VMA_NULL);
3601 void InitBlockAllocation(
3603 VmaDeviceMemoryBlock* block,
3604 VkDeviceSize offset,
3605 VkDeviceSize alignment,
3607 VmaSuballocationType suballocationType,
3611 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3612 VMA_ASSERT(block != VMA_NULL);
3613 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3614 m_Alignment = alignment;
3616 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3617 m_SuballocationType = (uint8_t)suballocationType;
3618 m_BlockAllocation.m_hPool = hPool;
3619 m_BlockAllocation.m_Block = block;
3620 m_BlockAllocation.m_Offset = offset;
3621 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3626 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3627 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3628 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3629 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3630 m_BlockAllocation.m_Block = VMA_NULL;
3631 m_BlockAllocation.m_Offset = 0;
3632 m_BlockAllocation.m_CanBecomeLost =
true;
3635 void ChangeBlockAllocation(
3637 VmaDeviceMemoryBlock* block,
3638 VkDeviceSize offset);
3641 void InitDedicatedAllocation(
3642 uint32_t memoryTypeIndex,
3643 VkDeviceMemory hMemory,
3644 VmaSuballocationType suballocationType,
3648 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3649 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3650 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3653 m_SuballocationType = (uint8_t)suballocationType;
3654 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3655 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3656 m_DedicatedAllocation.m_hMemory = hMemory;
3657 m_DedicatedAllocation.m_pMappedData = pMappedData;
3660 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3661 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3662 VkDeviceSize GetSize()
const {
return m_Size; }
3663 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3664 void* GetUserData()
const {
return m_pUserData; }
3665 void SetUserData(
VmaAllocator hAllocator,
void* pUserData);
3666 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3668 VmaDeviceMemoryBlock* GetBlock()
const 3670 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3671 return m_BlockAllocation.m_Block;
3673 VkDeviceSize GetOffset()
const;
3674 VkDeviceMemory GetMemory()
const;
3675 uint32_t GetMemoryTypeIndex()
const;
3676 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3677 void* GetMappedData()
const;
3678 bool CanBecomeLost()
const;
3681 uint32_t GetLastUseFrameIndex()
const 3683 return m_LastUseFrameIndex.load();
3685 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3687 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3697 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3699 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3701 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3712 void BlockAllocMap();
3713 void BlockAllocUnmap();
3714 VkResult DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData);
3718 VkDeviceSize m_Alignment;
3719 VkDeviceSize m_Size;
3721 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3723 uint8_t m_SuballocationType;
3730 struct BlockAllocation
3733 VmaDeviceMemoryBlock* m_Block;
3734 VkDeviceSize m_Offset;
3735 bool m_CanBecomeLost;
3739 struct DedicatedAllocation
3741 uint32_t m_MemoryTypeIndex;
3742 VkDeviceMemory m_hMemory;
3743 void* m_pMappedData;
3749 BlockAllocation m_BlockAllocation;
3751 DedicatedAllocation m_DedicatedAllocation;
3761 struct VmaSuballocation
3763 VkDeviceSize offset;
3766 VmaSuballocationType type;
3769 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3772 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3787 struct VmaAllocationRequest
3789 VkDeviceSize offset;
3790 VkDeviceSize sumFreeSize;
3791 VkDeviceSize sumItemSize;
3792 VmaSuballocationList::iterator item;
3793 size_t itemsToMakeLostCount;
3795 VkDeviceSize CalcCost()
const 3797 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3805 class VmaBlockMetadata
3809 ~VmaBlockMetadata();
3810 void Init(VkDeviceSize size);
3813 bool Validate()
const;
3814 VkDeviceSize GetSize()
const {
return m_Size; }
3815 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3816 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3817 VkDeviceSize GetUnusedRangeSizeMax()
const;
3819 bool IsEmpty()
const;
3821 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3824 #if VMA_STATS_STRING_ENABLED 3825 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3829 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3834 bool CreateAllocationRequest(
3835 uint32_t currentFrameIndex,
3836 uint32_t frameInUseCount,
3837 VkDeviceSize bufferImageGranularity,
3838 VkDeviceSize allocSize,
3839 VkDeviceSize allocAlignment,
3840 VmaSuballocationType allocType,
3841 bool canMakeOtherLost,
3842 VmaAllocationRequest* pAllocationRequest);
3844 bool MakeRequestedAllocationsLost(
3845 uint32_t currentFrameIndex,
3846 uint32_t frameInUseCount,
3847 VmaAllocationRequest* pAllocationRequest);
3849 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3853 const VmaAllocationRequest& request,
3854 VmaSuballocationType type,
3855 VkDeviceSize allocSize,
3860 void FreeAtOffset(VkDeviceSize offset);
3863 VkDeviceSize m_Size;
3864 uint32_t m_FreeCount;
3865 VkDeviceSize m_SumFreeSize;
3866 VmaSuballocationList m_Suballocations;
3869 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3871 bool ValidateFreeSuballocationList()
const;
3875 bool CheckAllocation(
3876 uint32_t currentFrameIndex,
3877 uint32_t frameInUseCount,
3878 VkDeviceSize bufferImageGranularity,
3879 VkDeviceSize allocSize,
3880 VkDeviceSize allocAlignment,
3881 VmaSuballocationType allocType,
3882 VmaSuballocationList::const_iterator suballocItem,
3883 bool canMakeOtherLost,
3884 VkDeviceSize* pOffset,
3885 size_t* itemsToMakeLostCount,
3886 VkDeviceSize* pSumFreeSize,
3887 VkDeviceSize* pSumItemSize)
const;
3889 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3893 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3896 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3899 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3903 class VmaDeviceMemoryMapping
3906 VmaDeviceMemoryMapping();
3907 ~VmaDeviceMemoryMapping();
3909 void* GetMappedData()
const {
return m_pMappedData; }
3912 VkResult Map(
VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData);
3913 void Unmap(
VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count);
3917 uint32_t m_MapCount;
3918 void* m_pMappedData;
3927 class VmaDeviceMemoryBlock
3930 uint32_t m_MemoryTypeIndex;
3931 VkDeviceMemory m_hMemory;
3932 VmaDeviceMemoryMapping m_Mapping;
3933 VmaBlockMetadata m_Metadata;
3937 ~VmaDeviceMemoryBlock()
3939 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3944 uint32_t newMemoryTypeIndex,
3945 VkDeviceMemory newMemory,
3946 VkDeviceSize newSize);
3951 bool Validate()
const;
3954 VkResult Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData);
3958 struct VmaPointerLess
3960 bool operator()(
const void* lhs,
const void* rhs)
const 3966 class VmaDefragmentator;
3974 struct VmaBlockVector
3978 uint32_t memoryTypeIndex,
3979 VkDeviceSize preferredBlockSize,
3980 size_t minBlockCount,
3981 size_t maxBlockCount,
3982 VkDeviceSize bufferImageGranularity,
3983 uint32_t frameInUseCount,
3987 VkResult CreateMinBlocks();
3989 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3990 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3991 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3992 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3996 bool IsEmpty()
const {
return m_Blocks.empty(); }
4000 uint32_t currentFrameIndex,
4001 const VkMemoryRequirements& vkMemReq,
4003 VmaSuballocationType suballocType,
4012 #if VMA_STATS_STRING_ENABLED 4013 void PrintDetailedMap(
class VmaJsonWriter& json);
4016 void MakePoolAllocationsLost(
4017 uint32_t currentFrameIndex,
4018 size_t* pLostAllocationCount);
4020 VmaDefragmentator* EnsureDefragmentator(
4022 uint32_t currentFrameIndex);
4024 VkResult Defragment(
4026 VkDeviceSize& maxBytesToMove,
4027 uint32_t& maxAllocationsToMove);
4029 void DestroyDefragmentator();
4032 friend class VmaDefragmentator;
4035 const uint32_t m_MemoryTypeIndex;
4036 const VkDeviceSize m_PreferredBlockSize;
4037 const size_t m_MinBlockCount;
4038 const size_t m_MaxBlockCount;
4039 const VkDeviceSize m_BufferImageGranularity;
4040 const uint32_t m_FrameInUseCount;
4041 const bool m_IsCustomPool;
4044 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
4048 bool m_HasEmptyBlock;
4049 VmaDefragmentator* m_pDefragmentator;
4051 size_t CalcMaxBlockSize()
const;
4054 void Remove(VmaDeviceMemoryBlock* pBlock);
4058 void IncrementallySortBlocks();
4060 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
4066 VmaBlockVector m_BlockVector;
4074 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
4076 #if VMA_STATS_STRING_ENABLED 4081 class VmaDefragmentator
4084 VmaBlockVector*
const m_pBlockVector;
4085 uint32_t m_CurrentFrameIndex;
4086 VkDeviceSize m_BytesMoved;
4087 uint32_t m_AllocationsMoved;
4089 struct AllocationInfo
4092 VkBool32* m_pChanged;
4095 m_hAllocation(VK_NULL_HANDLE),
4096 m_pChanged(VMA_NULL)
4101 struct AllocationInfoSizeGreater
4103 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 4105 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
4110 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4114 VmaDeviceMemoryBlock* m_pBlock;
4115 bool m_HasNonMovableAllocations;
4116 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
4118 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
4120 m_HasNonMovableAllocations(true),
4121 m_Allocations(pAllocationCallbacks),
4122 m_pMappedDataForDefragmentation(VMA_NULL)
4126 void CalcHasNonMovableAllocations()
4128 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
4129 const size_t defragmentAllocCount = m_Allocations.size();
4130 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
4133 void SortAllocationsBySizeDescecnding()
4135 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
4138 VkResult EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData);
4143 void* m_pMappedDataForDefragmentation;
4146 struct BlockPointerLess
4148 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 4150 return pLhsBlockInfo->m_pBlock < pRhsBlock;
4152 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4154 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
4160 struct BlockInfoCompareMoveDestination
4162 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 4164 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
4168 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
4172 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
4180 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
4181 BlockInfoVector m_Blocks;
4183 VkResult DefragmentRound(
4184 VkDeviceSize maxBytesToMove,
4185 uint32_t maxAllocationsToMove);
4187 static bool MoveMakesSense(
4188 size_t dstBlockIndex, VkDeviceSize dstOffset,
4189 size_t srcBlockIndex, VkDeviceSize srcOffset);
4194 VmaBlockVector* pBlockVector,
4195 uint32_t currentFrameIndex);
4197 ~VmaDefragmentator();
4199 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
4200 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
4202 void AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged);
4204 VkResult Defragment(
4205 VkDeviceSize maxBytesToMove,
4206 uint32_t maxAllocationsToMove);
4210 struct VmaAllocator_T
4213 bool m_UseKhrDedicatedAllocation;
4215 bool m_AllocationCallbacksSpecified;
4216 VkAllocationCallbacks m_AllocationCallbacks;
4220 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
4221 VMA_MUTEX m_HeapSizeLimitMutex;
4223 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
4224 VkPhysicalDeviceMemoryProperties m_MemProps;
4227 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
4230 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
4231 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
4232 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
4237 const VkAllocationCallbacks* GetAllocationCallbacks()
const 4239 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
4243 return m_VulkanFunctions;
4246 VkDeviceSize GetBufferImageGranularity()
const 4249 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
4250 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
4253 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
4254 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
4256 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 4258 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
4259 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4262 void GetBufferMemoryRequirements(
4264 VkMemoryRequirements& memReq,
4265 bool& requiresDedicatedAllocation,
4266 bool& prefersDedicatedAllocation)
const;
4267 void GetImageMemoryRequirements(
4269 VkMemoryRequirements& memReq,
4270 bool& requiresDedicatedAllocation,
4271 bool& prefersDedicatedAllocation)
const;
4274 VkResult AllocateMemory(
4275 const VkMemoryRequirements& vkMemReq,
4276 bool requiresDedicatedAllocation,
4277 bool prefersDedicatedAllocation,
4278 VkBuffer dedicatedBuffer,
4279 VkImage dedicatedImage,
4281 VmaSuballocationType suballocType,
4287 void CalculateStats(
VmaStats* pStats);
4289 #if VMA_STATS_STRING_ENABLED 4290 void PrintDetailedMap(
class VmaJsonWriter& json);
4293 VkResult Defragment(
4295 size_t allocationCount,
4296 VkBool32* pAllocationsChanged,
4304 void DestroyPool(
VmaPool pool);
4307 void SetCurrentFrameIndex(uint32_t frameIndex);
4309 void MakePoolAllocationsLost(
4311 size_t* pLostAllocationCount);
4315 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
4316 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
4322 VkDeviceSize m_PreferredLargeHeapBlockSize;
4324 VkPhysicalDevice m_PhysicalDevice;
4325 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
4327 VMA_MUTEX m_PoolsMutex;
4329 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
4335 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
4337 VkResult AllocateMemoryOfType(
4338 const VkMemoryRequirements& vkMemReq,
4339 bool dedicatedAllocation,
4340 VkBuffer dedicatedBuffer,
4341 VkImage dedicatedImage,
4343 uint32_t memTypeIndex,
4344 VmaSuballocationType suballocType,
4348 VkResult AllocateDedicatedMemory(
4350 VmaSuballocationType suballocType,
4351 uint32_t memTypeIndex,
4353 bool isUserDataString,
4355 VkBuffer dedicatedBuffer,
4356 VkImage dedicatedImage,
4366 static void* VmaMalloc(
VmaAllocator hAllocator,
size_t size,
size_t alignment)
4368 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4371 static void VmaFree(
VmaAllocator hAllocator,
void* ptr)
4373 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4376 template<
typename T>
4379 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
4382 template<
typename T>
4383 static T* VmaAllocateArray(
VmaAllocator hAllocator,
size_t count)
4385 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
4388 template<
typename T>
4389 static void vma_delete(
VmaAllocator hAllocator, T* ptr)
4394 VmaFree(hAllocator, ptr);
4398 template<
typename T>
4399 static void vma_delete_array(
VmaAllocator hAllocator, T* ptr,
size_t count)
4403 for(
size_t i = count; i--; )
4405 VmaFree(hAllocator, ptr);
4412 #if VMA_STATS_STRING_ENABLED 4414 class VmaStringBuilder
4417 VmaStringBuilder(
VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4418 size_t GetLength()
const {
return m_Data.size(); }
4419 const char* GetData()
const {
return m_Data.data(); }
4421 void Add(
char ch) { m_Data.push_back(ch); }
4422 void Add(
const char* pStr);
4423 void AddNewLine() { Add(
'\n'); }
4424 void AddNumber(uint32_t num);
4425 void AddNumber(uint64_t num);
4426 void AddPointer(
const void* ptr);
4429 VmaVector< char, VmaStlAllocator<char> > m_Data;
4432 void VmaStringBuilder::Add(
const char* pStr)
4434 const size_t strLen = strlen(pStr);
4437 const size_t oldCount = m_Data.size();
4438 m_Data.resize(oldCount + strLen);
4439 memcpy(m_Data.data() + oldCount, pStr, strLen);
4443 void VmaStringBuilder::AddNumber(uint32_t num)
4446 VmaUint32ToStr(buf,
sizeof(buf), num);
4450 void VmaStringBuilder::AddNumber(uint64_t num)
4453 VmaUint64ToStr(buf,
sizeof(buf), num);
4457 void VmaStringBuilder::AddPointer(
const void* ptr)
4460 VmaPtrToStr(buf,
sizeof(buf), ptr);
4464 #endif // #if VMA_STATS_STRING_ENABLED 4469 #if VMA_STATS_STRING_ENABLED 4474 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4477 void BeginObject(
bool singleLine =
false);
4480 void BeginArray(
bool singleLine =
false);
4483 void WriteString(
const char* pStr);
4484 void BeginString(
const char* pStr = VMA_NULL);
4485 void ContinueString(
const char* pStr);
4486 void ContinueString(uint32_t n);
4487 void ContinueString(uint64_t n);
4488 void ContinueString_Pointer(
const void* ptr);
4489 void EndString(
const char* pStr = VMA_NULL);
4491 void WriteNumber(uint32_t n);
4492 void WriteNumber(uint64_t n);
4493 void WriteBool(
bool b);
4497 static const char*
const INDENT;
4499 enum COLLECTION_TYPE
4501 COLLECTION_TYPE_OBJECT,
4502 COLLECTION_TYPE_ARRAY,
4506 COLLECTION_TYPE type;
4507 uint32_t valueCount;
4508 bool singleLineMode;
4511 VmaStringBuilder& m_SB;
4512 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4513 bool m_InsideString;
4515 void BeginValue(
bool isString);
4516 void WriteIndent(
bool oneLess =
false);
4519 const char*
const VmaJsonWriter::INDENT =
" ";
4521 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4523 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4524 m_InsideString(false)
4528 VmaJsonWriter::~VmaJsonWriter()
4530 VMA_ASSERT(!m_InsideString);
4531 VMA_ASSERT(m_Stack.empty());
4534 void VmaJsonWriter::BeginObject(
bool singleLine)
4536 VMA_ASSERT(!m_InsideString);
4542 item.type = COLLECTION_TYPE_OBJECT;
4543 item.valueCount = 0;
4544 item.singleLineMode = singleLine;
4545 m_Stack.push_back(item);
4548 void VmaJsonWriter::EndObject()
4550 VMA_ASSERT(!m_InsideString);
4555 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4559 void VmaJsonWriter::BeginArray(
bool singleLine)
4561 VMA_ASSERT(!m_InsideString);
4567 item.type = COLLECTION_TYPE_ARRAY;
4568 item.valueCount = 0;
4569 item.singleLineMode = singleLine;
4570 m_Stack.push_back(item);
4573 void VmaJsonWriter::EndArray()
4575 VMA_ASSERT(!m_InsideString);
4580 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4584 void VmaJsonWriter::WriteString(
const char* pStr)
4590 void VmaJsonWriter::BeginString(
const char* pStr)
4592 VMA_ASSERT(!m_InsideString);
4596 m_InsideString =
true;
4597 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4599 ContinueString(pStr);
4603 void VmaJsonWriter::ContinueString(
const char* pStr)
4605 VMA_ASSERT(m_InsideString);
4607 const size_t strLen = strlen(pStr);
4608 for(
size_t i = 0; i < strLen; ++i)
4641 VMA_ASSERT(0 &&
"Character not currently supported.");
4647 void VmaJsonWriter::ContinueString(uint32_t n)
4649 VMA_ASSERT(m_InsideString);
4653 void VmaJsonWriter::ContinueString(uint64_t n)
4655 VMA_ASSERT(m_InsideString);
4659 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4661 VMA_ASSERT(m_InsideString);
4662 m_SB.AddPointer(ptr);
4665 void VmaJsonWriter::EndString(
const char* pStr)
4667 VMA_ASSERT(m_InsideString);
4668 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4670 ContinueString(pStr);
4673 m_InsideString =
false;
4676 void VmaJsonWriter::WriteNumber(uint32_t n)
4678 VMA_ASSERT(!m_InsideString);
4683 void VmaJsonWriter::WriteNumber(uint64_t n)
4685 VMA_ASSERT(!m_InsideString);
4690 void VmaJsonWriter::WriteBool(
bool b)
4692 VMA_ASSERT(!m_InsideString);
4694 m_SB.Add(b ?
"true" :
"false");
4697 void VmaJsonWriter::WriteNull()
4699 VMA_ASSERT(!m_InsideString);
4704 void VmaJsonWriter::BeginValue(
bool isString)
4706 if(!m_Stack.empty())
4708 StackItem& currItem = m_Stack.back();
4709 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4710 currItem.valueCount % 2 == 0)
4712 VMA_ASSERT(isString);
4715 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4716 currItem.valueCount % 2 != 0)
4720 else if(currItem.valueCount > 0)
4729 ++currItem.valueCount;
4733 void VmaJsonWriter::WriteIndent(
bool oneLess)
4735 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4739 size_t count = m_Stack.size();
4740 if(count > 0 && oneLess)
4744 for(
size_t i = 0; i < count; ++i)
4751 #endif // #if VMA_STATS_STRING_ENABLED 4755 void VmaAllocation_T::SetUserData(
VmaAllocator hAllocator,
void* pUserData)
4757 if(IsUserDataString())
4759 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4761 FreeUserDataString(hAllocator);
4763 if(pUserData != VMA_NULL)
4765 const char*
const newStrSrc = (
char*)pUserData;
4766 const size_t newStrLen = strlen(newStrSrc);
4767 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4768 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4769 m_pUserData = newStrDst;
4774 m_pUserData = pUserData;
4778 void VmaAllocation_T::ChangeBlockAllocation(
4780 VmaDeviceMemoryBlock* block,
4781 VkDeviceSize offset)
4783 VMA_ASSERT(block != VMA_NULL);
4784 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4787 if(block != m_BlockAllocation.m_Block)
4789 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
4790 if(IsPersistentMap())
4792 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
4793 block->Map(hAllocator, mapRefCount, VMA_NULL);
4796 m_BlockAllocation.m_Block = block;
4797 m_BlockAllocation.m_Offset = offset;
4800 VkDeviceSize VmaAllocation_T::GetOffset()
const 4804 case ALLOCATION_TYPE_BLOCK:
4805 return m_BlockAllocation.m_Offset;
4806 case ALLOCATION_TYPE_DEDICATED:
4814 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4818 case ALLOCATION_TYPE_BLOCK:
4819 return m_BlockAllocation.m_Block->m_hMemory;
4820 case ALLOCATION_TYPE_DEDICATED:
4821 return m_DedicatedAllocation.m_hMemory;
4824 return VK_NULL_HANDLE;
4828 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4832 case ALLOCATION_TYPE_BLOCK:
4833 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4834 case ALLOCATION_TYPE_DEDICATED:
4835 return m_DedicatedAllocation.m_MemoryTypeIndex;
4842 void* VmaAllocation_T::GetMappedData()
const 4846 case ALLOCATION_TYPE_BLOCK:
4849 void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4850 VMA_ASSERT(pBlockData != VMA_NULL);
4851 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
4858 case ALLOCATION_TYPE_DEDICATED:
4859 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4860 return m_DedicatedAllocation.m_pMappedData;
4867 bool VmaAllocation_T::CanBecomeLost()
const 4871 case ALLOCATION_TYPE_BLOCK:
4872 return m_BlockAllocation.m_CanBecomeLost;
4873 case ALLOCATION_TYPE_DEDICATED:
4881 VmaPool VmaAllocation_T::GetPool()
const 4883 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4884 return m_BlockAllocation.m_hPool;
4887 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4889 VMA_ASSERT(CanBecomeLost());
4895 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4898 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4903 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4909 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4919 void VmaAllocation_T::FreeUserDataString(
VmaAllocator hAllocator)
4921 VMA_ASSERT(IsUserDataString());
4922 if(m_pUserData != VMA_NULL)
4924 char*
const oldStr = (
char*)m_pUserData;
4925 const size_t oldStrLen = strlen(oldStr);
4926 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
4927 m_pUserData = VMA_NULL;
4931 void VmaAllocation_T::BlockAllocMap()
4933 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4935 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4941 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
4945 void VmaAllocation_T::BlockAllocUnmap()
4947 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4949 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4955 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
4959 VkResult VmaAllocation_T::DedicatedAllocMap(
VmaAllocator hAllocator,
void** ppData)
4961 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4965 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4967 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4968 *ppData = m_DedicatedAllocation.m_pMappedData;
4974 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
4975 return VK_ERROR_MEMORY_MAP_FAILED;
4980 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4981 hAllocator->m_hDevice,
4982 m_DedicatedAllocation.m_hMemory,
4987 if(result == VK_SUCCESS)
4989 m_DedicatedAllocation.m_pMappedData = *ppData;
4996 void VmaAllocation_T::DedicatedAllocUnmap(
VmaAllocator hAllocator)
4998 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
5000 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
5005 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
5006 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
5007 hAllocator->m_hDevice,
5008 m_DedicatedAllocation.m_hMemory);
5013 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
5017 #if VMA_STATS_STRING_ENABLED 5020 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
5029 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
5033 json.WriteString(
"Blocks");
5036 json.WriteString(
"Allocations");
5039 json.WriteString(
"UnusedRanges");
5042 json.WriteString(
"UsedBytes");
5045 json.WriteString(
"UnusedBytes");
5050 json.WriteString(
"AllocationSize");
5051 json.BeginObject(
true);
5052 json.WriteString(
"Min");
5054 json.WriteString(
"Avg");
5056 json.WriteString(
"Max");
5063 json.WriteString(
"UnusedRangeSize");
5064 json.BeginObject(
true);
5065 json.WriteString(
"Min");
5067 json.WriteString(
"Avg");
5069 json.WriteString(
"Max");
5077 #endif // #if VMA_STATS_STRING_ENABLED 5079 struct VmaSuballocationItemSizeLess
5082 const VmaSuballocationList::iterator lhs,
5083 const VmaSuballocationList::iterator rhs)
const 5085 return lhs->size < rhs->size;
5088 const VmaSuballocationList::iterator lhs,
5089 VkDeviceSize rhsSize)
const 5091 return lhs->size < rhsSize;
5098 VmaBlockMetadata::VmaBlockMetadata(
VmaAllocator hAllocator) :
5102 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
5103 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
5107 VmaBlockMetadata::~VmaBlockMetadata()
5111 void VmaBlockMetadata::Init(VkDeviceSize size)
5115 m_SumFreeSize = size;
5117 VmaSuballocation suballoc = {};
5118 suballoc.offset = 0;
5119 suballoc.size = size;
5120 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5121 suballoc.hAllocation = VK_NULL_HANDLE;
5123 m_Suballocations.push_back(suballoc);
5124 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
5126 m_FreeSuballocationsBySize.push_back(suballocItem);
5129 bool VmaBlockMetadata::Validate()
const 5131 if(m_Suballocations.empty())
5137 VkDeviceSize calculatedOffset = 0;
5139 uint32_t calculatedFreeCount = 0;
5141 VkDeviceSize calculatedSumFreeSize = 0;
5144 size_t freeSuballocationsToRegister = 0;
5146 bool prevFree =
false;
5148 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5149 suballocItem != m_Suballocations.cend();
5152 const VmaSuballocation& subAlloc = *suballocItem;
5155 if(subAlloc.offset != calculatedOffset)
5160 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
5162 if(prevFree && currFree)
5167 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
5174 calculatedSumFreeSize += subAlloc.size;
5175 ++calculatedFreeCount;
5176 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5178 ++freeSuballocationsToRegister;
5183 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
5187 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
5193 calculatedOffset += subAlloc.size;
5194 prevFree = currFree;
5199 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
5204 VkDeviceSize lastSize = 0;
5205 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
5207 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
5210 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5215 if(suballocItem->size < lastSize)
5220 lastSize = suballocItem->size;
5224 if(!ValidateFreeSuballocationList() ||
5225 (calculatedOffset != m_Size) ||
5226 (calculatedSumFreeSize != m_SumFreeSize) ||
5227 (calculatedFreeCount != m_FreeCount))
5235 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 5237 if(!m_FreeSuballocationsBySize.empty())
5239 return m_FreeSuballocationsBySize.back()->size;
5247 bool VmaBlockMetadata::IsEmpty()
const 5249 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
5252 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 5256 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5268 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5269 suballocItem != m_Suballocations.cend();
5272 const VmaSuballocation& suballoc = *suballocItem;
5273 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5286 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 5288 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5290 inoutStats.
size += m_Size;
5297 #if VMA_STATS_STRING_ENABLED 5299 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 5303 json.WriteString(
"TotalBytes");
5304 json.WriteNumber(m_Size);
5306 json.WriteString(
"UnusedBytes");
5307 json.WriteNumber(m_SumFreeSize);
5309 json.WriteString(
"Allocations");
5310 json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
5312 json.WriteString(
"UnusedRanges");
5313 json.WriteNumber(m_FreeCount);
5315 json.WriteString(
"Suballocations");
5318 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5319 suballocItem != m_Suballocations.cend();
5320 ++suballocItem, ++i)
5322 json.BeginObject(
true);
5324 json.WriteString(
"Type");
5325 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
5327 json.WriteString(
"Size");
5328 json.WriteNumber(suballocItem->size);
5330 json.WriteString(
"Offset");
5331 json.WriteNumber(suballocItem->offset);
5333 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5335 const void* pUserData = suballocItem->hAllocation->GetUserData();
5336 if(pUserData != VMA_NULL)
5338 json.WriteString(
"UserData");
5339 if(suballocItem->hAllocation->IsUserDataString())
5341 json.WriteString((
const char*)pUserData);
5346 json.ContinueString_Pointer(pUserData);
5359 #endif // #if VMA_STATS_STRING_ENABLED 5371 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
5373 VMA_ASSERT(IsEmpty());
5374 pAllocationRequest->offset = 0;
5375 pAllocationRequest->sumFreeSize = m_SumFreeSize;
5376 pAllocationRequest->sumItemSize = 0;
5377 pAllocationRequest->item = m_Suballocations.begin();
5378 pAllocationRequest->itemsToMakeLostCount = 0;
5381 bool VmaBlockMetadata::CreateAllocationRequest(
5382 uint32_t currentFrameIndex,
5383 uint32_t frameInUseCount,
5384 VkDeviceSize bufferImageGranularity,
5385 VkDeviceSize allocSize,
5386 VkDeviceSize allocAlignment,
5387 VmaSuballocationType allocType,
5388 bool canMakeOtherLost,
5389 VmaAllocationRequest* pAllocationRequest)
5391 VMA_ASSERT(allocSize > 0);
5392 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5393 VMA_ASSERT(pAllocationRequest != VMA_NULL);
5394 VMA_HEAVY_ASSERT(Validate());
5397 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
5403 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5404 if(freeSuballocCount > 0)
5409 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5410 m_FreeSuballocationsBySize.data(),
5411 m_FreeSuballocationsBySize.data() + freeSuballocCount,
5413 VmaSuballocationItemSizeLess());
5414 size_t index = it - m_FreeSuballocationsBySize.data();
5415 for(; index < freeSuballocCount; ++index)
5420 bufferImageGranularity,
5424 m_FreeSuballocationsBySize[index],
5426 &pAllocationRequest->offset,
5427 &pAllocationRequest->itemsToMakeLostCount,
5428 &pAllocationRequest->sumFreeSize,
5429 &pAllocationRequest->sumItemSize))
5431 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5439 for(
size_t index = freeSuballocCount; index--; )
5444 bufferImageGranularity,
5448 m_FreeSuballocationsBySize[index],
5450 &pAllocationRequest->offset,
5451 &pAllocationRequest->itemsToMakeLostCount,
5452 &pAllocationRequest->sumFreeSize,
5453 &pAllocationRequest->sumItemSize))
5455 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5462 if(canMakeOtherLost)
5466 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5467 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5469 VmaAllocationRequest tmpAllocRequest = {};
5470 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5471 suballocIt != m_Suballocations.end();
5474 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5475 suballocIt->hAllocation->CanBecomeLost())
5480 bufferImageGranularity,
5486 &tmpAllocRequest.offset,
5487 &tmpAllocRequest.itemsToMakeLostCount,
5488 &tmpAllocRequest.sumFreeSize,
5489 &tmpAllocRequest.sumItemSize))
5491 tmpAllocRequest.item = suballocIt;
5493 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5495 *pAllocationRequest = tmpAllocRequest;
5501 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5510 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5511 uint32_t currentFrameIndex,
5512 uint32_t frameInUseCount,
5513 VmaAllocationRequest* pAllocationRequest)
5515 while(pAllocationRequest->itemsToMakeLostCount > 0)
5517 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5519 ++pAllocationRequest->item;
5521 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5522 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5523 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5524 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5526 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5527 --pAllocationRequest->itemsToMakeLostCount;
5535 VMA_HEAVY_ASSERT(Validate());
5536 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5537 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5542 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5544 uint32_t lostAllocationCount = 0;
5545 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5546 it != m_Suballocations.end();
5549 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5550 it->hAllocation->CanBecomeLost() &&
5551 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5553 it = FreeSuballocation(it);
5554 ++lostAllocationCount;
5557 return lostAllocationCount;
5560 void VmaBlockMetadata::Alloc(
5561 const VmaAllocationRequest& request,
5562 VmaSuballocationType type,
5563 VkDeviceSize allocSize,
5566 VMA_ASSERT(request.item != m_Suballocations.end());
5567 VmaSuballocation& suballoc = *request.item;
5569 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5571 VMA_ASSERT(request.offset >= suballoc.offset);
5572 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5573 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5574 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5578 UnregisterFreeSuballocation(request.item);
5580 suballoc.offset = request.offset;
5581 suballoc.size = allocSize;
5582 suballoc.type = type;
5583 suballoc.hAllocation = hAllocation;
5588 VmaSuballocation paddingSuballoc = {};
5589 paddingSuballoc.offset = request.offset + allocSize;
5590 paddingSuballoc.size = paddingEnd;
5591 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5592 VmaSuballocationList::iterator next = request.item;
5594 const VmaSuballocationList::iterator paddingEndItem =
5595 m_Suballocations.insert(next, paddingSuballoc);
5596 RegisterFreeSuballocation(paddingEndItem);
5602 VmaSuballocation paddingSuballoc = {};
5603 paddingSuballoc.offset = request.offset - paddingBegin;
5604 paddingSuballoc.size = paddingBegin;
5605 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5606 const VmaSuballocationList::iterator paddingBeginItem =
5607 m_Suballocations.insert(request.item, paddingSuballoc);
5608 RegisterFreeSuballocation(paddingBeginItem);
5612 m_FreeCount = m_FreeCount - 1;
5613 if(paddingBegin > 0)
5621 m_SumFreeSize -= allocSize;
5626 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5627 suballocItem != m_Suballocations.end();
5630 VmaSuballocation& suballoc = *suballocItem;
5631 if(suballoc.hAllocation == allocation)
5633 FreeSuballocation(suballocItem);
5634 VMA_HEAVY_ASSERT(Validate());
5638 VMA_ASSERT(0 &&
"Not found!");
5641 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
5643 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5644 suballocItem != m_Suballocations.end();
5647 VmaSuballocation& suballoc = *suballocItem;
5648 if(suballoc.offset == offset)
5650 FreeSuballocation(suballocItem);
5654 VMA_ASSERT(0 &&
"Not found!");
5657 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5659 VkDeviceSize lastSize = 0;
5660 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5662 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5664 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5669 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5674 if(it->size < lastSize)
5680 lastSize = it->size;
5685 bool VmaBlockMetadata::CheckAllocation(
5686 uint32_t currentFrameIndex,
5687 uint32_t frameInUseCount,
5688 VkDeviceSize bufferImageGranularity,
5689 VkDeviceSize allocSize,
5690 VkDeviceSize allocAlignment,
5691 VmaSuballocationType allocType,
5692 VmaSuballocationList::const_iterator suballocItem,
5693 bool canMakeOtherLost,
5694 VkDeviceSize* pOffset,
5695 size_t* itemsToMakeLostCount,
5696 VkDeviceSize* pSumFreeSize,
5697 VkDeviceSize* pSumItemSize)
const 5699 VMA_ASSERT(allocSize > 0);
5700 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5701 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5702 VMA_ASSERT(pOffset != VMA_NULL);
5704 *itemsToMakeLostCount = 0;
5708 if(canMakeOtherLost)
5710 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5712 *pSumFreeSize = suballocItem->size;
5716 if(suballocItem->hAllocation->CanBecomeLost() &&
5717 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5719 ++*itemsToMakeLostCount;
5720 *pSumItemSize = suballocItem->size;
5729 if(m_Size - suballocItem->offset < allocSize)
5735 *pOffset = suballocItem->offset;
5738 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5740 *pOffset += VMA_DEBUG_MARGIN;
5744 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5745 *pOffset = VmaAlignUp(*pOffset, alignment);
5749 if(bufferImageGranularity > 1)
5751 bool bufferImageGranularityConflict =
false;
5752 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5753 while(prevSuballocItem != m_Suballocations.cbegin())
5756 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5757 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5759 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5761 bufferImageGranularityConflict =
true;
5769 if(bufferImageGranularityConflict)
5771 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5777 if(*pOffset >= suballocItem->offset + suballocItem->size)
5783 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5786 VmaSuballocationList::const_iterator next = suballocItem;
5788 const VkDeviceSize requiredEndMargin =
5789 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5791 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5793 if(suballocItem->offset + totalSize > m_Size)
5800 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5801 if(totalSize > suballocItem->size)
5803 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5804 while(remainingSize > 0)
5807 if(lastSuballocItem == m_Suballocations.cend())
5811 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5813 *pSumFreeSize += lastSuballocItem->size;
5817 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5818 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5819 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5821 ++*itemsToMakeLostCount;
5822 *pSumItemSize += lastSuballocItem->size;
5829 remainingSize = (lastSuballocItem->size < remainingSize) ?
5830 remainingSize - lastSuballocItem->size : 0;
5836 if(bufferImageGranularity > 1)
5838 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5840 while(nextSuballocItem != m_Suballocations.cend())
5842 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5843 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5845 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5847 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5848 if(nextSuballoc.hAllocation->CanBecomeLost() &&
5849 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5851 ++*itemsToMakeLostCount;
5870 const VmaSuballocation& suballoc = *suballocItem;
5871 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5873 *pSumFreeSize = suballoc.size;
5876 if(suballoc.size < allocSize)
5882 *pOffset = suballoc.offset;
5885 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5887 *pOffset += VMA_DEBUG_MARGIN;
5891 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5892 *pOffset = VmaAlignUp(*pOffset, alignment);
5896 if(bufferImageGranularity > 1)
5898 bool bufferImageGranularityConflict =
false;
5899 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5900 while(prevSuballocItem != m_Suballocations.cbegin())
5903 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5904 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5906 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5908 bufferImageGranularityConflict =
true;
5916 if(bufferImageGranularityConflict)
5918 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5923 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5926 VmaSuballocationList::const_iterator next = suballocItem;
5928 const VkDeviceSize requiredEndMargin =
5929 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5932 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5939 if(bufferImageGranularity > 1)
5941 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5943 while(nextSuballocItem != m_Suballocations.cend())
5945 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5946 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5948 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5967 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5969 VMA_ASSERT(item != m_Suballocations.end());
5970 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5972 VmaSuballocationList::iterator nextItem = item;
5974 VMA_ASSERT(nextItem != m_Suballocations.end());
5975 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5977 item->size += nextItem->size;
5979 m_Suballocations.erase(nextItem);
5982 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5985 VmaSuballocation& suballoc = *suballocItem;
5986 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5987 suballoc.hAllocation = VK_NULL_HANDLE;
5991 m_SumFreeSize += suballoc.size;
5994 bool mergeWithNext =
false;
5995 bool mergeWithPrev =
false;
5997 VmaSuballocationList::iterator nextItem = suballocItem;
5999 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
6001 mergeWithNext =
true;
6004 VmaSuballocationList::iterator prevItem = suballocItem;
6005 if(suballocItem != m_Suballocations.begin())
6008 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
6010 mergeWithPrev =
true;
6016 UnregisterFreeSuballocation(nextItem);
6017 MergeFreeWithNext(suballocItem);
6022 UnregisterFreeSuballocation(prevItem);
6023 MergeFreeWithNext(prevItem);
6024 RegisterFreeSuballocation(prevItem);
6029 RegisterFreeSuballocation(suballocItem);
6030 return suballocItem;
6034 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
6036 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6037 VMA_ASSERT(item->size > 0);
6041 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6043 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6045 if(m_FreeSuballocationsBySize.empty())
6047 m_FreeSuballocationsBySize.push_back(item);
6051 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
6059 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
6061 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
6062 VMA_ASSERT(item->size > 0);
6066 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
6068 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
6070 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
6071 m_FreeSuballocationsBySize.data(),
6072 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
6074 VmaSuballocationItemSizeLess());
6075 for(
size_t index = it - m_FreeSuballocationsBySize.data();
6076 index < m_FreeSuballocationsBySize.size();
6079 if(m_FreeSuballocationsBySize[index] == item)
6081 VmaVectorRemove(m_FreeSuballocationsBySize, index);
6084 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
6086 VMA_ASSERT(0 &&
"Not found.");
6095 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
6097 m_pMappedData(VMA_NULL)
6101 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
6103 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
6106 VkResult VmaDeviceMemoryMapping::Map(
VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData)
6113 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6116 m_MapCount += count;
6117 VMA_ASSERT(m_pMappedData != VMA_NULL);
6118 if(ppData != VMA_NULL)
6120 *ppData = m_pMappedData;
6126 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
6127 hAllocator->m_hDevice,
6133 if(result == VK_SUCCESS)
6135 if(ppData != VMA_NULL)
6137 *ppData = m_pMappedData;
6145 void VmaDeviceMemoryMapping::Unmap(
VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count)
6152 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
6153 if(m_MapCount >= count)
6155 m_MapCount -= count;
6158 m_pMappedData = VMA_NULL;
6159 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
6164 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
6171 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(
VmaAllocator hAllocator) :
6172 m_MemoryTypeIndex(UINT32_MAX),
6173 m_hMemory(VK_NULL_HANDLE),
6174 m_Metadata(hAllocator)
6178 void VmaDeviceMemoryBlock::Init(
6179 uint32_t newMemoryTypeIndex,
6180 VkDeviceMemory newMemory,
6181 VkDeviceSize newSize)
6183 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
6185 m_MemoryTypeIndex = newMemoryTypeIndex;
6186 m_hMemory = newMemory;
6188 m_Metadata.Init(newSize);
6191 void VmaDeviceMemoryBlock::Destroy(
VmaAllocator allocator)
6195 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
6197 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
6198 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
6199 m_hMemory = VK_NULL_HANDLE;
6202 bool VmaDeviceMemoryBlock::Validate()
const 6204 if((m_hMemory == VK_NULL_HANDLE) ||
6205 (m_Metadata.GetSize() == 0))
6210 return m_Metadata.Validate();
6213 VkResult VmaDeviceMemoryBlock::Map(
VmaAllocator hAllocator, uint32_t count,
void** ppData)
6215 return m_Mapping.Map(hAllocator, m_hMemory, count, ppData);
6218 void VmaDeviceMemoryBlock::Unmap(
VmaAllocator hAllocator, uint32_t count)
6220 m_Mapping.Unmap(hAllocator, m_hMemory, count);
6225 memset(&outInfo, 0,
sizeof(outInfo));
6244 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
6252 VmaPool_T::VmaPool_T(
6257 createInfo.memoryTypeIndex,
6258 createInfo.blockSize,
6259 createInfo.minBlockCount,
6260 createInfo.maxBlockCount,
6262 createInfo.frameInUseCount,
6267 VmaPool_T::~VmaPool_T()
6271 #if VMA_STATS_STRING_ENABLED 6273 #endif // #if VMA_STATS_STRING_ENABLED 6275 VmaBlockVector::VmaBlockVector(
6277 uint32_t memoryTypeIndex,
6278 VkDeviceSize preferredBlockSize,
6279 size_t minBlockCount,
6280 size_t maxBlockCount,
6281 VkDeviceSize bufferImageGranularity,
6282 uint32_t frameInUseCount,
6283 bool isCustomPool) :
6284 m_hAllocator(hAllocator),
6285 m_MemoryTypeIndex(memoryTypeIndex),
6286 m_PreferredBlockSize(preferredBlockSize),
6287 m_MinBlockCount(minBlockCount),
6288 m_MaxBlockCount(maxBlockCount),
6289 m_BufferImageGranularity(bufferImageGranularity),
6290 m_FrameInUseCount(frameInUseCount),
6291 m_IsCustomPool(isCustomPool),
6292 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
6293 m_HasEmptyBlock(false),
6294 m_pDefragmentator(VMA_NULL)
6298 VmaBlockVector::~VmaBlockVector()
6300 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
6302 for(
size_t i = m_Blocks.size(); i--; )
6304 m_Blocks[i]->Destroy(m_hAllocator);
6305 vma_delete(m_hAllocator, m_Blocks[i]);
6309 VkResult VmaBlockVector::CreateMinBlocks()
6311 for(
size_t i = 0; i < m_MinBlockCount; ++i)
6313 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
6314 if(res != VK_SUCCESS)
6322 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
6330 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6332 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6334 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6336 VMA_HEAVY_ASSERT(pBlock->Validate());
6337 pBlock->m_Metadata.AddPoolStats(*pStats);
6341 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
6343 VkResult VmaBlockVector::Allocate(
6345 uint32_t currentFrameIndex,
6346 const VkMemoryRequirements& vkMemReq,
6348 VmaSuballocationType suballocType,
6354 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6358 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6360 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6361 VMA_ASSERT(pCurrBlock);
6362 VmaAllocationRequest currRequest = {};
6363 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6366 m_BufferImageGranularity,
6374 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
6378 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
6379 if(res != VK_SUCCESS)
6386 if(pCurrBlock->m_Metadata.IsEmpty())
6388 m_HasEmptyBlock =
false;
6391 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6392 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
6393 (*pAllocation)->InitBlockAllocation(
6402 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
6403 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6404 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6409 const bool canCreateNewBlock =
6411 (m_Blocks.size() < m_MaxBlockCount);
6414 if(canCreateNewBlock)
6417 VkDeviceSize newBlockSize = m_PreferredBlockSize;
6418 uint32_t newBlockSizeShift = 0;
6419 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
6423 if(m_IsCustomPool ==
false)
6426 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
6427 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
6429 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6430 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
6432 newBlockSize = smallerNewBlockSize;
6433 ++newBlockSizeShift;
6442 size_t newBlockIndex = 0;
6443 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
6445 if(m_IsCustomPool ==
false)
6447 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
6449 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6450 if(smallerNewBlockSize >= vkMemReq.size)
6452 newBlockSize = smallerNewBlockSize;
6453 ++newBlockSizeShift;
6454 res = CreateBlock(newBlockSize, &newBlockIndex);
6463 if(res == VK_SUCCESS)
6465 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
6466 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
6470 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
6471 if(res != VK_SUCCESS)
6478 VmaAllocationRequest allocRequest;
6479 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6480 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6481 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6482 (*pAllocation)->InitBlockAllocation(
6485 allocRequest.offset,
6491 VMA_HEAVY_ASSERT(pBlock->Validate());
6492 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6493 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6501 if(canMakeOtherLost)
6503 uint32_t tryIndex = 0;
6504 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6506 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6507 VmaAllocationRequest bestRequest = {};
6508 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6512 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6514 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6515 VMA_ASSERT(pCurrBlock);
6516 VmaAllocationRequest currRequest = {};
6517 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6520 m_BufferImageGranularity,
6527 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6528 if(pBestRequestBlock == VMA_NULL ||
6529 currRequestCost < bestRequestCost)
6531 pBestRequestBlock = pCurrBlock;
6532 bestRequest = currRequest;
6533 bestRequestCost = currRequestCost;
6535 if(bestRequestCost == 0)
6543 if(pBestRequestBlock != VMA_NULL)
6547 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
6548 if(res != VK_SUCCESS)
6554 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6560 if(pBestRequestBlock->m_Metadata.IsEmpty())
6562 m_HasEmptyBlock =
false;
6565 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6566 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6567 (*pAllocation)->InitBlockAllocation(
6576 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
6577 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6578 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6592 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6594 return VK_ERROR_TOO_MANY_OBJECTS;
6598 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6601 void VmaBlockVector::Free(
6604 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6608 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6610 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6612 if(hAllocation->IsPersistentMap())
6614 pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory, 1);
6617 pBlock->m_Metadata.Free(hAllocation);
6618 VMA_HEAVY_ASSERT(pBlock->Validate());
6620 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6623 if(pBlock->m_Metadata.IsEmpty())
6626 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6628 pBlockToDelete = pBlock;
6634 m_HasEmptyBlock =
true;
6639 else if(m_HasEmptyBlock)
6641 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6642 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6644 pBlockToDelete = pLastBlock;
6645 m_Blocks.pop_back();
6646 m_HasEmptyBlock =
false;
6650 IncrementallySortBlocks();
6655 if(pBlockToDelete != VMA_NULL)
6657 VMA_DEBUG_LOG(
" Deleted empty allocation");
6658 pBlockToDelete->Destroy(m_hAllocator);
6659 vma_delete(m_hAllocator, pBlockToDelete);
6663 size_t VmaBlockVector::CalcMaxBlockSize()
const 6666 for(
size_t i = m_Blocks.size(); i--; )
6668 result = VMA_MAX((uint64_t)result, (uint64_t)m_Blocks[i]->m_Metadata.GetSize());
6669 if(result >= m_PreferredBlockSize)
6677 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6679 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6681 if(m_Blocks[blockIndex] == pBlock)
6683 VmaVectorRemove(m_Blocks, blockIndex);
6690 void VmaBlockVector::IncrementallySortBlocks()
6693 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6695 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6697 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6703 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6705 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6706 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6707 allocInfo.allocationSize = blockSize;
6708 VkDeviceMemory mem = VK_NULL_HANDLE;
6709 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6718 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6722 allocInfo.allocationSize);
6724 m_Blocks.push_back(pBlock);
6725 if(pNewBlockIndex != VMA_NULL)
6727 *pNewBlockIndex = m_Blocks.size() - 1;
6733 #if VMA_STATS_STRING_ENABLED 6735 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6737 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6743 json.WriteString(
"MemoryTypeIndex");
6744 json.WriteNumber(m_MemoryTypeIndex);
6746 json.WriteString(
"BlockSize");
6747 json.WriteNumber(m_PreferredBlockSize);
6749 json.WriteString(
"BlockCount");
6750 json.BeginObject(
true);
6751 if(m_MinBlockCount > 0)
6753 json.WriteString(
"Min");
6754 json.WriteNumber((uint64_t)m_MinBlockCount);
6756 if(m_MaxBlockCount < SIZE_MAX)
6758 json.WriteString(
"Max");
6759 json.WriteNumber((uint64_t)m_MaxBlockCount);
6761 json.WriteString(
"Cur");
6762 json.WriteNumber((uint64_t)m_Blocks.size());
6765 if(m_FrameInUseCount > 0)
6767 json.WriteString(
"FrameInUseCount");
6768 json.WriteNumber(m_FrameInUseCount);
6773 json.WriteString(
"PreferredBlockSize");
6774 json.WriteNumber(m_PreferredBlockSize);
6777 json.WriteString(
"Blocks");
6779 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6781 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6788 #endif // #if VMA_STATS_STRING_ENABLED 6790 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6792 uint32_t currentFrameIndex)
6794 if(m_pDefragmentator == VMA_NULL)
6796 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6802 return m_pDefragmentator;
6805 VkResult VmaBlockVector::Defragment(
6807 VkDeviceSize& maxBytesToMove,
6808 uint32_t& maxAllocationsToMove)
6810 if(m_pDefragmentator == VMA_NULL)
6815 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6818 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
6821 if(pDefragmentationStats != VMA_NULL)
6823 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
6824 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6827 VMA_ASSERT(bytesMoved <= maxBytesToMove);
6828 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6834 m_HasEmptyBlock =
false;
6835 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
6837 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6838 if(pBlock->m_Metadata.IsEmpty())
6840 if(m_Blocks.size() > m_MinBlockCount)
6842 if(pDefragmentationStats != VMA_NULL)
6845 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
6848 VmaVectorRemove(m_Blocks, blockIndex);
6849 pBlock->Destroy(m_hAllocator);
6850 vma_delete(m_hAllocator, pBlock);
6854 m_HasEmptyBlock =
true;
6862 void VmaBlockVector::DestroyDefragmentator()
6864 if(m_pDefragmentator != VMA_NULL)
6866 vma_delete(m_hAllocator, m_pDefragmentator);
6867 m_pDefragmentator = VMA_NULL;
6871 void VmaBlockVector::MakePoolAllocationsLost(
6872 uint32_t currentFrameIndex,
6873 size_t* pLostAllocationCount)
6875 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6876 size_t lostAllocationCount = 0;
6877 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6879 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6881 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6883 if(pLostAllocationCount != VMA_NULL)
6885 *pLostAllocationCount = lostAllocationCount;
6889 void VmaBlockVector::AddStats(
VmaStats* pStats)
6891 const uint32_t memTypeIndex = m_MemoryTypeIndex;
6892 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6894 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6896 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6898 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6900 VMA_HEAVY_ASSERT(pBlock->Validate());
6902 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6903 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6904 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6905 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6912 VmaDefragmentator::VmaDefragmentator(
6914 VmaBlockVector* pBlockVector,
6915 uint32_t currentFrameIndex) :
6916 m_hAllocator(hAllocator),
6917 m_pBlockVector(pBlockVector),
6918 m_CurrentFrameIndex(currentFrameIndex),
6920 m_AllocationsMoved(0),
6921 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6922 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6926 VmaDefragmentator::~VmaDefragmentator()
6928 for(
size_t i = m_Blocks.size(); i--; )
6930 vma_delete(m_hAllocator, m_Blocks[i]);
6934 void VmaDefragmentator::AddAllocation(
VmaAllocation hAlloc, VkBool32* pChanged)
6936 AllocationInfo allocInfo;
6937 allocInfo.m_hAllocation = hAlloc;
6938 allocInfo.m_pChanged = pChanged;
6939 m_Allocations.push_back(allocInfo);
6942 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(
VmaAllocator hAllocator,
void** ppMappedData)
6945 if(m_pMappedDataForDefragmentation)
6947 *ppMappedData = m_pMappedDataForDefragmentation;
6952 if(m_pBlock->m_Mapping.GetMappedData())
6954 *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6959 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
6960 *ppMappedData = m_pMappedDataForDefragmentation;
6964 void VmaDefragmentator::BlockInfo::Unmap(
VmaAllocator hAllocator)
6966 if(m_pMappedDataForDefragmentation != VMA_NULL)
6968 m_pBlock->Unmap(hAllocator, 1);
6972 VkResult VmaDefragmentator::DefragmentRound(
6973 VkDeviceSize maxBytesToMove,
6974 uint32_t maxAllocationsToMove)
6976 if(m_Blocks.empty())
6981 size_t srcBlockIndex = m_Blocks.size() - 1;
6982 size_t srcAllocIndex = SIZE_MAX;
6988 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6990 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6993 if(srcBlockIndex == 0)
7000 srcAllocIndex = SIZE_MAX;
7005 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
7009 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
7010 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
7012 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
7013 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
7014 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
7015 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
7018 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
7020 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
7021 VmaAllocationRequest dstAllocRequest;
7022 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
7023 m_CurrentFrameIndex,
7024 m_pBlockVector->GetFrameInUseCount(),
7025 m_pBlockVector->GetBufferImageGranularity(),
7030 &dstAllocRequest) &&
7032 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
7034 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
7037 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
7038 (m_BytesMoved + size > maxBytesToMove))
7040 return VK_INCOMPLETE;
7043 void* pDstMappedData = VMA_NULL;
7044 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
7045 if(res != VK_SUCCESS)
7050 void* pSrcMappedData = VMA_NULL;
7051 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
7052 if(res != VK_SUCCESS)
7059 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
7060 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
7061 static_cast<size_t>(size));
7063 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
7064 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
7066 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
7068 if(allocInfo.m_pChanged != VMA_NULL)
7070 *allocInfo.m_pChanged = VK_TRUE;
7073 ++m_AllocationsMoved;
7074 m_BytesMoved += size;
7076 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
7084 if(srcAllocIndex > 0)
7090 if(srcBlockIndex > 0)
7093 srcAllocIndex = SIZE_MAX;
7103 VkResult VmaDefragmentator::Defragment(
7104 VkDeviceSize maxBytesToMove,
7105 uint32_t maxAllocationsToMove)
7107 if(m_Allocations.empty())
7113 const size_t blockCount = m_pBlockVector->m_Blocks.size();
7114 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7116 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
7117 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
7118 m_Blocks.push_back(pBlockInfo);
7122 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
7125 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
7127 AllocationInfo& allocInfo = m_Allocations[blockIndex];
7129 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7131 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
7132 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
7133 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
7135 (*it)->m_Allocations.push_back(allocInfo);
7143 m_Allocations.clear();
7145 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7147 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
7148 pBlockInfo->CalcHasNonMovableAllocations();
7149 pBlockInfo->SortAllocationsBySizeDescecnding();
7153 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
7156 VkResult result = VK_SUCCESS;
7157 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
7159 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
7163 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
7165 m_Blocks[blockIndex]->Unmap(m_hAllocator);
7171 bool VmaDefragmentator::MoveMakesSense(
7172 size_t dstBlockIndex, VkDeviceSize dstOffset,
7173 size_t srcBlockIndex, VkDeviceSize srcOffset)
7175 if(dstBlockIndex < srcBlockIndex)
7179 if(dstBlockIndex > srcBlockIndex)
7183 if(dstOffset < srcOffset)
7196 m_hDevice(pCreateInfo->device),
7197 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
7198 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
7199 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
7200 m_PreferredLargeHeapBlockSize(0),
7201 m_PhysicalDevice(pCreateInfo->physicalDevice),
7202 m_CurrentFrameIndex(0),
7203 m_Pools(VmaStlAllocator<
VmaPool>(GetAllocationCallbacks()))
7207 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
7208 memset(&m_MemProps, 0,
sizeof(m_MemProps));
7209 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
7211 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
7212 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
7214 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7216 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
7227 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
7228 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
7235 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
7237 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
7238 if(limit != VK_WHOLE_SIZE)
7240 m_HeapSizeLimit[heapIndex] = limit;
7241 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
7243 m_MemProps.memoryHeaps[heapIndex].size = limit;
7249 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7251 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
7253 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
7259 GetBufferImageGranularity(),
7264 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
7268 VmaAllocator_T::~VmaAllocator_T()
7270 VMA_ASSERT(m_Pools.empty());
7272 for(
size_t i = GetMemoryTypeCount(); i--; )
7274 vma_delete(
this, m_pDedicatedAllocations[i]);
7275 vma_delete(
this, m_pBlockVectors[i]);
7279 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
7281 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7282 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
7283 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
7284 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
7285 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
7286 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
7287 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
7288 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
7289 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
7290 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
7291 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
7292 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
7293 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
7294 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
7295 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
7296 if(m_UseKhrDedicatedAllocation)
7298 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
7299 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
7300 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
7301 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
7303 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7305 #define VMA_COPY_IF_NOT_NULL(funcName) \ 7306 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 7308 if(pVulkanFunctions != VMA_NULL)
7310 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
7311 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
7312 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
7313 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
7314 VMA_COPY_IF_NOT_NULL(vkMapMemory);
7315 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
7316 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
7317 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
7318 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
7319 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
7320 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
7321 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
7322 VMA_COPY_IF_NOT_NULL(vkCreateImage);
7323 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
7324 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
7325 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
7328 #undef VMA_COPY_IF_NOT_NULL 7332 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
7333 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
7334 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
7335 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
7336 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
7337 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
7338 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
7339 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
7340 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
7341 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
7342 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
7343 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
7344 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
7345 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
7346 if(m_UseKhrDedicatedAllocation)
7348 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
7349 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
7353 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
7355 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7356 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
7357 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
7358 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
7361 VkResult VmaAllocator_T::AllocateMemoryOfType(
7362 const VkMemoryRequirements& vkMemReq,
7363 bool dedicatedAllocation,
7364 VkBuffer dedicatedBuffer,
7365 VkImage dedicatedImage,
7367 uint32_t memTypeIndex,
7368 VmaSuballocationType suballocType,
7371 VMA_ASSERT(pAllocation != VMA_NULL);
7372 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
7378 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
7383 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
7384 VMA_ASSERT(blockVector);
7386 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
7387 bool preferDedicatedMemory =
7388 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
7389 dedicatedAllocation ||
7391 vkMemReq.size > preferredBlockSize / 2;
7393 if(preferDedicatedMemory &&
7395 finalCreateInfo.
pool == VK_NULL_HANDLE)
7404 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7408 return AllocateDedicatedMemory(
7422 VkResult res = blockVector->Allocate(
7424 m_CurrentFrameIndex.load(),
7429 if(res == VK_SUCCESS)
7437 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7441 res = AllocateDedicatedMemory(
7447 finalCreateInfo.pUserData,
7451 if(res == VK_SUCCESS)
7454 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
7460 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7467 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7469 VmaSuballocationType suballocType,
7470 uint32_t memTypeIndex,
7472 bool isUserDataString,
7474 VkBuffer dedicatedBuffer,
7475 VkImage dedicatedImage,
7478 VMA_ASSERT(pAllocation);
7480 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7481 allocInfo.memoryTypeIndex = memTypeIndex;
7482 allocInfo.allocationSize = size;
7484 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7485 if(m_UseKhrDedicatedAllocation)
7487 if(dedicatedBuffer != VK_NULL_HANDLE)
7489 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7490 dedicatedAllocInfo.buffer = dedicatedBuffer;
7491 allocInfo.pNext = &dedicatedAllocInfo;
7493 else if(dedicatedImage != VK_NULL_HANDLE)
7495 dedicatedAllocInfo.image = dedicatedImage;
7496 allocInfo.pNext = &dedicatedAllocInfo;
7501 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7502 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7505 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7509 void* pMappedData = VMA_NULL;
7512 res = (*m_VulkanFunctions.vkMapMemory)(
7521 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7522 FreeVulkanMemory(memTypeIndex, size, hMemory);
7527 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7528 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7529 (*pAllocation)->SetUserData(
this, pUserData);
7533 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7534 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7535 VMA_ASSERT(pDedicatedAllocations);
7536 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7539 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7544 void VmaAllocator_T::GetBufferMemoryRequirements(
7546 VkMemoryRequirements& memReq,
7547 bool& requiresDedicatedAllocation,
7548 bool& prefersDedicatedAllocation)
const 7550 if(m_UseKhrDedicatedAllocation)
7552 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7553 memReqInfo.buffer = hBuffer;
7555 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7557 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7558 memReq2.pNext = &memDedicatedReq;
7560 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7562 memReq = memReq2.memoryRequirements;
7563 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7564 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7568 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7569 requiresDedicatedAllocation =
false;
7570 prefersDedicatedAllocation =
false;
7574 void VmaAllocator_T::GetImageMemoryRequirements(
7576 VkMemoryRequirements& memReq,
7577 bool& requiresDedicatedAllocation,
7578 bool& prefersDedicatedAllocation)
const 7580 if(m_UseKhrDedicatedAllocation)
7582 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7583 memReqInfo.image = hImage;
7585 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7587 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7588 memReq2.pNext = &memDedicatedReq;
7590 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7592 memReq = memReq2.memoryRequirements;
7593 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7594 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7598 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7599 requiresDedicatedAllocation =
false;
7600 prefersDedicatedAllocation =
false;
7604 VkResult VmaAllocator_T::AllocateMemory(
7605 const VkMemoryRequirements& vkMemReq,
7606 bool requiresDedicatedAllocation,
7607 bool prefersDedicatedAllocation,
7608 VkBuffer dedicatedBuffer,
7609 VkImage dedicatedImage,
7611 VmaSuballocationType suballocType,
7617 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7618 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7623 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7624 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7626 if(requiresDedicatedAllocation)
7630 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7631 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7633 if(createInfo.
pool != VK_NULL_HANDLE)
7635 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7636 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7639 if((createInfo.
pool != VK_NULL_HANDLE) &&
7642 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7643 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7646 if(createInfo.
pool != VK_NULL_HANDLE)
7648 return createInfo.
pool->m_BlockVector.Allocate(
7650 m_CurrentFrameIndex.load(),
7659 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7660 uint32_t memTypeIndex = UINT32_MAX;
7662 if(res == VK_SUCCESS)
7664 res = AllocateMemoryOfType(
7666 requiresDedicatedAllocation || prefersDedicatedAllocation,
7674 if(res == VK_SUCCESS)
7684 memoryTypeBits &= ~(1u << memTypeIndex);
7687 if(res == VK_SUCCESS)
7689 res = AllocateMemoryOfType(
7691 requiresDedicatedAllocation || prefersDedicatedAllocation,
7699 if(res == VK_SUCCESS)
7709 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7720 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7722 VMA_ASSERT(allocation);
7724 if(allocation->CanBecomeLost() ==
false ||
7725 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7727 switch(allocation->GetType())
7729 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7731 VmaBlockVector* pBlockVector = VMA_NULL;
7732 VmaPool hPool = allocation->GetPool();
7733 if(hPool != VK_NULL_HANDLE)
7735 pBlockVector = &hPool->m_BlockVector;
7739 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7740 pBlockVector = m_pBlockVectors[memTypeIndex];
7742 pBlockVector->Free(allocation);
7745 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7746 FreeDedicatedMemory(allocation);
7753 allocation->SetUserData(
this, VMA_NULL);
7754 vma_delete(
this, allocation);
7757 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7760 InitStatInfo(pStats->
total);
7761 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7763 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7767 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7769 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7770 VMA_ASSERT(pBlockVector);
7771 pBlockVector->AddStats(pStats);
7776 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7777 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7779 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7784 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7786 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7787 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7788 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7789 VMA_ASSERT(pDedicatedAllocVector);
7790 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7793 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7794 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7795 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7796 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7801 VmaPostprocessCalcStatInfo(pStats->
total);
7802 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
7803 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
7804 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
7805 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
7808 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7810 VkResult VmaAllocator_T::Defragment(
7812 size_t allocationCount,
7813 VkBool32* pAllocationsChanged,
7817 if(pAllocationsChanged != VMA_NULL)
7819 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
7821 if(pDefragmentationStats != VMA_NULL)
7823 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
7826 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7828 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7830 const size_t poolCount = m_Pools.size();
7833 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7837 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7839 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7841 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7843 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7845 VmaBlockVector* pAllocBlockVector = VMA_NULL;
7847 const VmaPool hAllocPool = hAlloc->GetPool();
7849 if(hAllocPool != VK_NULL_HANDLE)
7851 pAllocBlockVector = &hAllocPool->GetBlockVector();
7856 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7859 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7861 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7862 &pAllocationsChanged[allocIndex] : VMA_NULL;
7863 pDefragmentator->AddAllocation(hAlloc, pChanged);
7867 VkResult result = VK_SUCCESS;
7871 VkDeviceSize maxBytesToMove = SIZE_MAX;
7872 uint32_t maxAllocationsToMove = UINT32_MAX;
7873 if(pDefragmentationInfo != VMA_NULL)
7880 for(uint32_t memTypeIndex = 0;
7881 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7885 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7887 result = m_pBlockVectors[memTypeIndex]->Defragment(
7888 pDefragmentationStats,
7890 maxAllocationsToMove);
7895 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7897 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7898 pDefragmentationStats,
7900 maxAllocationsToMove);
7906 for(
size_t poolIndex = poolCount; poolIndex--; )
7908 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7912 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7914 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7916 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7925 if(hAllocation->CanBecomeLost())
7931 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7932 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7935 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7939 pAllocationInfo->
offset = 0;
7940 pAllocationInfo->
size = hAllocation->GetSize();
7942 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7945 else if(localLastUseFrameIndex == localCurrFrameIndex)
7947 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7948 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7949 pAllocationInfo->
offset = hAllocation->GetOffset();
7950 pAllocationInfo->
size = hAllocation->GetSize();
7952 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7957 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7959 localLastUseFrameIndex = localCurrFrameIndex;
7966 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7967 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7968 pAllocationInfo->
offset = hAllocation->GetOffset();
7969 pAllocationInfo->
size = hAllocation->GetSize();
7970 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7971 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7975 bool VmaAllocator_T::TouchAllocation(
VmaAllocation hAllocation)
7978 if(hAllocation->CanBecomeLost())
7980 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7981 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7984 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7988 else if(localLastUseFrameIndex == localCurrFrameIndex)
7994 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7996 localLastUseFrameIndex = localCurrFrameIndex;
8009 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
8022 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
8024 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
8025 if(res != VK_SUCCESS)
8027 vma_delete(
this, *pPool);
8034 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8035 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
8041 void VmaAllocator_T::DestroyPool(
VmaPool pool)
8045 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8046 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
8047 VMA_ASSERT(success &&
"Pool not found in Allocator.");
8050 vma_delete(
this, pool);
8055 pool->m_BlockVector.GetPoolStats(pPoolStats);
8058 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
8060 m_CurrentFrameIndex.store(frameIndex);
8063 void VmaAllocator_T::MakePoolAllocationsLost(
8065 size_t* pLostAllocationCount)
8067 hPool->m_BlockVector.MakePoolAllocationsLost(
8068 m_CurrentFrameIndex.load(),
8069 pLostAllocationCount);
8072 void VmaAllocator_T::CreateLostAllocation(
VmaAllocation* pAllocation)
8074 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
8075 (*pAllocation)->InitLost();
8078 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
8080 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
8083 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8085 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8086 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
8088 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8089 if(res == VK_SUCCESS)
8091 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
8096 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
8101 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
8104 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
8106 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
8112 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
8114 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
8116 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
8119 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
8121 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
8122 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
8124 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
8125 m_HeapSizeLimit[heapIndex] += size;
8129 VkResult VmaAllocator_T::Map(
VmaAllocation hAllocation,
void** ppData)
8131 if(hAllocation->CanBecomeLost())
8133 return VK_ERROR_MEMORY_MAP_FAILED;
8136 switch(hAllocation->GetType())
8138 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8140 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8141 char *pBytes = VMA_NULL;
8142 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
8143 if(res == VK_SUCCESS)
8145 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
8146 hAllocation->BlockAllocMap();
8150 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8151 return hAllocation->DedicatedAllocMap(
this, ppData);
8154 return VK_ERROR_MEMORY_MAP_FAILED;
8160 switch(hAllocation->GetType())
8162 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
8164 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
8165 hAllocation->BlockAllocUnmap();
8166 pBlock->Unmap(
this, 1);
8169 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
8170 hAllocation->DedicatedAllocUnmap(
this);
8177 void VmaAllocator_T::FreeDedicatedMemory(
VmaAllocation allocation)
8179 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
8181 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
8183 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8184 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
8185 VMA_ASSERT(pDedicatedAllocations);
8186 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
8187 VMA_ASSERT(success);
8190 VkDeviceMemory hMemory = allocation->GetMemory();
8192 if(allocation->GetMappedData() != VMA_NULL)
8194 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
8197 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
8199 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
8202 #if VMA_STATS_STRING_ENABLED 8204 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
8206 bool dedicatedAllocationsStarted =
false;
8207 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8209 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
8210 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
8211 VMA_ASSERT(pDedicatedAllocVector);
8212 if(pDedicatedAllocVector->empty() ==
false)
8214 if(dedicatedAllocationsStarted ==
false)
8216 dedicatedAllocationsStarted =
true;
8217 json.WriteString(
"DedicatedAllocations");
8221 json.BeginString(
"Type ");
8222 json.ContinueString(memTypeIndex);
8227 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
8230 json.BeginObject(
true);
8232 json.WriteString(
"Type");
8233 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
8235 json.WriteString(
"Size");
8236 json.WriteNumber(hAlloc->GetSize());
8238 const void* pUserData = hAlloc->GetUserData();
8239 if(pUserData != VMA_NULL)
8241 json.WriteString(
"UserData");
8242 if(hAlloc->IsUserDataString())
8244 json.WriteString((
const char*)pUserData);
8249 json.ContinueString_Pointer(pUserData);
8260 if(dedicatedAllocationsStarted)
8266 bool allocationsStarted =
false;
8267 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8269 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
8271 if(allocationsStarted ==
false)
8273 allocationsStarted =
true;
8274 json.WriteString(
"DefaultPools");
8278 json.BeginString(
"Type ");
8279 json.ContinueString(memTypeIndex);
8282 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
8285 if(allocationsStarted)
8292 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8293 const size_t poolCount = m_Pools.size();
8296 json.WriteString(
"Pools");
8298 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
8300 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
8307 #endif // #if VMA_STATS_STRING_ENABLED 8309 static VkResult AllocateMemoryForImage(
8313 VmaSuballocationType suballocType,
8316 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
8318 VkMemoryRequirements vkMemReq = {};
8319 bool requiresDedicatedAllocation =
false;
8320 bool prefersDedicatedAllocation =
false;
8321 allocator->GetImageMemoryRequirements(image, vkMemReq,
8322 requiresDedicatedAllocation, prefersDedicatedAllocation);
8324 return allocator->AllocateMemory(
8326 requiresDedicatedAllocation,
8327 prefersDedicatedAllocation,
8330 *pAllocationCreateInfo,
8342 VMA_ASSERT(pCreateInfo && pAllocator);
8343 VMA_DEBUG_LOG(
"vmaCreateAllocator");
8351 if(allocator != VK_NULL_HANDLE)
8353 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
8354 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
8355 vma_delete(&allocationCallbacks, allocator);
8361 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
8363 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
8364 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
8369 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
8371 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
8372 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
8377 uint32_t memoryTypeIndex,
8378 VkMemoryPropertyFlags* pFlags)
8380 VMA_ASSERT(allocator && pFlags);
8381 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
8382 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
8387 uint32_t frameIndex)
8389 VMA_ASSERT(allocator);
8390 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
8392 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8394 allocator->SetCurrentFrameIndex(frameIndex);
8401 VMA_ASSERT(allocator && pStats);
8402 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8403 allocator->CalculateStats(pStats);
8406 #if VMA_STATS_STRING_ENABLED 8410 char** ppStatsString,
8411 VkBool32 detailedMap)
8413 VMA_ASSERT(allocator && ppStatsString);
8414 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8416 VmaStringBuilder sb(allocator);
8418 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
8422 allocator->CalculateStats(&stats);
8424 json.WriteString(
"Total");
8425 VmaPrintStatInfo(json, stats.
total);
8427 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
8429 json.BeginString(
"Heap ");
8430 json.ContinueString(heapIndex);
8434 json.WriteString(
"Size");
8435 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
8437 json.WriteString(
"Flags");
8438 json.BeginArray(
true);
8439 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
8441 json.WriteString(
"DEVICE_LOCAL");
8447 json.WriteString(
"Stats");
8448 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
8451 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
8453 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
8455 json.BeginString(
"Type ");
8456 json.ContinueString(typeIndex);
8461 json.WriteString(
"Flags");
8462 json.BeginArray(
true);
8463 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
8464 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
8466 json.WriteString(
"DEVICE_LOCAL");
8468 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8470 json.WriteString(
"HOST_VISIBLE");
8472 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
8474 json.WriteString(
"HOST_COHERENT");
8476 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
8478 json.WriteString(
"HOST_CACHED");
8480 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
8482 json.WriteString(
"LAZILY_ALLOCATED");
8488 json.WriteString(
"Stats");
8489 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
8498 if(detailedMap == VK_TRUE)
8500 allocator->PrintDetailedMap(json);
8506 const size_t len = sb.GetLength();
8507 char*
const pChars = vma_new_array(allocator,
char, len + 1);
8510 memcpy(pChars, sb.GetData(), len);
8513 *ppStatsString = pChars;
8520 if(pStatsString != VMA_NULL)
8522 VMA_ASSERT(allocator);
8523 size_t len = strlen(pStatsString);
8524 vma_delete_array(allocator, pStatsString, len + 1);
8528 #endif // #if VMA_STATS_STRING_ENABLED 8535 uint32_t memoryTypeBits,
8537 uint32_t* pMemoryTypeIndex)
8539 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8540 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8541 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8548 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
8552 switch(pAllocationCreateInfo->
usage)
8557 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8560 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8563 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8564 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8567 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8568 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8574 *pMemoryTypeIndex = UINT32_MAX;
8575 uint32_t minCost = UINT32_MAX;
8576 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8577 memTypeIndex < allocator->GetMemoryTypeCount();
8578 ++memTypeIndex, memTypeBit <<= 1)
8581 if((memTypeBit & memoryTypeBits) != 0)
8583 const VkMemoryPropertyFlags currFlags =
8584 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8586 if((requiredFlags & ~currFlags) == 0)
8589 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8591 if(currCost < minCost)
8593 *pMemoryTypeIndex = memTypeIndex;
8603 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8608 const VkBufferCreateInfo* pBufferCreateInfo,
8610 uint32_t* pMemoryTypeIndex)
8612 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8613 VMA_ASSERT(pBufferCreateInfo != VMA_NULL);
8614 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8615 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8617 const VkDevice hDev = allocator->m_hDevice;
8618 VkBuffer hBuffer = VK_NULL_HANDLE;
8619 VkResult res = allocator->GetVulkanFunctions().vkCreateBuffer(
8620 hDev, pBufferCreateInfo, allocator->GetAllocationCallbacks(), &hBuffer);
8621 if(res == VK_SUCCESS)
8623 VkMemoryRequirements memReq = {};
8624 allocator->GetVulkanFunctions().vkGetBufferMemoryRequirements(
8625 hDev, hBuffer, &memReq);
8629 memReq.memoryTypeBits,
8630 pAllocationCreateInfo,
8633 allocator->GetVulkanFunctions().vkDestroyBuffer(
8634 hDev, hBuffer, allocator->GetAllocationCallbacks());
8641 const VkImageCreateInfo* pImageCreateInfo,
8643 uint32_t* pMemoryTypeIndex)
8645 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8646 VMA_ASSERT(pImageCreateInfo != VMA_NULL);
8647 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8648 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8650 const VkDevice hDev = allocator->m_hDevice;
8651 VkImage hImage = VK_NULL_HANDLE;
8652 VkResult res = allocator->GetVulkanFunctions().vkCreateImage(
8653 hDev, pImageCreateInfo, allocator->GetAllocationCallbacks(), &hImage);
8654 if(res == VK_SUCCESS)
8656 VkMemoryRequirements memReq = {};
8657 allocator->GetVulkanFunctions().vkGetImageMemoryRequirements(
8658 hDev, hImage, &memReq);
8662 memReq.memoryTypeBits,
8663 pAllocationCreateInfo,
8666 allocator->GetVulkanFunctions().vkDestroyImage(
8667 hDev, hImage, allocator->GetAllocationCallbacks());
8677 VMA_ASSERT(allocator && pCreateInfo && pPool);
8679 VMA_DEBUG_LOG(
"vmaCreatePool");
8681 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8683 return allocator->CreatePool(pCreateInfo, pPool);
8690 VMA_ASSERT(allocator);
8692 if(pool == VK_NULL_HANDLE)
8697 VMA_DEBUG_LOG(
"vmaDestroyPool");
8699 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8701 allocator->DestroyPool(pool);
8709 VMA_ASSERT(allocator && pool && pPoolStats);
8711 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8713 allocator->GetPoolStats(pool, pPoolStats);
8719 size_t* pLostAllocationCount)
8721 VMA_ASSERT(allocator && pool);
8723 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8725 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8730 const VkMemoryRequirements* pVkMemoryRequirements,
8735 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8737 VMA_DEBUG_LOG(
"vmaAllocateMemory");
8739 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8741 VkResult result = allocator->AllocateMemory(
8742 *pVkMemoryRequirements,
8748 VMA_SUBALLOCATION_TYPE_UNKNOWN,
8751 if(pAllocationInfo && result == VK_SUCCESS)
8753 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8766 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8768 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
8770 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8772 VkMemoryRequirements vkMemReq = {};
8773 bool requiresDedicatedAllocation =
false;
8774 bool prefersDedicatedAllocation =
false;
8775 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
8776 requiresDedicatedAllocation,
8777 prefersDedicatedAllocation);
8779 VkResult result = allocator->AllocateMemory(
8781 requiresDedicatedAllocation,
8782 prefersDedicatedAllocation,
8786 VMA_SUBALLOCATION_TYPE_BUFFER,
8789 if(pAllocationInfo && result == VK_SUCCESS)
8791 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8804 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8806 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
8808 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8810 VkResult result = AllocateMemoryForImage(
8814 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
8817 if(pAllocationInfo && result == VK_SUCCESS)
8819 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8829 VMA_ASSERT(allocator && allocation);
8831 VMA_DEBUG_LOG(
"vmaFreeMemory");
8833 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8835 allocator->FreeMemory(allocation);
8843 VMA_ASSERT(allocator && allocation && pAllocationInfo);
8845 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8847 allocator->GetAllocationInfo(allocation, pAllocationInfo);
8854 VMA_ASSERT(allocator && allocation);
8856 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8858 return allocator->TouchAllocation(allocation);
8866 VMA_ASSERT(allocator && allocation);
8868 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8870 allocation->SetUserData(allocator, pUserData);
8877 VMA_ASSERT(allocator && pAllocation);
8879 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
8881 allocator->CreateLostAllocation(pAllocation);
8889 VMA_ASSERT(allocator && allocation && ppData);
8891 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8893 return allocator->Map(allocation, ppData);
8900 VMA_ASSERT(allocator && allocation);
8902 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8904 allocator->Unmap(allocation);
8910 size_t allocationCount,
8911 VkBool32* pAllocationsChanged,
8915 VMA_ASSERT(allocator && pAllocations);
8917 VMA_DEBUG_LOG(
"vmaDefragment");
8919 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8921 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8926 const VkBufferCreateInfo* pBufferCreateInfo,
8932 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8934 VMA_DEBUG_LOG(
"vmaCreateBuffer");
8936 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8938 *pBuffer = VK_NULL_HANDLE;
8939 *pAllocation = VK_NULL_HANDLE;
8942 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8943 allocator->m_hDevice,
8945 allocator->GetAllocationCallbacks(),
8950 VkMemoryRequirements vkMemReq = {};
8951 bool requiresDedicatedAllocation =
false;
8952 bool prefersDedicatedAllocation =
false;
8953 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8954 requiresDedicatedAllocation, prefersDedicatedAllocation);
8958 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
8960 VMA_ASSERT(vkMemReq.alignment %
8961 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
8963 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
8965 VMA_ASSERT(vkMemReq.alignment %
8966 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
8968 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
8970 VMA_ASSERT(vkMemReq.alignment %
8971 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
8975 res = allocator->AllocateMemory(
8977 requiresDedicatedAllocation,
8978 prefersDedicatedAllocation,
8981 *pAllocationCreateInfo,
8982 VMA_SUBALLOCATION_TYPE_BUFFER,
8987 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8988 allocator->m_hDevice,
8990 (*pAllocation)->GetMemory(),
8991 (*pAllocation)->GetOffset());
8995 if(pAllocationInfo != VMA_NULL)
8997 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9001 allocator->FreeMemory(*pAllocation);
9002 *pAllocation = VK_NULL_HANDLE;
9003 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
9004 *pBuffer = VK_NULL_HANDLE;
9007 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
9008 *pBuffer = VK_NULL_HANDLE;
9019 if(buffer != VK_NULL_HANDLE)
9021 VMA_ASSERT(allocator);
9023 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
9025 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9027 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
9029 allocator->FreeMemory(allocation);
9035 const VkImageCreateInfo* pImageCreateInfo,
9041 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
9043 VMA_DEBUG_LOG(
"vmaCreateImage");
9045 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9047 *pImage = VK_NULL_HANDLE;
9048 *pAllocation = VK_NULL_HANDLE;
9051 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
9052 allocator->m_hDevice,
9054 allocator->GetAllocationCallbacks(),
9058 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
9059 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
9060 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
9063 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
9067 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
9068 allocator->m_hDevice,
9070 (*pAllocation)->GetMemory(),
9071 (*pAllocation)->GetOffset());
9075 if(pAllocationInfo != VMA_NULL)
9077 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
9081 allocator->FreeMemory(*pAllocation);
9082 *pAllocation = VK_NULL_HANDLE;
9083 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
9084 *pImage = VK_NULL_HANDLE;
9087 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
9088 *pImage = VK_NULL_HANDLE;
9099 if(image != VK_NULL_HANDLE)
9101 VMA_ASSERT(allocator);
9103 VMA_DEBUG_LOG(
"vmaDestroyImage");
9105 VMA_DEBUG_GLOBAL_MUTEX_LOCK
9107 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
9109 allocator->FreeMemory(allocation);
9113 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:1022
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1284
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:1047
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
Represents single memory allocation.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:1032
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1241
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:1026
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1614
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:1044
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1813
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1460
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1514
Definition: vk_mem_alloc.h:1321
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:1015
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1359
Definition: vk_mem_alloc.h:1268
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks. Optional.
Definition: vk_mem_alloc.h:1056
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:1109
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:1041
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1272
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation and atomically marks it as used in current fra...
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1174
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:1029
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1173
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:1037
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1817
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1073
VmaStatInfo total
Definition: vk_mem_alloc.h:1183
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1825
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1343
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1808
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:1030
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:957
Represents main object of this library initialized.
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:1050
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1468
Definition: vk_mem_alloc.h:1462
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1624
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:1027
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1380
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1484
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1520
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1013
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1471
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given memoryTypeBits and VmaAllocationCreateInfo.
VmaMemoryUsage
Definition: vk_mem_alloc.h:1219
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1803
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1821
Definition: vk_mem_alloc.h:1258
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1367
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:1028
Represents custom memory pool.
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1179
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:963
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:984
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:989
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1823
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1354
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1530
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:1023
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1162
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1479
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:976
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1328
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1175
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:980
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1474
Definition: vk_mem_alloc.h:1267
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1349
Definition: vk_mem_alloc.h:1340
VkResult vmaFindMemoryTypeIndexForImageInfo(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkImageCreateInfo and VmaAllocationCreateInfo.
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1165
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:1025
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool. Optional.
Definition: vk_mem_alloc.h:1492
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory. Optional.
Definition: vk_mem_alloc.h:1059
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1523
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1338
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1373
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either null or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:1097
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1181
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1308
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1174
VkResult vmaFindMemoryTypeIndexForBufferInfo(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
Helps to find memoryTypeIndex, given VkBufferCreateInfo and VmaAllocationCreateInfo.
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:1034
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:978
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:1033
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1506
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1638
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:1053
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1174
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1171
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1511
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1619
Definition: vk_mem_alloc.h:1336
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1819
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:1021
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:1036
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1169
Definition: vk_mem_alloc.h:1224
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1464
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1167
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:1031
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:1035
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1295
Definition: vk_mem_alloc.h:1251
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1633
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:1011
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:1024
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1600
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1442
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1175
VkBool32 vmaTouchAllocation(VmaAllocator allocator, VmaAllocation allocation)
Returns VK_TRUE if allocation is not lost and atomically marks it as used in current frame...
Definition: vk_mem_alloc.h:1334
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1182
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1517
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1175
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1605