23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 688 #include <vulkan/vulkan.h> 690 VK_DEFINE_HANDLE(VmaAllocator)
694 VmaAllocator allocator,
696 VkDeviceMemory memory,
700 VmaAllocator allocator,
702 VkDeviceMemory memory,
860 VmaAllocator* pAllocator);
864 VmaAllocator allocator);
871 VmaAllocator allocator,
872 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
879 VmaAllocator allocator,
880 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
889 VmaAllocator allocator,
890 uint32_t memoryTypeIndex,
891 VkMemoryPropertyFlags* pFlags);
902 VmaAllocator allocator,
903 uint32_t frameIndex);
933 VmaAllocator allocator,
936 #define VMA_STATS_STRING_ENABLED 1 938 #if VMA_STATS_STRING_ENABLED 944 VmaAllocator allocator,
945 char** ppStatsString,
946 VkBool32 detailedMap);
949 VmaAllocator allocator,
952 #endif // #if VMA_STATS_STRING_ENABLED 954 VK_DEFINE_HANDLE(VmaPool)
1125 VmaAllocator allocator,
1126 uint32_t memoryTypeBits,
1128 uint32_t* pMemoryTypeIndex);
1229 VmaAllocator allocator,
1236 VmaAllocator allocator,
1246 VmaAllocator allocator,
1257 VmaAllocator allocator,
1259 size_t* pLostAllocationCount);
1261 VK_DEFINE_HANDLE(VmaAllocation)
1317 VmaAllocator allocator,
1318 const VkMemoryRequirements* pVkMemoryRequirements,
1320 VmaAllocation* pAllocation,
1330 VmaAllocator allocator,
1333 VmaAllocation* pAllocation,
1338 VmaAllocator allocator,
1341 VmaAllocation* pAllocation,
1346 VmaAllocator allocator,
1347 VmaAllocation allocation);
1351 VmaAllocator allocator,
1352 VmaAllocation allocation,
1369 VmaAllocator allocator,
1370 VmaAllocation allocation,
1384 VmaAllocator allocator,
1385 VmaAllocation* pAllocation);
1422 VmaAllocator allocator,
1423 VmaAllocation allocation,
1431 VmaAllocator allocator,
1432 VmaAllocation allocation);
1537 VmaAllocator allocator,
1538 VmaAllocation* pAllocations,
1539 size_t allocationCount,
1540 VkBool32* pAllocationsChanged,
1571 VmaAllocator allocator,
1572 const VkBufferCreateInfo* pBufferCreateInfo,
1575 VmaAllocation* pAllocation,
1590 VmaAllocator allocator,
1592 VmaAllocation allocation);
1596 VmaAllocator allocator,
1597 const VkImageCreateInfo* pImageCreateInfo,
1600 VmaAllocation* pAllocation,
1615 VmaAllocator allocator,
1617 VmaAllocation allocation);
1623 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1626 #ifdef __INTELLISENSE__ 1627 #define VMA_IMPLEMENTATION 1630 #ifdef VMA_IMPLEMENTATION 1631 #undef VMA_IMPLEMENTATION 1653 #ifndef VMA_STATIC_VULKAN_FUNCTIONS 1654 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1666 #if VMA_USE_STL_CONTAINERS 1667 #define VMA_USE_STL_VECTOR 1 1668 #define VMA_USE_STL_UNORDERED_MAP 1 1669 #define VMA_USE_STL_LIST 1 1672 #if VMA_USE_STL_VECTOR 1676 #if VMA_USE_STL_UNORDERED_MAP 1677 #include <unordered_map> 1680 #if VMA_USE_STL_LIST 1689 #include <algorithm> 1693 #if !defined(_WIN32) 1700 #define VMA_ASSERT(expr) assert(expr) 1702 #define VMA_ASSERT(expr) 1708 #ifndef VMA_HEAVY_ASSERT 1710 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1712 #define VMA_HEAVY_ASSERT(expr) 1718 #define VMA_NULL nullptr 1721 #ifndef VMA_ALIGN_OF 1722 #define VMA_ALIGN_OF(type) (__alignof(type)) 1725 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1727 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1729 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1733 #ifndef VMA_SYSTEM_FREE 1735 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1737 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1742 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1746 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1750 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1754 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1757 #ifndef VMA_DEBUG_LOG 1758 #define VMA_DEBUG_LOG(format, ...) 1768 #if VMA_STATS_STRING_ENABLED 1769 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1771 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1773 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1775 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1777 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1779 snprintf(outStr, strLen,
"%p", ptr);
1789 void Lock() { m_Mutex.lock(); }
1790 void Unlock() { m_Mutex.unlock(); }
1794 #define VMA_MUTEX VmaMutex 1805 #ifndef VMA_ATOMIC_UINT32 1806 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1809 #ifndef VMA_BEST_FIT 1822 #define VMA_BEST_FIT (1) 1825 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 1830 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 1833 #ifndef VMA_DEBUG_ALIGNMENT 1838 #define VMA_DEBUG_ALIGNMENT (1) 1841 #ifndef VMA_DEBUG_MARGIN 1846 #define VMA_DEBUG_MARGIN (0) 1849 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1854 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1857 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1862 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1865 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1866 #define VMA_SMALL_HEAP_MAX_SIZE (512 * 1024 * 1024) 1870 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1871 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256 * 1024 * 1024) 1875 #ifndef VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE 1876 #define VMA_DEFAULT_SMALL_HEAP_BLOCK_SIZE (64 * 1024 * 1024) 1880 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1886 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1887 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1890 static inline uint32_t VmaCountBitsSet(uint32_t v)
1892 uint32_t c = v - ((v >> 1) & 0x55555555);
1893 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1894 c = ((c >> 4) + c) & 0x0F0F0F0F;
1895 c = ((c >> 8) + c) & 0x00FF00FF;
1896 c = ((c >> 16) + c) & 0x0000FFFF;
1902 template <
typename T>
1903 static inline T VmaAlignUp(T val, T align)
1905 return (val + align - 1) / align * align;
1909 template <
typename T>
1910 inline T VmaRoundDiv(T x, T y)
1912 return (x + (y / (T)2)) / y;
1917 template<
typename Iterator,
typename Compare>
1918 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1920 Iterator centerValue = end; --centerValue;
1921 Iterator insertIndex = beg;
1922 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1924 if(cmp(*memTypeIndex, *centerValue))
1926 if(insertIndex != memTypeIndex)
1928 VMA_SWAP(*memTypeIndex, *insertIndex);
1933 if(insertIndex != centerValue)
1935 VMA_SWAP(*insertIndex, *centerValue);
1940 template<
typename Iterator,
typename Compare>
1941 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1945 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1946 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1947 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1951 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1953 #endif // #ifndef VMA_SORT 1962 static inline bool VmaBlocksOnSamePage(
1963 VkDeviceSize resourceAOffset,
1964 VkDeviceSize resourceASize,
1965 VkDeviceSize resourceBOffset,
1966 VkDeviceSize pageSize)
1968 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
1969 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
1970 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
1971 VkDeviceSize resourceBStart = resourceBOffset;
1972 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
1973 return resourceAEndPage == resourceBStartPage;
1976 enum VmaSuballocationType
1978 VMA_SUBALLOCATION_TYPE_FREE = 0,
1979 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
1980 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
1981 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
1982 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
1983 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
1984 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
1993 static inline bool VmaIsBufferImageGranularityConflict(
1994 VmaSuballocationType suballocType1,
1995 VmaSuballocationType suballocType2)
1997 if(suballocType1 > suballocType2)
1999 VMA_SWAP(suballocType1, suballocType2);
2002 switch(suballocType1)
2004 case VMA_SUBALLOCATION_TYPE_FREE:
2006 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2008 case VMA_SUBALLOCATION_TYPE_BUFFER:
2010 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2011 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2012 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2014 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2015 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2016 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2017 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2019 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2020 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2032 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2033 m_pMutex(useMutex ? &mutex : VMA_NULL)
2050 VMA_MUTEX* m_pMutex;
2053 #if VMA_DEBUG_GLOBAL_MUTEX 2054 static VMA_MUTEX gDebugGlobalMutex;
2055 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2057 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2061 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2072 template <
typename IterT,
typename KeyT,
typename CmpT>
2073 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2075 size_t down = 0, up = (end - beg);
2078 const size_t mid = (down + up) / 2;
2079 if(cmp(*(beg+mid), key))
2094 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2096 if((pAllocationCallbacks != VMA_NULL) &&
2097 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2099 return (*pAllocationCallbacks->pfnAllocation)(
2100 pAllocationCallbacks->pUserData,
2103 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2107 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2111 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2113 if((pAllocationCallbacks != VMA_NULL) &&
2114 (pAllocationCallbacks->pfnFree != VMA_NULL))
2116 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2120 VMA_SYSTEM_FREE(ptr);
2124 template<
typename T>
2125 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2127 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2130 template<
typename T>
2131 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2133 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2136 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2138 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2140 template<
typename T>
2141 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2144 VmaFree(pAllocationCallbacks, ptr);
2147 template<
typename T>
2148 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2152 for(
size_t i = count; i--; )
2156 VmaFree(pAllocationCallbacks, ptr);
2161 template<
typename T>
2162 class VmaStlAllocator
2165 const VkAllocationCallbacks*
const m_pCallbacks;
2166 typedef T value_type;
2168 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2169 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2171 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2172 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2174 template<
typename U>
2175 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2177 return m_pCallbacks == rhs.m_pCallbacks;
2179 template<
typename U>
2180 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2182 return m_pCallbacks != rhs.m_pCallbacks;
2185 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2188 #if VMA_USE_STL_VECTOR 2190 #define VmaVector std::vector 2192 template<
typename T,
typename allocatorT>
2193 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2195 vec.insert(vec.begin() + index, item);
2198 template<
typename T,
typename allocatorT>
2199 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2201 vec.erase(vec.begin() + index);
2204 #else // #if VMA_USE_STL_VECTOR 2209 template<
typename T,
typename AllocatorT>
2213 typedef T value_type;
2215 VmaVector(
const AllocatorT& allocator) :
2216 m_Allocator(allocator),
2223 VmaVector(
size_t count,
const AllocatorT& allocator) :
2224 m_Allocator(allocator),
2225 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2231 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2232 m_Allocator(src.m_Allocator),
2233 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2234 m_Count(src.m_Count),
2235 m_Capacity(src.m_Count)
2239 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2245 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2248 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2252 resize(rhs.m_Count);
2255 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2261 bool empty()
const {
return m_Count == 0; }
2262 size_t size()
const {
return m_Count; }
2263 T* data() {
return m_pArray; }
2264 const T* data()
const {
return m_pArray; }
2266 T& operator[](
size_t index)
2268 VMA_HEAVY_ASSERT(index < m_Count);
2269 return m_pArray[index];
2271 const T& operator[](
size_t index)
const 2273 VMA_HEAVY_ASSERT(index < m_Count);
2274 return m_pArray[index];
2279 VMA_HEAVY_ASSERT(m_Count > 0);
2282 const T& front()
const 2284 VMA_HEAVY_ASSERT(m_Count > 0);
2289 VMA_HEAVY_ASSERT(m_Count > 0);
2290 return m_pArray[m_Count - 1];
2292 const T& back()
const 2294 VMA_HEAVY_ASSERT(m_Count > 0);
2295 return m_pArray[m_Count - 1];
2298 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2300 newCapacity = VMA_MAX(newCapacity, m_Count);
2302 if((newCapacity < m_Capacity) && !freeMemory)
2304 newCapacity = m_Capacity;
2307 if(newCapacity != m_Capacity)
2309 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2312 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2314 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2315 m_Capacity = newCapacity;
2316 m_pArray = newArray;
2320 void resize(
size_t newCount,
bool freeMemory =
false)
2322 size_t newCapacity = m_Capacity;
2323 if(newCount > m_Capacity)
2325 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2329 newCapacity = newCount;
2332 if(newCapacity != m_Capacity)
2334 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2335 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2336 if(elementsToCopy != 0)
2338 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2340 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2341 m_Capacity = newCapacity;
2342 m_pArray = newArray;
2348 void clear(
bool freeMemory =
false)
2350 resize(0, freeMemory);
2353 void insert(
size_t index,
const T& src)
2355 VMA_HEAVY_ASSERT(index <= m_Count);
2356 const size_t oldCount = size();
2357 resize(oldCount + 1);
2358 if(index < oldCount)
2360 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2362 m_pArray[index] = src;
2365 void remove(
size_t index)
2367 VMA_HEAVY_ASSERT(index < m_Count);
2368 const size_t oldCount = size();
2369 if(index < oldCount - 1)
2371 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2373 resize(oldCount - 1);
2376 void push_back(
const T& src)
2378 const size_t newIndex = size();
2379 resize(newIndex + 1);
2380 m_pArray[newIndex] = src;
2385 VMA_HEAVY_ASSERT(m_Count > 0);
2389 void push_front(
const T& src)
2396 VMA_HEAVY_ASSERT(m_Count > 0);
2400 typedef T* iterator;
2402 iterator begin() {
return m_pArray; }
2403 iterator end() {
return m_pArray + m_Count; }
2406 AllocatorT m_Allocator;
2412 template<
typename T,
typename allocatorT>
2413 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2415 vec.insert(index, item);
2418 template<
typename T,
typename allocatorT>
2419 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2424 #endif // #if VMA_USE_STL_VECTOR 2426 template<
typename CmpLess,
typename VectorT>
2427 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2429 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2431 vector.data() + vector.size(),
2433 CmpLess()) - vector.data();
2434 VmaVectorInsert(vector, indexToInsert, value);
2435 return indexToInsert;
2438 template<
typename CmpLess,
typename VectorT>
2439 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2442 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2447 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2449 size_t indexToRemove = it - vector.begin();
2450 VmaVectorRemove(vector, indexToRemove);
2456 template<
typename CmpLess,
typename VectorT>
2457 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2460 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2462 vector.data() + vector.size(),
2465 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2467 return it - vector.begin();
2471 return vector.size();
2483 template<
typename T>
2484 class VmaPoolAllocator
2487 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2488 ~VmaPoolAllocator();
2496 uint32_t NextFreeIndex;
2503 uint32_t FirstFreeIndex;
2506 const VkAllocationCallbacks* m_pAllocationCallbacks;
2507 size_t m_ItemsPerBlock;
2508 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2510 ItemBlock& CreateNewBlock();
2513 template<
typename T>
2514 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2515 m_pAllocationCallbacks(pAllocationCallbacks),
2516 m_ItemsPerBlock(itemsPerBlock),
2517 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2519 VMA_ASSERT(itemsPerBlock > 0);
2522 template<
typename T>
2523 VmaPoolAllocator<T>::~VmaPoolAllocator()
2528 template<
typename T>
2529 void VmaPoolAllocator<T>::Clear()
2531 for(
size_t i = m_ItemBlocks.size(); i--; )
2532 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2533 m_ItemBlocks.clear();
2536 template<
typename T>
2537 T* VmaPoolAllocator<T>::Alloc()
2539 for(
size_t i = m_ItemBlocks.size(); i--; )
2541 ItemBlock& block = m_ItemBlocks[i];
2543 if(block.FirstFreeIndex != UINT32_MAX)
2545 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2546 block.FirstFreeIndex = pItem->NextFreeIndex;
2547 return &pItem->Value;
2552 ItemBlock& newBlock = CreateNewBlock();
2553 Item*
const pItem = &newBlock.pItems[0];
2554 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2555 return &pItem->Value;
2558 template<
typename T>
2559 void VmaPoolAllocator<T>::Free(T* ptr)
2562 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2564 ItemBlock& block = m_ItemBlocks[i];
2568 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2571 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2573 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2574 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2575 block.FirstFreeIndex = index;
2579 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2582 template<
typename T>
2583 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2585 ItemBlock newBlock = {
2586 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2588 m_ItemBlocks.push_back(newBlock);
2591 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2592 newBlock.pItems[i].NextFreeIndex = i + 1;
2593 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2594 return m_ItemBlocks.back();
2600 #if VMA_USE_STL_LIST 2602 #define VmaList std::list 2604 #else // #if VMA_USE_STL_LIST 2606 template<
typename T>
2615 template<
typename T>
2619 typedef VmaListItem<T> ItemType;
2621 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2625 size_t GetCount()
const {
return m_Count; }
2626 bool IsEmpty()
const {
return m_Count == 0; }
2628 ItemType* Front() {
return m_pFront; }
2629 const ItemType* Front()
const {
return m_pFront; }
2630 ItemType* Back() {
return m_pBack; }
2631 const ItemType* Back()
const {
return m_pBack; }
2633 ItemType* PushBack();
2634 ItemType* PushFront();
2635 ItemType* PushBack(
const T& value);
2636 ItemType* PushFront(
const T& value);
2641 ItemType* InsertBefore(ItemType* pItem);
2643 ItemType* InsertAfter(ItemType* pItem);
2645 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2646 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2648 void Remove(ItemType* pItem);
2651 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2652 VmaPoolAllocator<ItemType> m_ItemAllocator;
2658 VmaRawList(
const VmaRawList<T>& src);
2659 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2662 template<
typename T>
2663 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2664 m_pAllocationCallbacks(pAllocationCallbacks),
2665 m_ItemAllocator(pAllocationCallbacks, 128),
2672 template<
typename T>
2673 VmaRawList<T>::~VmaRawList()
2679 template<
typename T>
2680 void VmaRawList<T>::Clear()
2682 if(IsEmpty() ==
false)
2684 ItemType* pItem = m_pBack;
2685 while(pItem != VMA_NULL)
2687 ItemType*
const pPrevItem = pItem->pPrev;
2688 m_ItemAllocator.Free(pItem);
2691 m_pFront = VMA_NULL;
2697 template<
typename T>
2698 VmaListItem<T>* VmaRawList<T>::PushBack()
2700 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2701 pNewItem->pNext = VMA_NULL;
2704 pNewItem->pPrev = VMA_NULL;
2705 m_pFront = pNewItem;
2711 pNewItem->pPrev = m_pBack;
2712 m_pBack->pNext = pNewItem;
2719 template<
typename T>
2720 VmaListItem<T>* VmaRawList<T>::PushFront()
2722 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2723 pNewItem->pPrev = VMA_NULL;
2726 pNewItem->pNext = VMA_NULL;
2727 m_pFront = pNewItem;
2733 pNewItem->pNext = m_pFront;
2734 m_pFront->pPrev = pNewItem;
2735 m_pFront = pNewItem;
2741 template<
typename T>
2742 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2744 ItemType*
const pNewItem = PushBack();
2745 pNewItem->Value = value;
2749 template<
typename T>
2750 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2752 ItemType*
const pNewItem = PushFront();
2753 pNewItem->Value = value;
2757 template<
typename T>
2758 void VmaRawList<T>::PopBack()
2760 VMA_HEAVY_ASSERT(m_Count > 0);
2761 ItemType*
const pBackItem = m_pBack;
2762 ItemType*
const pPrevItem = pBackItem->pPrev;
2763 if(pPrevItem != VMA_NULL)
2765 pPrevItem->pNext = VMA_NULL;
2767 m_pBack = pPrevItem;
2768 m_ItemAllocator.Free(pBackItem);
2772 template<
typename T>
2773 void VmaRawList<T>::PopFront()
2775 VMA_HEAVY_ASSERT(m_Count > 0);
2776 ItemType*
const pFrontItem = m_pFront;
2777 ItemType*
const pNextItem = pFrontItem->pNext;
2778 if(pNextItem != VMA_NULL)
2780 pNextItem->pPrev = VMA_NULL;
2782 m_pFront = pNextItem;
2783 m_ItemAllocator.Free(pFrontItem);
2787 template<
typename T>
2788 void VmaRawList<T>::Remove(ItemType* pItem)
2790 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2791 VMA_HEAVY_ASSERT(m_Count > 0);
2793 if(pItem->pPrev != VMA_NULL)
2795 pItem->pPrev->pNext = pItem->pNext;
2799 VMA_HEAVY_ASSERT(m_pFront == pItem);
2800 m_pFront = pItem->pNext;
2803 if(pItem->pNext != VMA_NULL)
2805 pItem->pNext->pPrev = pItem->pPrev;
2809 VMA_HEAVY_ASSERT(m_pBack == pItem);
2810 m_pBack = pItem->pPrev;
2813 m_ItemAllocator.Free(pItem);
2817 template<
typename T>
2818 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2820 if(pItem != VMA_NULL)
2822 ItemType*
const prevItem = pItem->pPrev;
2823 ItemType*
const newItem = m_ItemAllocator.Alloc();
2824 newItem->pPrev = prevItem;
2825 newItem->pNext = pItem;
2826 pItem->pPrev = newItem;
2827 if(prevItem != VMA_NULL)
2829 prevItem->pNext = newItem;
2833 VMA_HEAVY_ASSERT(m_pFront == pItem);
2843 template<
typename T>
2844 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2846 if(pItem != VMA_NULL)
2848 ItemType*
const nextItem = pItem->pNext;
2849 ItemType*
const newItem = m_ItemAllocator.Alloc();
2850 newItem->pNext = nextItem;
2851 newItem->pPrev = pItem;
2852 pItem->pNext = newItem;
2853 if(nextItem != VMA_NULL)
2855 nextItem->pPrev = newItem;
2859 VMA_HEAVY_ASSERT(m_pBack == pItem);
2869 template<
typename T>
2870 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2872 ItemType*
const newItem = InsertBefore(pItem);
2873 newItem->Value = value;
2877 template<
typename T>
2878 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2880 ItemType*
const newItem = InsertAfter(pItem);
2881 newItem->Value = value;
2885 template<
typename T,
typename AllocatorT>
2898 T& operator*()
const 2900 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2901 return m_pItem->Value;
2903 T* operator->()
const 2905 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2906 return &m_pItem->Value;
2909 iterator& operator++()
2911 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2912 m_pItem = m_pItem->pNext;
2915 iterator& operator--()
2917 if(m_pItem != VMA_NULL)
2919 m_pItem = m_pItem->pPrev;
2923 VMA_HEAVY_ASSERT(!m_pList.IsEmpty());
2924 m_pItem = m_pList->Back();
2929 iterator operator++(
int)
2931 iterator result = *
this;
2935 iterator operator--(
int)
2937 iterator result = *
this;
2942 bool operator==(
const iterator& rhs)
const 2944 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2945 return m_pItem == rhs.m_pItem;
2947 bool operator!=(
const iterator& rhs)
const 2949 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2950 return m_pItem != rhs.m_pItem;
2954 VmaRawList<T>* m_pList;
2955 VmaListItem<T>* m_pItem;
2957 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2963 friend class VmaList<T, AllocatorT>;
2966 class const_iterator
2975 const_iterator(
const iterator& src) :
2976 m_pList(src.m_pList),
2977 m_pItem(src.m_pItem)
2981 const T& operator*()
const 2983 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2984 return m_pItem->Value;
2986 const T* operator->()
const 2988 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2989 return &m_pItem->Value;
2992 const_iterator& operator++()
2994 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2995 m_pItem = m_pItem->pNext;
2998 const_iterator& operator--()
3000 if(m_pItem != VMA_NULL)
3002 m_pItem = m_pItem->pPrev;
3006 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3007 m_pItem = m_pList->Back();
3012 const_iterator operator++(
int)
3014 const_iterator result = *
this;
3018 const_iterator operator--(
int)
3020 const_iterator result = *
this;
3025 bool operator==(
const const_iterator& rhs)
const 3027 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3028 return m_pItem == rhs.m_pItem;
3030 bool operator!=(
const const_iterator& rhs)
const 3032 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3033 return m_pItem != rhs.m_pItem;
3037 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3043 const VmaRawList<T>* m_pList;
3044 const VmaListItem<T>* m_pItem;
3046 friend class VmaList<T, AllocatorT>;
3049 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3051 bool empty()
const {
return m_RawList.IsEmpty(); }
3052 size_t size()
const {
return m_RawList.GetCount(); }
3054 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3055 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3057 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3058 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3060 void clear() { m_RawList.Clear(); }
3061 void push_back(
const T& value) { m_RawList.PushBack(value); }
3062 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3063 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3066 VmaRawList<T> m_RawList;
3069 #endif // #if VMA_USE_STL_LIST 3077 #if VMA_USE_STL_UNORDERED_MAP 3079 #define VmaPair std::pair 3081 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3082 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3084 #else // #if VMA_USE_STL_UNORDERED_MAP 3086 template<
typename T1,
typename T2>
3092 VmaPair() : first(), second() { }
3093 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3099 template<
typename KeyT,
typename ValueT>
3103 typedef VmaPair<KeyT, ValueT> PairType;
3104 typedef PairType* iterator;
3106 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3108 iterator begin() {
return m_Vector.begin(); }
3109 iterator end() {
return m_Vector.end(); }
3111 void insert(
const PairType& pair);
3112 iterator find(
const KeyT& key);
3113 void erase(iterator it);
3116 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3119 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3121 template<
typename FirstT,
typename SecondT>
3122 struct VmaPairFirstLess
3124 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3126 return lhs.first < rhs.first;
3128 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3130 return lhs.first < rhsFirst;
3134 template<
typename KeyT,
typename ValueT>
3135 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3137 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3139 m_Vector.data() + m_Vector.size(),
3141 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3142 VmaVectorInsert(m_Vector, indexToInsert, pair);
3145 template<
typename KeyT,
typename ValueT>
3146 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3148 PairType* it = VmaBinaryFindFirstNotLess(
3150 m_Vector.data() + m_Vector.size(),
3152 VmaPairFirstLess<KeyT, ValueT>());
3153 if((it != m_Vector.end()) && (it->first == key))
3159 return m_Vector.end();
3163 template<
typename KeyT,
typename ValueT>
3164 void VmaMap<KeyT, ValueT>::erase(iterator it)
3166 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3169 #endif // #if VMA_USE_STL_UNORDERED_MAP 3175 class VmaDeviceMemoryBlock;
3177 struct VmaAllocation_T
3180 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3184 FLAG_USER_DATA_STRING = 0x01,
3188 enum ALLOCATION_TYPE
3190 ALLOCATION_TYPE_NONE,
3191 ALLOCATION_TYPE_BLOCK,
3192 ALLOCATION_TYPE_DEDICATED,
3195 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3198 m_pUserData(VMA_NULL),
3199 m_LastUseFrameIndex(currentFrameIndex),
3200 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3201 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3203 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3209 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3212 VMA_ASSERT(m_pUserData == VMA_NULL);
3215 void InitBlockAllocation(
3217 VmaDeviceMemoryBlock* block,
3218 VkDeviceSize offset,
3219 VkDeviceSize alignment,
3221 VmaSuballocationType suballocationType,
3225 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3226 VMA_ASSERT(block != VMA_NULL);
3227 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3228 m_Alignment = alignment;
3230 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3231 m_SuballocationType = (uint8_t)suballocationType;
3232 m_BlockAllocation.m_hPool = hPool;
3233 m_BlockAllocation.m_Block = block;
3234 m_BlockAllocation.m_Offset = offset;
3235 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3240 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3241 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3242 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3243 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3244 m_BlockAllocation.m_Block = VMA_NULL;
3245 m_BlockAllocation.m_Offset = 0;
3246 m_BlockAllocation.m_CanBecomeLost =
true;
3249 void ChangeBlockAllocation(
3250 VmaDeviceMemoryBlock* block,
3251 VkDeviceSize offset)
3253 VMA_ASSERT(block != VMA_NULL);
3254 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3255 m_BlockAllocation.m_Block = block;
3256 m_BlockAllocation.m_Offset = offset;
3260 void InitDedicatedAllocation(
3261 uint32_t memoryTypeIndex,
3262 VkDeviceMemory hMemory,
3263 VmaSuballocationType suballocationType,
3267 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3268 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3269 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3272 m_SuballocationType = (uint8_t)suballocationType;
3273 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3274 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3275 m_DedicatedAllocation.m_hMemory = hMemory;
3276 m_DedicatedAllocation.m_pMappedData = pMappedData;
3279 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3280 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3281 VkDeviceSize GetSize()
const {
return m_Size; }
3282 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3283 void* GetUserData()
const {
return m_pUserData; }
3284 void SetUserData(VmaAllocator hAllocator,
void* pUserData);
3285 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3287 VmaDeviceMemoryBlock* GetBlock()
const 3289 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3290 return m_BlockAllocation.m_Block;
3292 VkDeviceSize GetOffset()
const;
3293 VkDeviceMemory GetMemory()
const;
3294 uint32_t GetMemoryTypeIndex()
const;
3295 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3296 void* GetMappedData()
const;
3297 bool CanBecomeLost()
const;
3298 VmaPool GetPool()
const;
3300 uint32_t GetLastUseFrameIndex()
const 3302 return m_LastUseFrameIndex.load();
3304 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3306 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3316 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3318 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3320 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3331 void BlockAllocMap();
3332 void BlockAllocUnmap();
3333 VkResult DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData);
3334 void DedicatedAllocUnmap(VmaAllocator hAllocator);
3337 VkDeviceSize m_Alignment;
3338 VkDeviceSize m_Size;
3340 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3342 uint8_t m_SuballocationType;
3349 struct BlockAllocation
3352 VmaDeviceMemoryBlock* m_Block;
3353 VkDeviceSize m_Offset;
3354 bool m_CanBecomeLost;
3358 struct DedicatedAllocation
3360 uint32_t m_MemoryTypeIndex;
3361 VkDeviceMemory m_hMemory;
3362 void* m_pMappedData;
3368 BlockAllocation m_BlockAllocation;
3370 DedicatedAllocation m_DedicatedAllocation;
3373 void FreeUserDataString(VmaAllocator hAllocator);
3380 struct VmaSuballocation
3382 VkDeviceSize offset;
3384 VmaAllocation hAllocation;
3385 VmaSuballocationType type;
3388 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3391 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3406 struct VmaAllocationRequest
3408 VkDeviceSize offset;
3409 VkDeviceSize sumFreeSize;
3410 VkDeviceSize sumItemSize;
3411 VmaSuballocationList::iterator item;
3412 size_t itemsToMakeLostCount;
3414 VkDeviceSize CalcCost()
const 3416 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3424 class VmaBlockMetadata
3427 VmaBlockMetadata(VmaAllocator hAllocator);
3428 ~VmaBlockMetadata();
3429 void Init(VkDeviceSize size);
3432 bool Validate()
const;
3433 VkDeviceSize GetSize()
const {
return m_Size; }
3434 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3435 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3436 VkDeviceSize GetUnusedRangeSizeMax()
const;
3438 bool IsEmpty()
const;
3440 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3443 #if VMA_STATS_STRING_ENABLED 3444 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3448 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3453 bool CreateAllocationRequest(
3454 uint32_t currentFrameIndex,
3455 uint32_t frameInUseCount,
3456 VkDeviceSize bufferImageGranularity,
3457 VkDeviceSize allocSize,
3458 VkDeviceSize allocAlignment,
3459 VmaSuballocationType allocType,
3460 bool canMakeOtherLost,
3461 VmaAllocationRequest* pAllocationRequest);
3463 bool MakeRequestedAllocationsLost(
3464 uint32_t currentFrameIndex,
3465 uint32_t frameInUseCount,
3466 VmaAllocationRequest* pAllocationRequest);
3468 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3472 const VmaAllocationRequest& request,
3473 VmaSuballocationType type,
3474 VkDeviceSize allocSize,
3475 VmaAllocation hAllocation);
3478 void Free(
const VmaAllocation allocation);
3481 VkDeviceSize m_Size;
3482 uint32_t m_FreeCount;
3483 VkDeviceSize m_SumFreeSize;
3484 VmaSuballocationList m_Suballocations;
3487 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3489 bool ValidateFreeSuballocationList()
const;
3493 bool CheckAllocation(
3494 uint32_t currentFrameIndex,
3495 uint32_t frameInUseCount,
3496 VkDeviceSize bufferImageGranularity,
3497 VkDeviceSize allocSize,
3498 VkDeviceSize allocAlignment,
3499 VmaSuballocationType allocType,
3500 VmaSuballocationList::const_iterator suballocItem,
3501 bool canMakeOtherLost,
3502 VkDeviceSize* pOffset,
3503 size_t* itemsToMakeLostCount,
3504 VkDeviceSize* pSumFreeSize,
3505 VkDeviceSize* pSumItemSize)
const;
3507 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3511 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3514 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3517 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3521 class VmaDeviceMemoryMapping
3524 VmaDeviceMemoryMapping();
3525 ~VmaDeviceMemoryMapping();
3527 void* GetMappedData()
const {
return m_pMappedData; }
3530 VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory,
void **ppData);
3531 void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory);
3535 uint32_t m_MapCount;
3536 void* m_pMappedData;
3545 class VmaDeviceMemoryBlock
3548 uint32_t m_MemoryTypeIndex;
3549 VkDeviceMemory m_hMemory;
3550 VmaDeviceMemoryMapping m_Mapping;
3551 VmaBlockMetadata m_Metadata;
3553 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3555 ~VmaDeviceMemoryBlock()
3557 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3562 uint32_t newMemoryTypeIndex,
3563 VkDeviceMemory newMemory,
3564 VkDeviceSize newSize);
3566 void Destroy(VmaAllocator allocator);
3569 bool Validate()
const;
3572 VkResult Map(VmaAllocator hAllocator,
void** ppData);
3573 void Unmap(VmaAllocator hAllocator);
3576 struct VmaPointerLess
3578 bool operator()(
const void* lhs,
const void* rhs)
const 3584 class VmaDefragmentator;
3592 struct VmaBlockVector
3595 VmaAllocator hAllocator,
3596 uint32_t memoryTypeIndex,
3597 VkDeviceSize preferredBlockSize,
3598 size_t minBlockCount,
3599 size_t maxBlockCount,
3600 VkDeviceSize bufferImageGranularity,
3601 uint32_t frameInUseCount,
3605 VkResult CreateMinBlocks();
3607 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3608 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3609 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3610 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3614 bool IsEmpty()
const {
return m_Blocks.empty(); }
3617 VmaPool hCurrentPool,
3618 uint32_t currentFrameIndex,
3619 const VkMemoryRequirements& vkMemReq,
3621 VmaSuballocationType suballocType,
3622 VmaAllocation* pAllocation);
3625 VmaAllocation hAllocation);
3630 #if VMA_STATS_STRING_ENABLED 3631 void PrintDetailedMap(
class VmaJsonWriter& json);
3634 void MakePoolAllocationsLost(
3635 uint32_t currentFrameIndex,
3636 size_t* pLostAllocationCount);
3638 VmaDefragmentator* EnsureDefragmentator(
3639 VmaAllocator hAllocator,
3640 uint32_t currentFrameIndex);
3642 VkResult Defragment(
3644 VkDeviceSize& maxBytesToMove,
3645 uint32_t& maxAllocationsToMove);
3647 void DestroyDefragmentator();
3650 friend class VmaDefragmentator;
3652 const VmaAllocator m_hAllocator;
3653 const uint32_t m_MemoryTypeIndex;
3654 const VkDeviceSize m_PreferredBlockSize;
3655 const size_t m_MinBlockCount;
3656 const size_t m_MaxBlockCount;
3657 const VkDeviceSize m_BufferImageGranularity;
3658 const uint32_t m_FrameInUseCount;
3659 const bool m_IsCustomPool;
3662 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3666 bool m_HasEmptyBlock;
3667 VmaDefragmentator* m_pDefragmentator;
3670 void Remove(VmaDeviceMemoryBlock* pBlock);
3674 void IncrementallySortBlocks();
3676 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3682 VmaBlockVector m_BlockVector;
3686 VmaAllocator hAllocator,
3690 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3692 #if VMA_STATS_STRING_ENABLED 3697 class VmaDefragmentator
3699 const VmaAllocator m_hAllocator;
3700 VmaBlockVector*
const m_pBlockVector;
3701 uint32_t m_CurrentFrameIndex;
3702 VkDeviceSize m_BytesMoved;
3703 uint32_t m_AllocationsMoved;
3705 struct AllocationInfo
3707 VmaAllocation m_hAllocation;
3708 VkBool32* m_pChanged;
3711 m_hAllocation(VK_NULL_HANDLE),
3712 m_pChanged(VMA_NULL)
3717 struct AllocationInfoSizeGreater
3719 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3721 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3726 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3730 VmaDeviceMemoryBlock* m_pBlock;
3731 bool m_HasNonMovableAllocations;
3732 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3734 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3736 m_HasNonMovableAllocations(true),
3737 m_Allocations(pAllocationCallbacks),
3738 m_pMappedDataForDefragmentation(VMA_NULL)
3742 void CalcHasNonMovableAllocations()
3744 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3745 const size_t defragmentAllocCount = m_Allocations.size();
3746 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3749 void SortAllocationsBySizeDescecnding()
3751 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3754 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3755 void Unmap(VmaAllocator hAllocator);
3759 void* m_pMappedDataForDefragmentation;
3762 struct BlockPointerLess
3764 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3766 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3768 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3770 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3776 struct BlockInfoCompareMoveDestination
3778 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3780 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3784 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3788 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
3796 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3797 BlockInfoVector m_Blocks;
3799 VkResult DefragmentRound(
3800 VkDeviceSize maxBytesToMove,
3801 uint32_t maxAllocationsToMove);
3803 static bool MoveMakesSense(
3804 size_t dstBlockIndex, VkDeviceSize dstOffset,
3805 size_t srcBlockIndex, VkDeviceSize srcOffset);
3809 VmaAllocator hAllocator,
3810 VmaBlockVector* pBlockVector,
3811 uint32_t currentFrameIndex);
3813 ~VmaDefragmentator();
3815 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3816 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3818 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3820 VkResult Defragment(
3821 VkDeviceSize maxBytesToMove,
3822 uint32_t maxAllocationsToMove);
3826 struct VmaAllocator_T
3829 bool m_UseKhrDedicatedAllocation;
3831 bool m_AllocationCallbacksSpecified;
3832 VkAllocationCallbacks m_AllocationCallbacks;
3836 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3837 VMA_MUTEX m_HeapSizeLimitMutex;
3839 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3840 VkPhysicalDeviceMemoryProperties m_MemProps;
3843 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
3846 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3847 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
3848 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
3853 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3855 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3859 return m_VulkanFunctions;
3862 VkDeviceSize GetBufferImageGranularity()
const 3865 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3866 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3869 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3870 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3872 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3874 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3875 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3878 void GetBufferMemoryRequirements(
3880 VkMemoryRequirements& memReq,
3881 bool& requiresDedicatedAllocation,
3882 bool& prefersDedicatedAllocation)
const;
3883 void GetImageMemoryRequirements(
3885 VkMemoryRequirements& memReq,
3886 bool& requiresDedicatedAllocation,
3887 bool& prefersDedicatedAllocation)
const;
3890 VkResult AllocateMemory(
3891 const VkMemoryRequirements& vkMemReq,
3892 bool requiresDedicatedAllocation,
3893 bool prefersDedicatedAllocation,
3894 VkBuffer dedicatedBuffer,
3895 VkImage dedicatedImage,
3897 VmaSuballocationType suballocType,
3898 VmaAllocation* pAllocation);
3901 void FreeMemory(
const VmaAllocation allocation);
3903 void CalculateStats(
VmaStats* pStats);
3905 #if VMA_STATS_STRING_ENABLED 3906 void PrintDetailedMap(
class VmaJsonWriter& json);
3909 VkResult Defragment(
3910 VmaAllocation* pAllocations,
3911 size_t allocationCount,
3912 VkBool32* pAllocationsChanged,
3916 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3919 void DestroyPool(VmaPool pool);
3920 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3922 void SetCurrentFrameIndex(uint32_t frameIndex);
3924 void MakePoolAllocationsLost(
3926 size_t* pLostAllocationCount);
3928 void CreateLostAllocation(VmaAllocation* pAllocation);
3930 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3931 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3933 VkResult Map(VmaAllocation hAllocation,
void** ppData);
3934 void Unmap(VmaAllocation hAllocation);
3937 VkDeviceSize m_PreferredLargeHeapBlockSize;
3938 VkDeviceSize m_PreferredSmallHeapBlockSize;
3940 VkPhysicalDevice m_PhysicalDevice;
3941 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3943 VMA_MUTEX m_PoolsMutex;
3945 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3951 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3953 VkResult AllocateMemoryOfType(
3954 const VkMemoryRequirements& vkMemReq,
3955 bool dedicatedAllocation,
3956 VkBuffer dedicatedBuffer,
3957 VkImage dedicatedImage,
3959 uint32_t memTypeIndex,
3960 VmaSuballocationType suballocType,
3961 VmaAllocation* pAllocation);
3964 VkResult AllocateDedicatedMemory(
3966 VmaSuballocationType suballocType,
3967 uint32_t memTypeIndex,
3969 bool isUserDataString,
3971 VkBuffer dedicatedBuffer,
3972 VkImage dedicatedImage,
3973 VmaAllocation* pAllocation);
3976 void FreeDedicatedMemory(VmaAllocation allocation);
3982 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
3984 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
3987 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
3989 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
3992 template<
typename T>
3993 static T* VmaAllocate(VmaAllocator hAllocator)
3995 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
3998 template<
typename T>
3999 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
4001 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
4004 template<
typename T>
4005 static void vma_delete(VmaAllocator hAllocator, T* ptr)
4010 VmaFree(hAllocator, ptr);
4014 template<
typename T>
4015 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
4019 for(
size_t i = count; i--; )
4021 VmaFree(hAllocator, ptr);
4028 #if VMA_STATS_STRING_ENABLED 4030 class VmaStringBuilder
4033 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4034 size_t GetLength()
const {
return m_Data.size(); }
4035 const char* GetData()
const {
return m_Data.data(); }
4037 void Add(
char ch) { m_Data.push_back(ch); }
4038 void Add(
const char* pStr);
4039 void AddNewLine() { Add(
'\n'); }
4040 void AddNumber(uint32_t num);
4041 void AddNumber(uint64_t num);
4042 void AddPointer(
const void* ptr);
4045 VmaVector< char, VmaStlAllocator<char> > m_Data;
4048 void VmaStringBuilder::Add(
const char* pStr)
4050 const size_t strLen = strlen(pStr);
4053 const size_t oldCount = m_Data.size();
4054 m_Data.resize(oldCount + strLen);
4055 memcpy(m_Data.data() + oldCount, pStr, strLen);
4059 void VmaStringBuilder::AddNumber(uint32_t num)
4062 VmaUint32ToStr(buf,
sizeof(buf), num);
4066 void VmaStringBuilder::AddNumber(uint64_t num)
4069 VmaUint64ToStr(buf,
sizeof(buf), num);
4073 void VmaStringBuilder::AddPointer(
const void* ptr)
4076 VmaPtrToStr(buf,
sizeof(buf), ptr);
4080 #endif // #if VMA_STATS_STRING_ENABLED 4085 #if VMA_STATS_STRING_ENABLED 4090 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4093 void BeginObject(
bool singleLine =
false);
4096 void BeginArray(
bool singleLine =
false);
4099 void WriteString(
const char* pStr);
4100 void BeginString(
const char* pStr = VMA_NULL);
4101 void ContinueString(
const char* pStr);
4102 void ContinueString(uint32_t n);
4103 void ContinueString(uint64_t n);
4104 void ContinueString_Pointer(
const void* ptr);
4105 void EndString(
const char* pStr = VMA_NULL);
4107 void WriteNumber(uint32_t n);
4108 void WriteNumber(uint64_t n);
4109 void WriteBool(
bool b);
4113 static const char*
const INDENT;
4115 enum COLLECTION_TYPE
4117 COLLECTION_TYPE_OBJECT,
4118 COLLECTION_TYPE_ARRAY,
4122 COLLECTION_TYPE type;
4123 uint32_t valueCount;
4124 bool singleLineMode;
4127 VmaStringBuilder& m_SB;
4128 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4129 bool m_InsideString;
4131 void BeginValue(
bool isString);
4132 void WriteIndent(
bool oneLess =
false);
4135 const char*
const VmaJsonWriter::INDENT =
" ";
4137 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4139 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4140 m_InsideString(false)
4144 VmaJsonWriter::~VmaJsonWriter()
4146 VMA_ASSERT(!m_InsideString);
4147 VMA_ASSERT(m_Stack.empty());
4150 void VmaJsonWriter::BeginObject(
bool singleLine)
4152 VMA_ASSERT(!m_InsideString);
4158 item.type = COLLECTION_TYPE_OBJECT;
4159 item.valueCount = 0;
4160 item.singleLineMode = singleLine;
4161 m_Stack.push_back(item);
4164 void VmaJsonWriter::EndObject()
4166 VMA_ASSERT(!m_InsideString);
4171 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4175 void VmaJsonWriter::BeginArray(
bool singleLine)
4177 VMA_ASSERT(!m_InsideString);
4183 item.type = COLLECTION_TYPE_ARRAY;
4184 item.valueCount = 0;
4185 item.singleLineMode = singleLine;
4186 m_Stack.push_back(item);
4189 void VmaJsonWriter::EndArray()
4191 VMA_ASSERT(!m_InsideString);
4196 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4200 void VmaJsonWriter::WriteString(
const char* pStr)
4206 void VmaJsonWriter::BeginString(
const char* pStr)
4208 VMA_ASSERT(!m_InsideString);
4212 m_InsideString =
true;
4213 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4215 ContinueString(pStr);
4219 void VmaJsonWriter::ContinueString(
const char* pStr)
4221 VMA_ASSERT(m_InsideString);
4223 const size_t strLen = strlen(pStr);
4224 for(
size_t i = 0; i < strLen; ++i)
4257 VMA_ASSERT(0 &&
"Character not currently supported.");
4263 void VmaJsonWriter::ContinueString(uint32_t n)
4265 VMA_ASSERT(m_InsideString);
4269 void VmaJsonWriter::ContinueString(uint64_t n)
4271 VMA_ASSERT(m_InsideString);
4275 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4277 VMA_ASSERT(m_InsideString);
4278 m_SB.AddPointer(ptr);
4281 void VmaJsonWriter::EndString(
const char* pStr)
4283 VMA_ASSERT(m_InsideString);
4284 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4286 ContinueString(pStr);
4289 m_InsideString =
false;
4292 void VmaJsonWriter::WriteNumber(uint32_t n)
4294 VMA_ASSERT(!m_InsideString);
4299 void VmaJsonWriter::WriteNumber(uint64_t n)
4301 VMA_ASSERT(!m_InsideString);
4306 void VmaJsonWriter::WriteBool(
bool b)
4308 VMA_ASSERT(!m_InsideString);
4310 m_SB.Add(b ?
"true" :
"false");
4313 void VmaJsonWriter::WriteNull()
4315 VMA_ASSERT(!m_InsideString);
4320 void VmaJsonWriter::BeginValue(
bool isString)
4322 if(!m_Stack.empty())
4324 StackItem& currItem = m_Stack.back();
4325 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4326 currItem.valueCount % 2 == 0)
4328 VMA_ASSERT(isString);
4331 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4332 currItem.valueCount % 2 != 0)
4336 else if(currItem.valueCount > 0)
4345 ++currItem.valueCount;
4349 void VmaJsonWriter::WriteIndent(
bool oneLess)
4351 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4355 size_t count = m_Stack.size();
4356 if(count > 0 && oneLess)
4360 for(
size_t i = 0; i < count; ++i)
4367 #endif // #if VMA_STATS_STRING_ENABLED 4371 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator,
void* pUserData)
4373 if(IsUserDataString())
4375 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4377 FreeUserDataString(hAllocator);
4379 if(pUserData != VMA_NULL)
4381 const char*
const newStrSrc = (
char*)pUserData;
4382 const size_t newStrLen = strlen(newStrSrc);
4383 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4384 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4385 m_pUserData = newStrDst;
4390 m_pUserData = pUserData;
4394 VkDeviceSize VmaAllocation_T::GetOffset()
const 4398 case ALLOCATION_TYPE_BLOCK:
4399 return m_BlockAllocation.m_Offset;
4400 case ALLOCATION_TYPE_DEDICATED:
4408 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4412 case ALLOCATION_TYPE_BLOCK:
4413 return m_BlockAllocation.m_Block->m_hMemory;
4414 case ALLOCATION_TYPE_DEDICATED:
4415 return m_DedicatedAllocation.m_hMemory;
4418 return VK_NULL_HANDLE;
4422 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4426 case ALLOCATION_TYPE_BLOCK:
4427 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4428 case ALLOCATION_TYPE_DEDICATED:
4429 return m_DedicatedAllocation.m_MemoryTypeIndex;
4436 void* VmaAllocation_T::GetMappedData()
const 4440 case ALLOCATION_TYPE_BLOCK:
4443 void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4444 VMA_ASSERT(pBlockData != VMA_NULL);
4445 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
4452 case ALLOCATION_TYPE_DEDICATED:
4453 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4454 return m_DedicatedAllocation.m_pMappedData;
4461 bool VmaAllocation_T::CanBecomeLost()
const 4465 case ALLOCATION_TYPE_BLOCK:
4466 return m_BlockAllocation.m_CanBecomeLost;
4467 case ALLOCATION_TYPE_DEDICATED:
4475 VmaPool VmaAllocation_T::GetPool()
const 4477 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4478 return m_BlockAllocation.m_hPool;
4481 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4483 VMA_ASSERT(CanBecomeLost());
4489 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4492 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4497 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4503 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4513 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
4515 VMA_ASSERT(IsUserDataString());
4516 if(m_pUserData != VMA_NULL)
4518 char*
const oldStr = (
char*)m_pUserData;
4519 const size_t oldStrLen = strlen(oldStr);
4520 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
4521 m_pUserData = VMA_NULL;
4525 void VmaAllocation_T::BlockAllocMap()
4527 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4529 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4535 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
4539 void VmaAllocation_T::BlockAllocUnmap()
4541 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4543 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4549 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
4553 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData)
4555 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4559 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4561 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4562 *ppData = m_DedicatedAllocation.m_pMappedData;
4568 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
4569 return VK_ERROR_MEMORY_MAP_FAILED;
4574 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4575 hAllocator->m_hDevice,
4576 m_DedicatedAllocation.m_hMemory,
4581 if(result == VK_SUCCESS)
4583 m_DedicatedAllocation.m_pMappedData = *ppData;
4590 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
4592 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4594 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4599 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4600 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
4601 hAllocator->m_hDevice,
4602 m_DedicatedAllocation.m_hMemory);
4607 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
4611 #if VMA_STATS_STRING_ENABLED 4614 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4623 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4627 json.WriteString(
"Blocks");
4630 json.WriteString(
"Allocations");
4633 json.WriteString(
"UnusedRanges");
4636 json.WriteString(
"UsedBytes");
4639 json.WriteString(
"UnusedBytes");
4644 json.WriteString(
"AllocationSize");
4645 json.BeginObject(
true);
4646 json.WriteString(
"Min");
4648 json.WriteString(
"Avg");
4650 json.WriteString(
"Max");
4657 json.WriteString(
"UnusedRangeSize");
4658 json.BeginObject(
true);
4659 json.WriteString(
"Min");
4661 json.WriteString(
"Avg");
4663 json.WriteString(
"Max");
4671 #endif // #if VMA_STATS_STRING_ENABLED 4673 struct VmaSuballocationItemSizeLess
4676 const VmaSuballocationList::iterator lhs,
4677 const VmaSuballocationList::iterator rhs)
const 4679 return lhs->size < rhs->size;
4682 const VmaSuballocationList::iterator lhs,
4683 VkDeviceSize rhsSize)
const 4685 return lhs->size < rhsSize;
4692 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4696 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4697 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4701 VmaBlockMetadata::~VmaBlockMetadata()
4705 void VmaBlockMetadata::Init(VkDeviceSize size)
4709 m_SumFreeSize = size;
4711 VmaSuballocation suballoc = {};
4712 suballoc.offset = 0;
4713 suballoc.size = size;
4714 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4715 suballoc.hAllocation = VK_NULL_HANDLE;
4717 m_Suballocations.push_back(suballoc);
4718 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4720 m_FreeSuballocationsBySize.push_back(suballocItem);
4723 bool VmaBlockMetadata::Validate()
const 4725 if(m_Suballocations.empty())
4731 VkDeviceSize calculatedOffset = 0;
4733 uint32_t calculatedFreeCount = 0;
4735 VkDeviceSize calculatedSumFreeSize = 0;
4738 size_t freeSuballocationsToRegister = 0;
4740 bool prevFree =
false;
4742 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4743 suballocItem != m_Suballocations.cend();
4746 const VmaSuballocation& subAlloc = *suballocItem;
4749 if(subAlloc.offset != calculatedOffset)
4754 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4756 if(prevFree && currFree)
4760 prevFree = currFree;
4762 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4769 calculatedSumFreeSize += subAlloc.size;
4770 ++calculatedFreeCount;
4771 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4773 ++freeSuballocationsToRegister;
4777 calculatedOffset += subAlloc.size;
4782 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4787 VkDeviceSize lastSize = 0;
4788 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4790 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4793 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4798 if(suballocItem->size < lastSize)
4803 lastSize = suballocItem->size;
4808 ValidateFreeSuballocationList() &&
4809 (calculatedOffset == m_Size) &&
4810 (calculatedSumFreeSize == m_SumFreeSize) &&
4811 (calculatedFreeCount == m_FreeCount);
4814 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 4816 if(!m_FreeSuballocationsBySize.empty())
4818 return m_FreeSuballocationsBySize.back()->size;
4826 bool VmaBlockMetadata::IsEmpty()
const 4828 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4831 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 4835 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4847 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4848 suballocItem != m_Suballocations.cend();
4851 const VmaSuballocation& suballoc = *suballocItem;
4852 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
4865 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 4867 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4869 inoutStats.
size += m_Size;
4876 #if VMA_STATS_STRING_ENABLED 4878 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 4882 json.WriteString(
"TotalBytes");
4883 json.WriteNumber(m_Size);
4885 json.WriteString(
"UnusedBytes");
4886 json.WriteNumber(m_SumFreeSize);
4888 json.WriteString(
"Allocations");
4889 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4891 json.WriteString(
"UnusedRanges");
4892 json.WriteNumber(m_FreeCount);
4894 json.WriteString(
"Suballocations");
4897 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4898 suballocItem != m_Suballocations.cend();
4899 ++suballocItem, ++i)
4901 json.BeginObject(
true);
4903 json.WriteString(
"Type");
4904 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4906 json.WriteString(
"Size");
4907 json.WriteNumber(suballocItem->size);
4909 json.WriteString(
"Offset");
4910 json.WriteNumber(suballocItem->offset);
4912 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4914 const void* pUserData = suballocItem->hAllocation->GetUserData();
4915 if(pUserData != VMA_NULL)
4917 json.WriteString(
"UserData");
4918 if(suballocItem->hAllocation->IsUserDataString())
4920 json.WriteString((
const char*)pUserData);
4925 json.ContinueString_Pointer(pUserData);
4938 #endif // #if VMA_STATS_STRING_ENABLED 4950 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
4952 VMA_ASSERT(IsEmpty());
4953 pAllocationRequest->offset = 0;
4954 pAllocationRequest->sumFreeSize = m_SumFreeSize;
4955 pAllocationRequest->sumItemSize = 0;
4956 pAllocationRequest->item = m_Suballocations.begin();
4957 pAllocationRequest->itemsToMakeLostCount = 0;
4960 bool VmaBlockMetadata::CreateAllocationRequest(
4961 uint32_t currentFrameIndex,
4962 uint32_t frameInUseCount,
4963 VkDeviceSize bufferImageGranularity,
4964 VkDeviceSize allocSize,
4965 VkDeviceSize allocAlignment,
4966 VmaSuballocationType allocType,
4967 bool canMakeOtherLost,
4968 VmaAllocationRequest* pAllocationRequest)
4970 VMA_ASSERT(allocSize > 0);
4971 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
4972 VMA_ASSERT(pAllocationRequest != VMA_NULL);
4973 VMA_HEAVY_ASSERT(Validate());
4976 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
4982 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
4983 if(freeSuballocCount > 0)
4988 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
4989 m_FreeSuballocationsBySize.data(),
4990 m_FreeSuballocationsBySize.data() + freeSuballocCount,
4992 VmaSuballocationItemSizeLess());
4993 size_t index = it - m_FreeSuballocationsBySize.data();
4994 for(; index < freeSuballocCount; ++index)
4999 bufferImageGranularity,
5003 m_FreeSuballocationsBySize[index],
5005 &pAllocationRequest->offset,
5006 &pAllocationRequest->itemsToMakeLostCount,
5007 &pAllocationRequest->sumFreeSize,
5008 &pAllocationRequest->sumItemSize))
5010 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5018 for(
size_t index = freeSuballocCount; index--; )
5023 bufferImageGranularity,
5027 m_FreeSuballocationsBySize[index],
5029 &pAllocationRequest->offset,
5030 &pAllocationRequest->itemsToMakeLostCount,
5031 &pAllocationRequest->sumFreeSize,
5032 &pAllocationRequest->sumItemSize))
5034 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5041 if(canMakeOtherLost)
5045 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5046 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5048 VmaAllocationRequest tmpAllocRequest = {};
5049 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5050 suballocIt != m_Suballocations.end();
5053 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5054 suballocIt->hAllocation->CanBecomeLost())
5059 bufferImageGranularity,
5065 &tmpAllocRequest.offset,
5066 &tmpAllocRequest.itemsToMakeLostCount,
5067 &tmpAllocRequest.sumFreeSize,
5068 &tmpAllocRequest.sumItemSize))
5070 tmpAllocRequest.item = suballocIt;
5072 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5074 *pAllocationRequest = tmpAllocRequest;
5080 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5089 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5090 uint32_t currentFrameIndex,
5091 uint32_t frameInUseCount,
5092 VmaAllocationRequest* pAllocationRequest)
5094 while(pAllocationRequest->itemsToMakeLostCount > 0)
5096 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5098 ++pAllocationRequest->item;
5100 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5101 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5102 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5103 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5105 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5106 --pAllocationRequest->itemsToMakeLostCount;
5114 VMA_HEAVY_ASSERT(Validate());
5115 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5116 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5121 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5123 uint32_t lostAllocationCount = 0;
5124 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5125 it != m_Suballocations.end();
5128 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5129 it->hAllocation->CanBecomeLost() &&
5130 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5132 it = FreeSuballocation(it);
5133 ++lostAllocationCount;
5136 return lostAllocationCount;
5139 void VmaBlockMetadata::Alloc(
5140 const VmaAllocationRequest& request,
5141 VmaSuballocationType type,
5142 VkDeviceSize allocSize,
5143 VmaAllocation hAllocation)
5145 VMA_ASSERT(request.item != m_Suballocations.end());
5146 VmaSuballocation& suballoc = *request.item;
5148 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5150 VMA_ASSERT(request.offset >= suballoc.offset);
5151 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5152 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5153 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5157 UnregisterFreeSuballocation(request.item);
5159 suballoc.offset = request.offset;
5160 suballoc.size = allocSize;
5161 suballoc.type = type;
5162 suballoc.hAllocation = hAllocation;
5167 VmaSuballocation paddingSuballoc = {};
5168 paddingSuballoc.offset = request.offset + allocSize;
5169 paddingSuballoc.size = paddingEnd;
5170 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5171 VmaSuballocationList::iterator next = request.item;
5173 const VmaSuballocationList::iterator paddingEndItem =
5174 m_Suballocations.insert(next, paddingSuballoc);
5175 RegisterFreeSuballocation(paddingEndItem);
5181 VmaSuballocation paddingSuballoc = {};
5182 paddingSuballoc.offset = request.offset - paddingBegin;
5183 paddingSuballoc.size = paddingBegin;
5184 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5185 const VmaSuballocationList::iterator paddingBeginItem =
5186 m_Suballocations.insert(request.item, paddingSuballoc);
5187 RegisterFreeSuballocation(paddingBeginItem);
5191 m_FreeCount = m_FreeCount - 1;
5192 if(paddingBegin > 0)
5200 m_SumFreeSize -= allocSize;
5203 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
5205 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5206 suballocItem != m_Suballocations.end();
5209 VmaSuballocation& suballoc = *suballocItem;
5210 if(suballoc.hAllocation == allocation)
5212 FreeSuballocation(suballocItem);
5213 VMA_HEAVY_ASSERT(Validate());
5217 VMA_ASSERT(0 &&
"Not found!");
5220 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5222 VkDeviceSize lastSize = 0;
5223 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5225 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5227 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5232 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5237 if(it->size < lastSize)
5243 lastSize = it->size;
5248 bool VmaBlockMetadata::CheckAllocation(
5249 uint32_t currentFrameIndex,
5250 uint32_t frameInUseCount,
5251 VkDeviceSize bufferImageGranularity,
5252 VkDeviceSize allocSize,
5253 VkDeviceSize allocAlignment,
5254 VmaSuballocationType allocType,
5255 VmaSuballocationList::const_iterator suballocItem,
5256 bool canMakeOtherLost,
5257 VkDeviceSize* pOffset,
5258 size_t* itemsToMakeLostCount,
5259 VkDeviceSize* pSumFreeSize,
5260 VkDeviceSize* pSumItemSize)
const 5262 VMA_ASSERT(allocSize > 0);
5263 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5264 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5265 VMA_ASSERT(pOffset != VMA_NULL);
5267 *itemsToMakeLostCount = 0;
5271 if(canMakeOtherLost)
5273 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5275 *pSumFreeSize = suballocItem->size;
5279 if(suballocItem->hAllocation->CanBecomeLost() &&
5280 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5282 ++*itemsToMakeLostCount;
5283 *pSumItemSize = suballocItem->size;
5292 if(m_Size - suballocItem->offset < allocSize)
5298 *pOffset = suballocItem->offset;
5301 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5303 *pOffset += VMA_DEBUG_MARGIN;
5307 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5308 *pOffset = VmaAlignUp(*pOffset, alignment);
5312 if(bufferImageGranularity > 1)
5314 bool bufferImageGranularityConflict =
false;
5315 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5316 while(prevSuballocItem != m_Suballocations.cbegin())
5319 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5320 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5322 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5324 bufferImageGranularityConflict =
true;
5332 if(bufferImageGranularityConflict)
5334 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5340 if(*pOffset >= suballocItem->offset + suballocItem->size)
5346 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5349 VmaSuballocationList::const_iterator next = suballocItem;
5351 const VkDeviceSize requiredEndMargin =
5352 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5354 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5356 if(suballocItem->offset + totalSize > m_Size)
5363 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5364 if(totalSize > suballocItem->size)
5366 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5367 while(remainingSize > 0)
5370 if(lastSuballocItem == m_Suballocations.cend())
5374 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5376 *pSumFreeSize += lastSuballocItem->size;
5380 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5381 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5382 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5384 ++*itemsToMakeLostCount;
5385 *pSumItemSize += lastSuballocItem->size;
5392 remainingSize = (lastSuballocItem->size < remainingSize) ?
5393 remainingSize - lastSuballocItem->size : 0;
5399 if(bufferImageGranularity > 1)
5401 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5403 while(nextSuballocItem != m_Suballocations.cend())
5405 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5406 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5408 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5410 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5411 if(nextSuballoc.hAllocation->CanBecomeLost() &&
5412 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5414 ++*itemsToMakeLostCount;
5433 const VmaSuballocation& suballoc = *suballocItem;
5434 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5436 *pSumFreeSize = suballoc.size;
5439 if(suballoc.size < allocSize)
5445 *pOffset = suballoc.offset;
5448 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5450 *pOffset += VMA_DEBUG_MARGIN;
5454 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5455 *pOffset = VmaAlignUp(*pOffset, alignment);
5459 if(bufferImageGranularity > 1)
5461 bool bufferImageGranularityConflict =
false;
5462 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5463 while(prevSuballocItem != m_Suballocations.cbegin())
5466 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5467 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5469 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5471 bufferImageGranularityConflict =
true;
5479 if(bufferImageGranularityConflict)
5481 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5486 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5489 VmaSuballocationList::const_iterator next = suballocItem;
5491 const VkDeviceSize requiredEndMargin =
5492 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5495 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5502 if(bufferImageGranularity > 1)
5504 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5506 while(nextSuballocItem != m_Suballocations.cend())
5508 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5509 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5511 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5530 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5532 VMA_ASSERT(item != m_Suballocations.end());
5533 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5535 VmaSuballocationList::iterator nextItem = item;
5537 VMA_ASSERT(nextItem != m_Suballocations.end());
5538 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5540 item->size += nextItem->size;
5542 m_Suballocations.erase(nextItem);
5545 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5548 VmaSuballocation& suballoc = *suballocItem;
5549 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5550 suballoc.hAllocation = VK_NULL_HANDLE;
5554 m_SumFreeSize += suballoc.size;
5557 bool mergeWithNext =
false;
5558 bool mergeWithPrev =
false;
5560 VmaSuballocationList::iterator nextItem = suballocItem;
5562 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5564 mergeWithNext =
true;
5567 VmaSuballocationList::iterator prevItem = suballocItem;
5568 if(suballocItem != m_Suballocations.begin())
5571 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5573 mergeWithPrev =
true;
5579 UnregisterFreeSuballocation(nextItem);
5580 MergeFreeWithNext(suballocItem);
5585 UnregisterFreeSuballocation(prevItem);
5586 MergeFreeWithNext(prevItem);
5587 RegisterFreeSuballocation(prevItem);
5592 RegisterFreeSuballocation(suballocItem);
5593 return suballocItem;
5597 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5599 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5600 VMA_ASSERT(item->size > 0);
5604 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5606 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5608 if(m_FreeSuballocationsBySize.empty())
5610 m_FreeSuballocationsBySize.push_back(item);
5614 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5622 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5624 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5625 VMA_ASSERT(item->size > 0);
5629 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5631 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5633 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5634 m_FreeSuballocationsBySize.data(),
5635 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5637 VmaSuballocationItemSizeLess());
5638 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5639 index < m_FreeSuballocationsBySize.size();
5642 if(m_FreeSuballocationsBySize[index] == item)
5644 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5647 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5649 VMA_ASSERT(0 &&
"Not found.");
5658 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
5660 m_pMappedData(VMA_NULL)
5664 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
5666 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5669 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory,
void **ppData)
5671 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5675 VMA_ASSERT(m_pMappedData != VMA_NULL);
5676 if(ppData != VMA_NULL)
5678 *ppData = m_pMappedData;
5684 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5685 hAllocator->m_hDevice,
5691 if(result == VK_SUCCESS)
5693 if(ppData != VMA_NULL)
5695 *ppData = m_pMappedData;
5703 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory)
5705 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5708 if(--m_MapCount == 0)
5710 m_pMappedData = VMA_NULL;
5711 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
5716 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
5723 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
5724 m_MemoryTypeIndex(UINT32_MAX),
5725 m_hMemory(VK_NULL_HANDLE),
5726 m_Metadata(hAllocator)
5730 void VmaDeviceMemoryBlock::Init(
5731 uint32_t newMemoryTypeIndex,
5732 VkDeviceMemory newMemory,
5733 VkDeviceSize newSize)
5735 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5737 m_MemoryTypeIndex = newMemoryTypeIndex;
5738 m_hMemory = newMemory;
5740 m_Metadata.Init(newSize);
5743 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
5747 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
5749 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
5750 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
5751 m_hMemory = VK_NULL_HANDLE;
5754 bool VmaDeviceMemoryBlock::Validate()
const 5756 if((m_hMemory == VK_NULL_HANDLE) ||
5757 (m_Metadata.GetSize() == 0))
5762 return m_Metadata.Validate();
5765 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator,
void** ppData)
5767 return m_Mapping.Map(hAllocator, m_hMemory, ppData);
5770 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator)
5772 m_Mapping.Unmap(hAllocator, m_hMemory);
5777 memset(&outInfo, 0,
sizeof(outInfo));
5796 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5804 VmaPool_T::VmaPool_T(
5805 VmaAllocator hAllocator,
5809 createInfo.memoryTypeIndex,
5810 createInfo.blockSize,
5811 createInfo.minBlockCount,
5812 createInfo.maxBlockCount,
5814 createInfo.frameInUseCount,
5819 VmaPool_T::~VmaPool_T()
5823 #if VMA_STATS_STRING_ENABLED 5825 #endif // #if VMA_STATS_STRING_ENABLED 5827 VmaBlockVector::VmaBlockVector(
5828 VmaAllocator hAllocator,
5829 uint32_t memoryTypeIndex,
5830 VkDeviceSize preferredBlockSize,
5831 size_t minBlockCount,
5832 size_t maxBlockCount,
5833 VkDeviceSize bufferImageGranularity,
5834 uint32_t frameInUseCount,
5835 bool isCustomPool) :
5836 m_hAllocator(hAllocator),
5837 m_MemoryTypeIndex(memoryTypeIndex),
5838 m_PreferredBlockSize(preferredBlockSize),
5839 m_MinBlockCount(minBlockCount),
5840 m_MaxBlockCount(maxBlockCount),
5841 m_BufferImageGranularity(bufferImageGranularity),
5842 m_FrameInUseCount(frameInUseCount),
5843 m_IsCustomPool(isCustomPool),
5844 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5845 m_HasEmptyBlock(false),
5846 m_pDefragmentator(VMA_NULL)
5850 VmaBlockVector::~VmaBlockVector()
5852 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5854 for(
size_t i = m_Blocks.size(); i--; )
5856 m_Blocks[i]->Destroy(m_hAllocator);
5857 vma_delete(m_hAllocator, m_Blocks[i]);
5861 VkResult VmaBlockVector::CreateMinBlocks()
5863 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5865 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5866 if(res != VK_SUCCESS)
5874 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5882 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5884 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5886 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5888 VMA_HEAVY_ASSERT(pBlock->Validate());
5889 pBlock->m_Metadata.AddPoolStats(*pStats);
5893 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5895 VkResult VmaBlockVector::Allocate(
5896 VmaPool hCurrentPool,
5897 uint32_t currentFrameIndex,
5898 const VkMemoryRequirements& vkMemReq,
5900 VmaSuballocationType suballocType,
5901 VmaAllocation* pAllocation)
5906 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5910 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
5912 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
5913 VMA_ASSERT(pCurrBlock);
5914 VmaAllocationRequest currRequest = {};
5915 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
5918 m_BufferImageGranularity,
5926 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
5930 VkResult res = pCurrBlock->Map(m_hAllocator,
nullptr);
5931 if(res != VK_SUCCESS)
5938 if(pCurrBlock->m_Metadata.IsEmpty())
5940 m_HasEmptyBlock =
false;
5943 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
5944 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
5945 (*pAllocation)->InitBlockAllocation(
5954 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
5955 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
5956 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
5961 const bool canCreateNewBlock =
5963 (m_Blocks.size() < m_MaxBlockCount);
5966 if(canCreateNewBlock)
5969 VkDeviceSize blockSize = m_PreferredBlockSize;
5970 size_t newBlockIndex = 0;
5971 VkResult res = CreateBlock(blockSize, &newBlockIndex);
5974 if(res < 0 && m_IsCustomPool ==
false)
5978 if(blockSize >= vkMemReq.size)
5980 res = CreateBlock(blockSize, &newBlockIndex);
5985 if(blockSize >= vkMemReq.size)
5987 res = CreateBlock(blockSize, &newBlockIndex);
5992 if(res == VK_SUCCESS)
5994 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
5995 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
5999 res = pBlock->Map(m_hAllocator,
nullptr);
6000 if(res != VK_SUCCESS)
6007 VmaAllocationRequest allocRequest;
6008 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6009 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6010 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6011 (*pAllocation)->InitBlockAllocation(
6014 allocRequest.offset,
6020 VMA_HEAVY_ASSERT(pBlock->Validate());
6021 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6022 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6030 if(canMakeOtherLost)
6032 uint32_t tryIndex = 0;
6033 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6035 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6036 VmaAllocationRequest bestRequest = {};
6037 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6041 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6043 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6044 VMA_ASSERT(pCurrBlock);
6045 VmaAllocationRequest currRequest = {};
6046 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6049 m_BufferImageGranularity,
6056 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6057 if(pBestRequestBlock == VMA_NULL ||
6058 currRequestCost < bestRequestCost)
6060 pBestRequestBlock = pCurrBlock;
6061 bestRequest = currRequest;
6062 bestRequestCost = currRequestCost;
6064 if(bestRequestCost == 0)
6072 if(pBestRequestBlock != VMA_NULL)
6076 VkResult res = pBestRequestBlock->Map(m_hAllocator,
nullptr);
6077 if(res != VK_SUCCESS)
6083 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6089 if(pBestRequestBlock->m_Metadata.IsEmpty())
6091 m_HasEmptyBlock =
false;
6094 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6095 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6096 (*pAllocation)->InitBlockAllocation(
6105 VMA_HEAVY_ASSERT(pBlock->Validate());
6106 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6107 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6121 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6123 return VK_ERROR_TOO_MANY_OBJECTS;
6127 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6130 void VmaBlockVector::Free(
6131 VmaAllocation hAllocation)
6133 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6137 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6139 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6141 if(hAllocation->IsPersistentMap())
6143 pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory);
6146 pBlock->m_Metadata.Free(hAllocation);
6147 VMA_HEAVY_ASSERT(pBlock->Validate());
6149 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6152 if(pBlock->m_Metadata.IsEmpty())
6155 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6157 pBlockToDelete = pBlock;
6163 m_HasEmptyBlock =
true;
6168 else if(m_HasEmptyBlock)
6170 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6171 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6173 pBlockToDelete = pLastBlock;
6174 m_Blocks.pop_back();
6175 m_HasEmptyBlock =
false;
6179 IncrementallySortBlocks();
6184 if(pBlockToDelete != VMA_NULL)
6186 VMA_DEBUG_LOG(
" Deleted empty allocation");
6187 pBlockToDelete->Destroy(m_hAllocator);
6188 vma_delete(m_hAllocator, pBlockToDelete);
6192 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6194 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6196 if(m_Blocks[blockIndex] == pBlock)
6198 VmaVectorRemove(m_Blocks, blockIndex);
6205 void VmaBlockVector::IncrementallySortBlocks()
6208 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6210 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6212 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6218 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6220 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6221 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6222 allocInfo.allocationSize = blockSize;
6223 VkDeviceMemory mem = VK_NULL_HANDLE;
6224 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6233 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6237 allocInfo.allocationSize);
6239 m_Blocks.push_back(pBlock);
6240 if(pNewBlockIndex != VMA_NULL)
6242 *pNewBlockIndex = m_Blocks.size() - 1;
6248 #if VMA_STATS_STRING_ENABLED 6250 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6252 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6258 json.WriteString(
"MemoryTypeIndex");
6259 json.WriteNumber(m_MemoryTypeIndex);
6261 json.WriteString(
"BlockSize");
6262 json.WriteNumber(m_PreferredBlockSize);
6264 json.WriteString(
"BlockCount");
6265 json.BeginObject(
true);
6266 if(m_MinBlockCount > 0)
6268 json.WriteString(
"Min");
6269 json.WriteNumber(m_MinBlockCount);
6271 if(m_MaxBlockCount < SIZE_MAX)
6273 json.WriteString(
"Max");
6274 json.WriteNumber(m_MaxBlockCount);
6276 json.WriteString(
"Cur");
6277 json.WriteNumber(m_Blocks.size());
6280 if(m_FrameInUseCount > 0)
6282 json.WriteString(
"FrameInUseCount");
6283 json.WriteNumber(m_FrameInUseCount);
6288 json.WriteString(
"PreferredBlockSize");
6289 json.WriteNumber(m_PreferredBlockSize);
6292 json.WriteString(
"Blocks");
6294 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6296 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6303 #endif // #if VMA_STATS_STRING_ENABLED 6305 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6306 VmaAllocator hAllocator,
6307 uint32_t currentFrameIndex)
6309 if(m_pDefragmentator == VMA_NULL)
6311 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6317 return m_pDefragmentator;
6320 VkResult VmaBlockVector::Defragment(
6322 VkDeviceSize& maxBytesToMove,
6323 uint32_t& maxAllocationsToMove)
6325 if(m_pDefragmentator == VMA_NULL)
6330 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6333 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
6336 if(pDefragmentationStats != VMA_NULL)
6338 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
6339 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6342 VMA_ASSERT(bytesMoved <= maxBytesToMove);
6343 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6349 m_HasEmptyBlock =
false;
6350 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
6352 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6353 if(pBlock->m_Metadata.IsEmpty())
6355 if(m_Blocks.size() > m_MinBlockCount)
6357 if(pDefragmentationStats != VMA_NULL)
6360 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
6363 VmaVectorRemove(m_Blocks, blockIndex);
6364 pBlock->Destroy(m_hAllocator);
6365 vma_delete(m_hAllocator, pBlock);
6369 m_HasEmptyBlock =
true;
6377 void VmaBlockVector::DestroyDefragmentator()
6379 if(m_pDefragmentator != VMA_NULL)
6381 vma_delete(m_hAllocator, m_pDefragmentator);
6382 m_pDefragmentator = VMA_NULL;
6386 void VmaBlockVector::MakePoolAllocationsLost(
6387 uint32_t currentFrameIndex,
6388 size_t* pLostAllocationCount)
6390 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6392 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6394 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6396 pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6400 void VmaBlockVector::AddStats(
VmaStats* pStats)
6402 const uint32_t memTypeIndex = m_MemoryTypeIndex;
6403 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6405 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6407 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6409 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6411 VMA_HEAVY_ASSERT(pBlock->Validate());
6413 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6414 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6415 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6416 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6423 VmaDefragmentator::VmaDefragmentator(
6424 VmaAllocator hAllocator,
6425 VmaBlockVector* pBlockVector,
6426 uint32_t currentFrameIndex) :
6427 m_hAllocator(hAllocator),
6428 m_pBlockVector(pBlockVector),
6429 m_CurrentFrameIndex(currentFrameIndex),
6431 m_AllocationsMoved(0),
6432 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6433 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6437 VmaDefragmentator::~VmaDefragmentator()
6439 for(
size_t i = m_Blocks.size(); i--; )
6441 vma_delete(m_hAllocator, m_Blocks[i]);
6445 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6447 AllocationInfo allocInfo;
6448 allocInfo.m_hAllocation = hAlloc;
6449 allocInfo.m_pChanged = pChanged;
6450 m_Allocations.push_back(allocInfo);
6453 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
6456 if(m_pMappedDataForDefragmentation)
6458 *ppMappedData = m_pMappedDataForDefragmentation;
6463 if(m_pBlock->m_Mapping.GetMappedData())
6465 *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6470 VkResult res = m_pBlock->Map(hAllocator, &m_pMappedDataForDefragmentation);
6471 *ppMappedData = m_pMappedDataForDefragmentation;
6475 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6477 if(m_pMappedDataForDefragmentation != VMA_NULL)
6479 m_pBlock->Unmap(hAllocator);
6483 VkResult VmaDefragmentator::DefragmentRound(
6484 VkDeviceSize maxBytesToMove,
6485 uint32_t maxAllocationsToMove)
6487 if(m_Blocks.empty())
6492 size_t srcBlockIndex = m_Blocks.size() - 1;
6493 size_t srcAllocIndex = SIZE_MAX;
6499 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6501 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6504 if(srcBlockIndex == 0)
6511 srcAllocIndex = SIZE_MAX;
6516 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6520 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6521 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6523 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6524 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6525 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6526 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6529 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6531 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6532 VmaAllocationRequest dstAllocRequest;
6533 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6534 m_CurrentFrameIndex,
6535 m_pBlockVector->GetFrameInUseCount(),
6536 m_pBlockVector->GetBufferImageGranularity(),
6541 &dstAllocRequest) &&
6543 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6545 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6548 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6549 (m_BytesMoved + size > maxBytesToMove))
6551 return VK_INCOMPLETE;
6554 void* pDstMappedData = VMA_NULL;
6555 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6556 if(res != VK_SUCCESS)
6561 void* pSrcMappedData = VMA_NULL;
6562 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6563 if(res != VK_SUCCESS)
6570 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6571 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6572 static_cast<size_t>(size));
6574 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6575 pSrcBlockInfo->m_pBlock->m_Metadata.Free(allocInfo.m_hAllocation);
6577 allocInfo.m_hAllocation->ChangeBlockAllocation(pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6579 if(allocInfo.m_pChanged != VMA_NULL)
6581 *allocInfo.m_pChanged = VK_TRUE;
6584 ++m_AllocationsMoved;
6585 m_BytesMoved += size;
6587 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6595 if(srcAllocIndex > 0)
6601 if(srcBlockIndex > 0)
6604 srcAllocIndex = SIZE_MAX;
6614 VkResult VmaDefragmentator::Defragment(
6615 VkDeviceSize maxBytesToMove,
6616 uint32_t maxAllocationsToMove)
6618 if(m_Allocations.empty())
6624 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6625 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6627 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6628 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6629 m_Blocks.push_back(pBlockInfo);
6633 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6636 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6638 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6640 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6642 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6643 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6644 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6646 (*it)->m_Allocations.push_back(allocInfo);
6654 m_Allocations.clear();
6656 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6658 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6659 pBlockInfo->CalcHasNonMovableAllocations();
6660 pBlockInfo->SortAllocationsBySizeDescecnding();
6664 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6667 VkResult result = VK_SUCCESS;
6668 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6670 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6674 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6676 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6682 bool VmaDefragmentator::MoveMakesSense(
6683 size_t dstBlockIndex, VkDeviceSize dstOffset,
6684 size_t srcBlockIndex, VkDeviceSize srcOffset)
6686 if(dstBlockIndex < srcBlockIndex)
6690 if(dstBlockIndex > srcBlockIndex)
6694 if(dstOffset < srcOffset)
6707 m_PhysicalDevice(pCreateInfo->physicalDevice),
6708 m_hDevice(pCreateInfo->device),
6709 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6710 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6711 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6712 m_PreferredLargeHeapBlockSize(0),
6713 m_PreferredSmallHeapBlockSize(0),
6714 m_CurrentFrameIndex(0),
6715 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6719 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6720 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6721 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6723 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6724 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
6726 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6728 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6739 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6740 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6749 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6751 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6752 if(limit != VK_WHOLE_SIZE)
6754 m_HeapSizeLimit[heapIndex] = limit;
6755 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6757 m_MemProps.memoryHeaps[heapIndex].size = limit;
6763 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6765 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6767 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
6773 GetBufferImageGranularity(),
6778 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6782 VmaAllocator_T::~VmaAllocator_T()
6784 VMA_ASSERT(m_Pools.empty());
6786 for(
size_t i = GetMemoryTypeCount(); i--; )
6788 vma_delete(
this, m_pDedicatedAllocations[i]);
6789 vma_delete(
this, m_pBlockVectors[i]);
6793 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
6795 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6796 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6797 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6798 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6799 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6800 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6801 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6802 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6803 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6804 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6805 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6806 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6807 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6808 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6809 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6810 if(m_UseKhrDedicatedAllocation)
6812 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
6813 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
6814 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
6815 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
6817 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6819 #define VMA_COPY_IF_NOT_NULL(funcName) \ 6820 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 6822 if(pVulkanFunctions != VMA_NULL)
6824 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
6825 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
6826 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
6827 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
6828 VMA_COPY_IF_NOT_NULL(vkMapMemory);
6829 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
6830 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
6831 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
6832 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
6833 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
6834 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
6835 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
6836 VMA_COPY_IF_NOT_NULL(vkCreateImage);
6837 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
6838 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
6839 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
6842 #undef VMA_COPY_IF_NOT_NULL 6846 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6847 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6848 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6849 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6850 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6851 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6852 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6853 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6854 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6855 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6856 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6857 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6858 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6859 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6860 if(m_UseKhrDedicatedAllocation)
6862 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
6863 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
6867 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6869 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
6870 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
6871 return (heapSize <= VMA_SMALL_HEAP_MAX_SIZE) ?
6872 m_PreferredSmallHeapBlockSize : m_PreferredLargeHeapBlockSize;
6875 VkResult VmaAllocator_T::AllocateMemoryOfType(
6876 const VkMemoryRequirements& vkMemReq,
6877 bool dedicatedAllocation,
6878 VkBuffer dedicatedBuffer,
6879 VkImage dedicatedImage,
6881 uint32_t memTypeIndex,
6882 VmaSuballocationType suballocType,
6883 VmaAllocation* pAllocation)
6885 VMA_ASSERT(pAllocation != VMA_NULL);
6886 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
6892 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
6897 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
6898 VMA_ASSERT(blockVector);
6900 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
6901 bool preferDedicatedMemory =
6902 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
6903 dedicatedAllocation ||
6905 vkMemReq.size > preferredBlockSize / 2;
6907 if(preferDedicatedMemory &&
6909 finalCreateInfo.
pool == VK_NULL_HANDLE)
6918 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6922 return AllocateDedicatedMemory(
6936 VkResult res = blockVector->Allocate(
6938 m_CurrentFrameIndex.load(),
6943 if(res == VK_SUCCESS)
6951 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6955 res = AllocateDedicatedMemory(
6961 finalCreateInfo.pUserData,
6965 if(res == VK_SUCCESS)
6968 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
6974 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
6981 VkResult VmaAllocator_T::AllocateDedicatedMemory(
6983 VmaSuballocationType suballocType,
6984 uint32_t memTypeIndex,
6986 bool isUserDataString,
6988 VkBuffer dedicatedBuffer,
6989 VkImage dedicatedImage,
6990 VmaAllocation* pAllocation)
6992 VMA_ASSERT(pAllocation);
6994 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6995 allocInfo.memoryTypeIndex = memTypeIndex;
6996 allocInfo.allocationSize = size;
6998 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
6999 if(m_UseKhrDedicatedAllocation)
7001 if(dedicatedBuffer != VK_NULL_HANDLE)
7003 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7004 dedicatedAllocInfo.buffer = dedicatedBuffer;
7005 allocInfo.pNext = &dedicatedAllocInfo;
7007 else if(dedicatedImage != VK_NULL_HANDLE)
7009 dedicatedAllocInfo.image = dedicatedImage;
7010 allocInfo.pNext = &dedicatedAllocInfo;
7015 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7016 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7019 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7023 void* pMappedData =
nullptr;
7026 res = (*m_VulkanFunctions.vkMapMemory)(
7035 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7036 FreeVulkanMemory(memTypeIndex, size, hMemory);
7041 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7042 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7043 (*pAllocation)->SetUserData(
this, pUserData);
7047 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7048 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7049 VMA_ASSERT(pDedicatedAllocations);
7050 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7053 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7058 void VmaAllocator_T::GetBufferMemoryRequirements(
7060 VkMemoryRequirements& memReq,
7061 bool& requiresDedicatedAllocation,
7062 bool& prefersDedicatedAllocation)
const 7064 if(m_UseKhrDedicatedAllocation)
7066 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7067 memReqInfo.buffer = hBuffer;
7069 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7071 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7072 memReq2.pNext = &memDedicatedReq;
7074 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7076 memReq = memReq2.memoryRequirements;
7077 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7078 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7082 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7083 requiresDedicatedAllocation =
false;
7084 prefersDedicatedAllocation =
false;
7088 void VmaAllocator_T::GetImageMemoryRequirements(
7090 VkMemoryRequirements& memReq,
7091 bool& requiresDedicatedAllocation,
7092 bool& prefersDedicatedAllocation)
const 7094 if(m_UseKhrDedicatedAllocation)
7096 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7097 memReqInfo.image = hImage;
7099 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7101 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7102 memReq2.pNext = &memDedicatedReq;
7104 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7106 memReq = memReq2.memoryRequirements;
7107 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7108 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7112 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7113 requiresDedicatedAllocation =
false;
7114 prefersDedicatedAllocation =
false;
7118 VkResult VmaAllocator_T::AllocateMemory(
7119 const VkMemoryRequirements& vkMemReq,
7120 bool requiresDedicatedAllocation,
7121 bool prefersDedicatedAllocation,
7122 VkBuffer dedicatedBuffer,
7123 VkImage dedicatedImage,
7125 VmaSuballocationType suballocType,
7126 VmaAllocation* pAllocation)
7131 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7132 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7137 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7138 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7140 if(requiresDedicatedAllocation)
7144 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7145 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7147 if(createInfo.
pool != VK_NULL_HANDLE)
7149 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7150 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7153 if((createInfo.
pool != VK_NULL_HANDLE) &&
7156 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7157 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7160 if(createInfo.
pool != VK_NULL_HANDLE)
7162 return createInfo.
pool->m_BlockVector.Allocate(
7164 m_CurrentFrameIndex.load(),
7173 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7174 uint32_t memTypeIndex = UINT32_MAX;
7176 if(res == VK_SUCCESS)
7178 res = AllocateMemoryOfType(
7180 requiresDedicatedAllocation || prefersDedicatedAllocation,
7188 if(res == VK_SUCCESS)
7198 memoryTypeBits &= ~(1u << memTypeIndex);
7201 if(res == VK_SUCCESS)
7203 res = AllocateMemoryOfType(
7205 requiresDedicatedAllocation || prefersDedicatedAllocation,
7213 if(res == VK_SUCCESS)
7223 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7234 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7236 VMA_ASSERT(allocation);
7238 if(allocation->CanBecomeLost() ==
false ||
7239 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7241 switch(allocation->GetType())
7243 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7245 VmaBlockVector* pBlockVector = VMA_NULL;
7246 VmaPool hPool = allocation->GetPool();
7247 if(hPool != VK_NULL_HANDLE)
7249 pBlockVector = &hPool->m_BlockVector;
7253 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7254 pBlockVector = m_pBlockVectors[memTypeIndex];
7256 pBlockVector->Free(allocation);
7259 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7260 FreeDedicatedMemory(allocation);
7267 allocation->SetUserData(
this, VMA_NULL);
7268 vma_delete(
this, allocation);
7271 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7274 InitStatInfo(pStats->
total);
7275 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7277 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7281 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7283 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7284 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7285 VMA_ASSERT(pBlockVector);
7286 pBlockVector->AddStats(pStats);
7291 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7292 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7294 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7299 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7301 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7302 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7303 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7304 VMA_ASSERT(pDedicatedAllocVector);
7305 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7308 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7309 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7310 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7311 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7316 VmaPostprocessCalcStatInfo(pStats->
total);
7317 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
7318 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
7319 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
7320 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
7323 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7325 VkResult VmaAllocator_T::Defragment(
7326 VmaAllocation* pAllocations,
7327 size_t allocationCount,
7328 VkBool32* pAllocationsChanged,
7332 if(pAllocationsChanged != VMA_NULL)
7334 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
7336 if(pDefragmentationStats != VMA_NULL)
7338 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
7341 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7343 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7345 const size_t poolCount = m_Pools.size();
7348 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7350 VmaAllocation hAlloc = pAllocations[allocIndex];
7352 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7354 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7356 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7358 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7360 VmaBlockVector* pAllocBlockVector =
nullptr;
7362 const VmaPool hAllocPool = hAlloc->GetPool();
7364 if(hAllocPool != VK_NULL_HANDLE)
7366 pAllocBlockVector = &hAllocPool->GetBlockVector();
7371 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7374 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7376 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7377 &pAllocationsChanged[allocIndex] : VMA_NULL;
7378 pDefragmentator->AddAllocation(hAlloc, pChanged);
7382 VkResult result = VK_SUCCESS;
7386 VkDeviceSize maxBytesToMove = SIZE_MAX;
7387 uint32_t maxAllocationsToMove = UINT32_MAX;
7388 if(pDefragmentationInfo != VMA_NULL)
7395 for(uint32_t memTypeIndex = 0;
7396 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7400 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7402 result = m_pBlockVectors[memTypeIndex]->Defragment(
7403 pDefragmentationStats,
7405 maxAllocationsToMove);
7410 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7412 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7413 pDefragmentationStats,
7415 maxAllocationsToMove);
7421 for(
size_t poolIndex = poolCount; poolIndex--; )
7423 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7427 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7429 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7431 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7438 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7440 if(hAllocation->CanBecomeLost())
7446 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7447 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7450 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7454 pAllocationInfo->
offset = 0;
7455 pAllocationInfo->
size = hAllocation->GetSize();
7457 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7460 else if(localLastUseFrameIndex == localCurrFrameIndex)
7462 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7463 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7464 pAllocationInfo->
offset = hAllocation->GetOffset();
7465 pAllocationInfo->
size = hAllocation->GetSize();
7467 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7472 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7474 localLastUseFrameIndex = localCurrFrameIndex;
7481 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7482 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7483 pAllocationInfo->
offset = hAllocation->GetOffset();
7484 pAllocationInfo->
size = hAllocation->GetSize();
7485 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7486 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7490 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7492 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7505 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7507 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7508 if(res != VK_SUCCESS)
7510 vma_delete(
this, *pPool);
7517 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7518 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7524 void VmaAllocator_T::DestroyPool(VmaPool pool)
7528 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7529 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7530 VMA_ASSERT(success &&
"Pool not found in Allocator.");
7533 vma_delete(
this, pool);
7536 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
7538 pool->m_BlockVector.GetPoolStats(pPoolStats);
7541 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7543 m_CurrentFrameIndex.store(frameIndex);
7546 void VmaAllocator_T::MakePoolAllocationsLost(
7548 size_t* pLostAllocationCount)
7550 hPool->m_BlockVector.MakePoolAllocationsLost(
7551 m_CurrentFrameIndex.load(),
7552 pLostAllocationCount);
7555 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7557 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
7558 (*pAllocation)->InitLost();
7561 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7563 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7566 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7568 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7569 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7571 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7572 if(res == VK_SUCCESS)
7574 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7579 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7584 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7587 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
7589 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7595 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7597 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
7599 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
7602 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
7604 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
7605 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7607 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7608 m_HeapSizeLimit[heapIndex] += size;
7612 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation,
void** ppData)
7614 if(hAllocation->CanBecomeLost())
7616 return VK_ERROR_MEMORY_MAP_FAILED;
7619 switch(hAllocation->GetType())
7621 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7623 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7624 char *pBytes =
nullptr;
7625 VkResult res = pBlock->Map(
this, (
void**)&pBytes);
7626 if(res == VK_SUCCESS)
7628 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
7629 hAllocation->BlockAllocMap();
7633 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7634 return hAllocation->DedicatedAllocMap(
this, ppData);
7637 return VK_ERROR_MEMORY_MAP_FAILED;
7641 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
7643 switch(hAllocation->GetType())
7645 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7647 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7648 hAllocation->BlockAllocUnmap();
7649 pBlock->Unmap(
this);
7652 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7653 hAllocation->DedicatedAllocUnmap(
this);
7660 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
7662 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
7664 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7666 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7667 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7668 VMA_ASSERT(pDedicatedAllocations);
7669 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
7670 VMA_ASSERT(success);
7673 VkDeviceMemory hMemory = allocation->GetMemory();
7675 if(allocation->GetMappedData() != VMA_NULL)
7677 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
7680 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
7682 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
7685 #if VMA_STATS_STRING_ENABLED 7687 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
7689 bool dedicatedAllocationsStarted =
false;
7690 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7692 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7693 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7694 VMA_ASSERT(pDedicatedAllocVector);
7695 if(pDedicatedAllocVector->empty() ==
false)
7697 if(dedicatedAllocationsStarted ==
false)
7699 dedicatedAllocationsStarted =
true;
7700 json.WriteString(
"DedicatedAllocations");
7704 json.BeginString(
"Type ");
7705 json.ContinueString(memTypeIndex);
7710 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
7712 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
7713 json.BeginObject(
true);
7715 json.WriteString(
"Type");
7716 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
7718 json.WriteString(
"Size");
7719 json.WriteNumber(hAlloc->GetSize());
7721 const void* pUserData = hAlloc->GetUserData();
7722 if(pUserData != VMA_NULL)
7724 json.WriteString(
"UserData");
7725 if(hAlloc->IsUserDataString())
7727 json.WriteString((
const char*)pUserData);
7732 json.ContinueString_Pointer(pUserData);
7743 if(dedicatedAllocationsStarted)
7749 bool allocationsStarted =
false;
7750 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7752 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
7754 if(allocationsStarted ==
false)
7756 allocationsStarted =
true;
7757 json.WriteString(
"DefaultPools");
7761 json.BeginString(
"Type ");
7762 json.ContinueString(memTypeIndex);
7765 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
7768 if(allocationsStarted)
7775 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7776 const size_t poolCount = m_Pools.size();
7779 json.WriteString(
"Pools");
7781 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7783 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7790 #endif // #if VMA_STATS_STRING_ENABLED 7792 static VkResult AllocateMemoryForImage(
7793 VmaAllocator allocator,
7796 VmaSuballocationType suballocType,
7797 VmaAllocation* pAllocation)
7799 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7801 VkMemoryRequirements vkMemReq = {};
7802 bool requiresDedicatedAllocation =
false;
7803 bool prefersDedicatedAllocation =
false;
7804 allocator->GetImageMemoryRequirements(image, vkMemReq,
7805 requiresDedicatedAllocation, prefersDedicatedAllocation);
7807 return allocator->AllocateMemory(
7809 requiresDedicatedAllocation,
7810 prefersDedicatedAllocation,
7813 *pAllocationCreateInfo,
7823 VmaAllocator* pAllocator)
7825 VMA_ASSERT(pCreateInfo && pAllocator);
7826 VMA_DEBUG_LOG(
"vmaCreateAllocator");
7832 VmaAllocator allocator)
7834 if(allocator != VK_NULL_HANDLE)
7836 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
7837 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7838 vma_delete(&allocationCallbacks, allocator);
7843 VmaAllocator allocator,
7844 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7846 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7847 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7851 VmaAllocator allocator,
7852 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7854 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7855 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7859 VmaAllocator allocator,
7860 uint32_t memoryTypeIndex,
7861 VkMemoryPropertyFlags* pFlags)
7863 VMA_ASSERT(allocator && pFlags);
7864 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7865 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7869 VmaAllocator allocator,
7870 uint32_t frameIndex)
7872 VMA_ASSERT(allocator);
7873 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
7875 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7877 allocator->SetCurrentFrameIndex(frameIndex);
7881 VmaAllocator allocator,
7884 VMA_ASSERT(allocator && pStats);
7885 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7886 allocator->CalculateStats(pStats);
7889 #if VMA_STATS_STRING_ENABLED 7892 VmaAllocator allocator,
7893 char** ppStatsString,
7894 VkBool32 detailedMap)
7896 VMA_ASSERT(allocator && ppStatsString);
7897 VMA_DEBUG_GLOBAL_MUTEX_LOCK
7899 VmaStringBuilder sb(allocator);
7901 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
7905 allocator->CalculateStats(&stats);
7907 json.WriteString(
"Total");
7908 VmaPrintStatInfo(json, stats.
total);
7910 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
7912 json.BeginString(
"Heap ");
7913 json.ContinueString(heapIndex);
7917 json.WriteString(
"Size");
7918 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
7920 json.WriteString(
"Flags");
7921 json.BeginArray(
true);
7922 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
7924 json.WriteString(
"DEVICE_LOCAL");
7930 json.WriteString(
"Stats");
7931 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
7934 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
7936 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
7938 json.BeginString(
"Type ");
7939 json.ContinueString(typeIndex);
7944 json.WriteString(
"Flags");
7945 json.BeginArray(
true);
7946 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
7947 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
7949 json.WriteString(
"DEVICE_LOCAL");
7951 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7953 json.WriteString(
"HOST_VISIBLE");
7955 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
7957 json.WriteString(
"HOST_COHERENT");
7959 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
7961 json.WriteString(
"HOST_CACHED");
7963 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
7965 json.WriteString(
"LAZILY_ALLOCATED");
7971 json.WriteString(
"Stats");
7972 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
7981 if(detailedMap == VK_TRUE)
7983 allocator->PrintDetailedMap(json);
7989 const size_t len = sb.GetLength();
7990 char*
const pChars = vma_new_array(allocator,
char, len + 1);
7993 memcpy(pChars, sb.GetData(), len);
7996 *ppStatsString = pChars;
8000 VmaAllocator allocator,
8003 if(pStatsString != VMA_NULL)
8005 VMA_ASSERT(allocator);
8006 size_t len = strlen(pStatsString);
8007 vma_delete_array(allocator, pStatsString, len + 1);
8011 #endif // #if VMA_STATS_STRING_ENABLED 8017 VmaAllocator allocator,
8018 uint32_t memoryTypeBits,
8020 uint32_t* pMemoryTypeIndex)
8022 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8023 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8024 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8031 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
8035 switch(pAllocationCreateInfo->
usage)
8040 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8043 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8046 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8047 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8050 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8051 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8057 *pMemoryTypeIndex = UINT32_MAX;
8058 uint32_t minCost = UINT32_MAX;
8059 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8060 memTypeIndex < allocator->GetMemoryTypeCount();
8061 ++memTypeIndex, memTypeBit <<= 1)
8064 if((memTypeBit & memoryTypeBits) != 0)
8066 const VkMemoryPropertyFlags currFlags =
8067 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8069 if((requiredFlags & ~currFlags) == 0)
8072 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8074 if(currCost < minCost)
8076 *pMemoryTypeIndex = memTypeIndex;
8086 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8090 VmaAllocator allocator,
8094 VMA_ASSERT(allocator && pCreateInfo && pPool);
8096 VMA_DEBUG_LOG(
"vmaCreatePool");
8098 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8100 return allocator->CreatePool(pCreateInfo, pPool);
8104 VmaAllocator allocator,
8107 VMA_ASSERT(allocator);
8109 if(pool == VK_NULL_HANDLE)
8114 VMA_DEBUG_LOG(
"vmaDestroyPool");
8116 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8118 allocator->DestroyPool(pool);
8122 VmaAllocator allocator,
8126 VMA_ASSERT(allocator && pool && pPoolStats);
8128 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8130 allocator->GetPoolStats(pool, pPoolStats);
8134 VmaAllocator allocator,
8136 size_t* pLostAllocationCount)
8138 VMA_ASSERT(allocator && pool);
8140 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8142 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8146 VmaAllocator allocator,
8147 const VkMemoryRequirements* pVkMemoryRequirements,
8149 VmaAllocation* pAllocation,
8152 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8154 VMA_DEBUG_LOG(
"vmaAllocateMemory");
8156 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8158 VkResult result = allocator->AllocateMemory(
8159 *pVkMemoryRequirements,
8165 VMA_SUBALLOCATION_TYPE_UNKNOWN,
8168 if(pAllocationInfo && result == VK_SUCCESS)
8170 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8177 VmaAllocator allocator,
8180 VmaAllocation* pAllocation,
8183 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8185 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
8187 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8189 VkMemoryRequirements vkMemReq = {};
8190 bool requiresDedicatedAllocation =
false;
8191 bool prefersDedicatedAllocation =
false;
8192 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
8193 requiresDedicatedAllocation,
8194 prefersDedicatedAllocation);
8196 VkResult result = allocator->AllocateMemory(
8198 requiresDedicatedAllocation,
8199 prefersDedicatedAllocation,
8203 VMA_SUBALLOCATION_TYPE_BUFFER,
8206 if(pAllocationInfo && result == VK_SUCCESS)
8208 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8215 VmaAllocator allocator,
8218 VmaAllocation* pAllocation,
8221 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8223 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
8225 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8227 VkResult result = AllocateMemoryForImage(
8231 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
8234 if(pAllocationInfo && result == VK_SUCCESS)
8236 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8243 VmaAllocator allocator,
8244 VmaAllocation allocation)
8246 VMA_ASSERT(allocator && allocation);
8248 VMA_DEBUG_LOG(
"vmaFreeMemory");
8250 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8252 allocator->FreeMemory(allocation);
8256 VmaAllocator allocator,
8257 VmaAllocation allocation,
8260 VMA_ASSERT(allocator && allocation && pAllocationInfo);
8262 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8264 allocator->GetAllocationInfo(allocation, pAllocationInfo);
8268 VmaAllocator allocator,
8269 VmaAllocation allocation,
8272 VMA_ASSERT(allocator && allocation);
8274 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8276 allocation->SetUserData(allocator, pUserData);
8280 VmaAllocator allocator,
8281 VmaAllocation* pAllocation)
8283 VMA_ASSERT(allocator && pAllocation);
8285 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
8287 allocator->CreateLostAllocation(pAllocation);
8291 VmaAllocator allocator,
8292 VmaAllocation allocation,
8295 VMA_ASSERT(allocator && allocation && ppData);
8297 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8299 return allocator->Map(allocation, ppData);
8303 VmaAllocator allocator,
8304 VmaAllocation allocation)
8306 VMA_ASSERT(allocator && allocation);
8308 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8310 allocator->Unmap(allocation);
8314 VmaAllocator allocator,
8315 VmaAllocation* pAllocations,
8316 size_t allocationCount,
8317 VkBool32* pAllocationsChanged,
8321 VMA_ASSERT(allocator && pAllocations);
8323 VMA_DEBUG_LOG(
"vmaDefragment");
8325 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8327 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8331 VmaAllocator allocator,
8332 const VkBufferCreateInfo* pBufferCreateInfo,
8335 VmaAllocation* pAllocation,
8338 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8340 VMA_DEBUG_LOG(
"vmaCreateBuffer");
8342 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8344 *pBuffer = VK_NULL_HANDLE;
8345 *pAllocation = VK_NULL_HANDLE;
8348 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8349 allocator->m_hDevice,
8351 allocator->GetAllocationCallbacks(),
8356 VkMemoryRequirements vkMemReq = {};
8357 bool requiresDedicatedAllocation =
false;
8358 bool prefersDedicatedAllocation =
false;
8359 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8360 requiresDedicatedAllocation, prefersDedicatedAllocation);
8363 res = allocator->AllocateMemory(
8365 requiresDedicatedAllocation,
8366 prefersDedicatedAllocation,
8369 *pAllocationCreateInfo,
8370 VMA_SUBALLOCATION_TYPE_BUFFER,
8375 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8376 allocator->m_hDevice,
8378 (*pAllocation)->GetMemory(),
8379 (*pAllocation)->GetOffset());
8383 if(pAllocationInfo != VMA_NULL)
8385 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8389 allocator->FreeMemory(*pAllocation);
8390 *pAllocation = VK_NULL_HANDLE;
8391 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8392 *pBuffer = VK_NULL_HANDLE;
8395 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8396 *pBuffer = VK_NULL_HANDLE;
8403 VmaAllocator allocator,
8405 VmaAllocation allocation)
8407 if(buffer != VK_NULL_HANDLE)
8409 VMA_ASSERT(allocator);
8411 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8413 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8415 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8417 allocator->FreeMemory(allocation);
8422 VmaAllocator allocator,
8423 const VkImageCreateInfo* pImageCreateInfo,
8426 VmaAllocation* pAllocation,
8429 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8431 VMA_DEBUG_LOG(
"vmaCreateImage");
8433 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8435 *pImage = VK_NULL_HANDLE;
8436 *pAllocation = VK_NULL_HANDLE;
8439 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
8440 allocator->m_hDevice,
8442 allocator->GetAllocationCallbacks(),
8446 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
8447 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
8448 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
8451 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
8455 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
8456 allocator->m_hDevice,
8458 (*pAllocation)->GetMemory(),
8459 (*pAllocation)->GetOffset());
8463 if(pAllocationInfo != VMA_NULL)
8465 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8469 allocator->FreeMemory(*pAllocation);
8470 *pAllocation = VK_NULL_HANDLE;
8471 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8472 *pImage = VK_NULL_HANDLE;
8475 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8476 *pImage = VK_NULL_HANDLE;
8483 VmaAllocator allocator,
8485 VmaAllocation allocation)
8487 if(image != VK_NULL_HANDLE)
8489 VMA_ASSERT(allocator);
8491 VMA_DEBUG_LOG(
"vmaDestroyImage");
8493 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8495 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
8497 allocator->FreeMemory(allocation);
8501 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:764
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1011
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:789
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:774
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:974
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:768
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1279
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:786
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1445
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1149
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1203
Definition: vk_mem_alloc.h:1048
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:757
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1086
Definition: vk_mem_alloc.h:995
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:801
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:854
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:783
VkDeviceSize preferredSmallHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from small heaps <= 512 MB...
Definition: vk_mem_alloc.h:798
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:999
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:919
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:771
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:918
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:779
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1449
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:818
VmaStatInfo total
Definition: vk_mem_alloc.h:928
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1457
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1070
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1440
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:772
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:693
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:792
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1157
Definition: vk_mem_alloc.h:1151
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1289
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:769
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1107
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1173
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1209
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:755
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1160
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
VmaMemoryUsage
Definition: vk_mem_alloc.h:956
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1435
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1453
Definition: vk_mem_alloc.h:989
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1094
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:770
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:924
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:699
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:720
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:725
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1455
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1081
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1219
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:765
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:907
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1168
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:712
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1055
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:920
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:716
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1163
Definition: vk_mem_alloc.h:994
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1076
Definition: vk_mem_alloc.h:1067
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:910
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:767
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:1181
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:804
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1212
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1065
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1100
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:842
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:926
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1035
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:919
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:776
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:714
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:775
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1195
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1303
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps. ...
Definition: vk_mem_alloc.h:795
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:919
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:916
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1200
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1284
Definition: vk_mem_alloc.h:1063
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1451
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:763
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:778
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:914
Definition: vk_mem_alloc.h:961
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1153
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:912
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:773
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:777
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1022
Definition: vk_mem_alloc.h:983
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1298
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:753
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:766
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1265
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1131
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:920
Definition: vk_mem_alloc.h:1061
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:927
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1206
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:920
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1270