23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 826 #include <vulkan/vulkan.h> 828 VK_DEFINE_HANDLE(VmaAllocator)
832 VmaAllocator allocator,
834 VkDeviceMemory memory,
838 VmaAllocator allocator,
840 VkDeviceMemory memory,
989 VmaAllocator* pAllocator);
993 VmaAllocator allocator);
1000 VmaAllocator allocator,
1001 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
1008 VmaAllocator allocator,
1009 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
1018 VmaAllocator allocator,
1019 uint32_t memoryTypeIndex,
1020 VkMemoryPropertyFlags* pFlags);
1031 VmaAllocator allocator,
1032 uint32_t frameIndex);
1062 VmaAllocator allocator,
1065 #define VMA_STATS_STRING_ENABLED 1 1067 #if VMA_STATS_STRING_ENABLED 1073 VmaAllocator allocator,
1074 char** ppStatsString,
1075 VkBool32 detailedMap);
1078 VmaAllocator allocator,
1079 char* pStatsString);
1081 #endif // #if VMA_STATS_STRING_ENABLED 1083 VK_DEFINE_HANDLE(VmaPool)
1264 VmaAllocator allocator,
1265 uint32_t memoryTypeBits,
1267 uint32_t* pMemoryTypeIndex);
1368 VmaAllocator allocator,
1375 VmaAllocator allocator,
1385 VmaAllocator allocator,
1396 VmaAllocator allocator,
1398 size_t* pLostAllocationCount);
1400 VK_DEFINE_HANDLE(VmaAllocation)
1456 VmaAllocator allocator,
1457 const VkMemoryRequirements* pVkMemoryRequirements,
1459 VmaAllocation* pAllocation,
1469 VmaAllocator allocator,
1472 VmaAllocation* pAllocation,
1477 VmaAllocator allocator,
1480 VmaAllocation* pAllocation,
1485 VmaAllocator allocator,
1486 VmaAllocation allocation);
1490 VmaAllocator allocator,
1491 VmaAllocation allocation,
1508 VmaAllocator allocator,
1509 VmaAllocation allocation,
1523 VmaAllocator allocator,
1524 VmaAllocation* pAllocation);
1561 VmaAllocator allocator,
1562 VmaAllocation allocation,
1570 VmaAllocator allocator,
1571 VmaAllocation allocation);
1682 VmaAllocator allocator,
1683 VmaAllocation* pAllocations,
1684 size_t allocationCount,
1685 VkBool32* pAllocationsChanged,
1716 VmaAllocator allocator,
1717 const VkBufferCreateInfo* pBufferCreateInfo,
1720 VmaAllocation* pAllocation,
1735 VmaAllocator allocator,
1737 VmaAllocation allocation);
1741 VmaAllocator allocator,
1742 const VkImageCreateInfo* pImageCreateInfo,
1745 VmaAllocation* pAllocation,
1760 VmaAllocator allocator,
1762 VmaAllocation allocation);
1768 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1771 #ifdef __INTELLISENSE__ 1772 #define VMA_IMPLEMENTATION 1775 #ifdef VMA_IMPLEMENTATION 1776 #undef VMA_IMPLEMENTATION 1798 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 1799 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1811 #if VMA_USE_STL_CONTAINERS 1812 #define VMA_USE_STL_VECTOR 1 1813 #define VMA_USE_STL_UNORDERED_MAP 1 1814 #define VMA_USE_STL_LIST 1 1817 #if VMA_USE_STL_VECTOR 1821 #if VMA_USE_STL_UNORDERED_MAP 1822 #include <unordered_map> 1825 #if VMA_USE_STL_LIST 1834 #include <algorithm> 1838 #if !defined(_WIN32) && !defined(__APPLE__) 1842 #if defined(__APPLE__) 1844 void *aligned_alloc(
size_t alignment,
size_t size)
1847 if(alignment <
sizeof(
void*))
1849 alignment =
sizeof(
void*);
1853 if(posix_memalign(&pointer, alignment, size) == 0)
1862 #define VMA_ASSERT(expr) assert(expr) 1864 #define VMA_ASSERT(expr) 1870 #ifndef VMA_HEAVY_ASSERT 1872 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1874 #define VMA_HEAVY_ASSERT(expr) 1880 #define VMA_NULL nullptr 1883 #ifndef VMA_ALIGN_OF 1884 #define VMA_ALIGN_OF(type) (__alignof(type)) 1887 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1889 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1891 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1895 #ifndef VMA_SYSTEM_FREE 1897 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1899 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1904 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1908 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1912 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1916 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1919 #ifndef VMA_DEBUG_LOG 1920 #define VMA_DEBUG_LOG(format, ...) 1930 #if VMA_STATS_STRING_ENABLED 1931 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1933 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1935 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1937 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1939 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1941 snprintf(outStr, strLen,
"%p", ptr);
1951 void Lock() { m_Mutex.lock(); }
1952 void Unlock() { m_Mutex.unlock(); }
1956 #define VMA_MUTEX VmaMutex 1967 #ifndef VMA_ATOMIC_UINT32 1968 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1971 #ifndef VMA_BEST_FIT 1984 #define VMA_BEST_FIT (1) 1987 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 1992 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 1995 #ifndef VMA_DEBUG_ALIGNMENT 2000 #define VMA_DEBUG_ALIGNMENT (1) 2003 #ifndef VMA_DEBUG_MARGIN 2008 #define VMA_DEBUG_MARGIN (0) 2011 #ifndef VMA_DEBUG_GLOBAL_MUTEX 2016 #define VMA_DEBUG_GLOBAL_MUTEX (0) 2019 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 2024 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 2027 #ifndef VMA_SMALL_HEAP_MAX_SIZE 2028 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 2032 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 2033 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 2037 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
2043 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
2044 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
2047 static inline uint32_t VmaCountBitsSet(uint32_t v)
2049 uint32_t c = v - ((v >> 1) & 0x55555555);
2050 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
2051 c = ((c >> 4) + c) & 0x0F0F0F0F;
2052 c = ((c >> 8) + c) & 0x00FF00FF;
2053 c = ((c >> 16) + c) & 0x0000FFFF;
2059 template <
typename T>
2060 static inline T VmaAlignUp(T val, T align)
2062 return (val + align - 1) / align * align;
2066 template <
typename T>
2067 inline T VmaRoundDiv(T x, T y)
2069 return (x + (y / (T)2)) / y;
2074 template<
typename Iterator,
typename Compare>
2075 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
2077 Iterator centerValue = end; --centerValue;
2078 Iterator insertIndex = beg;
2079 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
2081 if(cmp(*memTypeIndex, *centerValue))
2083 if(insertIndex != memTypeIndex)
2085 VMA_SWAP(*memTypeIndex, *insertIndex);
2090 if(insertIndex != centerValue)
2092 VMA_SWAP(*insertIndex, *centerValue);
2097 template<
typename Iterator,
typename Compare>
2098 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
2102 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
2103 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
2104 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
2108 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 2110 #endif // #ifndef VMA_SORT 2119 static inline bool VmaBlocksOnSamePage(
2120 VkDeviceSize resourceAOffset,
2121 VkDeviceSize resourceASize,
2122 VkDeviceSize resourceBOffset,
2123 VkDeviceSize pageSize)
2125 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2126 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2127 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2128 VkDeviceSize resourceBStart = resourceBOffset;
2129 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2130 return resourceAEndPage == resourceBStartPage;
2133 enum VmaSuballocationType
2135 VMA_SUBALLOCATION_TYPE_FREE = 0,
2136 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2137 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2138 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2139 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2140 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2141 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2150 static inline bool VmaIsBufferImageGranularityConflict(
2151 VmaSuballocationType suballocType1,
2152 VmaSuballocationType suballocType2)
2154 if(suballocType1 > suballocType2)
2156 VMA_SWAP(suballocType1, suballocType2);
2159 switch(suballocType1)
2161 case VMA_SUBALLOCATION_TYPE_FREE:
2163 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2165 case VMA_SUBALLOCATION_TYPE_BUFFER:
2167 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2168 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2169 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2171 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2172 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2173 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2174 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2176 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2177 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2189 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2190 m_pMutex(useMutex ? &mutex : VMA_NULL)
2207 VMA_MUTEX* m_pMutex;
2210 #if VMA_DEBUG_GLOBAL_MUTEX 2211 static VMA_MUTEX gDebugGlobalMutex;
2212 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2214 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2218 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2229 template <
typename IterT,
typename KeyT,
typename CmpT>
2230 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2232 size_t down = 0, up = (end - beg);
2235 const size_t mid = (down + up) / 2;
2236 if(cmp(*(beg+mid), key))
2251 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2253 if((pAllocationCallbacks != VMA_NULL) &&
2254 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2256 return (*pAllocationCallbacks->pfnAllocation)(
2257 pAllocationCallbacks->pUserData,
2260 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2264 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2268 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2270 if((pAllocationCallbacks != VMA_NULL) &&
2271 (pAllocationCallbacks->pfnFree != VMA_NULL))
2273 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2277 VMA_SYSTEM_FREE(ptr);
2281 template<
typename T>
2282 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2284 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2287 template<
typename T>
2288 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2290 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2293 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2295 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2297 template<
typename T>
2298 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2301 VmaFree(pAllocationCallbacks, ptr);
2304 template<
typename T>
2305 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2309 for(
size_t i = count; i--; )
2313 VmaFree(pAllocationCallbacks, ptr);
2318 template<
typename T>
2319 class VmaStlAllocator
2322 const VkAllocationCallbacks*
const m_pCallbacks;
2323 typedef T value_type;
2325 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2326 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2328 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2329 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2331 template<
typename U>
2332 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2334 return m_pCallbacks == rhs.m_pCallbacks;
2336 template<
typename U>
2337 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2339 return m_pCallbacks != rhs.m_pCallbacks;
2342 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2345 #if VMA_USE_STL_VECTOR 2347 #define VmaVector std::vector 2349 template<
typename T,
typename allocatorT>
2350 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2352 vec.insert(vec.begin() + index, item);
2355 template<
typename T,
typename allocatorT>
2356 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2358 vec.erase(vec.begin() + index);
2361 #else // #if VMA_USE_STL_VECTOR 2366 template<
typename T,
typename AllocatorT>
2370 typedef T value_type;
2372 VmaVector(
const AllocatorT& allocator) :
2373 m_Allocator(allocator),
2380 VmaVector(
size_t count,
const AllocatorT& allocator) :
2381 m_Allocator(allocator),
2382 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2388 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2389 m_Allocator(src.m_Allocator),
2390 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2391 m_Count(src.m_Count),
2392 m_Capacity(src.m_Count)
2396 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2402 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2405 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2409 resize(rhs.m_Count);
2412 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2418 bool empty()
const {
return m_Count == 0; }
2419 size_t size()
const {
return m_Count; }
2420 T* data() {
return m_pArray; }
2421 const T* data()
const {
return m_pArray; }
2423 T& operator[](
size_t index)
2425 VMA_HEAVY_ASSERT(index < m_Count);
2426 return m_pArray[index];
2428 const T& operator[](
size_t index)
const 2430 VMA_HEAVY_ASSERT(index < m_Count);
2431 return m_pArray[index];
2436 VMA_HEAVY_ASSERT(m_Count > 0);
2439 const T& front()
const 2441 VMA_HEAVY_ASSERT(m_Count > 0);
2446 VMA_HEAVY_ASSERT(m_Count > 0);
2447 return m_pArray[m_Count - 1];
2449 const T& back()
const 2451 VMA_HEAVY_ASSERT(m_Count > 0);
2452 return m_pArray[m_Count - 1];
2455 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2457 newCapacity = VMA_MAX(newCapacity, m_Count);
2459 if((newCapacity < m_Capacity) && !freeMemory)
2461 newCapacity = m_Capacity;
2464 if(newCapacity != m_Capacity)
2466 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2469 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2471 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2472 m_Capacity = newCapacity;
2473 m_pArray = newArray;
2477 void resize(
size_t newCount,
bool freeMemory =
false)
2479 size_t newCapacity = m_Capacity;
2480 if(newCount > m_Capacity)
2482 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2486 newCapacity = newCount;
2489 if(newCapacity != m_Capacity)
2491 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2492 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2493 if(elementsToCopy != 0)
2495 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2497 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2498 m_Capacity = newCapacity;
2499 m_pArray = newArray;
2505 void clear(
bool freeMemory =
false)
2507 resize(0, freeMemory);
2510 void insert(
size_t index,
const T& src)
2512 VMA_HEAVY_ASSERT(index <= m_Count);
2513 const size_t oldCount = size();
2514 resize(oldCount + 1);
2515 if(index < oldCount)
2517 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2519 m_pArray[index] = src;
2522 void remove(
size_t index)
2524 VMA_HEAVY_ASSERT(index < m_Count);
2525 const size_t oldCount = size();
2526 if(index < oldCount - 1)
2528 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2530 resize(oldCount - 1);
2533 void push_back(
const T& src)
2535 const size_t newIndex = size();
2536 resize(newIndex + 1);
2537 m_pArray[newIndex] = src;
2542 VMA_HEAVY_ASSERT(m_Count > 0);
2546 void push_front(
const T& src)
2553 VMA_HEAVY_ASSERT(m_Count > 0);
2557 typedef T* iterator;
2559 iterator begin() {
return m_pArray; }
2560 iterator end() {
return m_pArray + m_Count; }
2563 AllocatorT m_Allocator;
2569 template<
typename T,
typename allocatorT>
2570 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2572 vec.insert(index, item);
2575 template<
typename T,
typename allocatorT>
2576 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2581 #endif // #if VMA_USE_STL_VECTOR 2583 template<
typename CmpLess,
typename VectorT>
2584 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2586 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2588 vector.data() + vector.size(),
2590 CmpLess()) - vector.data();
2591 VmaVectorInsert(vector, indexToInsert, value);
2592 return indexToInsert;
2595 template<
typename CmpLess,
typename VectorT>
2596 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2599 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2604 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2606 size_t indexToRemove = it - vector.begin();
2607 VmaVectorRemove(vector, indexToRemove);
2613 template<
typename CmpLess,
typename VectorT>
2614 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2617 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2619 vector.data() + vector.size(),
2622 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2624 return it - vector.begin();
2628 return vector.size();
2640 template<
typename T>
2641 class VmaPoolAllocator
2644 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2645 ~VmaPoolAllocator();
2653 uint32_t NextFreeIndex;
2660 uint32_t FirstFreeIndex;
2663 const VkAllocationCallbacks* m_pAllocationCallbacks;
2664 size_t m_ItemsPerBlock;
2665 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2667 ItemBlock& CreateNewBlock();
2670 template<
typename T>
2671 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2672 m_pAllocationCallbacks(pAllocationCallbacks),
2673 m_ItemsPerBlock(itemsPerBlock),
2674 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2676 VMA_ASSERT(itemsPerBlock > 0);
2679 template<
typename T>
2680 VmaPoolAllocator<T>::~VmaPoolAllocator()
2685 template<
typename T>
2686 void VmaPoolAllocator<T>::Clear()
2688 for(
size_t i = m_ItemBlocks.size(); i--; )
2689 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2690 m_ItemBlocks.clear();
2693 template<
typename T>
2694 T* VmaPoolAllocator<T>::Alloc()
2696 for(
size_t i = m_ItemBlocks.size(); i--; )
2698 ItemBlock& block = m_ItemBlocks[i];
2700 if(block.FirstFreeIndex != UINT32_MAX)
2702 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2703 block.FirstFreeIndex = pItem->NextFreeIndex;
2704 return &pItem->Value;
2709 ItemBlock& newBlock = CreateNewBlock();
2710 Item*
const pItem = &newBlock.pItems[0];
2711 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2712 return &pItem->Value;
2715 template<
typename T>
2716 void VmaPoolAllocator<T>::Free(T* ptr)
2719 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2721 ItemBlock& block = m_ItemBlocks[i];
2725 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2728 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2730 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2731 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2732 block.FirstFreeIndex = index;
2736 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2739 template<
typename T>
2740 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2742 ItemBlock newBlock = {
2743 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2745 m_ItemBlocks.push_back(newBlock);
2748 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2749 newBlock.pItems[i].NextFreeIndex = i + 1;
2750 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2751 return m_ItemBlocks.back();
2757 #if VMA_USE_STL_LIST 2759 #define VmaList std::list 2761 #else // #if VMA_USE_STL_LIST 2763 template<
typename T>
2772 template<
typename T>
2776 typedef VmaListItem<T> ItemType;
2778 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2782 size_t GetCount()
const {
return m_Count; }
2783 bool IsEmpty()
const {
return m_Count == 0; }
2785 ItemType* Front() {
return m_pFront; }
2786 const ItemType* Front()
const {
return m_pFront; }
2787 ItemType* Back() {
return m_pBack; }
2788 const ItemType* Back()
const {
return m_pBack; }
2790 ItemType* PushBack();
2791 ItemType* PushFront();
2792 ItemType* PushBack(
const T& value);
2793 ItemType* PushFront(
const T& value);
2798 ItemType* InsertBefore(ItemType* pItem);
2800 ItemType* InsertAfter(ItemType* pItem);
2802 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2803 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2805 void Remove(ItemType* pItem);
2808 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2809 VmaPoolAllocator<ItemType> m_ItemAllocator;
2815 VmaRawList(
const VmaRawList<T>& src);
2816 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2819 template<
typename T>
2820 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2821 m_pAllocationCallbacks(pAllocationCallbacks),
2822 m_ItemAllocator(pAllocationCallbacks, 128),
2829 template<
typename T>
2830 VmaRawList<T>::~VmaRawList()
2836 template<
typename T>
2837 void VmaRawList<T>::Clear()
2839 if(IsEmpty() ==
false)
2841 ItemType* pItem = m_pBack;
2842 while(pItem != VMA_NULL)
2844 ItemType*
const pPrevItem = pItem->pPrev;
2845 m_ItemAllocator.Free(pItem);
2848 m_pFront = VMA_NULL;
2854 template<
typename T>
2855 VmaListItem<T>* VmaRawList<T>::PushBack()
2857 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2858 pNewItem->pNext = VMA_NULL;
2861 pNewItem->pPrev = VMA_NULL;
2862 m_pFront = pNewItem;
2868 pNewItem->pPrev = m_pBack;
2869 m_pBack->pNext = pNewItem;
2876 template<
typename T>
2877 VmaListItem<T>* VmaRawList<T>::PushFront()
2879 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2880 pNewItem->pPrev = VMA_NULL;
2883 pNewItem->pNext = VMA_NULL;
2884 m_pFront = pNewItem;
2890 pNewItem->pNext = m_pFront;
2891 m_pFront->pPrev = pNewItem;
2892 m_pFront = pNewItem;
2898 template<
typename T>
2899 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2901 ItemType*
const pNewItem = PushBack();
2902 pNewItem->Value = value;
2906 template<
typename T>
2907 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2909 ItemType*
const pNewItem = PushFront();
2910 pNewItem->Value = value;
2914 template<
typename T>
2915 void VmaRawList<T>::PopBack()
2917 VMA_HEAVY_ASSERT(m_Count > 0);
2918 ItemType*
const pBackItem = m_pBack;
2919 ItemType*
const pPrevItem = pBackItem->pPrev;
2920 if(pPrevItem != VMA_NULL)
2922 pPrevItem->pNext = VMA_NULL;
2924 m_pBack = pPrevItem;
2925 m_ItemAllocator.Free(pBackItem);
2929 template<
typename T>
2930 void VmaRawList<T>::PopFront()
2932 VMA_HEAVY_ASSERT(m_Count > 0);
2933 ItemType*
const pFrontItem = m_pFront;
2934 ItemType*
const pNextItem = pFrontItem->pNext;
2935 if(pNextItem != VMA_NULL)
2937 pNextItem->pPrev = VMA_NULL;
2939 m_pFront = pNextItem;
2940 m_ItemAllocator.Free(pFrontItem);
2944 template<
typename T>
2945 void VmaRawList<T>::Remove(ItemType* pItem)
2947 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2948 VMA_HEAVY_ASSERT(m_Count > 0);
2950 if(pItem->pPrev != VMA_NULL)
2952 pItem->pPrev->pNext = pItem->pNext;
2956 VMA_HEAVY_ASSERT(m_pFront == pItem);
2957 m_pFront = pItem->pNext;
2960 if(pItem->pNext != VMA_NULL)
2962 pItem->pNext->pPrev = pItem->pPrev;
2966 VMA_HEAVY_ASSERT(m_pBack == pItem);
2967 m_pBack = pItem->pPrev;
2970 m_ItemAllocator.Free(pItem);
2974 template<
typename T>
2975 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2977 if(pItem != VMA_NULL)
2979 ItemType*
const prevItem = pItem->pPrev;
2980 ItemType*
const newItem = m_ItemAllocator.Alloc();
2981 newItem->pPrev = prevItem;
2982 newItem->pNext = pItem;
2983 pItem->pPrev = newItem;
2984 if(prevItem != VMA_NULL)
2986 prevItem->pNext = newItem;
2990 VMA_HEAVY_ASSERT(m_pFront == pItem);
3000 template<
typename T>
3001 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
3003 if(pItem != VMA_NULL)
3005 ItemType*
const nextItem = pItem->pNext;
3006 ItemType*
const newItem = m_ItemAllocator.Alloc();
3007 newItem->pNext = nextItem;
3008 newItem->pPrev = pItem;
3009 pItem->pNext = newItem;
3010 if(nextItem != VMA_NULL)
3012 nextItem->pPrev = newItem;
3016 VMA_HEAVY_ASSERT(m_pBack == pItem);
3026 template<
typename T>
3027 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
3029 ItemType*
const newItem = InsertBefore(pItem);
3030 newItem->Value = value;
3034 template<
typename T>
3035 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
3037 ItemType*
const newItem = InsertAfter(pItem);
3038 newItem->Value = value;
3042 template<
typename T,
typename AllocatorT>
3055 T& operator*()
const 3057 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3058 return m_pItem->Value;
3060 T* operator->()
const 3062 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3063 return &m_pItem->Value;
3066 iterator& operator++()
3068 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3069 m_pItem = m_pItem->pNext;
3072 iterator& operator--()
3074 if(m_pItem != VMA_NULL)
3076 m_pItem = m_pItem->pPrev;
3080 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3081 m_pItem = m_pList->Back();
3086 iterator operator++(
int)
3088 iterator result = *
this;
3092 iterator operator--(
int)
3094 iterator result = *
this;
3099 bool operator==(
const iterator& rhs)
const 3101 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3102 return m_pItem == rhs.m_pItem;
3104 bool operator!=(
const iterator& rhs)
const 3106 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3107 return m_pItem != rhs.m_pItem;
3111 VmaRawList<T>* m_pList;
3112 VmaListItem<T>* m_pItem;
3114 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
3120 friend class VmaList<T, AllocatorT>;
3123 class const_iterator
3132 const_iterator(
const iterator& src) :
3133 m_pList(src.m_pList),
3134 m_pItem(src.m_pItem)
3138 const T& operator*()
const 3140 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3141 return m_pItem->Value;
3143 const T* operator->()
const 3145 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3146 return &m_pItem->Value;
3149 const_iterator& operator++()
3151 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3152 m_pItem = m_pItem->pNext;
3155 const_iterator& operator--()
3157 if(m_pItem != VMA_NULL)
3159 m_pItem = m_pItem->pPrev;
3163 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3164 m_pItem = m_pList->Back();
3169 const_iterator operator++(
int)
3171 const_iterator result = *
this;
3175 const_iterator operator--(
int)
3177 const_iterator result = *
this;
3182 bool operator==(
const const_iterator& rhs)
const 3184 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3185 return m_pItem == rhs.m_pItem;
3187 bool operator!=(
const const_iterator& rhs)
const 3189 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3190 return m_pItem != rhs.m_pItem;
3194 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3200 const VmaRawList<T>* m_pList;
3201 const VmaListItem<T>* m_pItem;
3203 friend class VmaList<T, AllocatorT>;
3206 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3208 bool empty()
const {
return m_RawList.IsEmpty(); }
3209 size_t size()
const {
return m_RawList.GetCount(); }
3211 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3212 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3214 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3215 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3217 void clear() { m_RawList.Clear(); }
3218 void push_back(
const T& value) { m_RawList.PushBack(value); }
3219 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3220 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3223 VmaRawList<T> m_RawList;
3226 #endif // #if VMA_USE_STL_LIST 3234 #if VMA_USE_STL_UNORDERED_MAP 3236 #define VmaPair std::pair 3238 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3239 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3241 #else // #if VMA_USE_STL_UNORDERED_MAP 3243 template<
typename T1,
typename T2>
3249 VmaPair() : first(), second() { }
3250 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3256 template<
typename KeyT,
typename ValueT>
3260 typedef VmaPair<KeyT, ValueT> PairType;
3261 typedef PairType* iterator;
3263 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3265 iterator begin() {
return m_Vector.begin(); }
3266 iterator end() {
return m_Vector.end(); }
3268 void insert(
const PairType& pair);
3269 iterator find(
const KeyT& key);
3270 void erase(iterator it);
3273 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3276 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3278 template<
typename FirstT,
typename SecondT>
3279 struct VmaPairFirstLess
3281 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3283 return lhs.first < rhs.first;
3285 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3287 return lhs.first < rhsFirst;
3291 template<
typename KeyT,
typename ValueT>
3292 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3294 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3296 m_Vector.data() + m_Vector.size(),
3298 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3299 VmaVectorInsert(m_Vector, indexToInsert, pair);
3302 template<
typename KeyT,
typename ValueT>
3303 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3305 PairType* it = VmaBinaryFindFirstNotLess(
3307 m_Vector.data() + m_Vector.size(),
3309 VmaPairFirstLess<KeyT, ValueT>());
3310 if((it != m_Vector.end()) && (it->first == key))
3316 return m_Vector.end();
3320 template<
typename KeyT,
typename ValueT>
3321 void VmaMap<KeyT, ValueT>::erase(iterator it)
3323 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3326 #endif // #if VMA_USE_STL_UNORDERED_MAP 3332 class VmaDeviceMemoryBlock;
3334 struct VmaAllocation_T
3337 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3341 FLAG_USER_DATA_STRING = 0x01,
3345 enum ALLOCATION_TYPE
3347 ALLOCATION_TYPE_NONE,
3348 ALLOCATION_TYPE_BLOCK,
3349 ALLOCATION_TYPE_DEDICATED,
3352 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3355 m_pUserData(VMA_NULL),
3356 m_LastUseFrameIndex(currentFrameIndex),
3357 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3358 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3360 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3366 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3369 VMA_ASSERT(m_pUserData == VMA_NULL);
3372 void InitBlockAllocation(
3374 VmaDeviceMemoryBlock* block,
3375 VkDeviceSize offset,
3376 VkDeviceSize alignment,
3378 VmaSuballocationType suballocationType,
3382 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3383 VMA_ASSERT(block != VMA_NULL);
3384 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3385 m_Alignment = alignment;
3387 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3388 m_SuballocationType = (uint8_t)suballocationType;
3389 m_BlockAllocation.m_hPool = hPool;
3390 m_BlockAllocation.m_Block = block;
3391 m_BlockAllocation.m_Offset = offset;
3392 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3397 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3398 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3399 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3400 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3401 m_BlockAllocation.m_Block = VMA_NULL;
3402 m_BlockAllocation.m_Offset = 0;
3403 m_BlockAllocation.m_CanBecomeLost =
true;
3406 void ChangeBlockAllocation(
3407 VmaAllocator hAllocator,
3408 VmaDeviceMemoryBlock* block,
3409 VkDeviceSize offset);
3412 void InitDedicatedAllocation(
3413 uint32_t memoryTypeIndex,
3414 VkDeviceMemory hMemory,
3415 VmaSuballocationType suballocationType,
3419 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3420 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3421 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3424 m_SuballocationType = (uint8_t)suballocationType;
3425 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3426 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3427 m_DedicatedAllocation.m_hMemory = hMemory;
3428 m_DedicatedAllocation.m_pMappedData = pMappedData;
3431 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3432 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3433 VkDeviceSize GetSize()
const {
return m_Size; }
3434 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3435 void* GetUserData()
const {
return m_pUserData; }
3436 void SetUserData(VmaAllocator hAllocator,
void* pUserData);
3437 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3439 VmaDeviceMemoryBlock* GetBlock()
const 3441 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3442 return m_BlockAllocation.m_Block;
3444 VkDeviceSize GetOffset()
const;
3445 VkDeviceMemory GetMemory()
const;
3446 uint32_t GetMemoryTypeIndex()
const;
3447 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3448 void* GetMappedData()
const;
3449 bool CanBecomeLost()
const;
3450 VmaPool GetPool()
const;
3452 uint32_t GetLastUseFrameIndex()
const 3454 return m_LastUseFrameIndex.load();
3456 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3458 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3468 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3470 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3472 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3483 void BlockAllocMap();
3484 void BlockAllocUnmap();
3485 VkResult DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData);
3486 void DedicatedAllocUnmap(VmaAllocator hAllocator);
3489 VkDeviceSize m_Alignment;
3490 VkDeviceSize m_Size;
3492 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3494 uint8_t m_SuballocationType;
3501 struct BlockAllocation
3504 VmaDeviceMemoryBlock* m_Block;
3505 VkDeviceSize m_Offset;
3506 bool m_CanBecomeLost;
3510 struct DedicatedAllocation
3512 uint32_t m_MemoryTypeIndex;
3513 VkDeviceMemory m_hMemory;
3514 void* m_pMappedData;
3520 BlockAllocation m_BlockAllocation;
3522 DedicatedAllocation m_DedicatedAllocation;
3525 void FreeUserDataString(VmaAllocator hAllocator);
3532 struct VmaSuballocation
3534 VkDeviceSize offset;
3536 VmaAllocation hAllocation;
3537 VmaSuballocationType type;
3540 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3543 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3558 struct VmaAllocationRequest
3560 VkDeviceSize offset;
3561 VkDeviceSize sumFreeSize;
3562 VkDeviceSize sumItemSize;
3563 VmaSuballocationList::iterator item;
3564 size_t itemsToMakeLostCount;
3566 VkDeviceSize CalcCost()
const 3568 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3576 class VmaBlockMetadata
3579 VmaBlockMetadata(VmaAllocator hAllocator);
3580 ~VmaBlockMetadata();
3581 void Init(VkDeviceSize size);
3584 bool Validate()
const;
3585 VkDeviceSize GetSize()
const {
return m_Size; }
3586 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3587 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3588 VkDeviceSize GetUnusedRangeSizeMax()
const;
3590 bool IsEmpty()
const;
3592 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3595 #if VMA_STATS_STRING_ENABLED 3596 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3600 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3605 bool CreateAllocationRequest(
3606 uint32_t currentFrameIndex,
3607 uint32_t frameInUseCount,
3608 VkDeviceSize bufferImageGranularity,
3609 VkDeviceSize allocSize,
3610 VkDeviceSize allocAlignment,
3611 VmaSuballocationType allocType,
3612 bool canMakeOtherLost,
3613 VmaAllocationRequest* pAllocationRequest);
3615 bool MakeRequestedAllocationsLost(
3616 uint32_t currentFrameIndex,
3617 uint32_t frameInUseCount,
3618 VmaAllocationRequest* pAllocationRequest);
3620 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3624 const VmaAllocationRequest& request,
3625 VmaSuballocationType type,
3626 VkDeviceSize allocSize,
3627 VmaAllocation hAllocation);
3630 void Free(
const VmaAllocation allocation);
3631 void FreeAtOffset(VkDeviceSize offset);
3634 VkDeviceSize m_Size;
3635 uint32_t m_FreeCount;
3636 VkDeviceSize m_SumFreeSize;
3637 VmaSuballocationList m_Suballocations;
3640 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3642 bool ValidateFreeSuballocationList()
const;
3646 bool CheckAllocation(
3647 uint32_t currentFrameIndex,
3648 uint32_t frameInUseCount,
3649 VkDeviceSize bufferImageGranularity,
3650 VkDeviceSize allocSize,
3651 VkDeviceSize allocAlignment,
3652 VmaSuballocationType allocType,
3653 VmaSuballocationList::const_iterator suballocItem,
3654 bool canMakeOtherLost,
3655 VkDeviceSize* pOffset,
3656 size_t* itemsToMakeLostCount,
3657 VkDeviceSize* pSumFreeSize,
3658 VkDeviceSize* pSumItemSize)
const;
3660 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3664 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3667 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3670 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3674 class VmaDeviceMemoryMapping
3677 VmaDeviceMemoryMapping();
3678 ~VmaDeviceMemoryMapping();
3680 void* GetMappedData()
const {
return m_pMappedData; }
3683 VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData);
3684 void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count);
3688 uint32_t m_MapCount;
3689 void* m_pMappedData;
3698 class VmaDeviceMemoryBlock
3701 uint32_t m_MemoryTypeIndex;
3702 VkDeviceMemory m_hMemory;
3703 VmaDeviceMemoryMapping m_Mapping;
3704 VmaBlockMetadata m_Metadata;
3706 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3708 ~VmaDeviceMemoryBlock()
3710 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3715 uint32_t newMemoryTypeIndex,
3716 VkDeviceMemory newMemory,
3717 VkDeviceSize newSize);
3719 void Destroy(VmaAllocator allocator);
3722 bool Validate()
const;
3725 VkResult Map(VmaAllocator hAllocator, uint32_t count,
void** ppData);
3726 void Unmap(VmaAllocator hAllocator, uint32_t count);
3729 struct VmaPointerLess
3731 bool operator()(
const void* lhs,
const void* rhs)
const 3737 class VmaDefragmentator;
3745 struct VmaBlockVector
3748 VmaAllocator hAllocator,
3749 uint32_t memoryTypeIndex,
3750 VkDeviceSize preferredBlockSize,
3751 size_t minBlockCount,
3752 size_t maxBlockCount,
3753 VkDeviceSize bufferImageGranularity,
3754 uint32_t frameInUseCount,
3758 VkResult CreateMinBlocks();
3760 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3761 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3762 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3763 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3767 bool IsEmpty()
const {
return m_Blocks.empty(); }
3770 VmaPool hCurrentPool,
3771 uint32_t currentFrameIndex,
3772 const VkMemoryRequirements& vkMemReq,
3774 VmaSuballocationType suballocType,
3775 VmaAllocation* pAllocation);
3778 VmaAllocation hAllocation);
3783 #if VMA_STATS_STRING_ENABLED 3784 void PrintDetailedMap(
class VmaJsonWriter& json);
3787 void MakePoolAllocationsLost(
3788 uint32_t currentFrameIndex,
3789 size_t* pLostAllocationCount);
3791 VmaDefragmentator* EnsureDefragmentator(
3792 VmaAllocator hAllocator,
3793 uint32_t currentFrameIndex);
3795 VkResult Defragment(
3797 VkDeviceSize& maxBytesToMove,
3798 uint32_t& maxAllocationsToMove);
3800 void DestroyDefragmentator();
3803 friend class VmaDefragmentator;
3805 const VmaAllocator m_hAllocator;
3806 const uint32_t m_MemoryTypeIndex;
3807 const VkDeviceSize m_PreferredBlockSize;
3808 const size_t m_MinBlockCount;
3809 const size_t m_MaxBlockCount;
3810 const VkDeviceSize m_BufferImageGranularity;
3811 const uint32_t m_FrameInUseCount;
3812 const bool m_IsCustomPool;
3815 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3819 bool m_HasEmptyBlock;
3820 VmaDefragmentator* m_pDefragmentator;
3822 size_t CalcMaxBlockSize()
const;
3825 void Remove(VmaDeviceMemoryBlock* pBlock);
3829 void IncrementallySortBlocks();
3831 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3837 VmaBlockVector m_BlockVector;
3841 VmaAllocator hAllocator,
3845 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3847 #if VMA_STATS_STRING_ENABLED 3852 class VmaDefragmentator
3854 const VmaAllocator m_hAllocator;
3855 VmaBlockVector*
const m_pBlockVector;
3856 uint32_t m_CurrentFrameIndex;
3857 VkDeviceSize m_BytesMoved;
3858 uint32_t m_AllocationsMoved;
3860 struct AllocationInfo
3862 VmaAllocation m_hAllocation;
3863 VkBool32* m_pChanged;
3866 m_hAllocation(VK_NULL_HANDLE),
3867 m_pChanged(VMA_NULL)
3872 struct AllocationInfoSizeGreater
3874 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3876 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3881 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3885 VmaDeviceMemoryBlock* m_pBlock;
3886 bool m_HasNonMovableAllocations;
3887 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3889 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3891 m_HasNonMovableAllocations(true),
3892 m_Allocations(pAllocationCallbacks),
3893 m_pMappedDataForDefragmentation(VMA_NULL)
3897 void CalcHasNonMovableAllocations()
3899 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3900 const size_t defragmentAllocCount = m_Allocations.size();
3901 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3904 void SortAllocationsBySizeDescecnding()
3906 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3909 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3910 void Unmap(VmaAllocator hAllocator);
3914 void* m_pMappedDataForDefragmentation;
3917 struct BlockPointerLess
3919 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3921 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3923 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3925 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3931 struct BlockInfoCompareMoveDestination
3933 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3935 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3939 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3943 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
3951 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3952 BlockInfoVector m_Blocks;
3954 VkResult DefragmentRound(
3955 VkDeviceSize maxBytesToMove,
3956 uint32_t maxAllocationsToMove);
3958 static bool MoveMakesSense(
3959 size_t dstBlockIndex, VkDeviceSize dstOffset,
3960 size_t srcBlockIndex, VkDeviceSize srcOffset);
3964 VmaAllocator hAllocator,
3965 VmaBlockVector* pBlockVector,
3966 uint32_t currentFrameIndex);
3968 ~VmaDefragmentator();
3970 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3971 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3973 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3975 VkResult Defragment(
3976 VkDeviceSize maxBytesToMove,
3977 uint32_t maxAllocationsToMove);
3981 struct VmaAllocator_T
3984 bool m_UseKhrDedicatedAllocation;
3986 bool m_AllocationCallbacksSpecified;
3987 VkAllocationCallbacks m_AllocationCallbacks;
3991 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3992 VMA_MUTEX m_HeapSizeLimitMutex;
3994 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3995 VkPhysicalDeviceMemoryProperties m_MemProps;
3998 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
4001 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
4002 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
4003 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
4008 const VkAllocationCallbacks* GetAllocationCallbacks()
const 4010 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
4014 return m_VulkanFunctions;
4017 VkDeviceSize GetBufferImageGranularity()
const 4020 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
4021 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
4024 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
4025 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
4027 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 4029 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
4030 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
4033 void GetBufferMemoryRequirements(
4035 VkMemoryRequirements& memReq,
4036 bool& requiresDedicatedAllocation,
4037 bool& prefersDedicatedAllocation)
const;
4038 void GetImageMemoryRequirements(
4040 VkMemoryRequirements& memReq,
4041 bool& requiresDedicatedAllocation,
4042 bool& prefersDedicatedAllocation)
const;
4045 VkResult AllocateMemory(
4046 const VkMemoryRequirements& vkMemReq,
4047 bool requiresDedicatedAllocation,
4048 bool prefersDedicatedAllocation,
4049 VkBuffer dedicatedBuffer,
4050 VkImage dedicatedImage,
4052 VmaSuballocationType suballocType,
4053 VmaAllocation* pAllocation);
4056 void FreeMemory(
const VmaAllocation allocation);
4058 void CalculateStats(
VmaStats* pStats);
4060 #if VMA_STATS_STRING_ENABLED 4061 void PrintDetailedMap(
class VmaJsonWriter& json);
4064 VkResult Defragment(
4065 VmaAllocation* pAllocations,
4066 size_t allocationCount,
4067 VkBool32* pAllocationsChanged,
4071 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
4074 void DestroyPool(VmaPool pool);
4075 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
4077 void SetCurrentFrameIndex(uint32_t frameIndex);
4079 void MakePoolAllocationsLost(
4081 size_t* pLostAllocationCount);
4083 void CreateLostAllocation(VmaAllocation* pAllocation);
4085 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
4086 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
4088 VkResult Map(VmaAllocation hAllocation,
void** ppData);
4089 void Unmap(VmaAllocation hAllocation);
4092 VkDeviceSize m_PreferredLargeHeapBlockSize;
4094 VkPhysicalDevice m_PhysicalDevice;
4095 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
4097 VMA_MUTEX m_PoolsMutex;
4099 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
4105 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
4107 VkResult AllocateMemoryOfType(
4108 const VkMemoryRequirements& vkMemReq,
4109 bool dedicatedAllocation,
4110 VkBuffer dedicatedBuffer,
4111 VkImage dedicatedImage,
4113 uint32_t memTypeIndex,
4114 VmaSuballocationType suballocType,
4115 VmaAllocation* pAllocation);
4118 VkResult AllocateDedicatedMemory(
4120 VmaSuballocationType suballocType,
4121 uint32_t memTypeIndex,
4123 bool isUserDataString,
4125 VkBuffer dedicatedBuffer,
4126 VkImage dedicatedImage,
4127 VmaAllocation* pAllocation);
4130 void FreeDedicatedMemory(VmaAllocation allocation);
4136 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
4138 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4141 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
4143 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4146 template<
typename T>
4147 static T* VmaAllocate(VmaAllocator hAllocator)
4149 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
4152 template<
typename T>
4153 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
4155 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
4158 template<
typename T>
4159 static void vma_delete(VmaAllocator hAllocator, T* ptr)
4164 VmaFree(hAllocator, ptr);
4168 template<
typename T>
4169 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
4173 for(
size_t i = count; i--; )
4175 VmaFree(hAllocator, ptr);
4182 #if VMA_STATS_STRING_ENABLED 4184 class VmaStringBuilder
4187 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4188 size_t GetLength()
const {
return m_Data.size(); }
4189 const char* GetData()
const {
return m_Data.data(); }
4191 void Add(
char ch) { m_Data.push_back(ch); }
4192 void Add(
const char* pStr);
4193 void AddNewLine() { Add(
'\n'); }
4194 void AddNumber(uint32_t num);
4195 void AddNumber(uint64_t num);
4196 void AddPointer(
const void* ptr);
4199 VmaVector< char, VmaStlAllocator<char> > m_Data;
4202 void VmaStringBuilder::Add(
const char* pStr)
4204 const size_t strLen = strlen(pStr);
4207 const size_t oldCount = m_Data.size();
4208 m_Data.resize(oldCount + strLen);
4209 memcpy(m_Data.data() + oldCount, pStr, strLen);
4213 void VmaStringBuilder::AddNumber(uint32_t num)
4216 VmaUint32ToStr(buf,
sizeof(buf), num);
4220 void VmaStringBuilder::AddNumber(uint64_t num)
4223 VmaUint64ToStr(buf,
sizeof(buf), num);
4227 void VmaStringBuilder::AddPointer(
const void* ptr)
4230 VmaPtrToStr(buf,
sizeof(buf), ptr);
4234 #endif // #if VMA_STATS_STRING_ENABLED 4239 #if VMA_STATS_STRING_ENABLED 4244 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4247 void BeginObject(
bool singleLine =
false);
4250 void BeginArray(
bool singleLine =
false);
4253 void WriteString(
const char* pStr);
4254 void BeginString(
const char* pStr = VMA_NULL);
4255 void ContinueString(
const char* pStr);
4256 void ContinueString(uint32_t n);
4257 void ContinueString(uint64_t n);
4258 void ContinueString_Pointer(
const void* ptr);
4259 void EndString(
const char* pStr = VMA_NULL);
4261 void WriteNumber(uint32_t n);
4262 void WriteNumber(uint64_t n);
4263 void WriteBool(
bool b);
4267 static const char*
const INDENT;
4269 enum COLLECTION_TYPE
4271 COLLECTION_TYPE_OBJECT,
4272 COLLECTION_TYPE_ARRAY,
4276 COLLECTION_TYPE type;
4277 uint32_t valueCount;
4278 bool singleLineMode;
4281 VmaStringBuilder& m_SB;
4282 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4283 bool m_InsideString;
4285 void BeginValue(
bool isString);
4286 void WriteIndent(
bool oneLess =
false);
4289 const char*
const VmaJsonWriter::INDENT =
" ";
4291 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4293 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4294 m_InsideString(false)
4298 VmaJsonWriter::~VmaJsonWriter()
4300 VMA_ASSERT(!m_InsideString);
4301 VMA_ASSERT(m_Stack.empty());
4304 void VmaJsonWriter::BeginObject(
bool singleLine)
4306 VMA_ASSERT(!m_InsideString);
4312 item.type = COLLECTION_TYPE_OBJECT;
4313 item.valueCount = 0;
4314 item.singleLineMode = singleLine;
4315 m_Stack.push_back(item);
4318 void VmaJsonWriter::EndObject()
4320 VMA_ASSERT(!m_InsideString);
4325 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4329 void VmaJsonWriter::BeginArray(
bool singleLine)
4331 VMA_ASSERT(!m_InsideString);
4337 item.type = COLLECTION_TYPE_ARRAY;
4338 item.valueCount = 0;
4339 item.singleLineMode = singleLine;
4340 m_Stack.push_back(item);
4343 void VmaJsonWriter::EndArray()
4345 VMA_ASSERT(!m_InsideString);
4350 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4354 void VmaJsonWriter::WriteString(
const char* pStr)
4360 void VmaJsonWriter::BeginString(
const char* pStr)
4362 VMA_ASSERT(!m_InsideString);
4366 m_InsideString =
true;
4367 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4369 ContinueString(pStr);
4373 void VmaJsonWriter::ContinueString(
const char* pStr)
4375 VMA_ASSERT(m_InsideString);
4377 const size_t strLen = strlen(pStr);
4378 for(
size_t i = 0; i < strLen; ++i)
4411 VMA_ASSERT(0 &&
"Character not currently supported.");
4417 void VmaJsonWriter::ContinueString(uint32_t n)
4419 VMA_ASSERT(m_InsideString);
4423 void VmaJsonWriter::ContinueString(uint64_t n)
4425 VMA_ASSERT(m_InsideString);
4429 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4431 VMA_ASSERT(m_InsideString);
4432 m_SB.AddPointer(ptr);
4435 void VmaJsonWriter::EndString(
const char* pStr)
4437 VMA_ASSERT(m_InsideString);
4438 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4440 ContinueString(pStr);
4443 m_InsideString =
false;
4446 void VmaJsonWriter::WriteNumber(uint32_t n)
4448 VMA_ASSERT(!m_InsideString);
4453 void VmaJsonWriter::WriteNumber(uint64_t n)
4455 VMA_ASSERT(!m_InsideString);
4460 void VmaJsonWriter::WriteBool(
bool b)
4462 VMA_ASSERT(!m_InsideString);
4464 m_SB.Add(b ?
"true" :
"false");
4467 void VmaJsonWriter::WriteNull()
4469 VMA_ASSERT(!m_InsideString);
4474 void VmaJsonWriter::BeginValue(
bool isString)
4476 if(!m_Stack.empty())
4478 StackItem& currItem = m_Stack.back();
4479 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4480 currItem.valueCount % 2 == 0)
4482 VMA_ASSERT(isString);
4485 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4486 currItem.valueCount % 2 != 0)
4490 else if(currItem.valueCount > 0)
4499 ++currItem.valueCount;
4503 void VmaJsonWriter::WriteIndent(
bool oneLess)
4505 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4509 size_t count = m_Stack.size();
4510 if(count > 0 && oneLess)
4514 for(
size_t i = 0; i < count; ++i)
4521 #endif // #if VMA_STATS_STRING_ENABLED 4525 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator,
void* pUserData)
4527 if(IsUserDataString())
4529 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4531 FreeUserDataString(hAllocator);
4533 if(pUserData != VMA_NULL)
4535 const char*
const newStrSrc = (
char*)pUserData;
4536 const size_t newStrLen = strlen(newStrSrc);
4537 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4538 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4539 m_pUserData = newStrDst;
4544 m_pUserData = pUserData;
4548 void VmaAllocation_T::ChangeBlockAllocation(
4549 VmaAllocator hAllocator,
4550 VmaDeviceMemoryBlock* block,
4551 VkDeviceSize offset)
4553 VMA_ASSERT(block != VMA_NULL);
4554 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4557 if(block != m_BlockAllocation.m_Block)
4559 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
4560 if(IsPersistentMap())
4562 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
4563 block->Map(hAllocator, mapRefCount, VMA_NULL);
4566 m_BlockAllocation.m_Block = block;
4567 m_BlockAllocation.m_Offset = offset;
4570 VkDeviceSize VmaAllocation_T::GetOffset()
const 4574 case ALLOCATION_TYPE_BLOCK:
4575 return m_BlockAllocation.m_Offset;
4576 case ALLOCATION_TYPE_DEDICATED:
4584 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4588 case ALLOCATION_TYPE_BLOCK:
4589 return m_BlockAllocation.m_Block->m_hMemory;
4590 case ALLOCATION_TYPE_DEDICATED:
4591 return m_DedicatedAllocation.m_hMemory;
4594 return VK_NULL_HANDLE;
4598 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4602 case ALLOCATION_TYPE_BLOCK:
4603 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4604 case ALLOCATION_TYPE_DEDICATED:
4605 return m_DedicatedAllocation.m_MemoryTypeIndex;
4612 void* VmaAllocation_T::GetMappedData()
const 4616 case ALLOCATION_TYPE_BLOCK:
4619 void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4620 VMA_ASSERT(pBlockData != VMA_NULL);
4621 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
4628 case ALLOCATION_TYPE_DEDICATED:
4629 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4630 return m_DedicatedAllocation.m_pMappedData;
4637 bool VmaAllocation_T::CanBecomeLost()
const 4641 case ALLOCATION_TYPE_BLOCK:
4642 return m_BlockAllocation.m_CanBecomeLost;
4643 case ALLOCATION_TYPE_DEDICATED:
4651 VmaPool VmaAllocation_T::GetPool()
const 4653 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4654 return m_BlockAllocation.m_hPool;
4657 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4659 VMA_ASSERT(CanBecomeLost());
4665 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4668 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4673 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4679 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4689 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
4691 VMA_ASSERT(IsUserDataString());
4692 if(m_pUserData != VMA_NULL)
4694 char*
const oldStr = (
char*)m_pUserData;
4695 const size_t oldStrLen = strlen(oldStr);
4696 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
4697 m_pUserData = VMA_NULL;
4701 void VmaAllocation_T::BlockAllocMap()
4703 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4705 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4711 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
4715 void VmaAllocation_T::BlockAllocUnmap()
4717 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4719 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4725 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
4729 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData)
4731 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4735 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4737 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4738 *ppData = m_DedicatedAllocation.m_pMappedData;
4744 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
4745 return VK_ERROR_MEMORY_MAP_FAILED;
4750 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4751 hAllocator->m_hDevice,
4752 m_DedicatedAllocation.m_hMemory,
4757 if(result == VK_SUCCESS)
4759 m_DedicatedAllocation.m_pMappedData = *ppData;
4766 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
4768 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4770 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4775 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4776 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
4777 hAllocator->m_hDevice,
4778 m_DedicatedAllocation.m_hMemory);
4783 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
4787 #if VMA_STATS_STRING_ENABLED 4790 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4799 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4803 json.WriteString(
"Blocks");
4806 json.WriteString(
"Allocations");
4809 json.WriteString(
"UnusedRanges");
4812 json.WriteString(
"UsedBytes");
4815 json.WriteString(
"UnusedBytes");
4820 json.WriteString(
"AllocationSize");
4821 json.BeginObject(
true);
4822 json.WriteString(
"Min");
4824 json.WriteString(
"Avg");
4826 json.WriteString(
"Max");
4833 json.WriteString(
"UnusedRangeSize");
4834 json.BeginObject(
true);
4835 json.WriteString(
"Min");
4837 json.WriteString(
"Avg");
4839 json.WriteString(
"Max");
4847 #endif // #if VMA_STATS_STRING_ENABLED 4849 struct VmaSuballocationItemSizeLess
4852 const VmaSuballocationList::iterator lhs,
4853 const VmaSuballocationList::iterator rhs)
const 4855 return lhs->size < rhs->size;
4858 const VmaSuballocationList::iterator lhs,
4859 VkDeviceSize rhsSize)
const 4861 return lhs->size < rhsSize;
4868 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4872 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4873 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4877 VmaBlockMetadata::~VmaBlockMetadata()
4881 void VmaBlockMetadata::Init(VkDeviceSize size)
4885 m_SumFreeSize = size;
4887 VmaSuballocation suballoc = {};
4888 suballoc.offset = 0;
4889 suballoc.size = size;
4890 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4891 suballoc.hAllocation = VK_NULL_HANDLE;
4893 m_Suballocations.push_back(suballoc);
4894 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4896 m_FreeSuballocationsBySize.push_back(suballocItem);
4899 bool VmaBlockMetadata::Validate()
const 4901 if(m_Suballocations.empty())
4907 VkDeviceSize calculatedOffset = 0;
4909 uint32_t calculatedFreeCount = 0;
4911 VkDeviceSize calculatedSumFreeSize = 0;
4914 size_t freeSuballocationsToRegister = 0;
4916 bool prevFree =
false;
4918 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4919 suballocItem != m_Suballocations.cend();
4922 const VmaSuballocation& subAlloc = *suballocItem;
4925 if(subAlloc.offset != calculatedOffset)
4930 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4932 if(prevFree && currFree)
4937 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4944 calculatedSumFreeSize += subAlloc.size;
4945 ++calculatedFreeCount;
4946 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4948 ++freeSuballocationsToRegister;
4953 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
4957 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
4963 calculatedOffset += subAlloc.size;
4964 prevFree = currFree;
4969 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4974 VkDeviceSize lastSize = 0;
4975 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4977 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4980 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4985 if(suballocItem->size < lastSize)
4990 lastSize = suballocItem->size;
4994 if(!ValidateFreeSuballocationList() ||
4995 (calculatedOffset != m_Size) ||
4996 (calculatedSumFreeSize != m_SumFreeSize) ||
4997 (calculatedFreeCount != m_FreeCount))
5005 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 5007 if(!m_FreeSuballocationsBySize.empty())
5009 return m_FreeSuballocationsBySize.back()->size;
5017 bool VmaBlockMetadata::IsEmpty()
const 5019 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
5022 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 5026 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5038 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5039 suballocItem != m_Suballocations.cend();
5042 const VmaSuballocation& suballoc = *suballocItem;
5043 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
5056 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 5058 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
5060 inoutStats.
size += m_Size;
5067 #if VMA_STATS_STRING_ENABLED 5069 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 5073 json.WriteString(
"TotalBytes");
5074 json.WriteNumber(m_Size);
5076 json.WriteString(
"UnusedBytes");
5077 json.WriteNumber(m_SumFreeSize);
5079 json.WriteString(
"Allocations");
5080 json.WriteNumber((uint64_t)m_Suballocations.size() - m_FreeCount);
5082 json.WriteString(
"UnusedRanges");
5083 json.WriteNumber(m_FreeCount);
5085 json.WriteString(
"Suballocations");
5088 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
5089 suballocItem != m_Suballocations.cend();
5090 ++suballocItem, ++i)
5092 json.BeginObject(
true);
5094 json.WriteString(
"Type");
5095 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
5097 json.WriteString(
"Size");
5098 json.WriteNumber(suballocItem->size);
5100 json.WriteString(
"Offset");
5101 json.WriteNumber(suballocItem->offset);
5103 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
5105 const void* pUserData = suballocItem->hAllocation->GetUserData();
5106 if(pUserData != VMA_NULL)
5108 json.WriteString(
"UserData");
5109 if(suballocItem->hAllocation->IsUserDataString())
5111 json.WriteString((
const char*)pUserData);
5116 json.ContinueString_Pointer(pUserData);
5129 #endif // #if VMA_STATS_STRING_ENABLED 5141 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
5143 VMA_ASSERT(IsEmpty());
5144 pAllocationRequest->offset = 0;
5145 pAllocationRequest->sumFreeSize = m_SumFreeSize;
5146 pAllocationRequest->sumItemSize = 0;
5147 pAllocationRequest->item = m_Suballocations.begin();
5148 pAllocationRequest->itemsToMakeLostCount = 0;
5151 bool VmaBlockMetadata::CreateAllocationRequest(
5152 uint32_t currentFrameIndex,
5153 uint32_t frameInUseCount,
5154 VkDeviceSize bufferImageGranularity,
5155 VkDeviceSize allocSize,
5156 VkDeviceSize allocAlignment,
5157 VmaSuballocationType allocType,
5158 bool canMakeOtherLost,
5159 VmaAllocationRequest* pAllocationRequest)
5161 VMA_ASSERT(allocSize > 0);
5162 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5163 VMA_ASSERT(pAllocationRequest != VMA_NULL);
5164 VMA_HEAVY_ASSERT(Validate());
5167 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
5173 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5174 if(freeSuballocCount > 0)
5179 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5180 m_FreeSuballocationsBySize.data(),
5181 m_FreeSuballocationsBySize.data() + freeSuballocCount,
5183 VmaSuballocationItemSizeLess());
5184 size_t index = it - m_FreeSuballocationsBySize.data();
5185 for(; index < freeSuballocCount; ++index)
5190 bufferImageGranularity,
5194 m_FreeSuballocationsBySize[index],
5196 &pAllocationRequest->offset,
5197 &pAllocationRequest->itemsToMakeLostCount,
5198 &pAllocationRequest->sumFreeSize,
5199 &pAllocationRequest->sumItemSize))
5201 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5209 for(
size_t index = freeSuballocCount; index--; )
5214 bufferImageGranularity,
5218 m_FreeSuballocationsBySize[index],
5220 &pAllocationRequest->offset,
5221 &pAllocationRequest->itemsToMakeLostCount,
5222 &pAllocationRequest->sumFreeSize,
5223 &pAllocationRequest->sumItemSize))
5225 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5232 if(canMakeOtherLost)
5236 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5237 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5239 VmaAllocationRequest tmpAllocRequest = {};
5240 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5241 suballocIt != m_Suballocations.end();
5244 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5245 suballocIt->hAllocation->CanBecomeLost())
5250 bufferImageGranularity,
5256 &tmpAllocRequest.offset,
5257 &tmpAllocRequest.itemsToMakeLostCount,
5258 &tmpAllocRequest.sumFreeSize,
5259 &tmpAllocRequest.sumItemSize))
5261 tmpAllocRequest.item = suballocIt;
5263 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5265 *pAllocationRequest = tmpAllocRequest;
5271 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5280 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5281 uint32_t currentFrameIndex,
5282 uint32_t frameInUseCount,
5283 VmaAllocationRequest* pAllocationRequest)
5285 while(pAllocationRequest->itemsToMakeLostCount > 0)
5287 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5289 ++pAllocationRequest->item;
5291 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5292 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5293 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5294 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5296 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5297 --pAllocationRequest->itemsToMakeLostCount;
5305 VMA_HEAVY_ASSERT(Validate());
5306 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5307 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5312 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5314 uint32_t lostAllocationCount = 0;
5315 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5316 it != m_Suballocations.end();
5319 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5320 it->hAllocation->CanBecomeLost() &&
5321 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5323 it = FreeSuballocation(it);
5324 ++lostAllocationCount;
5327 return lostAllocationCount;
5330 void VmaBlockMetadata::Alloc(
5331 const VmaAllocationRequest& request,
5332 VmaSuballocationType type,
5333 VkDeviceSize allocSize,
5334 VmaAllocation hAllocation)
5336 VMA_ASSERT(request.item != m_Suballocations.end());
5337 VmaSuballocation& suballoc = *request.item;
5339 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5341 VMA_ASSERT(request.offset >= suballoc.offset);
5342 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5343 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5344 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5348 UnregisterFreeSuballocation(request.item);
5350 suballoc.offset = request.offset;
5351 suballoc.size = allocSize;
5352 suballoc.type = type;
5353 suballoc.hAllocation = hAllocation;
5358 VmaSuballocation paddingSuballoc = {};
5359 paddingSuballoc.offset = request.offset + allocSize;
5360 paddingSuballoc.size = paddingEnd;
5361 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5362 VmaSuballocationList::iterator next = request.item;
5364 const VmaSuballocationList::iterator paddingEndItem =
5365 m_Suballocations.insert(next, paddingSuballoc);
5366 RegisterFreeSuballocation(paddingEndItem);
5372 VmaSuballocation paddingSuballoc = {};
5373 paddingSuballoc.offset = request.offset - paddingBegin;
5374 paddingSuballoc.size = paddingBegin;
5375 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5376 const VmaSuballocationList::iterator paddingBeginItem =
5377 m_Suballocations.insert(request.item, paddingSuballoc);
5378 RegisterFreeSuballocation(paddingBeginItem);
5382 m_FreeCount = m_FreeCount - 1;
5383 if(paddingBegin > 0)
5391 m_SumFreeSize -= allocSize;
5394 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
5396 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5397 suballocItem != m_Suballocations.end();
5400 VmaSuballocation& suballoc = *suballocItem;
5401 if(suballoc.hAllocation == allocation)
5403 FreeSuballocation(suballocItem);
5404 VMA_HEAVY_ASSERT(Validate());
5408 VMA_ASSERT(0 &&
"Not found!");
5411 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
5413 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5414 suballocItem != m_Suballocations.end();
5417 VmaSuballocation& suballoc = *suballocItem;
5418 if(suballoc.offset == offset)
5420 FreeSuballocation(suballocItem);
5424 VMA_ASSERT(0 &&
"Not found!");
5427 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5429 VkDeviceSize lastSize = 0;
5430 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5432 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5434 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5439 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5444 if(it->size < lastSize)
5450 lastSize = it->size;
5455 bool VmaBlockMetadata::CheckAllocation(
5456 uint32_t currentFrameIndex,
5457 uint32_t frameInUseCount,
5458 VkDeviceSize bufferImageGranularity,
5459 VkDeviceSize allocSize,
5460 VkDeviceSize allocAlignment,
5461 VmaSuballocationType allocType,
5462 VmaSuballocationList::const_iterator suballocItem,
5463 bool canMakeOtherLost,
5464 VkDeviceSize* pOffset,
5465 size_t* itemsToMakeLostCount,
5466 VkDeviceSize* pSumFreeSize,
5467 VkDeviceSize* pSumItemSize)
const 5469 VMA_ASSERT(allocSize > 0);
5470 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5471 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5472 VMA_ASSERT(pOffset != VMA_NULL);
5474 *itemsToMakeLostCount = 0;
5478 if(canMakeOtherLost)
5480 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5482 *pSumFreeSize = suballocItem->size;
5486 if(suballocItem->hAllocation->CanBecomeLost() &&
5487 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5489 ++*itemsToMakeLostCount;
5490 *pSumItemSize = suballocItem->size;
5499 if(m_Size - suballocItem->offset < allocSize)
5505 *pOffset = suballocItem->offset;
5508 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5510 *pOffset += VMA_DEBUG_MARGIN;
5514 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5515 *pOffset = VmaAlignUp(*pOffset, alignment);
5519 if(bufferImageGranularity > 1)
5521 bool bufferImageGranularityConflict =
false;
5522 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5523 while(prevSuballocItem != m_Suballocations.cbegin())
5526 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5527 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5529 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5531 bufferImageGranularityConflict =
true;
5539 if(bufferImageGranularityConflict)
5541 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5547 if(*pOffset >= suballocItem->offset + suballocItem->size)
5553 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5556 VmaSuballocationList::const_iterator next = suballocItem;
5558 const VkDeviceSize requiredEndMargin =
5559 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5561 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5563 if(suballocItem->offset + totalSize > m_Size)
5570 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5571 if(totalSize > suballocItem->size)
5573 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5574 while(remainingSize > 0)
5577 if(lastSuballocItem == m_Suballocations.cend())
5581 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5583 *pSumFreeSize += lastSuballocItem->size;
5587 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5588 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5589 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5591 ++*itemsToMakeLostCount;
5592 *pSumItemSize += lastSuballocItem->size;
5599 remainingSize = (lastSuballocItem->size < remainingSize) ?
5600 remainingSize - lastSuballocItem->size : 0;
5606 if(bufferImageGranularity > 1)
5608 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5610 while(nextSuballocItem != m_Suballocations.cend())
5612 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5613 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5615 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5617 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5618 if(nextSuballoc.hAllocation->CanBecomeLost() &&
5619 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5621 ++*itemsToMakeLostCount;
5640 const VmaSuballocation& suballoc = *suballocItem;
5641 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5643 *pSumFreeSize = suballoc.size;
5646 if(suballoc.size < allocSize)
5652 *pOffset = suballoc.offset;
5655 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5657 *pOffset += VMA_DEBUG_MARGIN;
5661 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5662 *pOffset = VmaAlignUp(*pOffset, alignment);
5666 if(bufferImageGranularity > 1)
5668 bool bufferImageGranularityConflict =
false;
5669 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5670 while(prevSuballocItem != m_Suballocations.cbegin())
5673 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5674 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5676 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5678 bufferImageGranularityConflict =
true;
5686 if(bufferImageGranularityConflict)
5688 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5693 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5696 VmaSuballocationList::const_iterator next = suballocItem;
5698 const VkDeviceSize requiredEndMargin =
5699 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5702 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5709 if(bufferImageGranularity > 1)
5711 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5713 while(nextSuballocItem != m_Suballocations.cend())
5715 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5716 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5718 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5737 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5739 VMA_ASSERT(item != m_Suballocations.end());
5740 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5742 VmaSuballocationList::iterator nextItem = item;
5744 VMA_ASSERT(nextItem != m_Suballocations.end());
5745 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5747 item->size += nextItem->size;
5749 m_Suballocations.erase(nextItem);
5752 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5755 VmaSuballocation& suballoc = *suballocItem;
5756 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5757 suballoc.hAllocation = VK_NULL_HANDLE;
5761 m_SumFreeSize += suballoc.size;
5764 bool mergeWithNext =
false;
5765 bool mergeWithPrev =
false;
5767 VmaSuballocationList::iterator nextItem = suballocItem;
5769 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5771 mergeWithNext =
true;
5774 VmaSuballocationList::iterator prevItem = suballocItem;
5775 if(suballocItem != m_Suballocations.begin())
5778 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5780 mergeWithPrev =
true;
5786 UnregisterFreeSuballocation(nextItem);
5787 MergeFreeWithNext(suballocItem);
5792 UnregisterFreeSuballocation(prevItem);
5793 MergeFreeWithNext(prevItem);
5794 RegisterFreeSuballocation(prevItem);
5799 RegisterFreeSuballocation(suballocItem);
5800 return suballocItem;
5804 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5806 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5807 VMA_ASSERT(item->size > 0);
5811 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5813 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5815 if(m_FreeSuballocationsBySize.empty())
5817 m_FreeSuballocationsBySize.push_back(item);
5821 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5829 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5831 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5832 VMA_ASSERT(item->size > 0);
5836 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5838 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5840 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5841 m_FreeSuballocationsBySize.data(),
5842 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5844 VmaSuballocationItemSizeLess());
5845 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5846 index < m_FreeSuballocationsBySize.size();
5849 if(m_FreeSuballocationsBySize[index] == item)
5851 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5854 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5856 VMA_ASSERT(0 &&
"Not found.");
5865 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
5867 m_pMappedData(VMA_NULL)
5871 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
5873 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5876 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData)
5883 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5886 m_MapCount += count;
5887 VMA_ASSERT(m_pMappedData != VMA_NULL);
5888 if(ppData != VMA_NULL)
5890 *ppData = m_pMappedData;
5896 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5897 hAllocator->m_hDevice,
5903 if(result == VK_SUCCESS)
5905 if(ppData != VMA_NULL)
5907 *ppData = m_pMappedData;
5915 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count)
5922 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5923 if(m_MapCount >= count)
5925 m_MapCount -= count;
5928 m_pMappedData = VMA_NULL;
5929 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
5934 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
5941 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
5942 m_MemoryTypeIndex(UINT32_MAX),
5943 m_hMemory(VK_NULL_HANDLE),
5944 m_Metadata(hAllocator)
5948 void VmaDeviceMemoryBlock::Init(
5949 uint32_t newMemoryTypeIndex,
5950 VkDeviceMemory newMemory,
5951 VkDeviceSize newSize)
5953 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5955 m_MemoryTypeIndex = newMemoryTypeIndex;
5956 m_hMemory = newMemory;
5958 m_Metadata.Init(newSize);
5961 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
5965 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
5967 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
5968 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
5969 m_hMemory = VK_NULL_HANDLE;
5972 bool VmaDeviceMemoryBlock::Validate()
const 5974 if((m_hMemory == VK_NULL_HANDLE) ||
5975 (m_Metadata.GetSize() == 0))
5980 return m_Metadata.Validate();
5983 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, uint32_t count,
void** ppData)
5985 return m_Mapping.Map(hAllocator, m_hMemory, count, ppData);
5988 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator, uint32_t count)
5990 m_Mapping.Unmap(hAllocator, m_hMemory, count);
5995 memset(&outInfo, 0,
sizeof(outInfo));
6014 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
6022 VmaPool_T::VmaPool_T(
6023 VmaAllocator hAllocator,
6027 createInfo.memoryTypeIndex,
6028 createInfo.blockSize,
6029 createInfo.minBlockCount,
6030 createInfo.maxBlockCount,
6032 createInfo.frameInUseCount,
6037 VmaPool_T::~VmaPool_T()
6041 #if VMA_STATS_STRING_ENABLED 6043 #endif // #if VMA_STATS_STRING_ENABLED 6045 VmaBlockVector::VmaBlockVector(
6046 VmaAllocator hAllocator,
6047 uint32_t memoryTypeIndex,
6048 VkDeviceSize preferredBlockSize,
6049 size_t minBlockCount,
6050 size_t maxBlockCount,
6051 VkDeviceSize bufferImageGranularity,
6052 uint32_t frameInUseCount,
6053 bool isCustomPool) :
6054 m_hAllocator(hAllocator),
6055 m_MemoryTypeIndex(memoryTypeIndex),
6056 m_PreferredBlockSize(preferredBlockSize),
6057 m_MinBlockCount(minBlockCount),
6058 m_MaxBlockCount(maxBlockCount),
6059 m_BufferImageGranularity(bufferImageGranularity),
6060 m_FrameInUseCount(frameInUseCount),
6061 m_IsCustomPool(isCustomPool),
6062 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
6063 m_HasEmptyBlock(false),
6064 m_pDefragmentator(VMA_NULL)
6068 VmaBlockVector::~VmaBlockVector()
6070 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
6072 for(
size_t i = m_Blocks.size(); i--; )
6074 m_Blocks[i]->Destroy(m_hAllocator);
6075 vma_delete(m_hAllocator, m_Blocks[i]);
6079 VkResult VmaBlockVector::CreateMinBlocks()
6081 for(
size_t i = 0; i < m_MinBlockCount; ++i)
6083 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
6084 if(res != VK_SUCCESS)
6092 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
6100 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6102 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6104 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6106 VMA_HEAVY_ASSERT(pBlock->Validate());
6107 pBlock->m_Metadata.AddPoolStats(*pStats);
6111 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
6113 VkResult VmaBlockVector::Allocate(
6114 VmaPool hCurrentPool,
6115 uint32_t currentFrameIndex,
6116 const VkMemoryRequirements& vkMemReq,
6118 VmaSuballocationType suballocType,
6119 VmaAllocation* pAllocation)
6124 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6128 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6130 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6131 VMA_ASSERT(pCurrBlock);
6132 VmaAllocationRequest currRequest = {};
6133 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6136 m_BufferImageGranularity,
6144 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
6148 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
6149 if(res != VK_SUCCESS)
6156 if(pCurrBlock->m_Metadata.IsEmpty())
6158 m_HasEmptyBlock =
false;
6161 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6162 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
6163 (*pAllocation)->InitBlockAllocation(
6172 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
6173 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6174 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6179 const bool canCreateNewBlock =
6181 (m_Blocks.size() < m_MaxBlockCount);
6184 if(canCreateNewBlock)
6187 VkDeviceSize newBlockSize = m_PreferredBlockSize;
6188 uint32_t newBlockSizeShift = 0;
6189 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
6193 if(m_IsCustomPool ==
false)
6196 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
6197 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
6199 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6200 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
6202 newBlockSize = smallerNewBlockSize;
6203 ++newBlockSizeShift;
6212 size_t newBlockIndex = 0;
6213 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
6215 if(m_IsCustomPool ==
false)
6217 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
6219 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6220 if(smallerNewBlockSize >= vkMemReq.size)
6222 newBlockSize = smallerNewBlockSize;
6223 ++newBlockSizeShift;
6224 res = CreateBlock(newBlockSize, &newBlockIndex);
6233 if(res == VK_SUCCESS)
6235 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
6236 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
6240 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
6241 if(res != VK_SUCCESS)
6248 VmaAllocationRequest allocRequest;
6249 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6250 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6251 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6252 (*pAllocation)->InitBlockAllocation(
6255 allocRequest.offset,
6261 VMA_HEAVY_ASSERT(pBlock->Validate());
6262 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6263 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6271 if(canMakeOtherLost)
6273 uint32_t tryIndex = 0;
6274 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6276 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6277 VmaAllocationRequest bestRequest = {};
6278 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6282 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6284 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6285 VMA_ASSERT(pCurrBlock);
6286 VmaAllocationRequest currRequest = {};
6287 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6290 m_BufferImageGranularity,
6297 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6298 if(pBestRequestBlock == VMA_NULL ||
6299 currRequestCost < bestRequestCost)
6301 pBestRequestBlock = pCurrBlock;
6302 bestRequest = currRequest;
6303 bestRequestCost = currRequestCost;
6305 if(bestRequestCost == 0)
6313 if(pBestRequestBlock != VMA_NULL)
6317 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
6318 if(res != VK_SUCCESS)
6324 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6330 if(pBestRequestBlock->m_Metadata.IsEmpty())
6332 m_HasEmptyBlock =
false;
6335 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6336 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6337 (*pAllocation)->InitBlockAllocation(
6346 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
6347 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6348 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6362 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6364 return VK_ERROR_TOO_MANY_OBJECTS;
6368 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6371 void VmaBlockVector::Free(
6372 VmaAllocation hAllocation)
6374 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6378 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6380 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6382 if(hAllocation->IsPersistentMap())
6384 pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory, 1);
6387 pBlock->m_Metadata.Free(hAllocation);
6388 VMA_HEAVY_ASSERT(pBlock->Validate());
6390 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6393 if(pBlock->m_Metadata.IsEmpty())
6396 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6398 pBlockToDelete = pBlock;
6404 m_HasEmptyBlock =
true;
6409 else if(m_HasEmptyBlock)
6411 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6412 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6414 pBlockToDelete = pLastBlock;
6415 m_Blocks.pop_back();
6416 m_HasEmptyBlock =
false;
6420 IncrementallySortBlocks();
6425 if(pBlockToDelete != VMA_NULL)
6427 VMA_DEBUG_LOG(
" Deleted empty allocation");
6428 pBlockToDelete->Destroy(m_hAllocator);
6429 vma_delete(m_hAllocator, pBlockToDelete);
6433 size_t VmaBlockVector::CalcMaxBlockSize()
const 6436 for(
size_t i = m_Blocks.size(); i--; )
6438 result = VMA_MAX((uint64_t)result, (uint64_t)m_Blocks[i]->m_Metadata.GetSize());
6439 if(result >= m_PreferredBlockSize)
6447 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6449 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6451 if(m_Blocks[blockIndex] == pBlock)
6453 VmaVectorRemove(m_Blocks, blockIndex);
6460 void VmaBlockVector::IncrementallySortBlocks()
6463 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6465 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6467 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6473 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6475 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6476 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6477 allocInfo.allocationSize = blockSize;
6478 VkDeviceMemory mem = VK_NULL_HANDLE;
6479 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6488 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6492 allocInfo.allocationSize);
6494 m_Blocks.push_back(pBlock);
6495 if(pNewBlockIndex != VMA_NULL)
6497 *pNewBlockIndex = m_Blocks.size() - 1;
6503 #if VMA_STATS_STRING_ENABLED 6505 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6507 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6513 json.WriteString(
"MemoryTypeIndex");
6514 json.WriteNumber(m_MemoryTypeIndex);
6516 json.WriteString(
"BlockSize");
6517 json.WriteNumber(m_PreferredBlockSize);
6519 json.WriteString(
"BlockCount");
6520 json.BeginObject(
true);
6521 if(m_MinBlockCount > 0)
6523 json.WriteString(
"Min");
6524 json.WriteNumber((uint64_t)m_MinBlockCount);
6526 if(m_MaxBlockCount < SIZE_MAX)
6528 json.WriteString(
"Max");
6529 json.WriteNumber((uint64_t)m_MaxBlockCount);
6531 json.WriteString(
"Cur");
6532 json.WriteNumber((uint64_t)m_Blocks.size());
6535 if(m_FrameInUseCount > 0)
6537 json.WriteString(
"FrameInUseCount");
6538 json.WriteNumber(m_FrameInUseCount);
6543 json.WriteString(
"PreferredBlockSize");
6544 json.WriteNumber(m_PreferredBlockSize);
6547 json.WriteString(
"Blocks");
6549 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6551 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6558 #endif // #if VMA_STATS_STRING_ENABLED 6560 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6561 VmaAllocator hAllocator,
6562 uint32_t currentFrameIndex)
6564 if(m_pDefragmentator == VMA_NULL)
6566 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6572 return m_pDefragmentator;
6575 VkResult VmaBlockVector::Defragment(
6577 VkDeviceSize& maxBytesToMove,
6578 uint32_t& maxAllocationsToMove)
6580 if(m_pDefragmentator == VMA_NULL)
6585 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6588 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
6591 if(pDefragmentationStats != VMA_NULL)
6593 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
6594 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6597 VMA_ASSERT(bytesMoved <= maxBytesToMove);
6598 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6604 m_HasEmptyBlock =
false;
6605 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
6607 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6608 if(pBlock->m_Metadata.IsEmpty())
6610 if(m_Blocks.size() > m_MinBlockCount)
6612 if(pDefragmentationStats != VMA_NULL)
6615 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
6618 VmaVectorRemove(m_Blocks, blockIndex);
6619 pBlock->Destroy(m_hAllocator);
6620 vma_delete(m_hAllocator, pBlock);
6624 m_HasEmptyBlock =
true;
6632 void VmaBlockVector::DestroyDefragmentator()
6634 if(m_pDefragmentator != VMA_NULL)
6636 vma_delete(m_hAllocator, m_pDefragmentator);
6637 m_pDefragmentator = VMA_NULL;
6641 void VmaBlockVector::MakePoolAllocationsLost(
6642 uint32_t currentFrameIndex,
6643 size_t* pLostAllocationCount)
6645 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6646 size_t lostAllocationCount = 0;
6647 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6649 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6651 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6653 if(pLostAllocationCount != VMA_NULL)
6655 *pLostAllocationCount = lostAllocationCount;
6659 void VmaBlockVector::AddStats(
VmaStats* pStats)
6661 const uint32_t memTypeIndex = m_MemoryTypeIndex;
6662 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6664 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6666 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6668 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6670 VMA_HEAVY_ASSERT(pBlock->Validate());
6672 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6673 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6674 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6675 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6682 VmaDefragmentator::VmaDefragmentator(
6683 VmaAllocator hAllocator,
6684 VmaBlockVector* pBlockVector,
6685 uint32_t currentFrameIndex) :
6686 m_hAllocator(hAllocator),
6687 m_pBlockVector(pBlockVector),
6688 m_CurrentFrameIndex(currentFrameIndex),
6690 m_AllocationsMoved(0),
6691 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6692 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6696 VmaDefragmentator::~VmaDefragmentator()
6698 for(
size_t i = m_Blocks.size(); i--; )
6700 vma_delete(m_hAllocator, m_Blocks[i]);
6704 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6706 AllocationInfo allocInfo;
6707 allocInfo.m_hAllocation = hAlloc;
6708 allocInfo.m_pChanged = pChanged;
6709 m_Allocations.push_back(allocInfo);
6712 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
6715 if(m_pMappedDataForDefragmentation)
6717 *ppMappedData = m_pMappedDataForDefragmentation;
6722 if(m_pBlock->m_Mapping.GetMappedData())
6724 *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6729 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
6730 *ppMappedData = m_pMappedDataForDefragmentation;
6734 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6736 if(m_pMappedDataForDefragmentation != VMA_NULL)
6738 m_pBlock->Unmap(hAllocator, 1);
6742 VkResult VmaDefragmentator::DefragmentRound(
6743 VkDeviceSize maxBytesToMove,
6744 uint32_t maxAllocationsToMove)
6746 if(m_Blocks.empty())
6751 size_t srcBlockIndex = m_Blocks.size() - 1;
6752 size_t srcAllocIndex = SIZE_MAX;
6758 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6760 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6763 if(srcBlockIndex == 0)
6770 srcAllocIndex = SIZE_MAX;
6775 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6779 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6780 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6782 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6783 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6784 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6785 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6788 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6790 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6791 VmaAllocationRequest dstAllocRequest;
6792 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6793 m_CurrentFrameIndex,
6794 m_pBlockVector->GetFrameInUseCount(),
6795 m_pBlockVector->GetBufferImageGranularity(),
6800 &dstAllocRequest) &&
6802 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6804 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6807 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6808 (m_BytesMoved + size > maxBytesToMove))
6810 return VK_INCOMPLETE;
6813 void* pDstMappedData = VMA_NULL;
6814 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6815 if(res != VK_SUCCESS)
6820 void* pSrcMappedData = VMA_NULL;
6821 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6822 if(res != VK_SUCCESS)
6829 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6830 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6831 static_cast<size_t>(size));
6833 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6834 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
6836 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6838 if(allocInfo.m_pChanged != VMA_NULL)
6840 *allocInfo.m_pChanged = VK_TRUE;
6843 ++m_AllocationsMoved;
6844 m_BytesMoved += size;
6846 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6854 if(srcAllocIndex > 0)
6860 if(srcBlockIndex > 0)
6863 srcAllocIndex = SIZE_MAX;
6873 VkResult VmaDefragmentator::Defragment(
6874 VkDeviceSize maxBytesToMove,
6875 uint32_t maxAllocationsToMove)
6877 if(m_Allocations.empty())
6883 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6884 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6886 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6887 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6888 m_Blocks.push_back(pBlockInfo);
6892 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6895 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6897 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6899 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6901 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6902 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6903 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6905 (*it)->m_Allocations.push_back(allocInfo);
6913 m_Allocations.clear();
6915 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6917 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6918 pBlockInfo->CalcHasNonMovableAllocations();
6919 pBlockInfo->SortAllocationsBySizeDescecnding();
6923 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6926 VkResult result = VK_SUCCESS;
6927 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6929 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6933 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6935 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6941 bool VmaDefragmentator::MoveMakesSense(
6942 size_t dstBlockIndex, VkDeviceSize dstOffset,
6943 size_t srcBlockIndex, VkDeviceSize srcOffset)
6945 if(dstBlockIndex < srcBlockIndex)
6949 if(dstBlockIndex > srcBlockIndex)
6953 if(dstOffset < srcOffset)
6966 m_hDevice(pCreateInfo->device),
6967 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6968 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6969 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6970 m_PreferredLargeHeapBlockSize(0),
6971 m_PhysicalDevice(pCreateInfo->physicalDevice),
6972 m_CurrentFrameIndex(0),
6973 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6977 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6978 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6979 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6981 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6982 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
6984 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6986 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6997 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6998 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
7005 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
7007 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
7008 if(limit != VK_WHOLE_SIZE)
7010 m_HeapSizeLimit[heapIndex] = limit;
7011 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
7013 m_MemProps.memoryHeaps[heapIndex].size = limit;
7019 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7021 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
7023 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
7029 GetBufferImageGranularity(),
7034 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
7038 VmaAllocator_T::~VmaAllocator_T()
7040 VMA_ASSERT(m_Pools.empty());
7042 for(
size_t i = GetMemoryTypeCount(); i--; )
7044 vma_delete(
this, m_pDedicatedAllocations[i]);
7045 vma_delete(
this, m_pBlockVectors[i]);
7049 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
7051 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7052 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
7053 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
7054 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
7055 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
7056 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
7057 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
7058 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
7059 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
7060 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
7061 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
7062 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
7063 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
7064 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
7065 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
7066 if(m_UseKhrDedicatedAllocation)
7068 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
7069 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
7070 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
7071 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
7073 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 7075 #define VMA_COPY_IF_NOT_NULL(funcName) \ 7076 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 7078 if(pVulkanFunctions != VMA_NULL)
7080 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
7081 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
7082 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
7083 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
7084 VMA_COPY_IF_NOT_NULL(vkMapMemory);
7085 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
7086 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
7087 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
7088 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
7089 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
7090 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
7091 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
7092 VMA_COPY_IF_NOT_NULL(vkCreateImage);
7093 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
7094 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
7095 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
7098 #undef VMA_COPY_IF_NOT_NULL 7102 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
7103 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
7104 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
7105 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
7106 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
7107 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
7108 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
7109 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
7110 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
7111 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
7112 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
7113 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
7114 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
7115 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
7116 if(m_UseKhrDedicatedAllocation)
7118 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
7119 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
7123 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
7125 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7126 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
7127 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
7128 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
7131 VkResult VmaAllocator_T::AllocateMemoryOfType(
7132 const VkMemoryRequirements& vkMemReq,
7133 bool dedicatedAllocation,
7134 VkBuffer dedicatedBuffer,
7135 VkImage dedicatedImage,
7137 uint32_t memTypeIndex,
7138 VmaSuballocationType suballocType,
7139 VmaAllocation* pAllocation)
7141 VMA_ASSERT(pAllocation != VMA_NULL);
7142 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
7148 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
7153 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
7154 VMA_ASSERT(blockVector);
7156 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
7157 bool preferDedicatedMemory =
7158 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
7159 dedicatedAllocation ||
7161 vkMemReq.size > preferredBlockSize / 2;
7163 if(preferDedicatedMemory &&
7165 finalCreateInfo.
pool == VK_NULL_HANDLE)
7174 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7178 return AllocateDedicatedMemory(
7192 VkResult res = blockVector->Allocate(
7194 m_CurrentFrameIndex.load(),
7199 if(res == VK_SUCCESS)
7207 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7211 res = AllocateDedicatedMemory(
7217 finalCreateInfo.pUserData,
7221 if(res == VK_SUCCESS)
7224 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
7230 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7237 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7239 VmaSuballocationType suballocType,
7240 uint32_t memTypeIndex,
7242 bool isUserDataString,
7244 VkBuffer dedicatedBuffer,
7245 VkImage dedicatedImage,
7246 VmaAllocation* pAllocation)
7248 VMA_ASSERT(pAllocation);
7250 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7251 allocInfo.memoryTypeIndex = memTypeIndex;
7252 allocInfo.allocationSize = size;
7254 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7255 if(m_UseKhrDedicatedAllocation)
7257 if(dedicatedBuffer != VK_NULL_HANDLE)
7259 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7260 dedicatedAllocInfo.buffer = dedicatedBuffer;
7261 allocInfo.pNext = &dedicatedAllocInfo;
7263 else if(dedicatedImage != VK_NULL_HANDLE)
7265 dedicatedAllocInfo.image = dedicatedImage;
7266 allocInfo.pNext = &dedicatedAllocInfo;
7271 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7272 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7275 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7279 void* pMappedData = VMA_NULL;
7282 res = (*m_VulkanFunctions.vkMapMemory)(
7291 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7292 FreeVulkanMemory(memTypeIndex, size, hMemory);
7297 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7298 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7299 (*pAllocation)->SetUserData(
this, pUserData);
7303 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7304 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7305 VMA_ASSERT(pDedicatedAllocations);
7306 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7309 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7314 void VmaAllocator_T::GetBufferMemoryRequirements(
7316 VkMemoryRequirements& memReq,
7317 bool& requiresDedicatedAllocation,
7318 bool& prefersDedicatedAllocation)
const 7320 if(m_UseKhrDedicatedAllocation)
7322 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7323 memReqInfo.buffer = hBuffer;
7325 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7327 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7328 memReq2.pNext = &memDedicatedReq;
7330 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7332 memReq = memReq2.memoryRequirements;
7333 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7334 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7338 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7339 requiresDedicatedAllocation =
false;
7340 prefersDedicatedAllocation =
false;
7344 void VmaAllocator_T::GetImageMemoryRequirements(
7346 VkMemoryRequirements& memReq,
7347 bool& requiresDedicatedAllocation,
7348 bool& prefersDedicatedAllocation)
const 7350 if(m_UseKhrDedicatedAllocation)
7352 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7353 memReqInfo.image = hImage;
7355 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7357 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7358 memReq2.pNext = &memDedicatedReq;
7360 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7362 memReq = memReq2.memoryRequirements;
7363 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7364 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7368 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7369 requiresDedicatedAllocation =
false;
7370 prefersDedicatedAllocation =
false;
7374 VkResult VmaAllocator_T::AllocateMemory(
7375 const VkMemoryRequirements& vkMemReq,
7376 bool requiresDedicatedAllocation,
7377 bool prefersDedicatedAllocation,
7378 VkBuffer dedicatedBuffer,
7379 VkImage dedicatedImage,
7381 VmaSuballocationType suballocType,
7382 VmaAllocation* pAllocation)
7387 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7388 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7393 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7394 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7396 if(requiresDedicatedAllocation)
7400 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7401 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7403 if(createInfo.
pool != VK_NULL_HANDLE)
7405 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7406 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7409 if((createInfo.
pool != VK_NULL_HANDLE) &&
7412 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7413 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7416 if(createInfo.
pool != VK_NULL_HANDLE)
7418 return createInfo.
pool->m_BlockVector.Allocate(
7420 m_CurrentFrameIndex.load(),
7429 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7430 uint32_t memTypeIndex = UINT32_MAX;
7432 if(res == VK_SUCCESS)
7434 res = AllocateMemoryOfType(
7436 requiresDedicatedAllocation || prefersDedicatedAllocation,
7444 if(res == VK_SUCCESS)
7454 memoryTypeBits &= ~(1u << memTypeIndex);
7457 if(res == VK_SUCCESS)
7459 res = AllocateMemoryOfType(
7461 requiresDedicatedAllocation || prefersDedicatedAllocation,
7469 if(res == VK_SUCCESS)
7479 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7490 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7492 VMA_ASSERT(allocation);
7494 if(allocation->CanBecomeLost() ==
false ||
7495 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7497 switch(allocation->GetType())
7499 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7501 VmaBlockVector* pBlockVector = VMA_NULL;
7502 VmaPool hPool = allocation->GetPool();
7503 if(hPool != VK_NULL_HANDLE)
7505 pBlockVector = &hPool->m_BlockVector;
7509 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7510 pBlockVector = m_pBlockVectors[memTypeIndex];
7512 pBlockVector->Free(allocation);
7515 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7516 FreeDedicatedMemory(allocation);
7523 allocation->SetUserData(
this, VMA_NULL);
7524 vma_delete(
this, allocation);
7527 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7530 InitStatInfo(pStats->
total);
7531 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7533 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7537 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7539 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7540 VMA_ASSERT(pBlockVector);
7541 pBlockVector->AddStats(pStats);
7546 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7547 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7549 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7554 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7556 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7557 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7558 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7559 VMA_ASSERT(pDedicatedAllocVector);
7560 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7563 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7564 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7565 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7566 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7571 VmaPostprocessCalcStatInfo(pStats->
total);
7572 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
7573 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
7574 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
7575 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
7578 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7580 VkResult VmaAllocator_T::Defragment(
7581 VmaAllocation* pAllocations,
7582 size_t allocationCount,
7583 VkBool32* pAllocationsChanged,
7587 if(pAllocationsChanged != VMA_NULL)
7589 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
7591 if(pDefragmentationStats != VMA_NULL)
7593 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
7596 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7598 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7600 const size_t poolCount = m_Pools.size();
7603 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7605 VmaAllocation hAlloc = pAllocations[allocIndex];
7607 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7609 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7611 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7613 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7615 VmaBlockVector* pAllocBlockVector = VMA_NULL;
7617 const VmaPool hAllocPool = hAlloc->GetPool();
7619 if(hAllocPool != VK_NULL_HANDLE)
7621 pAllocBlockVector = &hAllocPool->GetBlockVector();
7626 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7629 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7631 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7632 &pAllocationsChanged[allocIndex] : VMA_NULL;
7633 pDefragmentator->AddAllocation(hAlloc, pChanged);
7637 VkResult result = VK_SUCCESS;
7641 VkDeviceSize maxBytesToMove = SIZE_MAX;
7642 uint32_t maxAllocationsToMove = UINT32_MAX;
7643 if(pDefragmentationInfo != VMA_NULL)
7650 for(uint32_t memTypeIndex = 0;
7651 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7655 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7657 result = m_pBlockVectors[memTypeIndex]->Defragment(
7658 pDefragmentationStats,
7660 maxAllocationsToMove);
7665 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7667 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7668 pDefragmentationStats,
7670 maxAllocationsToMove);
7676 for(
size_t poolIndex = poolCount; poolIndex--; )
7678 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7682 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7684 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7686 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7693 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7695 if(hAllocation->CanBecomeLost())
7701 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7702 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7705 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7709 pAllocationInfo->
offset = 0;
7710 pAllocationInfo->
size = hAllocation->GetSize();
7712 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7715 else if(localLastUseFrameIndex == localCurrFrameIndex)
7717 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7718 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7719 pAllocationInfo->
offset = hAllocation->GetOffset();
7720 pAllocationInfo->
size = hAllocation->GetSize();
7722 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7727 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7729 localLastUseFrameIndex = localCurrFrameIndex;
7736 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7737 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7738 pAllocationInfo->
offset = hAllocation->GetOffset();
7739 pAllocationInfo->
size = hAllocation->GetSize();
7740 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7741 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7745 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7747 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7760 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7762 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7763 if(res != VK_SUCCESS)
7765 vma_delete(
this, *pPool);
7772 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7773 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7779 void VmaAllocator_T::DestroyPool(VmaPool pool)
7783 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7784 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7785 VMA_ASSERT(success &&
"Pool not found in Allocator.");
7788 vma_delete(
this, pool);
7791 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
7793 pool->m_BlockVector.GetPoolStats(pPoolStats);
7796 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7798 m_CurrentFrameIndex.store(frameIndex);
7801 void VmaAllocator_T::MakePoolAllocationsLost(
7803 size_t* pLostAllocationCount)
7805 hPool->m_BlockVector.MakePoolAllocationsLost(
7806 m_CurrentFrameIndex.load(),
7807 pLostAllocationCount);
7810 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7812 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
7813 (*pAllocation)->InitLost();
7816 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7818 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7821 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7823 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7824 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7826 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7827 if(res == VK_SUCCESS)
7829 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7834 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7839 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7842 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
7844 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7850 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7852 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
7854 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
7857 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
7859 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
7860 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7862 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7863 m_HeapSizeLimit[heapIndex] += size;
7867 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation,
void** ppData)
7869 if(hAllocation->CanBecomeLost())
7871 return VK_ERROR_MEMORY_MAP_FAILED;
7874 switch(hAllocation->GetType())
7876 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7878 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7879 char *pBytes = VMA_NULL;
7880 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
7881 if(res == VK_SUCCESS)
7883 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
7884 hAllocation->BlockAllocMap();
7888 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7889 return hAllocation->DedicatedAllocMap(
this, ppData);
7892 return VK_ERROR_MEMORY_MAP_FAILED;
7896 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
7898 switch(hAllocation->GetType())
7900 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7902 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7903 hAllocation->BlockAllocUnmap();
7904 pBlock->Unmap(
this, 1);
7907 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7908 hAllocation->DedicatedAllocUnmap(
this);
7915 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
7917 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
7919 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7921 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7922 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7923 VMA_ASSERT(pDedicatedAllocations);
7924 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
7925 VMA_ASSERT(success);
7928 VkDeviceMemory hMemory = allocation->GetMemory();
7930 if(allocation->GetMappedData() != VMA_NULL)
7932 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
7935 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
7937 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
7940 #if VMA_STATS_STRING_ENABLED 7942 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
7944 bool dedicatedAllocationsStarted =
false;
7945 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7947 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7948 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7949 VMA_ASSERT(pDedicatedAllocVector);
7950 if(pDedicatedAllocVector->empty() ==
false)
7952 if(dedicatedAllocationsStarted ==
false)
7954 dedicatedAllocationsStarted =
true;
7955 json.WriteString(
"DedicatedAllocations");
7959 json.BeginString(
"Type ");
7960 json.ContinueString(memTypeIndex);
7965 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
7967 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
7968 json.BeginObject(
true);
7970 json.WriteString(
"Type");
7971 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
7973 json.WriteString(
"Size");
7974 json.WriteNumber(hAlloc->GetSize());
7976 const void* pUserData = hAlloc->GetUserData();
7977 if(pUserData != VMA_NULL)
7979 json.WriteString(
"UserData");
7980 if(hAlloc->IsUserDataString())
7982 json.WriteString((
const char*)pUserData);
7987 json.ContinueString_Pointer(pUserData);
7998 if(dedicatedAllocationsStarted)
8004 bool allocationsStarted =
false;
8005 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
8007 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
8009 if(allocationsStarted ==
false)
8011 allocationsStarted =
true;
8012 json.WriteString(
"DefaultPools");
8016 json.BeginString(
"Type ");
8017 json.ContinueString(memTypeIndex);
8020 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
8023 if(allocationsStarted)
8030 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
8031 const size_t poolCount = m_Pools.size();
8034 json.WriteString(
"Pools");
8036 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
8038 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
8045 #endif // #if VMA_STATS_STRING_ENABLED 8047 static VkResult AllocateMemoryForImage(
8048 VmaAllocator allocator,
8051 VmaSuballocationType suballocType,
8052 VmaAllocation* pAllocation)
8054 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
8056 VkMemoryRequirements vkMemReq = {};
8057 bool requiresDedicatedAllocation =
false;
8058 bool prefersDedicatedAllocation =
false;
8059 allocator->GetImageMemoryRequirements(image, vkMemReq,
8060 requiresDedicatedAllocation, prefersDedicatedAllocation);
8062 return allocator->AllocateMemory(
8064 requiresDedicatedAllocation,
8065 prefersDedicatedAllocation,
8068 *pAllocationCreateInfo,
8078 VmaAllocator* pAllocator)
8080 VMA_ASSERT(pCreateInfo && pAllocator);
8081 VMA_DEBUG_LOG(
"vmaCreateAllocator");
8087 VmaAllocator allocator)
8089 if(allocator != VK_NULL_HANDLE)
8091 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
8092 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
8093 vma_delete(&allocationCallbacks, allocator);
8098 VmaAllocator allocator,
8099 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
8101 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
8102 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
8106 VmaAllocator allocator,
8107 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
8109 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
8110 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
8114 VmaAllocator allocator,
8115 uint32_t memoryTypeIndex,
8116 VkMemoryPropertyFlags* pFlags)
8118 VMA_ASSERT(allocator && pFlags);
8119 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
8120 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
8124 VmaAllocator allocator,
8125 uint32_t frameIndex)
8127 VMA_ASSERT(allocator);
8128 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
8130 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8132 allocator->SetCurrentFrameIndex(frameIndex);
8136 VmaAllocator allocator,
8139 VMA_ASSERT(allocator && pStats);
8140 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8141 allocator->CalculateStats(pStats);
8144 #if VMA_STATS_STRING_ENABLED 8147 VmaAllocator allocator,
8148 char** ppStatsString,
8149 VkBool32 detailedMap)
8151 VMA_ASSERT(allocator && ppStatsString);
8152 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8154 VmaStringBuilder sb(allocator);
8156 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
8160 allocator->CalculateStats(&stats);
8162 json.WriteString(
"Total");
8163 VmaPrintStatInfo(json, stats.
total);
8165 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
8167 json.BeginString(
"Heap ");
8168 json.ContinueString(heapIndex);
8172 json.WriteString(
"Size");
8173 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
8175 json.WriteString(
"Flags");
8176 json.BeginArray(
true);
8177 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
8179 json.WriteString(
"DEVICE_LOCAL");
8185 json.WriteString(
"Stats");
8186 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
8189 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
8191 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
8193 json.BeginString(
"Type ");
8194 json.ContinueString(typeIndex);
8199 json.WriteString(
"Flags");
8200 json.BeginArray(
true);
8201 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
8202 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
8204 json.WriteString(
"DEVICE_LOCAL");
8206 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8208 json.WriteString(
"HOST_VISIBLE");
8210 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
8212 json.WriteString(
"HOST_COHERENT");
8214 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
8216 json.WriteString(
"HOST_CACHED");
8218 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
8220 json.WriteString(
"LAZILY_ALLOCATED");
8226 json.WriteString(
"Stats");
8227 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
8236 if(detailedMap == VK_TRUE)
8238 allocator->PrintDetailedMap(json);
8244 const size_t len = sb.GetLength();
8245 char*
const pChars = vma_new_array(allocator,
char, len + 1);
8248 memcpy(pChars, sb.GetData(), len);
8251 *ppStatsString = pChars;
8255 VmaAllocator allocator,
8258 if(pStatsString != VMA_NULL)
8260 VMA_ASSERT(allocator);
8261 size_t len = strlen(pStatsString);
8262 vma_delete_array(allocator, pStatsString, len + 1);
8266 #endif // #if VMA_STATS_STRING_ENABLED 8272 VmaAllocator allocator,
8273 uint32_t memoryTypeBits,
8275 uint32_t* pMemoryTypeIndex)
8277 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8278 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8279 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8286 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
8290 switch(pAllocationCreateInfo->
usage)
8295 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8298 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8301 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8302 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8305 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8306 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8312 *pMemoryTypeIndex = UINT32_MAX;
8313 uint32_t minCost = UINT32_MAX;
8314 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8315 memTypeIndex < allocator->GetMemoryTypeCount();
8316 ++memTypeIndex, memTypeBit <<= 1)
8319 if((memTypeBit & memoryTypeBits) != 0)
8321 const VkMemoryPropertyFlags currFlags =
8322 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8324 if((requiredFlags & ~currFlags) == 0)
8327 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8329 if(currCost < minCost)
8331 *pMemoryTypeIndex = memTypeIndex;
8341 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8345 VmaAllocator allocator,
8349 VMA_ASSERT(allocator && pCreateInfo && pPool);
8351 VMA_DEBUG_LOG(
"vmaCreatePool");
8353 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8355 return allocator->CreatePool(pCreateInfo, pPool);
8359 VmaAllocator allocator,
8362 VMA_ASSERT(allocator);
8364 if(pool == VK_NULL_HANDLE)
8369 VMA_DEBUG_LOG(
"vmaDestroyPool");
8371 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8373 allocator->DestroyPool(pool);
8377 VmaAllocator allocator,
8381 VMA_ASSERT(allocator && pool && pPoolStats);
8383 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8385 allocator->GetPoolStats(pool, pPoolStats);
8389 VmaAllocator allocator,
8391 size_t* pLostAllocationCount)
8393 VMA_ASSERT(allocator && pool);
8395 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8397 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8401 VmaAllocator allocator,
8402 const VkMemoryRequirements* pVkMemoryRequirements,
8404 VmaAllocation* pAllocation,
8407 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8409 VMA_DEBUG_LOG(
"vmaAllocateMemory");
8411 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8413 VkResult result = allocator->AllocateMemory(
8414 *pVkMemoryRequirements,
8420 VMA_SUBALLOCATION_TYPE_UNKNOWN,
8423 if(pAllocationInfo && result == VK_SUCCESS)
8425 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8432 VmaAllocator allocator,
8435 VmaAllocation* pAllocation,
8438 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8440 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
8442 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8444 VkMemoryRequirements vkMemReq = {};
8445 bool requiresDedicatedAllocation =
false;
8446 bool prefersDedicatedAllocation =
false;
8447 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
8448 requiresDedicatedAllocation,
8449 prefersDedicatedAllocation);
8451 VkResult result = allocator->AllocateMemory(
8453 requiresDedicatedAllocation,
8454 prefersDedicatedAllocation,
8458 VMA_SUBALLOCATION_TYPE_BUFFER,
8461 if(pAllocationInfo && result == VK_SUCCESS)
8463 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8470 VmaAllocator allocator,
8473 VmaAllocation* pAllocation,
8476 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8478 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
8480 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8482 VkResult result = AllocateMemoryForImage(
8486 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
8489 if(pAllocationInfo && result == VK_SUCCESS)
8491 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8498 VmaAllocator allocator,
8499 VmaAllocation allocation)
8501 VMA_ASSERT(allocator && allocation);
8503 VMA_DEBUG_LOG(
"vmaFreeMemory");
8505 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8507 allocator->FreeMemory(allocation);
8511 VmaAllocator allocator,
8512 VmaAllocation allocation,
8515 VMA_ASSERT(allocator && allocation && pAllocationInfo);
8517 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8519 allocator->GetAllocationInfo(allocation, pAllocationInfo);
8523 VmaAllocator allocator,
8524 VmaAllocation allocation,
8527 VMA_ASSERT(allocator && allocation);
8529 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8531 allocation->SetUserData(allocator, pUserData);
8535 VmaAllocator allocator,
8536 VmaAllocation* pAllocation)
8538 VMA_ASSERT(allocator && pAllocation);
8540 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
8542 allocator->CreateLostAllocation(pAllocation);
8546 VmaAllocator allocator,
8547 VmaAllocation allocation,
8550 VMA_ASSERT(allocator && allocation && ppData);
8552 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8554 return allocator->Map(allocation, ppData);
8558 VmaAllocator allocator,
8559 VmaAllocation allocation)
8561 VMA_ASSERT(allocator && allocation);
8563 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8565 allocator->Unmap(allocation);
8569 VmaAllocator allocator,
8570 VmaAllocation* pAllocations,
8571 size_t allocationCount,
8572 VkBool32* pAllocationsChanged,
8576 VMA_ASSERT(allocator && pAllocations);
8578 VMA_DEBUG_LOG(
"vmaDefragment");
8580 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8582 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8586 VmaAllocator allocator,
8587 const VkBufferCreateInfo* pBufferCreateInfo,
8590 VmaAllocation* pAllocation,
8593 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8595 VMA_DEBUG_LOG(
"vmaCreateBuffer");
8597 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8599 *pBuffer = VK_NULL_HANDLE;
8600 *pAllocation = VK_NULL_HANDLE;
8603 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8604 allocator->m_hDevice,
8606 allocator->GetAllocationCallbacks(),
8611 VkMemoryRequirements vkMemReq = {};
8612 bool requiresDedicatedAllocation =
false;
8613 bool prefersDedicatedAllocation =
false;
8614 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8615 requiresDedicatedAllocation, prefersDedicatedAllocation);
8619 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
8621 VMA_ASSERT(vkMemReq.alignment %
8622 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
8624 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
8626 VMA_ASSERT(vkMemReq.alignment %
8627 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
8629 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
8631 VMA_ASSERT(vkMemReq.alignment %
8632 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
8636 res = allocator->AllocateMemory(
8638 requiresDedicatedAllocation,
8639 prefersDedicatedAllocation,
8642 *pAllocationCreateInfo,
8643 VMA_SUBALLOCATION_TYPE_BUFFER,
8648 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8649 allocator->m_hDevice,
8651 (*pAllocation)->GetMemory(),
8652 (*pAllocation)->GetOffset());
8656 if(pAllocationInfo != VMA_NULL)
8658 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8662 allocator->FreeMemory(*pAllocation);
8663 *pAllocation = VK_NULL_HANDLE;
8664 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8665 *pBuffer = VK_NULL_HANDLE;
8668 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8669 *pBuffer = VK_NULL_HANDLE;
8676 VmaAllocator allocator,
8678 VmaAllocation allocation)
8680 if(buffer != VK_NULL_HANDLE)
8682 VMA_ASSERT(allocator);
8684 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8686 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8688 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8690 allocator->FreeMemory(allocation);
8695 VmaAllocator allocator,
8696 const VkImageCreateInfo* pImageCreateInfo,
8699 VmaAllocation* pAllocation,
8702 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8704 VMA_DEBUG_LOG(
"vmaCreateImage");
8706 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8708 *pImage = VK_NULL_HANDLE;
8709 *pAllocation = VK_NULL_HANDLE;
8712 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
8713 allocator->m_hDevice,
8715 allocator->GetAllocationCallbacks(),
8719 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
8720 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
8721 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
8724 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
8728 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
8729 allocator->m_hDevice,
8731 (*pAllocation)->GetMemory(),
8732 (*pAllocation)->GetOffset());
8736 if(pAllocationInfo != VMA_NULL)
8738 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8742 allocator->FreeMemory(*pAllocation);
8743 *pAllocation = VK_NULL_HANDLE;
8744 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8745 *pImage = VK_NULL_HANDLE;
8748 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8749 *pImage = VK_NULL_HANDLE;
8756 VmaAllocator allocator,
8758 VmaAllocation allocation)
8760 if(image != VK_NULL_HANDLE)
8762 VMA_ASSERT(allocator);
8764 VMA_DEBUG_LOG(
"vmaDestroyImage");
8766 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8768 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
8770 allocator->FreeMemory(allocation);
8774 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:896
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1150
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:921
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:906
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1107
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:900
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1418
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:918
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1584
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1288
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1342
Definition: vk_mem_alloc.h:1187
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:889
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1225
Definition: vk_mem_alloc.h:1134
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:930
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:983
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:915
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1138
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:1048
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:903
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:1047
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:911
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1588
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:947
VmaStatInfo total
Definition: vk_mem_alloc.h:1057
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1596
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1209
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1579
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:904
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:831
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:924
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1296
Definition: vk_mem_alloc.h:1290
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1428
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:901
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1246
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1312
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1348
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:887
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1299
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
VmaMemoryUsage
Definition: vk_mem_alloc.h:1085
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1574
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1592
Definition: vk_mem_alloc.h:1124
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1233
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:902
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1053
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:837
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:858
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:863
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1594
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1220
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1358
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:897
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:1036
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1307
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:850
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1194
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:1049
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:854
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1302
Definition: vk_mem_alloc.h:1133
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1215
Definition: vk_mem_alloc.h:1206
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:1039
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:899
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:1320
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:933
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1351
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1204
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1239
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:971
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:1055
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1174
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:1048
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:908
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:852
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:907
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1334
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1442
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:927
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:1048
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:1045
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1339
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1423
Definition: vk_mem_alloc.h:1202
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1590
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:895
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:910
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:1043
Definition: vk_mem_alloc.h:1090
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1292
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:1041
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:905
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:909
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1161
Definition: vk_mem_alloc.h:1117
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1437
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:885
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:898
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1404
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1270
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:1049
Definition: vk_mem_alloc.h:1200
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:1056
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1345
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:1049
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1409