23 #ifndef AMD_VULKAN_MEMORY_ALLOCATOR_H 24 #define AMD_VULKAN_MEMORY_ALLOCATOR_H 723 #include <vulkan/vulkan.h> 725 VK_DEFINE_HANDLE(VmaAllocator)
729 VmaAllocator allocator,
731 VkDeviceMemory memory,
735 VmaAllocator allocator,
737 VkDeviceMemory memory,
886 VmaAllocator* pAllocator);
890 VmaAllocator allocator);
897 VmaAllocator allocator,
898 const VkPhysicalDeviceProperties** ppPhysicalDeviceProperties);
905 VmaAllocator allocator,
906 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties);
915 VmaAllocator allocator,
916 uint32_t memoryTypeIndex,
917 VkMemoryPropertyFlags* pFlags);
928 VmaAllocator allocator,
929 uint32_t frameIndex);
959 VmaAllocator allocator,
962 #define VMA_STATS_STRING_ENABLED 1 964 #if VMA_STATS_STRING_ENABLED 970 VmaAllocator allocator,
971 char** ppStatsString,
972 VkBool32 detailedMap);
975 VmaAllocator allocator,
978 #endif // #if VMA_STATS_STRING_ENABLED 980 VK_DEFINE_HANDLE(VmaPool)
1161 VmaAllocator allocator,
1162 uint32_t memoryTypeBits,
1164 uint32_t* pMemoryTypeIndex);
1265 VmaAllocator allocator,
1272 VmaAllocator allocator,
1282 VmaAllocator allocator,
1293 VmaAllocator allocator,
1295 size_t* pLostAllocationCount);
1297 VK_DEFINE_HANDLE(VmaAllocation)
1353 VmaAllocator allocator,
1354 const VkMemoryRequirements* pVkMemoryRequirements,
1356 VmaAllocation* pAllocation,
1366 VmaAllocator allocator,
1369 VmaAllocation* pAllocation,
1374 VmaAllocator allocator,
1377 VmaAllocation* pAllocation,
1382 VmaAllocator allocator,
1383 VmaAllocation allocation);
1387 VmaAllocator allocator,
1388 VmaAllocation allocation,
1405 VmaAllocator allocator,
1406 VmaAllocation allocation,
1420 VmaAllocator allocator,
1421 VmaAllocation* pAllocation);
1458 VmaAllocator allocator,
1459 VmaAllocation allocation,
1467 VmaAllocator allocator,
1468 VmaAllocation allocation);
1573 VmaAllocator allocator,
1574 VmaAllocation* pAllocations,
1575 size_t allocationCount,
1576 VkBool32* pAllocationsChanged,
1607 VmaAllocator allocator,
1608 const VkBufferCreateInfo* pBufferCreateInfo,
1611 VmaAllocation* pAllocation,
1626 VmaAllocator allocator,
1628 VmaAllocation allocation);
1632 VmaAllocator allocator,
1633 const VkImageCreateInfo* pImageCreateInfo,
1636 VmaAllocation* pAllocation,
1651 VmaAllocator allocator,
1653 VmaAllocation allocation);
1659 #endif // AMD_VULKAN_MEMORY_ALLOCATOR_H 1662 #ifdef __INTELLISENSE__ 1663 #define VMA_IMPLEMENTATION 1666 #ifdef VMA_IMPLEMENTATION 1667 #undef VMA_IMPLEMENTATION 1689 #if !defined(VMA_STATIC_VULKAN_FUNCTIONS) && !defined(VK_NO_PROTOTYPES) 1690 #define VMA_STATIC_VULKAN_FUNCTIONS 1 1702 #if VMA_USE_STL_CONTAINERS 1703 #define VMA_USE_STL_VECTOR 1 1704 #define VMA_USE_STL_UNORDERED_MAP 1 1705 #define VMA_USE_STL_LIST 1 1708 #if VMA_USE_STL_VECTOR 1712 #if VMA_USE_STL_UNORDERED_MAP 1713 #include <unordered_map> 1716 #if VMA_USE_STL_LIST 1725 #include <algorithm> 1729 #if !defined(_WIN32) 1736 #define VMA_ASSERT(expr) assert(expr) 1738 #define VMA_ASSERT(expr) 1744 #ifndef VMA_HEAVY_ASSERT 1746 #define VMA_HEAVY_ASSERT(expr) //VMA_ASSERT(expr) 1748 #define VMA_HEAVY_ASSERT(expr) 1754 #define VMA_NULL nullptr 1757 #ifndef VMA_ALIGN_OF 1758 #define VMA_ALIGN_OF(type) (__alignof(type)) 1761 #ifndef VMA_SYSTEM_ALIGNED_MALLOC 1763 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (_aligned_malloc((size), (alignment))) 1765 #define VMA_SYSTEM_ALIGNED_MALLOC(size, alignment) (aligned_alloc((alignment), (size) )) 1769 #ifndef VMA_SYSTEM_FREE 1771 #define VMA_SYSTEM_FREE(ptr) _aligned_free(ptr) 1773 #define VMA_SYSTEM_FREE(ptr) free(ptr) 1778 #define VMA_MIN(v1, v2) (std::min((v1), (v2))) 1782 #define VMA_MAX(v1, v2) (std::max((v1), (v2))) 1786 #define VMA_SWAP(v1, v2) std::swap((v1), (v2)) 1790 #define VMA_SORT(beg, end, cmp) std::sort(beg, end, cmp) 1793 #ifndef VMA_DEBUG_LOG 1794 #define VMA_DEBUG_LOG(format, ...) 1804 #if VMA_STATS_STRING_ENABLED 1805 static inline void VmaUint32ToStr(
char* outStr,
size_t strLen, uint32_t num)
1807 snprintf(outStr, strLen,
"%u", static_cast<unsigned int>(num));
1809 static inline void VmaUint64ToStr(
char* outStr,
size_t strLen, uint64_t num)
1811 snprintf(outStr, strLen,
"%llu", static_cast<unsigned long long>(num));
1813 static inline void VmaPtrToStr(
char* outStr,
size_t strLen,
const void* ptr)
1815 snprintf(outStr, strLen,
"%p", ptr);
1825 void Lock() { m_Mutex.lock(); }
1826 void Unlock() { m_Mutex.unlock(); }
1830 #define VMA_MUTEX VmaMutex 1841 #ifndef VMA_ATOMIC_UINT32 1842 #define VMA_ATOMIC_UINT32 std::atomic<uint32_t> 1845 #ifndef VMA_BEST_FIT 1858 #define VMA_BEST_FIT (1) 1861 #ifndef VMA_DEBUG_ALWAYS_DEDICATED_MEMORY 1866 #define VMA_DEBUG_ALWAYS_DEDICATED_MEMORY (0) 1869 #ifndef VMA_DEBUG_ALIGNMENT 1874 #define VMA_DEBUG_ALIGNMENT (1) 1877 #ifndef VMA_DEBUG_MARGIN 1882 #define VMA_DEBUG_MARGIN (0) 1885 #ifndef VMA_DEBUG_GLOBAL_MUTEX 1890 #define VMA_DEBUG_GLOBAL_MUTEX (0) 1893 #ifndef VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY 1898 #define VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY (1) 1901 #ifndef VMA_SMALL_HEAP_MAX_SIZE 1902 #define VMA_SMALL_HEAP_MAX_SIZE (1024ull * 1024 * 1024) 1906 #ifndef VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE 1907 #define VMA_DEFAULT_LARGE_HEAP_BLOCK_SIZE (256ull * 1024 * 1024) 1911 static const uint32_t VMA_FRAME_INDEX_LOST = UINT32_MAX;
1917 static VkAllocationCallbacks VmaEmptyAllocationCallbacks = {
1918 VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL, VMA_NULL };
1921 static inline uint32_t VmaCountBitsSet(uint32_t v)
1923 uint32_t c = v - ((v >> 1) & 0x55555555);
1924 c = ((c >> 2) & 0x33333333) + (c & 0x33333333);
1925 c = ((c >> 4) + c) & 0x0F0F0F0F;
1926 c = ((c >> 8) + c) & 0x00FF00FF;
1927 c = ((c >> 16) + c) & 0x0000FFFF;
1933 template <
typename T>
1934 static inline T VmaAlignUp(T val, T align)
1936 return (val + align - 1) / align * align;
1940 template <
typename T>
1941 inline T VmaRoundDiv(T x, T y)
1943 return (x + (y / (T)2)) / y;
1948 template<
typename Iterator,
typename Compare>
1949 Iterator VmaQuickSortPartition(Iterator beg, Iterator end, Compare cmp)
1951 Iterator centerValue = end; --centerValue;
1952 Iterator insertIndex = beg;
1953 for(Iterator memTypeIndex = beg; memTypeIndex < centerValue; ++memTypeIndex)
1955 if(cmp(*memTypeIndex, *centerValue))
1957 if(insertIndex != memTypeIndex)
1959 VMA_SWAP(*memTypeIndex, *insertIndex);
1964 if(insertIndex != centerValue)
1966 VMA_SWAP(*insertIndex, *centerValue);
1971 template<
typename Iterator,
typename Compare>
1972 void VmaQuickSort(Iterator beg, Iterator end, Compare cmp)
1976 Iterator it = VmaQuickSortPartition<Iterator, Compare>(beg, end, cmp);
1977 VmaQuickSort<Iterator, Compare>(beg, it, cmp);
1978 VmaQuickSort<Iterator, Compare>(it + 1, end, cmp);
1982 #define VMA_SORT(beg, end, cmp) VmaQuickSort(beg, end, cmp) 1984 #endif // #ifndef VMA_SORT 1993 static inline bool VmaBlocksOnSamePage(
1994 VkDeviceSize resourceAOffset,
1995 VkDeviceSize resourceASize,
1996 VkDeviceSize resourceBOffset,
1997 VkDeviceSize pageSize)
1999 VMA_ASSERT(resourceAOffset + resourceASize <= resourceBOffset && resourceASize > 0 && pageSize > 0);
2000 VkDeviceSize resourceAEnd = resourceAOffset + resourceASize - 1;
2001 VkDeviceSize resourceAEndPage = resourceAEnd & ~(pageSize - 1);
2002 VkDeviceSize resourceBStart = resourceBOffset;
2003 VkDeviceSize resourceBStartPage = resourceBStart & ~(pageSize - 1);
2004 return resourceAEndPage == resourceBStartPage;
2007 enum VmaSuballocationType
2009 VMA_SUBALLOCATION_TYPE_FREE = 0,
2010 VMA_SUBALLOCATION_TYPE_UNKNOWN = 1,
2011 VMA_SUBALLOCATION_TYPE_BUFFER = 2,
2012 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN = 3,
2013 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR = 4,
2014 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL = 5,
2015 VMA_SUBALLOCATION_TYPE_MAX_ENUM = 0x7FFFFFFF
2024 static inline bool VmaIsBufferImageGranularityConflict(
2025 VmaSuballocationType suballocType1,
2026 VmaSuballocationType suballocType2)
2028 if(suballocType1 > suballocType2)
2030 VMA_SWAP(suballocType1, suballocType2);
2033 switch(suballocType1)
2035 case VMA_SUBALLOCATION_TYPE_FREE:
2037 case VMA_SUBALLOCATION_TYPE_UNKNOWN:
2039 case VMA_SUBALLOCATION_TYPE_BUFFER:
2041 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2042 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2043 case VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN:
2045 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN ||
2046 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR ||
2047 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2048 case VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR:
2050 suballocType2 == VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL;
2051 case VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL:
2063 VmaMutexLock(VMA_MUTEX& mutex,
bool useMutex) :
2064 m_pMutex(useMutex ? &mutex : VMA_NULL)
2081 VMA_MUTEX* m_pMutex;
2084 #if VMA_DEBUG_GLOBAL_MUTEX 2085 static VMA_MUTEX gDebugGlobalMutex;
2086 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK VmaMutexLock debugGlobalMutexLock(gDebugGlobalMutex, true); 2088 #define VMA_DEBUG_GLOBAL_MUTEX_LOCK 2092 static const VkDeviceSize VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER = 16;
2103 template <
typename IterT,
typename KeyT,
typename CmpT>
2104 static IterT VmaBinaryFindFirstNotLess(IterT beg, IterT end,
const KeyT &key, CmpT cmp)
2106 size_t down = 0, up = (end - beg);
2109 const size_t mid = (down + up) / 2;
2110 if(cmp(*(beg+mid), key))
2125 static void* VmaMalloc(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t size,
size_t alignment)
2127 if((pAllocationCallbacks != VMA_NULL) &&
2128 (pAllocationCallbacks->pfnAllocation != VMA_NULL))
2130 return (*pAllocationCallbacks->pfnAllocation)(
2131 pAllocationCallbacks->pUserData,
2134 VK_SYSTEM_ALLOCATION_SCOPE_OBJECT);
2138 return VMA_SYSTEM_ALIGNED_MALLOC(size, alignment);
2142 static void VmaFree(
const VkAllocationCallbacks* pAllocationCallbacks,
void* ptr)
2144 if((pAllocationCallbacks != VMA_NULL) &&
2145 (pAllocationCallbacks->pfnFree != VMA_NULL))
2147 (*pAllocationCallbacks->pfnFree)(pAllocationCallbacks->pUserData, ptr);
2151 VMA_SYSTEM_FREE(ptr);
2155 template<
typename T>
2156 static T* VmaAllocate(
const VkAllocationCallbacks* pAllocationCallbacks)
2158 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T), VMA_ALIGN_OF(T));
2161 template<
typename T>
2162 static T* VmaAllocateArray(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t count)
2164 return (T*)VmaMalloc(pAllocationCallbacks,
sizeof(T) * count, VMA_ALIGN_OF(T));
2167 #define vma_new(allocator, type) new(VmaAllocate<type>(allocator))(type) 2169 #define vma_new_array(allocator, type, count) new(VmaAllocateArray<type>((allocator), (count)))(type) 2171 template<
typename T>
2172 static void vma_delete(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr)
2175 VmaFree(pAllocationCallbacks, ptr);
2178 template<
typename T>
2179 static void vma_delete_array(
const VkAllocationCallbacks* pAllocationCallbacks, T* ptr,
size_t count)
2183 for(
size_t i = count; i--; )
2187 VmaFree(pAllocationCallbacks, ptr);
2192 template<
typename T>
2193 class VmaStlAllocator
2196 const VkAllocationCallbacks*
const m_pCallbacks;
2197 typedef T value_type;
2199 VmaStlAllocator(
const VkAllocationCallbacks* pCallbacks) : m_pCallbacks(pCallbacks) { }
2200 template<
typename U> VmaStlAllocator(
const VmaStlAllocator<U>& src) : m_pCallbacks(src.m_pCallbacks) { }
2202 T* allocate(
size_t n) {
return VmaAllocateArray<T>(m_pCallbacks, n); }
2203 void deallocate(T* p,
size_t n) { VmaFree(m_pCallbacks, p); }
2205 template<
typename U>
2206 bool operator==(
const VmaStlAllocator<U>& rhs)
const 2208 return m_pCallbacks == rhs.m_pCallbacks;
2210 template<
typename U>
2211 bool operator!=(
const VmaStlAllocator<U>& rhs)
const 2213 return m_pCallbacks != rhs.m_pCallbacks;
2216 VmaStlAllocator& operator=(
const VmaStlAllocator& x) =
delete;
2219 #if VMA_USE_STL_VECTOR 2221 #define VmaVector std::vector 2223 template<
typename T,
typename allocatorT>
2224 static void VmaVectorInsert(std::vector<T, allocatorT>& vec,
size_t index,
const T& item)
2226 vec.insert(vec.begin() + index, item);
2229 template<
typename T,
typename allocatorT>
2230 static void VmaVectorRemove(std::vector<T, allocatorT>& vec,
size_t index)
2232 vec.erase(vec.begin() + index);
2235 #else // #if VMA_USE_STL_VECTOR 2240 template<
typename T,
typename AllocatorT>
2244 typedef T value_type;
2246 VmaVector(
const AllocatorT& allocator) :
2247 m_Allocator(allocator),
2254 VmaVector(
size_t count,
const AllocatorT& allocator) :
2255 m_Allocator(allocator),
2256 m_pArray(count ? (T*)VmaAllocateArray<T>(allocator.m_pCallbacks, count) : VMA_NULL),
2262 VmaVector(
const VmaVector<T, AllocatorT>& src) :
2263 m_Allocator(src.m_Allocator),
2264 m_pArray(src.m_Count ? (T*)VmaAllocateArray<T>(src.m_Allocator.m_pCallbacks, src.m_Count) : VMA_NULL),
2265 m_Count(src.m_Count),
2266 m_Capacity(src.m_Count)
2270 memcpy(m_pArray, src.m_pArray, m_Count *
sizeof(T));
2276 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2279 VmaVector& operator=(
const VmaVector<T, AllocatorT>& rhs)
2283 resize(rhs.m_Count);
2286 memcpy(m_pArray, rhs.m_pArray, m_Count *
sizeof(T));
2292 bool empty()
const {
return m_Count == 0; }
2293 size_t size()
const {
return m_Count; }
2294 T* data() {
return m_pArray; }
2295 const T* data()
const {
return m_pArray; }
2297 T& operator[](
size_t index)
2299 VMA_HEAVY_ASSERT(index < m_Count);
2300 return m_pArray[index];
2302 const T& operator[](
size_t index)
const 2304 VMA_HEAVY_ASSERT(index < m_Count);
2305 return m_pArray[index];
2310 VMA_HEAVY_ASSERT(m_Count > 0);
2313 const T& front()
const 2315 VMA_HEAVY_ASSERT(m_Count > 0);
2320 VMA_HEAVY_ASSERT(m_Count > 0);
2321 return m_pArray[m_Count - 1];
2323 const T& back()
const 2325 VMA_HEAVY_ASSERT(m_Count > 0);
2326 return m_pArray[m_Count - 1];
2329 void reserve(
size_t newCapacity,
bool freeMemory =
false)
2331 newCapacity = VMA_MAX(newCapacity, m_Count);
2333 if((newCapacity < m_Capacity) && !freeMemory)
2335 newCapacity = m_Capacity;
2338 if(newCapacity != m_Capacity)
2340 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator, newCapacity) : VMA_NULL;
2343 memcpy(newArray, m_pArray, m_Count *
sizeof(T));
2345 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2346 m_Capacity = newCapacity;
2347 m_pArray = newArray;
2351 void resize(
size_t newCount,
bool freeMemory =
false)
2353 size_t newCapacity = m_Capacity;
2354 if(newCount > m_Capacity)
2356 newCapacity = VMA_MAX(newCount, VMA_MAX(m_Capacity * 3 / 2, (
size_t)8));
2360 newCapacity = newCount;
2363 if(newCapacity != m_Capacity)
2365 T*
const newArray = newCapacity ? VmaAllocateArray<T>(m_Allocator.m_pCallbacks, newCapacity) : VMA_NULL;
2366 const size_t elementsToCopy = VMA_MIN(m_Count, newCount);
2367 if(elementsToCopy != 0)
2369 memcpy(newArray, m_pArray, elementsToCopy *
sizeof(T));
2371 VmaFree(m_Allocator.m_pCallbacks, m_pArray);
2372 m_Capacity = newCapacity;
2373 m_pArray = newArray;
2379 void clear(
bool freeMemory =
false)
2381 resize(0, freeMemory);
2384 void insert(
size_t index,
const T& src)
2386 VMA_HEAVY_ASSERT(index <= m_Count);
2387 const size_t oldCount = size();
2388 resize(oldCount + 1);
2389 if(index < oldCount)
2391 memmove(m_pArray + (index + 1), m_pArray + index, (oldCount - index) *
sizeof(T));
2393 m_pArray[index] = src;
2396 void remove(
size_t index)
2398 VMA_HEAVY_ASSERT(index < m_Count);
2399 const size_t oldCount = size();
2400 if(index < oldCount - 1)
2402 memmove(m_pArray + index, m_pArray + (index + 1), (oldCount - index - 1) *
sizeof(T));
2404 resize(oldCount - 1);
2407 void push_back(
const T& src)
2409 const size_t newIndex = size();
2410 resize(newIndex + 1);
2411 m_pArray[newIndex] = src;
2416 VMA_HEAVY_ASSERT(m_Count > 0);
2420 void push_front(
const T& src)
2427 VMA_HEAVY_ASSERT(m_Count > 0);
2431 typedef T* iterator;
2433 iterator begin() {
return m_pArray; }
2434 iterator end() {
return m_pArray + m_Count; }
2437 AllocatorT m_Allocator;
2443 template<
typename T,
typename allocatorT>
2444 static void VmaVectorInsert(VmaVector<T, allocatorT>& vec,
size_t index,
const T& item)
2446 vec.insert(index, item);
2449 template<
typename T,
typename allocatorT>
2450 static void VmaVectorRemove(VmaVector<T, allocatorT>& vec,
size_t index)
2455 #endif // #if VMA_USE_STL_VECTOR 2457 template<
typename CmpLess,
typename VectorT>
2458 size_t VmaVectorInsertSorted(VectorT& vector,
const typename VectorT::value_type& value)
2460 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
2462 vector.data() + vector.size(),
2464 CmpLess()) - vector.data();
2465 VmaVectorInsert(vector, indexToInsert, value);
2466 return indexToInsert;
2469 template<
typename CmpLess,
typename VectorT>
2470 bool VmaVectorRemoveSorted(VectorT& vector,
const typename VectorT::value_type& value)
2473 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2478 if((it != vector.end()) && !comparator(*it, value) && !comparator(value, *it))
2480 size_t indexToRemove = it - vector.begin();
2481 VmaVectorRemove(vector, indexToRemove);
2487 template<
typename CmpLess,
typename VectorT>
2488 size_t VmaVectorFindSorted(
const VectorT& vector,
const typename VectorT::value_type& value)
2491 typename VectorT::iterator it = VmaBinaryFindFirstNotLess(
2493 vector.data() + vector.size(),
2496 if(it != vector.size() && !comparator(*it, value) && !comparator(value, *it))
2498 return it - vector.begin();
2502 return vector.size();
2514 template<
typename T>
2515 class VmaPoolAllocator
2518 VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock);
2519 ~VmaPoolAllocator();
2527 uint32_t NextFreeIndex;
2534 uint32_t FirstFreeIndex;
2537 const VkAllocationCallbacks* m_pAllocationCallbacks;
2538 size_t m_ItemsPerBlock;
2539 VmaVector< ItemBlock, VmaStlAllocator<ItemBlock> > m_ItemBlocks;
2541 ItemBlock& CreateNewBlock();
2544 template<
typename T>
2545 VmaPoolAllocator<T>::VmaPoolAllocator(
const VkAllocationCallbacks* pAllocationCallbacks,
size_t itemsPerBlock) :
2546 m_pAllocationCallbacks(pAllocationCallbacks),
2547 m_ItemsPerBlock(itemsPerBlock),
2548 m_ItemBlocks(VmaStlAllocator<ItemBlock>(pAllocationCallbacks))
2550 VMA_ASSERT(itemsPerBlock > 0);
2553 template<
typename T>
2554 VmaPoolAllocator<T>::~VmaPoolAllocator()
2559 template<
typename T>
2560 void VmaPoolAllocator<T>::Clear()
2562 for(
size_t i = m_ItemBlocks.size(); i--; )
2563 vma_delete_array(m_pAllocationCallbacks, m_ItemBlocks[i].pItems, m_ItemsPerBlock);
2564 m_ItemBlocks.clear();
2567 template<
typename T>
2568 T* VmaPoolAllocator<T>::Alloc()
2570 for(
size_t i = m_ItemBlocks.size(); i--; )
2572 ItemBlock& block = m_ItemBlocks[i];
2574 if(block.FirstFreeIndex != UINT32_MAX)
2576 Item*
const pItem = &block.pItems[block.FirstFreeIndex];
2577 block.FirstFreeIndex = pItem->NextFreeIndex;
2578 return &pItem->Value;
2583 ItemBlock& newBlock = CreateNewBlock();
2584 Item*
const pItem = &newBlock.pItems[0];
2585 newBlock.FirstFreeIndex = pItem->NextFreeIndex;
2586 return &pItem->Value;
2589 template<
typename T>
2590 void VmaPoolAllocator<T>::Free(T* ptr)
2593 for(
size_t i = 0; i < m_ItemBlocks.size(); ++i)
2595 ItemBlock& block = m_ItemBlocks[i];
2599 memcpy(&pItemPtr, &ptr,
sizeof(pItemPtr));
2602 if((pItemPtr >= block.pItems) && (pItemPtr < block.pItems + m_ItemsPerBlock))
2604 const uint32_t index =
static_cast<uint32_t
>(pItemPtr - block.pItems);
2605 pItemPtr->NextFreeIndex = block.FirstFreeIndex;
2606 block.FirstFreeIndex = index;
2610 VMA_ASSERT(0 &&
"Pointer doesn't belong to this memory pool.");
2613 template<
typename T>
2614 typename VmaPoolAllocator<T>::ItemBlock& VmaPoolAllocator<T>::CreateNewBlock()
2616 ItemBlock newBlock = {
2617 vma_new_array(m_pAllocationCallbacks, Item, m_ItemsPerBlock), 0 };
2619 m_ItemBlocks.push_back(newBlock);
2622 for(uint32_t i = 0; i < m_ItemsPerBlock - 1; ++i)
2623 newBlock.pItems[i].NextFreeIndex = i + 1;
2624 newBlock.pItems[m_ItemsPerBlock - 1].NextFreeIndex = UINT32_MAX;
2625 return m_ItemBlocks.back();
2631 #if VMA_USE_STL_LIST 2633 #define VmaList std::list 2635 #else // #if VMA_USE_STL_LIST 2637 template<
typename T>
2646 template<
typename T>
2650 typedef VmaListItem<T> ItemType;
2652 VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks);
2656 size_t GetCount()
const {
return m_Count; }
2657 bool IsEmpty()
const {
return m_Count == 0; }
2659 ItemType* Front() {
return m_pFront; }
2660 const ItemType* Front()
const {
return m_pFront; }
2661 ItemType* Back() {
return m_pBack; }
2662 const ItemType* Back()
const {
return m_pBack; }
2664 ItemType* PushBack();
2665 ItemType* PushFront();
2666 ItemType* PushBack(
const T& value);
2667 ItemType* PushFront(
const T& value);
2672 ItemType* InsertBefore(ItemType* pItem);
2674 ItemType* InsertAfter(ItemType* pItem);
2676 ItemType* InsertBefore(ItemType* pItem,
const T& value);
2677 ItemType* InsertAfter(ItemType* pItem,
const T& value);
2679 void Remove(ItemType* pItem);
2682 const VkAllocationCallbacks*
const m_pAllocationCallbacks;
2683 VmaPoolAllocator<ItemType> m_ItemAllocator;
2689 VmaRawList(
const VmaRawList<T>& src);
2690 VmaRawList<T>& operator=(
const VmaRawList<T>& rhs);
2693 template<
typename T>
2694 VmaRawList<T>::VmaRawList(
const VkAllocationCallbacks* pAllocationCallbacks) :
2695 m_pAllocationCallbacks(pAllocationCallbacks),
2696 m_ItemAllocator(pAllocationCallbacks, 128),
2703 template<
typename T>
2704 VmaRawList<T>::~VmaRawList()
2710 template<
typename T>
2711 void VmaRawList<T>::Clear()
2713 if(IsEmpty() ==
false)
2715 ItemType* pItem = m_pBack;
2716 while(pItem != VMA_NULL)
2718 ItemType*
const pPrevItem = pItem->pPrev;
2719 m_ItemAllocator.Free(pItem);
2722 m_pFront = VMA_NULL;
2728 template<
typename T>
2729 VmaListItem<T>* VmaRawList<T>::PushBack()
2731 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2732 pNewItem->pNext = VMA_NULL;
2735 pNewItem->pPrev = VMA_NULL;
2736 m_pFront = pNewItem;
2742 pNewItem->pPrev = m_pBack;
2743 m_pBack->pNext = pNewItem;
2750 template<
typename T>
2751 VmaListItem<T>* VmaRawList<T>::PushFront()
2753 ItemType*
const pNewItem = m_ItemAllocator.Alloc();
2754 pNewItem->pPrev = VMA_NULL;
2757 pNewItem->pNext = VMA_NULL;
2758 m_pFront = pNewItem;
2764 pNewItem->pNext = m_pFront;
2765 m_pFront->pPrev = pNewItem;
2766 m_pFront = pNewItem;
2772 template<
typename T>
2773 VmaListItem<T>* VmaRawList<T>::PushBack(
const T& value)
2775 ItemType*
const pNewItem = PushBack();
2776 pNewItem->Value = value;
2780 template<
typename T>
2781 VmaListItem<T>* VmaRawList<T>::PushFront(
const T& value)
2783 ItemType*
const pNewItem = PushFront();
2784 pNewItem->Value = value;
2788 template<
typename T>
2789 void VmaRawList<T>::PopBack()
2791 VMA_HEAVY_ASSERT(m_Count > 0);
2792 ItemType*
const pBackItem = m_pBack;
2793 ItemType*
const pPrevItem = pBackItem->pPrev;
2794 if(pPrevItem != VMA_NULL)
2796 pPrevItem->pNext = VMA_NULL;
2798 m_pBack = pPrevItem;
2799 m_ItemAllocator.Free(pBackItem);
2803 template<
typename T>
2804 void VmaRawList<T>::PopFront()
2806 VMA_HEAVY_ASSERT(m_Count > 0);
2807 ItemType*
const pFrontItem = m_pFront;
2808 ItemType*
const pNextItem = pFrontItem->pNext;
2809 if(pNextItem != VMA_NULL)
2811 pNextItem->pPrev = VMA_NULL;
2813 m_pFront = pNextItem;
2814 m_ItemAllocator.Free(pFrontItem);
2818 template<
typename T>
2819 void VmaRawList<T>::Remove(ItemType* pItem)
2821 VMA_HEAVY_ASSERT(pItem != VMA_NULL);
2822 VMA_HEAVY_ASSERT(m_Count > 0);
2824 if(pItem->pPrev != VMA_NULL)
2826 pItem->pPrev->pNext = pItem->pNext;
2830 VMA_HEAVY_ASSERT(m_pFront == pItem);
2831 m_pFront = pItem->pNext;
2834 if(pItem->pNext != VMA_NULL)
2836 pItem->pNext->pPrev = pItem->pPrev;
2840 VMA_HEAVY_ASSERT(m_pBack == pItem);
2841 m_pBack = pItem->pPrev;
2844 m_ItemAllocator.Free(pItem);
2848 template<
typename T>
2849 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem)
2851 if(pItem != VMA_NULL)
2853 ItemType*
const prevItem = pItem->pPrev;
2854 ItemType*
const newItem = m_ItemAllocator.Alloc();
2855 newItem->pPrev = prevItem;
2856 newItem->pNext = pItem;
2857 pItem->pPrev = newItem;
2858 if(prevItem != VMA_NULL)
2860 prevItem->pNext = newItem;
2864 VMA_HEAVY_ASSERT(m_pFront == pItem);
2874 template<
typename T>
2875 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem)
2877 if(pItem != VMA_NULL)
2879 ItemType*
const nextItem = pItem->pNext;
2880 ItemType*
const newItem = m_ItemAllocator.Alloc();
2881 newItem->pNext = nextItem;
2882 newItem->pPrev = pItem;
2883 pItem->pNext = newItem;
2884 if(nextItem != VMA_NULL)
2886 nextItem->pPrev = newItem;
2890 VMA_HEAVY_ASSERT(m_pBack == pItem);
2900 template<
typename T>
2901 VmaListItem<T>* VmaRawList<T>::InsertBefore(ItemType* pItem,
const T& value)
2903 ItemType*
const newItem = InsertBefore(pItem);
2904 newItem->Value = value;
2908 template<
typename T>
2909 VmaListItem<T>* VmaRawList<T>::InsertAfter(ItemType* pItem,
const T& value)
2911 ItemType*
const newItem = InsertAfter(pItem);
2912 newItem->Value = value;
2916 template<
typename T,
typename AllocatorT>
2929 T& operator*()
const 2931 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2932 return m_pItem->Value;
2934 T* operator->()
const 2936 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2937 return &m_pItem->Value;
2940 iterator& operator++()
2942 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
2943 m_pItem = m_pItem->pNext;
2946 iterator& operator--()
2948 if(m_pItem != VMA_NULL)
2950 m_pItem = m_pItem->pPrev;
2954 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
2955 m_pItem = m_pList->Back();
2960 iterator operator++(
int)
2962 iterator result = *
this;
2966 iterator operator--(
int)
2968 iterator result = *
this;
2973 bool operator==(
const iterator& rhs)
const 2975 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2976 return m_pItem == rhs.m_pItem;
2978 bool operator!=(
const iterator& rhs)
const 2980 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
2981 return m_pItem != rhs.m_pItem;
2985 VmaRawList<T>* m_pList;
2986 VmaListItem<T>* m_pItem;
2988 iterator(VmaRawList<T>* pList, VmaListItem<T>* pItem) :
2994 friend class VmaList<T, AllocatorT>;
2997 class const_iterator
3006 const_iterator(
const iterator& src) :
3007 m_pList(src.m_pList),
3008 m_pItem(src.m_pItem)
3012 const T& operator*()
const 3014 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3015 return m_pItem->Value;
3017 const T* operator->()
const 3019 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3020 return &m_pItem->Value;
3023 const_iterator& operator++()
3025 VMA_HEAVY_ASSERT(m_pItem != VMA_NULL);
3026 m_pItem = m_pItem->pNext;
3029 const_iterator& operator--()
3031 if(m_pItem != VMA_NULL)
3033 m_pItem = m_pItem->pPrev;
3037 VMA_HEAVY_ASSERT(!m_pList->IsEmpty());
3038 m_pItem = m_pList->Back();
3043 const_iterator operator++(
int)
3045 const_iterator result = *
this;
3049 const_iterator operator--(
int)
3051 const_iterator result = *
this;
3056 bool operator==(
const const_iterator& rhs)
const 3058 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3059 return m_pItem == rhs.m_pItem;
3061 bool operator!=(
const const_iterator& rhs)
const 3063 VMA_HEAVY_ASSERT(m_pList == rhs.m_pList);
3064 return m_pItem != rhs.m_pItem;
3068 const_iterator(
const VmaRawList<T>* pList,
const VmaListItem<T>* pItem) :
3074 const VmaRawList<T>* m_pList;
3075 const VmaListItem<T>* m_pItem;
3077 friend class VmaList<T, AllocatorT>;
3080 VmaList(
const AllocatorT& allocator) : m_RawList(allocator.m_pCallbacks) { }
3082 bool empty()
const {
return m_RawList.IsEmpty(); }
3083 size_t size()
const {
return m_RawList.GetCount(); }
3085 iterator begin() {
return iterator(&m_RawList, m_RawList.Front()); }
3086 iterator end() {
return iterator(&m_RawList, VMA_NULL); }
3088 const_iterator cbegin()
const {
return const_iterator(&m_RawList, m_RawList.Front()); }
3089 const_iterator cend()
const {
return const_iterator(&m_RawList, VMA_NULL); }
3091 void clear() { m_RawList.Clear(); }
3092 void push_back(
const T& value) { m_RawList.PushBack(value); }
3093 void erase(iterator it) { m_RawList.Remove(it.m_pItem); }
3094 iterator insert(iterator it,
const T& value) {
return iterator(&m_RawList, m_RawList.InsertBefore(it.m_pItem, value)); }
3097 VmaRawList<T> m_RawList;
3100 #endif // #if VMA_USE_STL_LIST 3108 #if VMA_USE_STL_UNORDERED_MAP 3110 #define VmaPair std::pair 3112 #define VMA_MAP_TYPE(KeyT, ValueT) \ 3113 std::unordered_map< KeyT, ValueT, std::hash<KeyT>, std::equal_to<KeyT>, VmaStlAllocator< std::pair<KeyT, ValueT> > > 3115 #else // #if VMA_USE_STL_UNORDERED_MAP 3117 template<
typename T1,
typename T2>
3123 VmaPair() : first(), second() { }
3124 VmaPair(
const T1& firstSrc,
const T2& secondSrc) : first(firstSrc), second(secondSrc) { }
3130 template<
typename KeyT,
typename ValueT>
3134 typedef VmaPair<KeyT, ValueT> PairType;
3135 typedef PairType* iterator;
3137 VmaMap(
const VmaStlAllocator<PairType>& allocator) : m_Vector(allocator) { }
3139 iterator begin() {
return m_Vector.begin(); }
3140 iterator end() {
return m_Vector.end(); }
3142 void insert(
const PairType& pair);
3143 iterator find(
const KeyT& key);
3144 void erase(iterator it);
3147 VmaVector< PairType, VmaStlAllocator<PairType> > m_Vector;
3150 #define VMA_MAP_TYPE(KeyT, ValueT) VmaMap<KeyT, ValueT> 3152 template<
typename FirstT,
typename SecondT>
3153 struct VmaPairFirstLess
3155 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const VmaPair<FirstT, SecondT>& rhs)
const 3157 return lhs.first < rhs.first;
3159 bool operator()(
const VmaPair<FirstT, SecondT>& lhs,
const FirstT& rhsFirst)
const 3161 return lhs.first < rhsFirst;
3165 template<
typename KeyT,
typename ValueT>
3166 void VmaMap<KeyT, ValueT>::insert(
const PairType& pair)
3168 const size_t indexToInsert = VmaBinaryFindFirstNotLess(
3170 m_Vector.data() + m_Vector.size(),
3172 VmaPairFirstLess<KeyT, ValueT>()) - m_Vector.data();
3173 VmaVectorInsert(m_Vector, indexToInsert, pair);
3176 template<
typename KeyT,
typename ValueT>
3177 VmaPair<KeyT, ValueT>* VmaMap<KeyT, ValueT>::find(
const KeyT& key)
3179 PairType* it = VmaBinaryFindFirstNotLess(
3181 m_Vector.data() + m_Vector.size(),
3183 VmaPairFirstLess<KeyT, ValueT>());
3184 if((it != m_Vector.end()) && (it->first == key))
3190 return m_Vector.end();
3194 template<
typename KeyT,
typename ValueT>
3195 void VmaMap<KeyT, ValueT>::erase(iterator it)
3197 VmaVectorRemove(m_Vector, it - m_Vector.begin());
3200 #endif // #if VMA_USE_STL_UNORDERED_MAP 3206 class VmaDeviceMemoryBlock;
3208 struct VmaAllocation_T
3211 static const uint8_t MAP_COUNT_FLAG_PERSISTENT_MAP = 0x80;
3215 FLAG_USER_DATA_STRING = 0x01,
3219 enum ALLOCATION_TYPE
3221 ALLOCATION_TYPE_NONE,
3222 ALLOCATION_TYPE_BLOCK,
3223 ALLOCATION_TYPE_DEDICATED,
3226 VmaAllocation_T(uint32_t currentFrameIndex,
bool userDataString) :
3229 m_pUserData(VMA_NULL),
3230 m_LastUseFrameIndex(currentFrameIndex),
3231 m_Type((uint8_t)ALLOCATION_TYPE_NONE),
3232 m_SuballocationType((uint8_t)VMA_SUBALLOCATION_TYPE_UNKNOWN),
3234 m_Flags(userDataString ? (uint8_t)FLAG_USER_DATA_STRING : 0)
3240 VMA_ASSERT((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) == 0 &&
"Allocation was not unmapped before destruction.");
3243 VMA_ASSERT(m_pUserData == VMA_NULL);
3246 void InitBlockAllocation(
3248 VmaDeviceMemoryBlock* block,
3249 VkDeviceSize offset,
3250 VkDeviceSize alignment,
3252 VmaSuballocationType suballocationType,
3256 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3257 VMA_ASSERT(block != VMA_NULL);
3258 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3259 m_Alignment = alignment;
3261 m_MapCount = mapped ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3262 m_SuballocationType = (uint8_t)suballocationType;
3263 m_BlockAllocation.m_hPool = hPool;
3264 m_BlockAllocation.m_Block = block;
3265 m_BlockAllocation.m_Offset = offset;
3266 m_BlockAllocation.m_CanBecomeLost = canBecomeLost;
3271 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3272 VMA_ASSERT(m_LastUseFrameIndex.load() == VMA_FRAME_INDEX_LOST);
3273 m_Type = (uint8_t)ALLOCATION_TYPE_BLOCK;
3274 m_BlockAllocation.m_hPool = VK_NULL_HANDLE;
3275 m_BlockAllocation.m_Block = VMA_NULL;
3276 m_BlockAllocation.m_Offset = 0;
3277 m_BlockAllocation.m_CanBecomeLost =
true;
3280 void ChangeBlockAllocation(
3281 VmaAllocator hAllocator,
3282 VmaDeviceMemoryBlock* block,
3283 VkDeviceSize offset);
3286 void InitDedicatedAllocation(
3287 uint32_t memoryTypeIndex,
3288 VkDeviceMemory hMemory,
3289 VmaSuballocationType suballocationType,
3293 VMA_ASSERT(m_Type == ALLOCATION_TYPE_NONE);
3294 VMA_ASSERT(hMemory != VK_NULL_HANDLE);
3295 m_Type = (uint8_t)ALLOCATION_TYPE_DEDICATED;
3298 m_SuballocationType = (uint8_t)suballocationType;
3299 m_MapCount = (pMappedData != VMA_NULL) ? MAP_COUNT_FLAG_PERSISTENT_MAP : 0;
3300 m_DedicatedAllocation.m_MemoryTypeIndex = memoryTypeIndex;
3301 m_DedicatedAllocation.m_hMemory = hMemory;
3302 m_DedicatedAllocation.m_pMappedData = pMappedData;
3305 ALLOCATION_TYPE GetType()
const {
return (ALLOCATION_TYPE)m_Type; }
3306 VkDeviceSize GetAlignment()
const {
return m_Alignment; }
3307 VkDeviceSize GetSize()
const {
return m_Size; }
3308 bool IsUserDataString()
const {
return (m_Flags & FLAG_USER_DATA_STRING) != 0; }
3309 void* GetUserData()
const {
return m_pUserData; }
3310 void SetUserData(VmaAllocator hAllocator,
void* pUserData);
3311 VmaSuballocationType GetSuballocationType()
const {
return (VmaSuballocationType)m_SuballocationType; }
3313 VmaDeviceMemoryBlock* GetBlock()
const 3315 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
3316 return m_BlockAllocation.m_Block;
3318 VkDeviceSize GetOffset()
const;
3319 VkDeviceMemory GetMemory()
const;
3320 uint32_t GetMemoryTypeIndex()
const;
3321 bool IsPersistentMap()
const {
return (m_MapCount & MAP_COUNT_FLAG_PERSISTENT_MAP) != 0; }
3322 void* GetMappedData()
const;
3323 bool CanBecomeLost()
const;
3324 VmaPool GetPool()
const;
3326 uint32_t GetLastUseFrameIndex()
const 3328 return m_LastUseFrameIndex.load();
3330 bool CompareExchangeLastUseFrameIndex(uint32_t& expected, uint32_t desired)
3332 return m_LastUseFrameIndex.compare_exchange_weak(expected, desired);
3342 bool MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3344 void DedicatedAllocCalcStatsInfo(
VmaStatInfo& outInfo)
3346 VMA_ASSERT(m_Type == ALLOCATION_TYPE_DEDICATED);
3357 void BlockAllocMap();
3358 void BlockAllocUnmap();
3359 VkResult DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData);
3360 void DedicatedAllocUnmap(VmaAllocator hAllocator);
3363 VkDeviceSize m_Alignment;
3364 VkDeviceSize m_Size;
3366 VMA_ATOMIC_UINT32 m_LastUseFrameIndex;
3368 uint8_t m_SuballocationType;
3375 struct BlockAllocation
3378 VmaDeviceMemoryBlock* m_Block;
3379 VkDeviceSize m_Offset;
3380 bool m_CanBecomeLost;
3384 struct DedicatedAllocation
3386 uint32_t m_MemoryTypeIndex;
3387 VkDeviceMemory m_hMemory;
3388 void* m_pMappedData;
3394 BlockAllocation m_BlockAllocation;
3396 DedicatedAllocation m_DedicatedAllocation;
3399 void FreeUserDataString(VmaAllocator hAllocator);
3406 struct VmaSuballocation
3408 VkDeviceSize offset;
3410 VmaAllocation hAllocation;
3411 VmaSuballocationType type;
3414 typedef VmaList< VmaSuballocation, VmaStlAllocator<VmaSuballocation> > VmaSuballocationList;
3417 static const VkDeviceSize VMA_LOST_ALLOCATION_COST = 1048576;
3432 struct VmaAllocationRequest
3434 VkDeviceSize offset;
3435 VkDeviceSize sumFreeSize;
3436 VkDeviceSize sumItemSize;
3437 VmaSuballocationList::iterator item;
3438 size_t itemsToMakeLostCount;
3440 VkDeviceSize CalcCost()
const 3442 return sumItemSize + itemsToMakeLostCount * VMA_LOST_ALLOCATION_COST;
3450 class VmaBlockMetadata
3453 VmaBlockMetadata(VmaAllocator hAllocator);
3454 ~VmaBlockMetadata();
3455 void Init(VkDeviceSize size);
3458 bool Validate()
const;
3459 VkDeviceSize GetSize()
const {
return m_Size; }
3460 size_t GetAllocationCount()
const {
return m_Suballocations.size() - m_FreeCount; }
3461 VkDeviceSize GetSumFreeSize()
const {
return m_SumFreeSize; }
3462 VkDeviceSize GetUnusedRangeSizeMax()
const;
3464 bool IsEmpty()
const;
3466 void CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const;
3469 #if VMA_STATS_STRING_ENABLED 3470 void PrintDetailedMap(
class VmaJsonWriter& json)
const;
3474 void CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest);
3479 bool CreateAllocationRequest(
3480 uint32_t currentFrameIndex,
3481 uint32_t frameInUseCount,
3482 VkDeviceSize bufferImageGranularity,
3483 VkDeviceSize allocSize,
3484 VkDeviceSize allocAlignment,
3485 VmaSuballocationType allocType,
3486 bool canMakeOtherLost,
3487 VmaAllocationRequest* pAllocationRequest);
3489 bool MakeRequestedAllocationsLost(
3490 uint32_t currentFrameIndex,
3491 uint32_t frameInUseCount,
3492 VmaAllocationRequest* pAllocationRequest);
3494 uint32_t MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount);
3498 const VmaAllocationRequest& request,
3499 VmaSuballocationType type,
3500 VkDeviceSize allocSize,
3501 VmaAllocation hAllocation);
3504 void Free(
const VmaAllocation allocation);
3505 void FreeAtOffset(VkDeviceSize offset);
3508 VkDeviceSize m_Size;
3509 uint32_t m_FreeCount;
3510 VkDeviceSize m_SumFreeSize;
3511 VmaSuballocationList m_Suballocations;
3514 VmaVector< VmaSuballocationList::iterator, VmaStlAllocator< VmaSuballocationList::iterator > > m_FreeSuballocationsBySize;
3516 bool ValidateFreeSuballocationList()
const;
3520 bool CheckAllocation(
3521 uint32_t currentFrameIndex,
3522 uint32_t frameInUseCount,
3523 VkDeviceSize bufferImageGranularity,
3524 VkDeviceSize allocSize,
3525 VkDeviceSize allocAlignment,
3526 VmaSuballocationType allocType,
3527 VmaSuballocationList::const_iterator suballocItem,
3528 bool canMakeOtherLost,
3529 VkDeviceSize* pOffset,
3530 size_t* itemsToMakeLostCount,
3531 VkDeviceSize* pSumFreeSize,
3532 VkDeviceSize* pSumItemSize)
const;
3534 void MergeFreeWithNext(VmaSuballocationList::iterator item);
3538 VmaSuballocationList::iterator FreeSuballocation(VmaSuballocationList::iterator suballocItem);
3541 void RegisterFreeSuballocation(VmaSuballocationList::iterator item);
3544 void UnregisterFreeSuballocation(VmaSuballocationList::iterator item);
3548 class VmaDeviceMemoryMapping
3551 VmaDeviceMemoryMapping();
3552 ~VmaDeviceMemoryMapping();
3554 void* GetMappedData()
const {
return m_pMappedData; }
3557 VkResult Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData);
3558 void Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count);
3562 uint32_t m_MapCount;
3563 void* m_pMappedData;
3572 class VmaDeviceMemoryBlock
3575 uint32_t m_MemoryTypeIndex;
3576 VkDeviceMemory m_hMemory;
3577 VmaDeviceMemoryMapping m_Mapping;
3578 VmaBlockMetadata m_Metadata;
3580 VmaDeviceMemoryBlock(VmaAllocator hAllocator);
3582 ~VmaDeviceMemoryBlock()
3584 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
3589 uint32_t newMemoryTypeIndex,
3590 VkDeviceMemory newMemory,
3591 VkDeviceSize newSize);
3593 void Destroy(VmaAllocator allocator);
3596 bool Validate()
const;
3599 VkResult Map(VmaAllocator hAllocator, uint32_t count,
void** ppData);
3600 void Unmap(VmaAllocator hAllocator, uint32_t count);
3603 struct VmaPointerLess
3605 bool operator()(
const void* lhs,
const void* rhs)
const 3611 class VmaDefragmentator;
3619 struct VmaBlockVector
3622 VmaAllocator hAllocator,
3623 uint32_t memoryTypeIndex,
3624 VkDeviceSize preferredBlockSize,
3625 size_t minBlockCount,
3626 size_t maxBlockCount,
3627 VkDeviceSize bufferImageGranularity,
3628 uint32_t frameInUseCount,
3632 VkResult CreateMinBlocks();
3634 uint32_t GetMemoryTypeIndex()
const {
return m_MemoryTypeIndex; }
3635 VkDeviceSize GetPreferredBlockSize()
const {
return m_PreferredBlockSize; }
3636 VkDeviceSize GetBufferImageGranularity()
const {
return m_BufferImageGranularity; }
3637 uint32_t GetFrameInUseCount()
const {
return m_FrameInUseCount; }
3641 bool IsEmpty()
const {
return m_Blocks.empty(); }
3644 VmaPool hCurrentPool,
3645 uint32_t currentFrameIndex,
3646 const VkMemoryRequirements& vkMemReq,
3648 VmaSuballocationType suballocType,
3649 VmaAllocation* pAllocation);
3652 VmaAllocation hAllocation);
3657 #if VMA_STATS_STRING_ENABLED 3658 void PrintDetailedMap(
class VmaJsonWriter& json);
3661 void MakePoolAllocationsLost(
3662 uint32_t currentFrameIndex,
3663 size_t* pLostAllocationCount);
3665 VmaDefragmentator* EnsureDefragmentator(
3666 VmaAllocator hAllocator,
3667 uint32_t currentFrameIndex);
3669 VkResult Defragment(
3671 VkDeviceSize& maxBytesToMove,
3672 uint32_t& maxAllocationsToMove);
3674 void DestroyDefragmentator();
3677 friend class VmaDefragmentator;
3679 const VmaAllocator m_hAllocator;
3680 const uint32_t m_MemoryTypeIndex;
3681 const VkDeviceSize m_PreferredBlockSize;
3682 const size_t m_MinBlockCount;
3683 const size_t m_MaxBlockCount;
3684 const VkDeviceSize m_BufferImageGranularity;
3685 const uint32_t m_FrameInUseCount;
3686 const bool m_IsCustomPool;
3689 VmaVector< VmaDeviceMemoryBlock*, VmaStlAllocator<VmaDeviceMemoryBlock*> > m_Blocks;
3693 bool m_HasEmptyBlock;
3694 VmaDefragmentator* m_pDefragmentator;
3696 size_t CalcMaxBlockSize()
const;
3699 void Remove(VmaDeviceMemoryBlock* pBlock);
3703 void IncrementallySortBlocks();
3705 VkResult CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex);
3711 VmaBlockVector m_BlockVector;
3715 VmaAllocator hAllocator,
3719 VmaBlockVector& GetBlockVector() {
return m_BlockVector; }
3721 #if VMA_STATS_STRING_ENABLED 3726 class VmaDefragmentator
3728 const VmaAllocator m_hAllocator;
3729 VmaBlockVector*
const m_pBlockVector;
3730 uint32_t m_CurrentFrameIndex;
3731 VkDeviceSize m_BytesMoved;
3732 uint32_t m_AllocationsMoved;
3734 struct AllocationInfo
3736 VmaAllocation m_hAllocation;
3737 VkBool32* m_pChanged;
3740 m_hAllocation(VK_NULL_HANDLE),
3741 m_pChanged(VMA_NULL)
3746 struct AllocationInfoSizeGreater
3748 bool operator()(
const AllocationInfo& lhs,
const AllocationInfo& rhs)
const 3750 return lhs.m_hAllocation->GetSize() > rhs.m_hAllocation->GetSize();
3755 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3759 VmaDeviceMemoryBlock* m_pBlock;
3760 bool m_HasNonMovableAllocations;
3761 VmaVector< AllocationInfo, VmaStlAllocator<AllocationInfo> > m_Allocations;
3763 BlockInfo(
const VkAllocationCallbacks* pAllocationCallbacks) :
3765 m_HasNonMovableAllocations(true),
3766 m_Allocations(pAllocationCallbacks),
3767 m_pMappedDataForDefragmentation(VMA_NULL)
3771 void CalcHasNonMovableAllocations()
3773 const size_t blockAllocCount = m_pBlock->m_Metadata.GetAllocationCount();
3774 const size_t defragmentAllocCount = m_Allocations.size();
3775 m_HasNonMovableAllocations = blockAllocCount != defragmentAllocCount;
3778 void SortAllocationsBySizeDescecnding()
3780 VMA_SORT(m_Allocations.begin(), m_Allocations.end(), AllocationInfoSizeGreater());
3783 VkResult EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData);
3784 void Unmap(VmaAllocator hAllocator);
3788 void* m_pMappedDataForDefragmentation;
3791 struct BlockPointerLess
3793 bool operator()(
const BlockInfo* pLhsBlockInfo,
const VmaDeviceMemoryBlock* pRhsBlock)
const 3795 return pLhsBlockInfo->m_pBlock < pRhsBlock;
3797 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3799 return pLhsBlockInfo->m_pBlock < pRhsBlockInfo->m_pBlock;
3805 struct BlockInfoCompareMoveDestination
3807 bool operator()(
const BlockInfo* pLhsBlockInfo,
const BlockInfo* pRhsBlockInfo)
const 3809 if(pLhsBlockInfo->m_HasNonMovableAllocations && !pRhsBlockInfo->m_HasNonMovableAllocations)
3813 if(!pLhsBlockInfo->m_HasNonMovableAllocations && pRhsBlockInfo->m_HasNonMovableAllocations)
3817 if(pLhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize() < pRhsBlockInfo->m_pBlock->m_Metadata.GetSumFreeSize())
3825 typedef VmaVector< BlockInfo*, VmaStlAllocator<BlockInfo*> > BlockInfoVector;
3826 BlockInfoVector m_Blocks;
3828 VkResult DefragmentRound(
3829 VkDeviceSize maxBytesToMove,
3830 uint32_t maxAllocationsToMove);
3832 static bool MoveMakesSense(
3833 size_t dstBlockIndex, VkDeviceSize dstOffset,
3834 size_t srcBlockIndex, VkDeviceSize srcOffset);
3838 VmaAllocator hAllocator,
3839 VmaBlockVector* pBlockVector,
3840 uint32_t currentFrameIndex);
3842 ~VmaDefragmentator();
3844 VkDeviceSize GetBytesMoved()
const {
return m_BytesMoved; }
3845 uint32_t GetAllocationsMoved()
const {
return m_AllocationsMoved; }
3847 void AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged);
3849 VkResult Defragment(
3850 VkDeviceSize maxBytesToMove,
3851 uint32_t maxAllocationsToMove);
3855 struct VmaAllocator_T
3858 bool m_UseKhrDedicatedAllocation;
3860 bool m_AllocationCallbacksSpecified;
3861 VkAllocationCallbacks m_AllocationCallbacks;
3865 VkDeviceSize m_HeapSizeLimit[VK_MAX_MEMORY_HEAPS];
3866 VMA_MUTEX m_HeapSizeLimitMutex;
3868 VkPhysicalDeviceProperties m_PhysicalDeviceProperties;
3869 VkPhysicalDeviceMemoryProperties m_MemProps;
3872 VmaBlockVector* m_pBlockVectors[VK_MAX_MEMORY_TYPES];
3875 typedef VmaVector< VmaAllocation, VmaStlAllocator<VmaAllocation> > AllocationVectorType;
3876 AllocationVectorType* m_pDedicatedAllocations[VK_MAX_MEMORY_TYPES];
3877 VMA_MUTEX m_DedicatedAllocationsMutex[VK_MAX_MEMORY_TYPES];
3882 const VkAllocationCallbacks* GetAllocationCallbacks()
const 3884 return m_AllocationCallbacksSpecified ? &m_AllocationCallbacks : 0;
3888 return m_VulkanFunctions;
3891 VkDeviceSize GetBufferImageGranularity()
const 3894 static_cast<VkDeviceSize>(VMA_DEBUG_MIN_BUFFER_IMAGE_GRANULARITY),
3895 m_PhysicalDeviceProperties.limits.bufferImageGranularity);
3898 uint32_t GetMemoryHeapCount()
const {
return m_MemProps.memoryHeapCount; }
3899 uint32_t GetMemoryTypeCount()
const {
return m_MemProps.memoryTypeCount; }
3901 uint32_t MemoryTypeIndexToHeapIndex(uint32_t memTypeIndex)
const 3903 VMA_ASSERT(memTypeIndex < m_MemProps.memoryTypeCount);
3904 return m_MemProps.memoryTypes[memTypeIndex].heapIndex;
3907 void GetBufferMemoryRequirements(
3909 VkMemoryRequirements& memReq,
3910 bool& requiresDedicatedAllocation,
3911 bool& prefersDedicatedAllocation)
const;
3912 void GetImageMemoryRequirements(
3914 VkMemoryRequirements& memReq,
3915 bool& requiresDedicatedAllocation,
3916 bool& prefersDedicatedAllocation)
const;
3919 VkResult AllocateMemory(
3920 const VkMemoryRequirements& vkMemReq,
3921 bool requiresDedicatedAllocation,
3922 bool prefersDedicatedAllocation,
3923 VkBuffer dedicatedBuffer,
3924 VkImage dedicatedImage,
3926 VmaSuballocationType suballocType,
3927 VmaAllocation* pAllocation);
3930 void FreeMemory(
const VmaAllocation allocation);
3932 void CalculateStats(
VmaStats* pStats);
3934 #if VMA_STATS_STRING_ENABLED 3935 void PrintDetailedMap(
class VmaJsonWriter& json);
3938 VkResult Defragment(
3939 VmaAllocation* pAllocations,
3940 size_t allocationCount,
3941 VkBool32* pAllocationsChanged,
3945 void GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo);
3948 void DestroyPool(VmaPool pool);
3949 void GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats);
3951 void SetCurrentFrameIndex(uint32_t frameIndex);
3953 void MakePoolAllocationsLost(
3955 size_t* pLostAllocationCount);
3957 void CreateLostAllocation(VmaAllocation* pAllocation);
3959 VkResult AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory);
3960 void FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory);
3962 VkResult Map(VmaAllocation hAllocation,
void** ppData);
3963 void Unmap(VmaAllocation hAllocation);
3966 VkDeviceSize m_PreferredLargeHeapBlockSize;
3968 VkPhysicalDevice m_PhysicalDevice;
3969 VMA_ATOMIC_UINT32 m_CurrentFrameIndex;
3971 VMA_MUTEX m_PoolsMutex;
3973 VmaVector<VmaPool, VmaStlAllocator<VmaPool> > m_Pools;
3979 VkDeviceSize CalcPreferredBlockSize(uint32_t memTypeIndex);
3981 VkResult AllocateMemoryOfType(
3982 const VkMemoryRequirements& vkMemReq,
3983 bool dedicatedAllocation,
3984 VkBuffer dedicatedBuffer,
3985 VkImage dedicatedImage,
3987 uint32_t memTypeIndex,
3988 VmaSuballocationType suballocType,
3989 VmaAllocation* pAllocation);
3992 VkResult AllocateDedicatedMemory(
3994 VmaSuballocationType suballocType,
3995 uint32_t memTypeIndex,
3997 bool isUserDataString,
3999 VkBuffer dedicatedBuffer,
4000 VkImage dedicatedImage,
4001 VmaAllocation* pAllocation);
4004 void FreeDedicatedMemory(VmaAllocation allocation);
4010 static void* VmaMalloc(VmaAllocator hAllocator,
size_t size,
size_t alignment)
4012 return VmaMalloc(&hAllocator->m_AllocationCallbacks, size, alignment);
4015 static void VmaFree(VmaAllocator hAllocator,
void* ptr)
4017 VmaFree(&hAllocator->m_AllocationCallbacks, ptr);
4020 template<
typename T>
4021 static T* VmaAllocate(VmaAllocator hAllocator)
4023 return (T*)VmaMalloc(hAllocator,
sizeof(T), VMA_ALIGN_OF(T));
4026 template<
typename T>
4027 static T* VmaAllocateArray(VmaAllocator hAllocator,
size_t count)
4029 return (T*)VmaMalloc(hAllocator,
sizeof(T) * count, VMA_ALIGN_OF(T));
4032 template<
typename T>
4033 static void vma_delete(VmaAllocator hAllocator, T* ptr)
4038 VmaFree(hAllocator, ptr);
4042 template<
typename T>
4043 static void vma_delete_array(VmaAllocator hAllocator, T* ptr,
size_t count)
4047 for(
size_t i = count; i--; )
4049 VmaFree(hAllocator, ptr);
4056 #if VMA_STATS_STRING_ENABLED 4058 class VmaStringBuilder
4061 VmaStringBuilder(VmaAllocator alloc) : m_Data(VmaStlAllocator<char>(alloc->GetAllocationCallbacks())) { }
4062 size_t GetLength()
const {
return m_Data.size(); }
4063 const char* GetData()
const {
return m_Data.data(); }
4065 void Add(
char ch) { m_Data.push_back(ch); }
4066 void Add(
const char* pStr);
4067 void AddNewLine() { Add(
'\n'); }
4068 void AddNumber(uint32_t num);
4069 void AddNumber(uint64_t num);
4070 void AddPointer(
const void* ptr);
4073 VmaVector< char, VmaStlAllocator<char> > m_Data;
4076 void VmaStringBuilder::Add(
const char* pStr)
4078 const size_t strLen = strlen(pStr);
4081 const size_t oldCount = m_Data.size();
4082 m_Data.resize(oldCount + strLen);
4083 memcpy(m_Data.data() + oldCount, pStr, strLen);
4087 void VmaStringBuilder::AddNumber(uint32_t num)
4090 VmaUint32ToStr(buf,
sizeof(buf), num);
4094 void VmaStringBuilder::AddNumber(uint64_t num)
4097 VmaUint64ToStr(buf,
sizeof(buf), num);
4101 void VmaStringBuilder::AddPointer(
const void* ptr)
4104 VmaPtrToStr(buf,
sizeof(buf), ptr);
4108 #endif // #if VMA_STATS_STRING_ENABLED 4113 #if VMA_STATS_STRING_ENABLED 4118 VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb);
4121 void BeginObject(
bool singleLine =
false);
4124 void BeginArray(
bool singleLine =
false);
4127 void WriteString(
const char* pStr);
4128 void BeginString(
const char* pStr = VMA_NULL);
4129 void ContinueString(
const char* pStr);
4130 void ContinueString(uint32_t n);
4131 void ContinueString(uint64_t n);
4132 void ContinueString_Pointer(
const void* ptr);
4133 void EndString(
const char* pStr = VMA_NULL);
4135 void WriteNumber(uint32_t n);
4136 void WriteNumber(uint64_t n);
4137 void WriteBool(
bool b);
4141 static const char*
const INDENT;
4143 enum COLLECTION_TYPE
4145 COLLECTION_TYPE_OBJECT,
4146 COLLECTION_TYPE_ARRAY,
4150 COLLECTION_TYPE type;
4151 uint32_t valueCount;
4152 bool singleLineMode;
4155 VmaStringBuilder& m_SB;
4156 VmaVector< StackItem, VmaStlAllocator<StackItem> > m_Stack;
4157 bool m_InsideString;
4159 void BeginValue(
bool isString);
4160 void WriteIndent(
bool oneLess =
false);
4163 const char*
const VmaJsonWriter::INDENT =
" ";
4165 VmaJsonWriter::VmaJsonWriter(
const VkAllocationCallbacks* pAllocationCallbacks, VmaStringBuilder& sb) :
4167 m_Stack(VmaStlAllocator<StackItem>(pAllocationCallbacks)),
4168 m_InsideString(false)
4172 VmaJsonWriter::~VmaJsonWriter()
4174 VMA_ASSERT(!m_InsideString);
4175 VMA_ASSERT(m_Stack.empty());
4178 void VmaJsonWriter::BeginObject(
bool singleLine)
4180 VMA_ASSERT(!m_InsideString);
4186 item.type = COLLECTION_TYPE_OBJECT;
4187 item.valueCount = 0;
4188 item.singleLineMode = singleLine;
4189 m_Stack.push_back(item);
4192 void VmaJsonWriter::EndObject()
4194 VMA_ASSERT(!m_InsideString);
4199 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_OBJECT);
4203 void VmaJsonWriter::BeginArray(
bool singleLine)
4205 VMA_ASSERT(!m_InsideString);
4211 item.type = COLLECTION_TYPE_ARRAY;
4212 item.valueCount = 0;
4213 item.singleLineMode = singleLine;
4214 m_Stack.push_back(item);
4217 void VmaJsonWriter::EndArray()
4219 VMA_ASSERT(!m_InsideString);
4224 VMA_ASSERT(!m_Stack.empty() && m_Stack.back().type == COLLECTION_TYPE_ARRAY);
4228 void VmaJsonWriter::WriteString(
const char* pStr)
4234 void VmaJsonWriter::BeginString(
const char* pStr)
4236 VMA_ASSERT(!m_InsideString);
4240 m_InsideString =
true;
4241 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4243 ContinueString(pStr);
4247 void VmaJsonWriter::ContinueString(
const char* pStr)
4249 VMA_ASSERT(m_InsideString);
4251 const size_t strLen = strlen(pStr);
4252 for(
size_t i = 0; i < strLen; ++i)
4285 VMA_ASSERT(0 &&
"Character not currently supported.");
4291 void VmaJsonWriter::ContinueString(uint32_t n)
4293 VMA_ASSERT(m_InsideString);
4297 void VmaJsonWriter::ContinueString(uint64_t n)
4299 VMA_ASSERT(m_InsideString);
4303 void VmaJsonWriter::ContinueString_Pointer(
const void* ptr)
4305 VMA_ASSERT(m_InsideString);
4306 m_SB.AddPointer(ptr);
4309 void VmaJsonWriter::EndString(
const char* pStr)
4311 VMA_ASSERT(m_InsideString);
4312 if(pStr != VMA_NULL && pStr[0] !=
'\0')
4314 ContinueString(pStr);
4317 m_InsideString =
false;
4320 void VmaJsonWriter::WriteNumber(uint32_t n)
4322 VMA_ASSERT(!m_InsideString);
4327 void VmaJsonWriter::WriteNumber(uint64_t n)
4329 VMA_ASSERT(!m_InsideString);
4334 void VmaJsonWriter::WriteBool(
bool b)
4336 VMA_ASSERT(!m_InsideString);
4338 m_SB.Add(b ?
"true" :
"false");
4341 void VmaJsonWriter::WriteNull()
4343 VMA_ASSERT(!m_InsideString);
4348 void VmaJsonWriter::BeginValue(
bool isString)
4350 if(!m_Stack.empty())
4352 StackItem& currItem = m_Stack.back();
4353 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4354 currItem.valueCount % 2 == 0)
4356 VMA_ASSERT(isString);
4359 if(currItem.type == COLLECTION_TYPE_OBJECT &&
4360 currItem.valueCount % 2 != 0)
4364 else if(currItem.valueCount > 0)
4373 ++currItem.valueCount;
4377 void VmaJsonWriter::WriteIndent(
bool oneLess)
4379 if(!m_Stack.empty() && !m_Stack.back().singleLineMode)
4383 size_t count = m_Stack.size();
4384 if(count > 0 && oneLess)
4388 for(
size_t i = 0; i < count; ++i)
4395 #endif // #if VMA_STATS_STRING_ENABLED 4399 void VmaAllocation_T::SetUserData(VmaAllocator hAllocator,
void* pUserData)
4401 if(IsUserDataString())
4403 VMA_ASSERT(pUserData == VMA_NULL || pUserData != m_pUserData);
4405 FreeUserDataString(hAllocator);
4407 if(pUserData != VMA_NULL)
4409 const char*
const newStrSrc = (
char*)pUserData;
4410 const size_t newStrLen = strlen(newStrSrc);
4411 char*
const newStrDst = vma_new_array(hAllocator,
char, newStrLen + 1);
4412 memcpy(newStrDst, newStrSrc, newStrLen + 1);
4413 m_pUserData = newStrDst;
4418 m_pUserData = pUserData;
4422 void VmaAllocation_T::ChangeBlockAllocation(
4423 VmaAllocator hAllocator,
4424 VmaDeviceMemoryBlock* block,
4425 VkDeviceSize offset)
4427 VMA_ASSERT(block != VMA_NULL);
4428 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4431 if(block != m_BlockAllocation.m_Block)
4433 uint32_t mapRefCount = m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP;
4434 if(IsPersistentMap())
4436 m_BlockAllocation.m_Block->Unmap(hAllocator, mapRefCount);
4437 block->Map(hAllocator, mapRefCount, VMA_NULL);
4440 m_BlockAllocation.m_Block = block;
4441 m_BlockAllocation.m_Offset = offset;
4444 VkDeviceSize VmaAllocation_T::GetOffset()
const 4448 case ALLOCATION_TYPE_BLOCK:
4449 return m_BlockAllocation.m_Offset;
4450 case ALLOCATION_TYPE_DEDICATED:
4458 VkDeviceMemory VmaAllocation_T::GetMemory()
const 4462 case ALLOCATION_TYPE_BLOCK:
4463 return m_BlockAllocation.m_Block->m_hMemory;
4464 case ALLOCATION_TYPE_DEDICATED:
4465 return m_DedicatedAllocation.m_hMemory;
4468 return VK_NULL_HANDLE;
4472 uint32_t VmaAllocation_T::GetMemoryTypeIndex()
const 4476 case ALLOCATION_TYPE_BLOCK:
4477 return m_BlockAllocation.m_Block->m_MemoryTypeIndex;
4478 case ALLOCATION_TYPE_DEDICATED:
4479 return m_DedicatedAllocation.m_MemoryTypeIndex;
4486 void* VmaAllocation_T::GetMappedData()
const 4490 case ALLOCATION_TYPE_BLOCK:
4493 void* pBlockData = m_BlockAllocation.m_Block->m_Mapping.GetMappedData();
4494 VMA_ASSERT(pBlockData != VMA_NULL);
4495 return (
char*)pBlockData + m_BlockAllocation.m_Offset;
4502 case ALLOCATION_TYPE_DEDICATED:
4503 VMA_ASSERT((m_DedicatedAllocation.m_pMappedData != VMA_NULL) == (m_MapCount != 0));
4504 return m_DedicatedAllocation.m_pMappedData;
4511 bool VmaAllocation_T::CanBecomeLost()
const 4515 case ALLOCATION_TYPE_BLOCK:
4516 return m_BlockAllocation.m_CanBecomeLost;
4517 case ALLOCATION_TYPE_DEDICATED:
4525 VmaPool VmaAllocation_T::GetPool()
const 4527 VMA_ASSERT(m_Type == ALLOCATION_TYPE_BLOCK);
4528 return m_BlockAllocation.m_hPool;
4531 bool VmaAllocation_T::MakeLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
4533 VMA_ASSERT(CanBecomeLost());
4539 uint32_t localLastUseFrameIndex = GetLastUseFrameIndex();
4542 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
4547 else if(localLastUseFrameIndex + frameInUseCount >= currentFrameIndex)
4553 if(CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, VMA_FRAME_INDEX_LOST))
4563 void VmaAllocation_T::FreeUserDataString(VmaAllocator hAllocator)
4565 VMA_ASSERT(IsUserDataString());
4566 if(m_pUserData != VMA_NULL)
4568 char*
const oldStr = (
char*)m_pUserData;
4569 const size_t oldStrLen = strlen(oldStr);
4570 vma_delete_array(hAllocator, oldStr, oldStrLen + 1);
4571 m_pUserData = VMA_NULL;
4575 void VmaAllocation_T::BlockAllocMap()
4577 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4579 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4585 VMA_ASSERT(0 &&
"Allocation mapped too many times simultaneously.");
4589 void VmaAllocation_T::BlockAllocUnmap()
4591 VMA_ASSERT(GetType() == ALLOCATION_TYPE_BLOCK);
4593 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4599 VMA_ASSERT(0 &&
"Unmapping allocation not previously mapped.");
4603 VkResult VmaAllocation_T::DedicatedAllocMap(VmaAllocator hAllocator,
void** ppData)
4605 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4609 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) < 0x7F)
4611 VMA_ASSERT(m_DedicatedAllocation.m_pMappedData != VMA_NULL);
4612 *ppData = m_DedicatedAllocation.m_pMappedData;
4618 VMA_ASSERT(0 &&
"Dedicated allocation mapped too many times simultaneously.");
4619 return VK_ERROR_MEMORY_MAP_FAILED;
4624 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
4625 hAllocator->m_hDevice,
4626 m_DedicatedAllocation.m_hMemory,
4631 if(result == VK_SUCCESS)
4633 m_DedicatedAllocation.m_pMappedData = *ppData;
4640 void VmaAllocation_T::DedicatedAllocUnmap(VmaAllocator hAllocator)
4642 VMA_ASSERT(GetType() == ALLOCATION_TYPE_DEDICATED);
4644 if((m_MapCount & ~MAP_COUNT_FLAG_PERSISTENT_MAP) != 0)
4649 m_DedicatedAllocation.m_pMappedData = VMA_NULL;
4650 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(
4651 hAllocator->m_hDevice,
4652 m_DedicatedAllocation.m_hMemory);
4657 VMA_ASSERT(0 &&
"Unmapping dedicated allocation not previously mapped.");
4661 #if VMA_STATS_STRING_ENABLED 4664 static const char* VMA_SUBALLOCATION_TYPE_NAMES[] = {
4673 static void VmaPrintStatInfo(VmaJsonWriter& json,
const VmaStatInfo& stat)
4677 json.WriteString(
"Blocks");
4680 json.WriteString(
"Allocations");
4683 json.WriteString(
"UnusedRanges");
4686 json.WriteString(
"UsedBytes");
4689 json.WriteString(
"UnusedBytes");
4694 json.WriteString(
"AllocationSize");
4695 json.BeginObject(
true);
4696 json.WriteString(
"Min");
4698 json.WriteString(
"Avg");
4700 json.WriteString(
"Max");
4707 json.WriteString(
"UnusedRangeSize");
4708 json.BeginObject(
true);
4709 json.WriteString(
"Min");
4711 json.WriteString(
"Avg");
4713 json.WriteString(
"Max");
4721 #endif // #if VMA_STATS_STRING_ENABLED 4723 struct VmaSuballocationItemSizeLess
4726 const VmaSuballocationList::iterator lhs,
4727 const VmaSuballocationList::iterator rhs)
const 4729 return lhs->size < rhs->size;
4732 const VmaSuballocationList::iterator lhs,
4733 VkDeviceSize rhsSize)
const 4735 return lhs->size < rhsSize;
4742 VmaBlockMetadata::VmaBlockMetadata(VmaAllocator hAllocator) :
4746 m_Suballocations(VmaStlAllocator<VmaSuballocation>(hAllocator->GetAllocationCallbacks())),
4747 m_FreeSuballocationsBySize(VmaStlAllocator<VmaSuballocationList::iterator>(hAllocator->GetAllocationCallbacks()))
4751 VmaBlockMetadata::~VmaBlockMetadata()
4755 void VmaBlockMetadata::Init(VkDeviceSize size)
4759 m_SumFreeSize = size;
4761 VmaSuballocation suballoc = {};
4762 suballoc.offset = 0;
4763 suballoc.size = size;
4764 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
4765 suballoc.hAllocation = VK_NULL_HANDLE;
4767 m_Suballocations.push_back(suballoc);
4768 VmaSuballocationList::iterator suballocItem = m_Suballocations.end();
4770 m_FreeSuballocationsBySize.push_back(suballocItem);
4773 bool VmaBlockMetadata::Validate()
const 4775 if(m_Suballocations.empty())
4781 VkDeviceSize calculatedOffset = 0;
4783 uint32_t calculatedFreeCount = 0;
4785 VkDeviceSize calculatedSumFreeSize = 0;
4788 size_t freeSuballocationsToRegister = 0;
4790 bool prevFree =
false;
4792 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4793 suballocItem != m_Suballocations.cend();
4796 const VmaSuballocation& subAlloc = *suballocItem;
4799 if(subAlloc.offset != calculatedOffset)
4804 const bool currFree = (subAlloc.type == VMA_SUBALLOCATION_TYPE_FREE);
4806 if(prevFree && currFree)
4811 if(currFree != (subAlloc.hAllocation == VK_NULL_HANDLE))
4818 calculatedSumFreeSize += subAlloc.size;
4819 ++calculatedFreeCount;
4820 if(subAlloc.size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
4822 ++freeSuballocationsToRegister;
4827 if(subAlloc.hAllocation->GetOffset() != subAlloc.offset)
4831 if(subAlloc.hAllocation->GetSize() != subAlloc.size)
4837 calculatedOffset += subAlloc.size;
4838 prevFree = currFree;
4843 if(m_FreeSuballocationsBySize.size() != freeSuballocationsToRegister)
4848 VkDeviceSize lastSize = 0;
4849 for(
size_t i = 0; i < m_FreeSuballocationsBySize.size(); ++i)
4851 VmaSuballocationList::iterator suballocItem = m_FreeSuballocationsBySize[i];
4854 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4859 if(suballocItem->size < lastSize)
4864 lastSize = suballocItem->size;
4868 if(!ValidateFreeSuballocationList() ||
4869 (calculatedOffset != m_Size) ||
4870 (calculatedSumFreeSize != m_SumFreeSize) ||
4871 (calculatedFreeCount != m_FreeCount))
4879 VkDeviceSize VmaBlockMetadata::GetUnusedRangeSizeMax()
const 4881 if(!m_FreeSuballocationsBySize.empty())
4883 return m_FreeSuballocationsBySize.back()->size;
4891 bool VmaBlockMetadata::IsEmpty()
const 4893 return (m_Suballocations.size() == 1) && (m_FreeCount == 1);
4896 void VmaBlockMetadata::CalcAllocationStatInfo(
VmaStatInfo& outInfo)
const 4900 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4912 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4913 suballocItem != m_Suballocations.cend();
4916 const VmaSuballocation& suballoc = *suballocItem;
4917 if(suballoc.type != VMA_SUBALLOCATION_TYPE_FREE)
4930 void VmaBlockMetadata::AddPoolStats(
VmaPoolStats& inoutStats)
const 4932 const uint32_t rangeCount = (uint32_t)m_Suballocations.size();
4934 inoutStats.
size += m_Size;
4941 #if VMA_STATS_STRING_ENABLED 4943 void VmaBlockMetadata::PrintDetailedMap(
class VmaJsonWriter& json)
const 4947 json.WriteString(
"TotalBytes");
4948 json.WriteNumber(m_Size);
4950 json.WriteString(
"UnusedBytes");
4951 json.WriteNumber(m_SumFreeSize);
4953 json.WriteString(
"Allocations");
4954 json.WriteNumber(m_Suballocations.size() - m_FreeCount);
4956 json.WriteString(
"UnusedRanges");
4957 json.WriteNumber(m_FreeCount);
4959 json.WriteString(
"Suballocations");
4962 for(VmaSuballocationList::const_iterator suballocItem = m_Suballocations.cbegin();
4963 suballocItem != m_Suballocations.cend();
4964 ++suballocItem, ++i)
4966 json.BeginObject(
true);
4968 json.WriteString(
"Type");
4969 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[suballocItem->type]);
4971 json.WriteString(
"Size");
4972 json.WriteNumber(suballocItem->size);
4974 json.WriteString(
"Offset");
4975 json.WriteNumber(suballocItem->offset);
4977 if(suballocItem->type != VMA_SUBALLOCATION_TYPE_FREE)
4979 const void* pUserData = suballocItem->hAllocation->GetUserData();
4980 if(pUserData != VMA_NULL)
4982 json.WriteString(
"UserData");
4983 if(suballocItem->hAllocation->IsUserDataString())
4985 json.WriteString((
const char*)pUserData);
4990 json.ContinueString_Pointer(pUserData);
5003 #endif // #if VMA_STATS_STRING_ENABLED 5015 void VmaBlockMetadata::CreateFirstAllocationRequest(VmaAllocationRequest* pAllocationRequest)
5017 VMA_ASSERT(IsEmpty());
5018 pAllocationRequest->offset = 0;
5019 pAllocationRequest->sumFreeSize = m_SumFreeSize;
5020 pAllocationRequest->sumItemSize = 0;
5021 pAllocationRequest->item = m_Suballocations.begin();
5022 pAllocationRequest->itemsToMakeLostCount = 0;
5025 bool VmaBlockMetadata::CreateAllocationRequest(
5026 uint32_t currentFrameIndex,
5027 uint32_t frameInUseCount,
5028 VkDeviceSize bufferImageGranularity,
5029 VkDeviceSize allocSize,
5030 VkDeviceSize allocAlignment,
5031 VmaSuballocationType allocType,
5032 bool canMakeOtherLost,
5033 VmaAllocationRequest* pAllocationRequest)
5035 VMA_ASSERT(allocSize > 0);
5036 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5037 VMA_ASSERT(pAllocationRequest != VMA_NULL);
5038 VMA_HEAVY_ASSERT(Validate());
5041 if(canMakeOtherLost ==
false && m_SumFreeSize < allocSize)
5047 const size_t freeSuballocCount = m_FreeSuballocationsBySize.size();
5048 if(freeSuballocCount > 0)
5053 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5054 m_FreeSuballocationsBySize.data(),
5055 m_FreeSuballocationsBySize.data() + freeSuballocCount,
5057 VmaSuballocationItemSizeLess());
5058 size_t index = it - m_FreeSuballocationsBySize.data();
5059 for(; index < freeSuballocCount; ++index)
5064 bufferImageGranularity,
5068 m_FreeSuballocationsBySize[index],
5070 &pAllocationRequest->offset,
5071 &pAllocationRequest->itemsToMakeLostCount,
5072 &pAllocationRequest->sumFreeSize,
5073 &pAllocationRequest->sumItemSize))
5075 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5083 for(
size_t index = freeSuballocCount; index--; )
5088 bufferImageGranularity,
5092 m_FreeSuballocationsBySize[index],
5094 &pAllocationRequest->offset,
5095 &pAllocationRequest->itemsToMakeLostCount,
5096 &pAllocationRequest->sumFreeSize,
5097 &pAllocationRequest->sumItemSize))
5099 pAllocationRequest->item = m_FreeSuballocationsBySize[index];
5106 if(canMakeOtherLost)
5110 pAllocationRequest->sumFreeSize = VK_WHOLE_SIZE;
5111 pAllocationRequest->sumItemSize = VK_WHOLE_SIZE;
5113 VmaAllocationRequest tmpAllocRequest = {};
5114 for(VmaSuballocationList::iterator suballocIt = m_Suballocations.begin();
5115 suballocIt != m_Suballocations.end();
5118 if(suballocIt->type == VMA_SUBALLOCATION_TYPE_FREE ||
5119 suballocIt->hAllocation->CanBecomeLost())
5124 bufferImageGranularity,
5130 &tmpAllocRequest.offset,
5131 &tmpAllocRequest.itemsToMakeLostCount,
5132 &tmpAllocRequest.sumFreeSize,
5133 &tmpAllocRequest.sumItemSize))
5135 tmpAllocRequest.item = suballocIt;
5137 if(tmpAllocRequest.CalcCost() < pAllocationRequest->CalcCost())
5139 *pAllocationRequest = tmpAllocRequest;
5145 if(pAllocationRequest->sumItemSize != VK_WHOLE_SIZE)
5154 bool VmaBlockMetadata::MakeRequestedAllocationsLost(
5155 uint32_t currentFrameIndex,
5156 uint32_t frameInUseCount,
5157 VmaAllocationRequest* pAllocationRequest)
5159 while(pAllocationRequest->itemsToMakeLostCount > 0)
5161 if(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE)
5163 ++pAllocationRequest->item;
5165 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5166 VMA_ASSERT(pAllocationRequest->item->hAllocation != VK_NULL_HANDLE);
5167 VMA_ASSERT(pAllocationRequest->item->hAllocation->CanBecomeLost());
5168 if(pAllocationRequest->item->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5170 pAllocationRequest->item = FreeSuballocation(pAllocationRequest->item);
5171 --pAllocationRequest->itemsToMakeLostCount;
5179 VMA_HEAVY_ASSERT(Validate());
5180 VMA_ASSERT(pAllocationRequest->item != m_Suballocations.end());
5181 VMA_ASSERT(pAllocationRequest->item->type == VMA_SUBALLOCATION_TYPE_FREE);
5186 uint32_t VmaBlockMetadata::MakeAllocationsLost(uint32_t currentFrameIndex, uint32_t frameInUseCount)
5188 uint32_t lostAllocationCount = 0;
5189 for(VmaSuballocationList::iterator it = m_Suballocations.begin();
5190 it != m_Suballocations.end();
5193 if(it->type != VMA_SUBALLOCATION_TYPE_FREE &&
5194 it->hAllocation->CanBecomeLost() &&
5195 it->hAllocation->MakeLost(currentFrameIndex, frameInUseCount))
5197 it = FreeSuballocation(it);
5198 ++lostAllocationCount;
5201 return lostAllocationCount;
5204 void VmaBlockMetadata::Alloc(
5205 const VmaAllocationRequest& request,
5206 VmaSuballocationType type,
5207 VkDeviceSize allocSize,
5208 VmaAllocation hAllocation)
5210 VMA_ASSERT(request.item != m_Suballocations.end());
5211 VmaSuballocation& suballoc = *request.item;
5213 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5215 VMA_ASSERT(request.offset >= suballoc.offset);
5216 const VkDeviceSize paddingBegin = request.offset - suballoc.offset;
5217 VMA_ASSERT(suballoc.size >= paddingBegin + allocSize);
5218 const VkDeviceSize paddingEnd = suballoc.size - paddingBegin - allocSize;
5222 UnregisterFreeSuballocation(request.item);
5224 suballoc.offset = request.offset;
5225 suballoc.size = allocSize;
5226 suballoc.type = type;
5227 suballoc.hAllocation = hAllocation;
5232 VmaSuballocation paddingSuballoc = {};
5233 paddingSuballoc.offset = request.offset + allocSize;
5234 paddingSuballoc.size = paddingEnd;
5235 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5236 VmaSuballocationList::iterator next = request.item;
5238 const VmaSuballocationList::iterator paddingEndItem =
5239 m_Suballocations.insert(next, paddingSuballoc);
5240 RegisterFreeSuballocation(paddingEndItem);
5246 VmaSuballocation paddingSuballoc = {};
5247 paddingSuballoc.offset = request.offset - paddingBegin;
5248 paddingSuballoc.size = paddingBegin;
5249 paddingSuballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5250 const VmaSuballocationList::iterator paddingBeginItem =
5251 m_Suballocations.insert(request.item, paddingSuballoc);
5252 RegisterFreeSuballocation(paddingBeginItem);
5256 m_FreeCount = m_FreeCount - 1;
5257 if(paddingBegin > 0)
5265 m_SumFreeSize -= allocSize;
5268 void VmaBlockMetadata::Free(
const VmaAllocation allocation)
5270 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5271 suballocItem != m_Suballocations.end();
5274 VmaSuballocation& suballoc = *suballocItem;
5275 if(suballoc.hAllocation == allocation)
5277 FreeSuballocation(suballocItem);
5278 VMA_HEAVY_ASSERT(Validate());
5282 VMA_ASSERT(0 &&
"Not found!");
5285 void VmaBlockMetadata::FreeAtOffset(VkDeviceSize offset)
5287 for(VmaSuballocationList::iterator suballocItem = m_Suballocations.begin();
5288 suballocItem != m_Suballocations.end();
5291 VmaSuballocation& suballoc = *suballocItem;
5292 if(suballoc.offset == offset)
5294 FreeSuballocation(suballocItem);
5298 VMA_ASSERT(0 &&
"Not found!");
5301 bool VmaBlockMetadata::ValidateFreeSuballocationList()
const 5303 VkDeviceSize lastSize = 0;
5304 for(
size_t i = 0, count = m_FreeSuballocationsBySize.size(); i < count; ++i)
5306 const VmaSuballocationList::iterator it = m_FreeSuballocationsBySize[i];
5308 if(it->type != VMA_SUBALLOCATION_TYPE_FREE)
5313 if(it->size < VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5318 if(it->size < lastSize)
5324 lastSize = it->size;
5329 bool VmaBlockMetadata::CheckAllocation(
5330 uint32_t currentFrameIndex,
5331 uint32_t frameInUseCount,
5332 VkDeviceSize bufferImageGranularity,
5333 VkDeviceSize allocSize,
5334 VkDeviceSize allocAlignment,
5335 VmaSuballocationType allocType,
5336 VmaSuballocationList::const_iterator suballocItem,
5337 bool canMakeOtherLost,
5338 VkDeviceSize* pOffset,
5339 size_t* itemsToMakeLostCount,
5340 VkDeviceSize* pSumFreeSize,
5341 VkDeviceSize* pSumItemSize)
const 5343 VMA_ASSERT(allocSize > 0);
5344 VMA_ASSERT(allocType != VMA_SUBALLOCATION_TYPE_FREE);
5345 VMA_ASSERT(suballocItem != m_Suballocations.cend());
5346 VMA_ASSERT(pOffset != VMA_NULL);
5348 *itemsToMakeLostCount = 0;
5352 if(canMakeOtherLost)
5354 if(suballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5356 *pSumFreeSize = suballocItem->size;
5360 if(suballocItem->hAllocation->CanBecomeLost() &&
5361 suballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5363 ++*itemsToMakeLostCount;
5364 *pSumItemSize = suballocItem->size;
5373 if(m_Size - suballocItem->offset < allocSize)
5379 *pOffset = suballocItem->offset;
5382 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5384 *pOffset += VMA_DEBUG_MARGIN;
5388 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5389 *pOffset = VmaAlignUp(*pOffset, alignment);
5393 if(bufferImageGranularity > 1)
5395 bool bufferImageGranularityConflict =
false;
5396 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5397 while(prevSuballocItem != m_Suballocations.cbegin())
5400 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5401 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5403 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5405 bufferImageGranularityConflict =
true;
5413 if(bufferImageGranularityConflict)
5415 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5421 if(*pOffset >= suballocItem->offset + suballocItem->size)
5427 const VkDeviceSize paddingBegin = *pOffset - suballocItem->offset;
5430 VmaSuballocationList::const_iterator next = suballocItem;
5432 const VkDeviceSize requiredEndMargin =
5433 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5435 const VkDeviceSize totalSize = paddingBegin + allocSize + requiredEndMargin;
5437 if(suballocItem->offset + totalSize > m_Size)
5444 VmaSuballocationList::const_iterator lastSuballocItem = suballocItem;
5445 if(totalSize > suballocItem->size)
5447 VkDeviceSize remainingSize = totalSize - suballocItem->size;
5448 while(remainingSize > 0)
5451 if(lastSuballocItem == m_Suballocations.cend())
5455 if(lastSuballocItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5457 *pSumFreeSize += lastSuballocItem->size;
5461 VMA_ASSERT(lastSuballocItem->hAllocation != VK_NULL_HANDLE);
5462 if(lastSuballocItem->hAllocation->CanBecomeLost() &&
5463 lastSuballocItem->hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5465 ++*itemsToMakeLostCount;
5466 *pSumItemSize += lastSuballocItem->size;
5473 remainingSize = (lastSuballocItem->size < remainingSize) ?
5474 remainingSize - lastSuballocItem->size : 0;
5480 if(bufferImageGranularity > 1)
5482 VmaSuballocationList::const_iterator nextSuballocItem = lastSuballocItem;
5484 while(nextSuballocItem != m_Suballocations.cend())
5486 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5487 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5489 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5491 VMA_ASSERT(nextSuballoc.hAllocation != VK_NULL_HANDLE);
5492 if(nextSuballoc.hAllocation->CanBecomeLost() &&
5493 nextSuballoc.hAllocation->GetLastUseFrameIndex() + frameInUseCount < currentFrameIndex)
5495 ++*itemsToMakeLostCount;
5514 const VmaSuballocation& suballoc = *suballocItem;
5515 VMA_ASSERT(suballoc.type == VMA_SUBALLOCATION_TYPE_FREE);
5517 *pSumFreeSize = suballoc.size;
5520 if(suballoc.size < allocSize)
5526 *pOffset = suballoc.offset;
5529 if((VMA_DEBUG_MARGIN > 0) && suballocItem != m_Suballocations.cbegin())
5531 *pOffset += VMA_DEBUG_MARGIN;
5535 const VkDeviceSize alignment = VMA_MAX(allocAlignment, static_cast<VkDeviceSize>(VMA_DEBUG_ALIGNMENT));
5536 *pOffset = VmaAlignUp(*pOffset, alignment);
5540 if(bufferImageGranularity > 1)
5542 bool bufferImageGranularityConflict =
false;
5543 VmaSuballocationList::const_iterator prevSuballocItem = suballocItem;
5544 while(prevSuballocItem != m_Suballocations.cbegin())
5547 const VmaSuballocation& prevSuballoc = *prevSuballocItem;
5548 if(VmaBlocksOnSamePage(prevSuballoc.offset, prevSuballoc.size, *pOffset, bufferImageGranularity))
5550 if(VmaIsBufferImageGranularityConflict(prevSuballoc.type, allocType))
5552 bufferImageGranularityConflict =
true;
5560 if(bufferImageGranularityConflict)
5562 *pOffset = VmaAlignUp(*pOffset, bufferImageGranularity);
5567 const VkDeviceSize paddingBegin = *pOffset - suballoc.offset;
5570 VmaSuballocationList::const_iterator next = suballocItem;
5572 const VkDeviceSize requiredEndMargin =
5573 (next != m_Suballocations.cend()) ? VMA_DEBUG_MARGIN : 0;
5576 if(paddingBegin + allocSize + requiredEndMargin > suballoc.size)
5583 if(bufferImageGranularity > 1)
5585 VmaSuballocationList::const_iterator nextSuballocItem = suballocItem;
5587 while(nextSuballocItem != m_Suballocations.cend())
5589 const VmaSuballocation& nextSuballoc = *nextSuballocItem;
5590 if(VmaBlocksOnSamePage(*pOffset, allocSize, nextSuballoc.offset, bufferImageGranularity))
5592 if(VmaIsBufferImageGranularityConflict(allocType, nextSuballoc.type))
5611 void VmaBlockMetadata::MergeFreeWithNext(VmaSuballocationList::iterator item)
5613 VMA_ASSERT(item != m_Suballocations.end());
5614 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5616 VmaSuballocationList::iterator nextItem = item;
5618 VMA_ASSERT(nextItem != m_Suballocations.end());
5619 VMA_ASSERT(nextItem->type == VMA_SUBALLOCATION_TYPE_FREE);
5621 item->size += nextItem->size;
5623 m_Suballocations.erase(nextItem);
5626 VmaSuballocationList::iterator VmaBlockMetadata::FreeSuballocation(VmaSuballocationList::iterator suballocItem)
5629 VmaSuballocation& suballoc = *suballocItem;
5630 suballoc.type = VMA_SUBALLOCATION_TYPE_FREE;
5631 suballoc.hAllocation = VK_NULL_HANDLE;
5635 m_SumFreeSize += suballoc.size;
5638 bool mergeWithNext =
false;
5639 bool mergeWithPrev =
false;
5641 VmaSuballocationList::iterator nextItem = suballocItem;
5643 if((nextItem != m_Suballocations.end()) && (nextItem->type == VMA_SUBALLOCATION_TYPE_FREE))
5645 mergeWithNext =
true;
5648 VmaSuballocationList::iterator prevItem = suballocItem;
5649 if(suballocItem != m_Suballocations.begin())
5652 if(prevItem->type == VMA_SUBALLOCATION_TYPE_FREE)
5654 mergeWithPrev =
true;
5660 UnregisterFreeSuballocation(nextItem);
5661 MergeFreeWithNext(suballocItem);
5666 UnregisterFreeSuballocation(prevItem);
5667 MergeFreeWithNext(prevItem);
5668 RegisterFreeSuballocation(prevItem);
5673 RegisterFreeSuballocation(suballocItem);
5674 return suballocItem;
5678 void VmaBlockMetadata::RegisterFreeSuballocation(VmaSuballocationList::iterator item)
5680 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5681 VMA_ASSERT(item->size > 0);
5685 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5687 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5689 if(m_FreeSuballocationsBySize.empty())
5691 m_FreeSuballocationsBySize.push_back(item);
5695 VmaVectorInsertSorted<VmaSuballocationItemSizeLess>(m_FreeSuballocationsBySize, item);
5703 void VmaBlockMetadata::UnregisterFreeSuballocation(VmaSuballocationList::iterator item)
5705 VMA_ASSERT(item->type == VMA_SUBALLOCATION_TYPE_FREE);
5706 VMA_ASSERT(item->size > 0);
5710 VMA_HEAVY_ASSERT(ValidateFreeSuballocationList());
5712 if(item->size >= VMA_MIN_FREE_SUBALLOCATION_SIZE_TO_REGISTER)
5714 VmaSuballocationList::iterator*
const it = VmaBinaryFindFirstNotLess(
5715 m_FreeSuballocationsBySize.data(),
5716 m_FreeSuballocationsBySize.data() + m_FreeSuballocationsBySize.size(),
5718 VmaSuballocationItemSizeLess());
5719 for(
size_t index = it - m_FreeSuballocationsBySize.data();
5720 index < m_FreeSuballocationsBySize.size();
5723 if(m_FreeSuballocationsBySize[index] == item)
5725 VmaVectorRemove(m_FreeSuballocationsBySize, index);
5728 VMA_ASSERT((m_FreeSuballocationsBySize[index]->size == item->size) &&
"Not found.");
5730 VMA_ASSERT(0 &&
"Not found.");
5739 VmaDeviceMemoryMapping::VmaDeviceMemoryMapping() :
5741 m_pMappedData(VMA_NULL)
5745 VmaDeviceMemoryMapping::~VmaDeviceMemoryMapping()
5747 VMA_ASSERT(m_MapCount == 0 &&
"VkDeviceMemory block is being destroyed while it is still mapped.");
5750 VkResult VmaDeviceMemoryMapping::Map(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count,
void **ppData)
5757 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5760 m_MapCount += count;
5761 VMA_ASSERT(m_pMappedData != VMA_NULL);
5762 if(ppData != VMA_NULL)
5764 *ppData = m_pMappedData;
5770 VkResult result = (*hAllocator->GetVulkanFunctions().vkMapMemory)(
5771 hAllocator->m_hDevice,
5777 if(result == VK_SUCCESS)
5779 if(ppData != VMA_NULL)
5781 *ppData = m_pMappedData;
5789 void VmaDeviceMemoryMapping::Unmap(VmaAllocator hAllocator, VkDeviceMemory hMemory, uint32_t count)
5796 VmaMutexLock lock(m_Mutex, hAllocator->m_UseMutex);
5797 if(m_MapCount >= count)
5799 m_MapCount -= count;
5802 m_pMappedData = VMA_NULL;
5803 (*hAllocator->GetVulkanFunctions().vkUnmapMemory)(hAllocator->m_hDevice, hMemory);
5808 VMA_ASSERT(0 &&
"VkDeviceMemory block is being unmapped while it was not previously mapped.");
5815 VmaDeviceMemoryBlock::VmaDeviceMemoryBlock(VmaAllocator hAllocator) :
5816 m_MemoryTypeIndex(UINT32_MAX),
5817 m_hMemory(VK_NULL_HANDLE),
5818 m_Metadata(hAllocator)
5822 void VmaDeviceMemoryBlock::Init(
5823 uint32_t newMemoryTypeIndex,
5824 VkDeviceMemory newMemory,
5825 VkDeviceSize newSize)
5827 VMA_ASSERT(m_hMemory == VK_NULL_HANDLE);
5829 m_MemoryTypeIndex = newMemoryTypeIndex;
5830 m_hMemory = newMemory;
5832 m_Metadata.Init(newSize);
5835 void VmaDeviceMemoryBlock::Destroy(VmaAllocator allocator)
5839 VMA_ASSERT(m_Metadata.IsEmpty() &&
"Some allocations were not freed before destruction of this memory block!");
5841 VMA_ASSERT(m_hMemory != VK_NULL_HANDLE);
5842 allocator->FreeVulkanMemory(m_MemoryTypeIndex, m_Metadata.GetSize(), m_hMemory);
5843 m_hMemory = VK_NULL_HANDLE;
5846 bool VmaDeviceMemoryBlock::Validate()
const 5848 if((m_hMemory == VK_NULL_HANDLE) ||
5849 (m_Metadata.GetSize() == 0))
5854 return m_Metadata.Validate();
5857 VkResult VmaDeviceMemoryBlock::Map(VmaAllocator hAllocator, uint32_t count,
void** ppData)
5859 return m_Mapping.Map(hAllocator, m_hMemory, count, ppData);
5862 void VmaDeviceMemoryBlock::Unmap(VmaAllocator hAllocator, uint32_t count)
5864 m_Mapping.Unmap(hAllocator, m_hMemory, count);
5869 memset(&outInfo, 0,
sizeof(outInfo));
5888 static void VmaPostprocessCalcStatInfo(
VmaStatInfo& inoutInfo)
5896 VmaPool_T::VmaPool_T(
5897 VmaAllocator hAllocator,
5901 createInfo.memoryTypeIndex,
5902 createInfo.blockSize,
5903 createInfo.minBlockCount,
5904 createInfo.maxBlockCount,
5906 createInfo.frameInUseCount,
5911 VmaPool_T::~VmaPool_T()
5915 #if VMA_STATS_STRING_ENABLED 5917 #endif // #if VMA_STATS_STRING_ENABLED 5919 VmaBlockVector::VmaBlockVector(
5920 VmaAllocator hAllocator,
5921 uint32_t memoryTypeIndex,
5922 VkDeviceSize preferredBlockSize,
5923 size_t minBlockCount,
5924 size_t maxBlockCount,
5925 VkDeviceSize bufferImageGranularity,
5926 uint32_t frameInUseCount,
5927 bool isCustomPool) :
5928 m_hAllocator(hAllocator),
5929 m_MemoryTypeIndex(memoryTypeIndex),
5930 m_PreferredBlockSize(preferredBlockSize),
5931 m_MinBlockCount(minBlockCount),
5932 m_MaxBlockCount(maxBlockCount),
5933 m_BufferImageGranularity(bufferImageGranularity),
5934 m_FrameInUseCount(frameInUseCount),
5935 m_IsCustomPool(isCustomPool),
5936 m_Blocks(VmaStlAllocator<VmaDeviceMemoryBlock*>(hAllocator->GetAllocationCallbacks())),
5937 m_HasEmptyBlock(false),
5938 m_pDefragmentator(VMA_NULL)
5942 VmaBlockVector::~VmaBlockVector()
5944 VMA_ASSERT(m_pDefragmentator == VMA_NULL);
5946 for(
size_t i = m_Blocks.size(); i--; )
5948 m_Blocks[i]->Destroy(m_hAllocator);
5949 vma_delete(m_hAllocator, m_Blocks[i]);
5953 VkResult VmaBlockVector::CreateMinBlocks()
5955 for(
size_t i = 0; i < m_MinBlockCount; ++i)
5957 VkResult res = CreateBlock(m_PreferredBlockSize, VMA_NULL);
5958 if(res != VK_SUCCESS)
5966 void VmaBlockVector::GetPoolStats(
VmaPoolStats* pStats)
5974 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
5976 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
5978 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
5980 VMA_HEAVY_ASSERT(pBlock->Validate());
5981 pBlock->m_Metadata.AddPoolStats(*pStats);
5985 static const uint32_t VMA_ALLOCATION_TRY_COUNT = 32;
5987 VkResult VmaBlockVector::Allocate(
5988 VmaPool hCurrentPool,
5989 uint32_t currentFrameIndex,
5990 const VkMemoryRequirements& vkMemReq,
5992 VmaSuballocationType suballocType,
5993 VmaAllocation* pAllocation)
5998 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6002 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6004 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6005 VMA_ASSERT(pCurrBlock);
6006 VmaAllocationRequest currRequest = {};
6007 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6010 m_BufferImageGranularity,
6018 VMA_ASSERT(currRequest.itemsToMakeLostCount == 0);
6022 VkResult res = pCurrBlock->Map(m_hAllocator, 1, VMA_NULL);
6023 if(res != VK_SUCCESS)
6030 if(pCurrBlock->m_Metadata.IsEmpty())
6032 m_HasEmptyBlock =
false;
6035 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6036 pCurrBlock->m_Metadata.Alloc(currRequest, suballocType, vkMemReq.size, *pAllocation);
6037 (*pAllocation)->InitBlockAllocation(
6046 VMA_HEAVY_ASSERT(pCurrBlock->Validate());
6047 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6048 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6053 const bool canCreateNewBlock =
6055 (m_Blocks.size() < m_MaxBlockCount);
6058 if(canCreateNewBlock)
6061 VkDeviceSize newBlockSize = m_PreferredBlockSize;
6062 uint32_t newBlockSizeShift = 0;
6063 const uint32_t NEW_BLOCK_SIZE_SHIFT_MAX = 3;
6067 if(m_IsCustomPool ==
false)
6070 const VkDeviceSize maxExistingBlockSize = CalcMaxBlockSize();
6071 for(uint32_t i = 0; i < NEW_BLOCK_SIZE_SHIFT_MAX; ++i)
6073 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6074 if(smallerNewBlockSize > maxExistingBlockSize && smallerNewBlockSize >= vkMemReq.size * 2)
6076 newBlockSize = smallerNewBlockSize;
6077 ++newBlockSizeShift;
6086 size_t newBlockIndex = 0;
6087 VkResult res = CreateBlock(newBlockSize, &newBlockIndex);
6089 if(m_IsCustomPool ==
false)
6091 while(res < 0 && newBlockSizeShift < NEW_BLOCK_SIZE_SHIFT_MAX)
6093 const VkDeviceSize smallerNewBlockSize = newBlockSize / 2;
6094 if(smallerNewBlockSize >= vkMemReq.size)
6096 newBlockSize = smallerNewBlockSize;
6097 ++newBlockSizeShift;
6098 res = CreateBlock(newBlockSize, &newBlockIndex);
6107 if(res == VK_SUCCESS)
6109 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[newBlockIndex];
6110 VMA_ASSERT(pBlock->m_Metadata.GetSize() >= vkMemReq.size);
6114 res = pBlock->Map(m_hAllocator, 1, VMA_NULL);
6115 if(res != VK_SUCCESS)
6122 VmaAllocationRequest allocRequest;
6123 pBlock->m_Metadata.CreateFirstAllocationRequest(&allocRequest);
6124 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6125 pBlock->m_Metadata.Alloc(allocRequest, suballocType, vkMemReq.size, *pAllocation);
6126 (*pAllocation)->InitBlockAllocation(
6129 allocRequest.offset,
6135 VMA_HEAVY_ASSERT(pBlock->Validate());
6136 VMA_DEBUG_LOG(
" Created new allocation Size=%llu", allocInfo.allocationSize);
6137 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6145 if(canMakeOtherLost)
6147 uint32_t tryIndex = 0;
6148 for(; tryIndex < VMA_ALLOCATION_TRY_COUNT; ++tryIndex)
6150 VmaDeviceMemoryBlock* pBestRequestBlock = VMA_NULL;
6151 VmaAllocationRequest bestRequest = {};
6152 VkDeviceSize bestRequestCost = VK_WHOLE_SIZE;
6156 for(
size_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex )
6158 VmaDeviceMemoryBlock*
const pCurrBlock = m_Blocks[blockIndex];
6159 VMA_ASSERT(pCurrBlock);
6160 VmaAllocationRequest currRequest = {};
6161 if(pCurrBlock->m_Metadata.CreateAllocationRequest(
6164 m_BufferImageGranularity,
6171 const VkDeviceSize currRequestCost = currRequest.CalcCost();
6172 if(pBestRequestBlock == VMA_NULL ||
6173 currRequestCost < bestRequestCost)
6175 pBestRequestBlock = pCurrBlock;
6176 bestRequest = currRequest;
6177 bestRequestCost = currRequestCost;
6179 if(bestRequestCost == 0)
6187 if(pBestRequestBlock != VMA_NULL)
6191 VkResult res = pBestRequestBlock->Map(m_hAllocator, 1, VMA_NULL);
6192 if(res != VK_SUCCESS)
6198 if(pBestRequestBlock->m_Metadata.MakeRequestedAllocationsLost(
6204 if(pBestRequestBlock->m_Metadata.IsEmpty())
6206 m_HasEmptyBlock =
false;
6209 *pAllocation = vma_new(m_hAllocator, VmaAllocation_T)(currentFrameIndex, isUserDataString);
6210 pBestRequestBlock->m_Metadata.Alloc(bestRequest, suballocType, vkMemReq.size, *pAllocation);
6211 (*pAllocation)->InitBlockAllocation(
6220 VMA_HEAVY_ASSERT(pBestRequestBlock->Validate());
6221 VMA_DEBUG_LOG(
" Returned from existing allocation #%u", (uint32_t)blockIndex);
6222 (*pAllocation)->SetUserData(m_hAllocator, createInfo.
pUserData);
6236 if(tryIndex == VMA_ALLOCATION_TRY_COUNT)
6238 return VK_ERROR_TOO_MANY_OBJECTS;
6242 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
6245 void VmaBlockVector::Free(
6246 VmaAllocation hAllocation)
6248 VmaDeviceMemoryBlock* pBlockToDelete = VMA_NULL;
6252 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6254 VmaDeviceMemoryBlock* pBlock = hAllocation->GetBlock();
6256 if(hAllocation->IsPersistentMap())
6258 pBlock->m_Mapping.Unmap(m_hAllocator, pBlock->m_hMemory, 1);
6261 pBlock->m_Metadata.Free(hAllocation);
6262 VMA_HEAVY_ASSERT(pBlock->Validate());
6264 VMA_DEBUG_LOG(
" Freed from MemoryTypeIndex=%u", memTypeIndex);
6267 if(pBlock->m_Metadata.IsEmpty())
6270 if(m_HasEmptyBlock && m_Blocks.size() > m_MinBlockCount)
6272 pBlockToDelete = pBlock;
6278 m_HasEmptyBlock =
true;
6283 else if(m_HasEmptyBlock)
6285 VmaDeviceMemoryBlock* pLastBlock = m_Blocks.back();
6286 if(pLastBlock->m_Metadata.IsEmpty() && m_Blocks.size() > m_MinBlockCount)
6288 pBlockToDelete = pLastBlock;
6289 m_Blocks.pop_back();
6290 m_HasEmptyBlock =
false;
6294 IncrementallySortBlocks();
6299 if(pBlockToDelete != VMA_NULL)
6301 VMA_DEBUG_LOG(
" Deleted empty allocation");
6302 pBlockToDelete->Destroy(m_hAllocator);
6303 vma_delete(m_hAllocator, pBlockToDelete);
6307 size_t VmaBlockVector::CalcMaxBlockSize()
const 6310 for(
size_t i = m_Blocks.size(); i--; )
6312 result = VMA_MAX(result, m_Blocks[i]->m_Metadata.GetSize());
6313 if(result >= m_PreferredBlockSize)
6321 void VmaBlockVector::Remove(VmaDeviceMemoryBlock* pBlock)
6323 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6325 if(m_Blocks[blockIndex] == pBlock)
6327 VmaVectorRemove(m_Blocks, blockIndex);
6334 void VmaBlockVector::IncrementallySortBlocks()
6337 for(
size_t i = 1; i < m_Blocks.size(); ++i)
6339 if(m_Blocks[i - 1]->m_Metadata.GetSumFreeSize() > m_Blocks[i]->m_Metadata.GetSumFreeSize())
6341 VMA_SWAP(m_Blocks[i - 1], m_Blocks[i]);
6347 VkResult VmaBlockVector::CreateBlock(VkDeviceSize blockSize,
size_t* pNewBlockIndex)
6349 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
6350 allocInfo.memoryTypeIndex = m_MemoryTypeIndex;
6351 allocInfo.allocationSize = blockSize;
6352 VkDeviceMemory mem = VK_NULL_HANDLE;
6353 VkResult res = m_hAllocator->AllocateVulkanMemory(&allocInfo, &mem);
6362 VmaDeviceMemoryBlock*
const pBlock = vma_new(m_hAllocator, VmaDeviceMemoryBlock)(m_hAllocator);
6366 allocInfo.allocationSize);
6368 m_Blocks.push_back(pBlock);
6369 if(pNewBlockIndex != VMA_NULL)
6371 *pNewBlockIndex = m_Blocks.size() - 1;
6377 #if VMA_STATS_STRING_ENABLED 6379 void VmaBlockVector::PrintDetailedMap(
class VmaJsonWriter& json)
6381 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6387 json.WriteString(
"MemoryTypeIndex");
6388 json.WriteNumber(m_MemoryTypeIndex);
6390 json.WriteString(
"BlockSize");
6391 json.WriteNumber(m_PreferredBlockSize);
6393 json.WriteString(
"BlockCount");
6394 json.BeginObject(
true);
6395 if(m_MinBlockCount > 0)
6397 json.WriteString(
"Min");
6398 json.WriteNumber(m_MinBlockCount);
6400 if(m_MaxBlockCount < SIZE_MAX)
6402 json.WriteString(
"Max");
6403 json.WriteNumber(m_MaxBlockCount);
6405 json.WriteString(
"Cur");
6406 json.WriteNumber(m_Blocks.size());
6409 if(m_FrameInUseCount > 0)
6411 json.WriteString(
"FrameInUseCount");
6412 json.WriteNumber(m_FrameInUseCount);
6417 json.WriteString(
"PreferredBlockSize");
6418 json.WriteNumber(m_PreferredBlockSize);
6421 json.WriteString(
"Blocks");
6423 for(
size_t i = 0; i < m_Blocks.size(); ++i)
6425 m_Blocks[i]->m_Metadata.PrintDetailedMap(json);
6432 #endif // #if VMA_STATS_STRING_ENABLED 6434 VmaDefragmentator* VmaBlockVector::EnsureDefragmentator(
6435 VmaAllocator hAllocator,
6436 uint32_t currentFrameIndex)
6438 if(m_pDefragmentator == VMA_NULL)
6440 m_pDefragmentator = vma_new(m_hAllocator, VmaDefragmentator)(
6446 return m_pDefragmentator;
6449 VkResult VmaBlockVector::Defragment(
6451 VkDeviceSize& maxBytesToMove,
6452 uint32_t& maxAllocationsToMove)
6454 if(m_pDefragmentator == VMA_NULL)
6459 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6462 VkResult result = m_pDefragmentator->Defragment(maxBytesToMove, maxAllocationsToMove);
6465 if(pDefragmentationStats != VMA_NULL)
6467 const VkDeviceSize
bytesMoved = m_pDefragmentator->GetBytesMoved();
6468 const uint32_t
allocationsMoved = m_pDefragmentator->GetAllocationsMoved();
6471 VMA_ASSERT(bytesMoved <= maxBytesToMove);
6472 VMA_ASSERT(allocationsMoved <= maxAllocationsToMove);
6478 m_HasEmptyBlock =
false;
6479 for(
size_t blockIndex = m_Blocks.size(); blockIndex--; )
6481 VmaDeviceMemoryBlock* pBlock = m_Blocks[blockIndex];
6482 if(pBlock->m_Metadata.IsEmpty())
6484 if(m_Blocks.size() > m_MinBlockCount)
6486 if(pDefragmentationStats != VMA_NULL)
6489 pDefragmentationStats->
bytesFreed += pBlock->m_Metadata.GetSize();
6492 VmaVectorRemove(m_Blocks, blockIndex);
6493 pBlock->Destroy(m_hAllocator);
6494 vma_delete(m_hAllocator, pBlock);
6498 m_HasEmptyBlock =
true;
6506 void VmaBlockVector::DestroyDefragmentator()
6508 if(m_pDefragmentator != VMA_NULL)
6510 vma_delete(m_hAllocator, m_pDefragmentator);
6511 m_pDefragmentator = VMA_NULL;
6515 void VmaBlockVector::MakePoolAllocationsLost(
6516 uint32_t currentFrameIndex,
6517 size_t* pLostAllocationCount)
6519 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6520 size_t lostAllocationCount = 0;
6521 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6523 VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6525 lostAllocationCount += pBlock->m_Metadata.MakeAllocationsLost(currentFrameIndex, m_FrameInUseCount);
6527 if(pLostAllocationCount != VMA_NULL)
6529 *pLostAllocationCount = lostAllocationCount;
6533 void VmaBlockVector::AddStats(
VmaStats* pStats)
6535 const uint32_t memTypeIndex = m_MemoryTypeIndex;
6536 const uint32_t memHeapIndex = m_hAllocator->MemoryTypeIndexToHeapIndex(memTypeIndex);
6538 VmaMutexLock lock(m_Mutex, m_hAllocator->m_UseMutex);
6540 for(uint32_t blockIndex = 0; blockIndex < m_Blocks.size(); ++blockIndex)
6542 const VmaDeviceMemoryBlock*
const pBlock = m_Blocks[blockIndex];
6544 VMA_HEAVY_ASSERT(pBlock->Validate());
6546 pBlock->m_Metadata.CalcAllocationStatInfo(allocationStatInfo);
6547 VmaAddStatInfo(pStats->
total, allocationStatInfo);
6548 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
6549 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
6556 VmaDefragmentator::VmaDefragmentator(
6557 VmaAllocator hAllocator,
6558 VmaBlockVector* pBlockVector,
6559 uint32_t currentFrameIndex) :
6560 m_hAllocator(hAllocator),
6561 m_pBlockVector(pBlockVector),
6562 m_CurrentFrameIndex(currentFrameIndex),
6564 m_AllocationsMoved(0),
6565 m_Allocations(VmaStlAllocator<AllocationInfo>(hAllocator->GetAllocationCallbacks())),
6566 m_Blocks(VmaStlAllocator<BlockInfo*>(hAllocator->GetAllocationCallbacks()))
6570 VmaDefragmentator::~VmaDefragmentator()
6572 for(
size_t i = m_Blocks.size(); i--; )
6574 vma_delete(m_hAllocator, m_Blocks[i]);
6578 void VmaDefragmentator::AddAllocation(VmaAllocation hAlloc, VkBool32* pChanged)
6580 AllocationInfo allocInfo;
6581 allocInfo.m_hAllocation = hAlloc;
6582 allocInfo.m_pChanged = pChanged;
6583 m_Allocations.push_back(allocInfo);
6586 VkResult VmaDefragmentator::BlockInfo::EnsureMapping(VmaAllocator hAllocator,
void** ppMappedData)
6589 if(m_pMappedDataForDefragmentation)
6591 *ppMappedData = m_pMappedDataForDefragmentation;
6596 if(m_pBlock->m_Mapping.GetMappedData())
6598 *ppMappedData = m_pBlock->m_Mapping.GetMappedData();
6603 VkResult res = m_pBlock->Map(hAllocator, 1, &m_pMappedDataForDefragmentation);
6604 *ppMappedData = m_pMappedDataForDefragmentation;
6608 void VmaDefragmentator::BlockInfo::Unmap(VmaAllocator hAllocator)
6610 if(m_pMappedDataForDefragmentation != VMA_NULL)
6612 m_pBlock->Unmap(hAllocator, 1);
6616 VkResult VmaDefragmentator::DefragmentRound(
6617 VkDeviceSize maxBytesToMove,
6618 uint32_t maxAllocationsToMove)
6620 if(m_Blocks.empty())
6625 size_t srcBlockIndex = m_Blocks.size() - 1;
6626 size_t srcAllocIndex = SIZE_MAX;
6632 while(srcAllocIndex >= m_Blocks[srcBlockIndex]->m_Allocations.size())
6634 if(m_Blocks[srcBlockIndex]->m_Allocations.empty())
6637 if(srcBlockIndex == 0)
6644 srcAllocIndex = SIZE_MAX;
6649 srcAllocIndex = m_Blocks[srcBlockIndex]->m_Allocations.size() - 1;
6653 BlockInfo* pSrcBlockInfo = m_Blocks[srcBlockIndex];
6654 AllocationInfo& allocInfo = pSrcBlockInfo->m_Allocations[srcAllocIndex];
6656 const VkDeviceSize size = allocInfo.m_hAllocation->GetSize();
6657 const VkDeviceSize srcOffset = allocInfo.m_hAllocation->GetOffset();
6658 const VkDeviceSize alignment = allocInfo.m_hAllocation->GetAlignment();
6659 const VmaSuballocationType suballocType = allocInfo.m_hAllocation->GetSuballocationType();
6662 for(
size_t dstBlockIndex = 0; dstBlockIndex <= srcBlockIndex; ++dstBlockIndex)
6664 BlockInfo* pDstBlockInfo = m_Blocks[dstBlockIndex];
6665 VmaAllocationRequest dstAllocRequest;
6666 if(pDstBlockInfo->m_pBlock->m_Metadata.CreateAllocationRequest(
6667 m_CurrentFrameIndex,
6668 m_pBlockVector->GetFrameInUseCount(),
6669 m_pBlockVector->GetBufferImageGranularity(),
6674 &dstAllocRequest) &&
6676 dstBlockIndex, dstAllocRequest.offset, srcBlockIndex, srcOffset))
6678 VMA_ASSERT(dstAllocRequest.itemsToMakeLostCount == 0);
6681 if((m_AllocationsMoved + 1 > maxAllocationsToMove) ||
6682 (m_BytesMoved + size > maxBytesToMove))
6684 return VK_INCOMPLETE;
6687 void* pDstMappedData = VMA_NULL;
6688 VkResult res = pDstBlockInfo->EnsureMapping(m_hAllocator, &pDstMappedData);
6689 if(res != VK_SUCCESS)
6694 void* pSrcMappedData = VMA_NULL;
6695 res = pSrcBlockInfo->EnsureMapping(m_hAllocator, &pSrcMappedData);
6696 if(res != VK_SUCCESS)
6703 reinterpret_cast<char*>(pDstMappedData) + dstAllocRequest.offset,
6704 reinterpret_cast<char*>(pSrcMappedData) + srcOffset,
6705 static_cast<size_t>(size));
6707 pDstBlockInfo->m_pBlock->m_Metadata.Alloc(dstAllocRequest, suballocType, size, allocInfo.m_hAllocation);
6708 pSrcBlockInfo->m_pBlock->m_Metadata.FreeAtOffset(srcOffset);
6710 allocInfo.m_hAllocation->ChangeBlockAllocation(m_hAllocator, pDstBlockInfo->m_pBlock, dstAllocRequest.offset);
6712 if(allocInfo.m_pChanged != VMA_NULL)
6714 *allocInfo.m_pChanged = VK_TRUE;
6717 ++m_AllocationsMoved;
6718 m_BytesMoved += size;
6720 VmaVectorRemove(pSrcBlockInfo->m_Allocations, srcAllocIndex);
6728 if(srcAllocIndex > 0)
6734 if(srcBlockIndex > 0)
6737 srcAllocIndex = SIZE_MAX;
6747 VkResult VmaDefragmentator::Defragment(
6748 VkDeviceSize maxBytesToMove,
6749 uint32_t maxAllocationsToMove)
6751 if(m_Allocations.empty())
6757 const size_t blockCount = m_pBlockVector->m_Blocks.size();
6758 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6760 BlockInfo* pBlockInfo = vma_new(m_hAllocator, BlockInfo)(m_hAllocator->GetAllocationCallbacks());
6761 pBlockInfo->m_pBlock = m_pBlockVector->m_Blocks[blockIndex];
6762 m_Blocks.push_back(pBlockInfo);
6766 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockPointerLess());
6769 for(
size_t blockIndex = 0, allocCount = m_Allocations.size(); blockIndex < allocCount; ++blockIndex)
6771 AllocationInfo& allocInfo = m_Allocations[blockIndex];
6773 if(allocInfo.m_hAllocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
6775 VmaDeviceMemoryBlock* pBlock = allocInfo.m_hAllocation->GetBlock();
6776 BlockInfoVector::iterator it = VmaBinaryFindFirstNotLess(m_Blocks.begin(), m_Blocks.end(), pBlock, BlockPointerLess());
6777 if(it != m_Blocks.end() && (*it)->m_pBlock == pBlock)
6779 (*it)->m_Allocations.push_back(allocInfo);
6787 m_Allocations.clear();
6789 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6791 BlockInfo* pBlockInfo = m_Blocks[blockIndex];
6792 pBlockInfo->CalcHasNonMovableAllocations();
6793 pBlockInfo->SortAllocationsBySizeDescecnding();
6797 VMA_SORT(m_Blocks.begin(), m_Blocks.end(), BlockInfoCompareMoveDestination());
6800 VkResult result = VK_SUCCESS;
6801 for(
size_t round = 0; (round < 2) && (result == VK_SUCCESS); ++round)
6803 result = DefragmentRound(maxBytesToMove, maxAllocationsToMove);
6807 for(
size_t blockIndex = 0; blockIndex < blockCount; ++blockIndex)
6809 m_Blocks[blockIndex]->Unmap(m_hAllocator);
6815 bool VmaDefragmentator::MoveMakesSense(
6816 size_t dstBlockIndex, VkDeviceSize dstOffset,
6817 size_t srcBlockIndex, VkDeviceSize srcOffset)
6819 if(dstBlockIndex < srcBlockIndex)
6823 if(dstBlockIndex > srcBlockIndex)
6827 if(dstOffset < srcOffset)
6840 m_hDevice(pCreateInfo->device),
6841 m_AllocationCallbacksSpecified(pCreateInfo->pAllocationCallbacks != VMA_NULL),
6842 m_AllocationCallbacks(pCreateInfo->pAllocationCallbacks ?
6843 *pCreateInfo->pAllocationCallbacks : VmaEmptyAllocationCallbacks),
6844 m_PreferredLargeHeapBlockSize(0),
6845 m_PhysicalDevice(pCreateInfo->physicalDevice),
6846 m_CurrentFrameIndex(0),
6847 m_Pools(VmaStlAllocator<VmaPool>(GetAllocationCallbacks()))
6851 memset(&m_DeviceMemoryCallbacks, 0 ,
sizeof(m_DeviceMemoryCallbacks));
6852 memset(&m_MemProps, 0,
sizeof(m_MemProps));
6853 memset(&m_PhysicalDeviceProperties, 0,
sizeof(m_PhysicalDeviceProperties));
6855 memset(&m_pBlockVectors, 0,
sizeof(m_pBlockVectors));
6856 memset(&m_pDedicatedAllocations, 0,
sizeof(m_pDedicatedAllocations));
6858 for(uint32_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
6860 m_HeapSizeLimit[i] = VK_WHOLE_SIZE;
6871 (*m_VulkanFunctions.vkGetPhysicalDeviceProperties)(m_PhysicalDevice, &m_PhysicalDeviceProperties);
6872 (*m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties)(m_PhysicalDevice, &m_MemProps);
6879 for(uint32_t heapIndex = 0; heapIndex < GetMemoryHeapCount(); ++heapIndex)
6881 const VkDeviceSize limit = pCreateInfo->
pHeapSizeLimit[heapIndex];
6882 if(limit != VK_WHOLE_SIZE)
6884 m_HeapSizeLimit[heapIndex] = limit;
6885 if(limit < m_MemProps.memoryHeaps[heapIndex].size)
6887 m_MemProps.memoryHeaps[heapIndex].size = limit;
6893 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
6895 const VkDeviceSize preferredBlockSize = CalcPreferredBlockSize(memTypeIndex);
6897 m_pBlockVectors[memTypeIndex] = vma_new(
this, VmaBlockVector)(
6903 GetBufferImageGranularity(),
6908 m_pDedicatedAllocations[memTypeIndex] = vma_new(
this, AllocationVectorType)(VmaStlAllocator<VmaAllocation>(GetAllocationCallbacks()));
6912 VmaAllocator_T::~VmaAllocator_T()
6914 VMA_ASSERT(m_Pools.empty());
6916 for(
size_t i = GetMemoryTypeCount(); i--; )
6918 vma_delete(
this, m_pDedicatedAllocations[i]);
6919 vma_delete(
this, m_pBlockVectors[i]);
6923 void VmaAllocator_T::ImportVulkanFunctions(
const VmaVulkanFunctions* pVulkanFunctions)
6925 #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6926 m_VulkanFunctions.vkGetPhysicalDeviceProperties = &vkGetPhysicalDeviceProperties;
6927 m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties = &vkGetPhysicalDeviceMemoryProperties;
6928 m_VulkanFunctions.vkAllocateMemory = &vkAllocateMemory;
6929 m_VulkanFunctions.vkFreeMemory = &vkFreeMemory;
6930 m_VulkanFunctions.vkMapMemory = &vkMapMemory;
6931 m_VulkanFunctions.vkUnmapMemory = &vkUnmapMemory;
6932 m_VulkanFunctions.vkBindBufferMemory = &vkBindBufferMemory;
6933 m_VulkanFunctions.vkBindImageMemory = &vkBindImageMemory;
6934 m_VulkanFunctions.vkGetBufferMemoryRequirements = &vkGetBufferMemoryRequirements;
6935 m_VulkanFunctions.vkGetImageMemoryRequirements = &vkGetImageMemoryRequirements;
6936 m_VulkanFunctions.vkCreateBuffer = &vkCreateBuffer;
6937 m_VulkanFunctions.vkDestroyBuffer = &vkDestroyBuffer;
6938 m_VulkanFunctions.vkCreateImage = &vkCreateImage;
6939 m_VulkanFunctions.vkDestroyImage = &vkDestroyImage;
6940 if(m_UseKhrDedicatedAllocation)
6942 m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR =
6943 (PFN_vkGetBufferMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetBufferMemoryRequirements2KHR");
6944 m_VulkanFunctions.vkGetImageMemoryRequirements2KHR =
6945 (PFN_vkGetImageMemoryRequirements2KHR)vkGetDeviceProcAddr(m_hDevice,
"vkGetImageMemoryRequirements2KHR");
6947 #endif // #if VMA_STATIC_VULKAN_FUNCTIONS == 1 6949 #define VMA_COPY_IF_NOT_NULL(funcName) \ 6950 if(pVulkanFunctions->funcName != VMA_NULL) m_VulkanFunctions.funcName = pVulkanFunctions->funcName; 6952 if(pVulkanFunctions != VMA_NULL)
6954 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceProperties);
6955 VMA_COPY_IF_NOT_NULL(vkGetPhysicalDeviceMemoryProperties);
6956 VMA_COPY_IF_NOT_NULL(vkAllocateMemory);
6957 VMA_COPY_IF_NOT_NULL(vkFreeMemory);
6958 VMA_COPY_IF_NOT_NULL(vkMapMemory);
6959 VMA_COPY_IF_NOT_NULL(vkUnmapMemory);
6960 VMA_COPY_IF_NOT_NULL(vkBindBufferMemory);
6961 VMA_COPY_IF_NOT_NULL(vkBindImageMemory);
6962 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements);
6963 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements);
6964 VMA_COPY_IF_NOT_NULL(vkCreateBuffer);
6965 VMA_COPY_IF_NOT_NULL(vkDestroyBuffer);
6966 VMA_COPY_IF_NOT_NULL(vkCreateImage);
6967 VMA_COPY_IF_NOT_NULL(vkDestroyImage);
6968 VMA_COPY_IF_NOT_NULL(vkGetBufferMemoryRequirements2KHR);
6969 VMA_COPY_IF_NOT_NULL(vkGetImageMemoryRequirements2KHR);
6972 #undef VMA_COPY_IF_NOT_NULL 6976 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceProperties != VMA_NULL);
6977 VMA_ASSERT(m_VulkanFunctions.vkGetPhysicalDeviceMemoryProperties != VMA_NULL);
6978 VMA_ASSERT(m_VulkanFunctions.vkAllocateMemory != VMA_NULL);
6979 VMA_ASSERT(m_VulkanFunctions.vkFreeMemory != VMA_NULL);
6980 VMA_ASSERT(m_VulkanFunctions.vkMapMemory != VMA_NULL);
6981 VMA_ASSERT(m_VulkanFunctions.vkUnmapMemory != VMA_NULL);
6982 VMA_ASSERT(m_VulkanFunctions.vkBindBufferMemory != VMA_NULL);
6983 VMA_ASSERT(m_VulkanFunctions.vkBindImageMemory != VMA_NULL);
6984 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements != VMA_NULL);
6985 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements != VMA_NULL);
6986 VMA_ASSERT(m_VulkanFunctions.vkCreateBuffer != VMA_NULL);
6987 VMA_ASSERT(m_VulkanFunctions.vkDestroyBuffer != VMA_NULL);
6988 VMA_ASSERT(m_VulkanFunctions.vkCreateImage != VMA_NULL);
6989 VMA_ASSERT(m_VulkanFunctions.vkDestroyImage != VMA_NULL);
6990 if(m_UseKhrDedicatedAllocation)
6992 VMA_ASSERT(m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR != VMA_NULL);
6993 VMA_ASSERT(m_VulkanFunctions.vkGetImageMemoryRequirements2KHR != VMA_NULL);
6997 VkDeviceSize VmaAllocator_T::CalcPreferredBlockSize(uint32_t memTypeIndex)
6999 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7000 const VkDeviceSize heapSize = m_MemProps.memoryHeaps[heapIndex].size;
7001 const bool isSmallHeap = heapSize <= VMA_SMALL_HEAP_MAX_SIZE;
7002 return isSmallHeap ? (heapSize / 8) : m_PreferredLargeHeapBlockSize;
7005 VkResult VmaAllocator_T::AllocateMemoryOfType(
7006 const VkMemoryRequirements& vkMemReq,
7007 bool dedicatedAllocation,
7008 VkBuffer dedicatedBuffer,
7009 VkImage dedicatedImage,
7011 uint32_t memTypeIndex,
7012 VmaSuballocationType suballocType,
7013 VmaAllocation* pAllocation)
7015 VMA_ASSERT(pAllocation != VMA_NULL);
7016 VMA_DEBUG_LOG(
" AllocateMemory: MemoryTypeIndex=%u, Size=%llu", memTypeIndex, vkMemReq.size);
7022 (m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) == 0)
7027 VmaBlockVector*
const blockVector = m_pBlockVectors[memTypeIndex];
7028 VMA_ASSERT(blockVector);
7030 const VkDeviceSize preferredBlockSize = blockVector->GetPreferredBlockSize();
7031 bool preferDedicatedMemory =
7032 VMA_DEBUG_ALWAYS_DEDICATED_MEMORY ||
7033 dedicatedAllocation ||
7035 vkMemReq.size > preferredBlockSize / 2;
7037 if(preferDedicatedMemory &&
7039 finalCreateInfo.
pool == VK_NULL_HANDLE)
7048 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7052 return AllocateDedicatedMemory(
7066 VkResult res = blockVector->Allocate(
7068 m_CurrentFrameIndex.load(),
7073 if(res == VK_SUCCESS)
7081 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7085 res = AllocateDedicatedMemory(
7091 finalCreateInfo.pUserData,
7095 if(res == VK_SUCCESS)
7098 VMA_DEBUG_LOG(
" Allocated as DedicatedMemory");
7104 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7111 VkResult VmaAllocator_T::AllocateDedicatedMemory(
7113 VmaSuballocationType suballocType,
7114 uint32_t memTypeIndex,
7116 bool isUserDataString,
7118 VkBuffer dedicatedBuffer,
7119 VkImage dedicatedImage,
7120 VmaAllocation* pAllocation)
7122 VMA_ASSERT(pAllocation);
7124 VkMemoryAllocateInfo allocInfo = { VK_STRUCTURE_TYPE_MEMORY_ALLOCATE_INFO };
7125 allocInfo.memoryTypeIndex = memTypeIndex;
7126 allocInfo.allocationSize = size;
7128 VkMemoryDedicatedAllocateInfoKHR dedicatedAllocInfo = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_ALLOCATE_INFO_KHR };
7129 if(m_UseKhrDedicatedAllocation)
7131 if(dedicatedBuffer != VK_NULL_HANDLE)
7133 VMA_ASSERT(dedicatedImage == VK_NULL_HANDLE);
7134 dedicatedAllocInfo.buffer = dedicatedBuffer;
7135 allocInfo.pNext = &dedicatedAllocInfo;
7137 else if(dedicatedImage != VK_NULL_HANDLE)
7139 dedicatedAllocInfo.image = dedicatedImage;
7140 allocInfo.pNext = &dedicatedAllocInfo;
7145 VkDeviceMemory hMemory = VK_NULL_HANDLE;
7146 VkResult res = AllocateVulkanMemory(&allocInfo, &hMemory);
7149 VMA_DEBUG_LOG(
" vkAllocateMemory FAILED");
7153 void* pMappedData = VMA_NULL;
7156 res = (*m_VulkanFunctions.vkMapMemory)(
7165 VMA_DEBUG_LOG(
" vkMapMemory FAILED");
7166 FreeVulkanMemory(memTypeIndex, size, hMemory);
7171 *pAllocation = vma_new(
this, VmaAllocation_T)(m_CurrentFrameIndex.load(), isUserDataString);
7172 (*pAllocation)->InitDedicatedAllocation(memTypeIndex, hMemory, suballocType, pMappedData, size);
7173 (*pAllocation)->SetUserData(
this, pUserData);
7177 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7178 AllocationVectorType* pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7179 VMA_ASSERT(pDedicatedAllocations);
7180 VmaVectorInsertSorted<VmaPointerLess>(*pDedicatedAllocations, *pAllocation);
7183 VMA_DEBUG_LOG(
" Allocated DedicatedMemory MemoryTypeIndex=#%u", memTypeIndex);
7188 void VmaAllocator_T::GetBufferMemoryRequirements(
7190 VkMemoryRequirements& memReq,
7191 bool& requiresDedicatedAllocation,
7192 bool& prefersDedicatedAllocation)
const 7194 if(m_UseKhrDedicatedAllocation)
7196 VkBufferMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_BUFFER_MEMORY_REQUIREMENTS_INFO_2_KHR };
7197 memReqInfo.buffer = hBuffer;
7199 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7201 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7202 memReq2.pNext = &memDedicatedReq;
7204 (*m_VulkanFunctions.vkGetBufferMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7206 memReq = memReq2.memoryRequirements;
7207 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7208 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7212 (*m_VulkanFunctions.vkGetBufferMemoryRequirements)(m_hDevice, hBuffer, &memReq);
7213 requiresDedicatedAllocation =
false;
7214 prefersDedicatedAllocation =
false;
7218 void VmaAllocator_T::GetImageMemoryRequirements(
7220 VkMemoryRequirements& memReq,
7221 bool& requiresDedicatedAllocation,
7222 bool& prefersDedicatedAllocation)
const 7224 if(m_UseKhrDedicatedAllocation)
7226 VkImageMemoryRequirementsInfo2KHR memReqInfo = { VK_STRUCTURE_TYPE_IMAGE_MEMORY_REQUIREMENTS_INFO_2_KHR };
7227 memReqInfo.image = hImage;
7229 VkMemoryDedicatedRequirementsKHR memDedicatedReq = { VK_STRUCTURE_TYPE_MEMORY_DEDICATED_REQUIREMENTS_KHR };
7231 VkMemoryRequirements2KHR memReq2 = { VK_STRUCTURE_TYPE_MEMORY_REQUIREMENTS_2_KHR };
7232 memReq2.pNext = &memDedicatedReq;
7234 (*m_VulkanFunctions.vkGetImageMemoryRequirements2KHR)(m_hDevice, &memReqInfo, &memReq2);
7236 memReq = memReq2.memoryRequirements;
7237 requiresDedicatedAllocation = (memDedicatedReq.requiresDedicatedAllocation != VK_FALSE);
7238 prefersDedicatedAllocation = (memDedicatedReq.prefersDedicatedAllocation != VK_FALSE);
7242 (*m_VulkanFunctions.vkGetImageMemoryRequirements)(m_hDevice, hImage, &memReq);
7243 requiresDedicatedAllocation =
false;
7244 prefersDedicatedAllocation =
false;
7248 VkResult VmaAllocator_T::AllocateMemory(
7249 const VkMemoryRequirements& vkMemReq,
7250 bool requiresDedicatedAllocation,
7251 bool prefersDedicatedAllocation,
7252 VkBuffer dedicatedBuffer,
7253 VkImage dedicatedImage,
7255 VmaSuballocationType suballocType,
7256 VmaAllocation* pAllocation)
7261 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT together with VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT makes no sense.");
7262 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7267 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_MAPPED_BIT together with VMA_ALLOCATION_CREATE_CAN_BECOME_LOST_BIT is invalid.");
7268 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7270 if(requiresDedicatedAllocation)
7274 VMA_ASSERT(0 &&
"VMA_ALLOCATION_CREATE_NEVER_ALLOCATE_BIT specified while dedicated allocation is required.");
7275 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7277 if(createInfo.
pool != VK_NULL_HANDLE)
7279 VMA_ASSERT(0 &&
"Pool specified while dedicated allocation is required.");
7280 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7283 if((createInfo.
pool != VK_NULL_HANDLE) &&
7286 VMA_ASSERT(0 &&
"Specifying VMA_ALLOCATION_CREATE_DEDICATED_MEMORY_BIT when pool != null is invalid.");
7287 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7290 if(createInfo.
pool != VK_NULL_HANDLE)
7292 return createInfo.
pool->m_BlockVector.Allocate(
7294 m_CurrentFrameIndex.load(),
7303 uint32_t memoryTypeBits = vkMemReq.memoryTypeBits;
7304 uint32_t memTypeIndex = UINT32_MAX;
7306 if(res == VK_SUCCESS)
7308 res = AllocateMemoryOfType(
7310 requiresDedicatedAllocation || prefersDedicatedAllocation,
7318 if(res == VK_SUCCESS)
7328 memoryTypeBits &= ~(1u << memTypeIndex);
7331 if(res == VK_SUCCESS)
7333 res = AllocateMemoryOfType(
7335 requiresDedicatedAllocation || prefersDedicatedAllocation,
7343 if(res == VK_SUCCESS)
7353 return VK_ERROR_OUT_OF_DEVICE_MEMORY;
7364 void VmaAllocator_T::FreeMemory(
const VmaAllocation allocation)
7366 VMA_ASSERT(allocation);
7368 if(allocation->CanBecomeLost() ==
false ||
7369 allocation->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST)
7371 switch(allocation->GetType())
7373 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7375 VmaBlockVector* pBlockVector = VMA_NULL;
7376 VmaPool hPool = allocation->GetPool();
7377 if(hPool != VK_NULL_HANDLE)
7379 pBlockVector = &hPool->m_BlockVector;
7383 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7384 pBlockVector = m_pBlockVectors[memTypeIndex];
7386 pBlockVector->Free(allocation);
7389 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7390 FreeDedicatedMemory(allocation);
7397 allocation->SetUserData(
this, VMA_NULL);
7398 vma_delete(
this, allocation);
7401 void VmaAllocator_T::CalculateStats(
VmaStats* pStats)
7404 InitStatInfo(pStats->
total);
7405 for(
size_t i = 0; i < VK_MAX_MEMORY_TYPES; ++i)
7407 for(
size_t i = 0; i < VK_MAX_MEMORY_HEAPS; ++i)
7411 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7413 VmaBlockVector*
const pBlockVector = m_pBlockVectors[memTypeIndex];
7414 VMA_ASSERT(pBlockVector);
7415 pBlockVector->AddStats(pStats);
7420 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7421 for(
size_t poolIndex = 0, poolCount = m_Pools.size(); poolIndex < poolCount; ++poolIndex)
7423 m_Pools[poolIndex]->GetBlockVector().AddStats(pStats);
7428 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7430 const uint32_t memHeapIndex = MemoryTypeIndexToHeapIndex(memTypeIndex);
7431 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7432 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7433 VMA_ASSERT(pDedicatedAllocVector);
7434 for(
size_t allocIndex = 0, allocCount = pDedicatedAllocVector->size(); allocIndex < allocCount; ++allocIndex)
7437 (*pDedicatedAllocVector)[allocIndex]->DedicatedAllocCalcStatsInfo(allocationStatInfo);
7438 VmaAddStatInfo(pStats->
total, allocationStatInfo);
7439 VmaAddStatInfo(pStats->
memoryType[memTypeIndex], allocationStatInfo);
7440 VmaAddStatInfo(pStats->
memoryHeap[memHeapIndex], allocationStatInfo);
7445 VmaPostprocessCalcStatInfo(pStats->
total);
7446 for(
size_t i = 0; i < GetMemoryTypeCount(); ++i)
7447 VmaPostprocessCalcStatInfo(pStats->
memoryType[i]);
7448 for(
size_t i = 0; i < GetMemoryHeapCount(); ++i)
7449 VmaPostprocessCalcStatInfo(pStats->
memoryHeap[i]);
7452 static const uint32_t VMA_VENDOR_ID_AMD = 4098;
7454 VkResult VmaAllocator_T::Defragment(
7455 VmaAllocation* pAllocations,
7456 size_t allocationCount,
7457 VkBool32* pAllocationsChanged,
7461 if(pAllocationsChanged != VMA_NULL)
7463 memset(pAllocationsChanged, 0,
sizeof(*pAllocationsChanged));
7465 if(pDefragmentationStats != VMA_NULL)
7467 memset(pDefragmentationStats, 0,
sizeof(*pDefragmentationStats));
7470 const uint32_t currentFrameIndex = m_CurrentFrameIndex.load();
7472 VmaMutexLock poolsLock(m_PoolsMutex, m_UseMutex);
7474 const size_t poolCount = m_Pools.size();
7477 for(
size_t allocIndex = 0; allocIndex < allocationCount; ++allocIndex)
7479 VmaAllocation hAlloc = pAllocations[allocIndex];
7481 const uint32_t memTypeIndex = hAlloc->GetMemoryTypeIndex();
7483 if((hAlloc->GetType() == VmaAllocation_T::ALLOCATION_TYPE_BLOCK) &&
7485 ((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0) &&
7487 (hAlloc->GetLastUseFrameIndex() != VMA_FRAME_INDEX_LOST))
7489 VmaBlockVector* pAllocBlockVector = VMA_NULL;
7491 const VmaPool hAllocPool = hAlloc->GetPool();
7493 if(hAllocPool != VK_NULL_HANDLE)
7495 pAllocBlockVector = &hAllocPool->GetBlockVector();
7500 pAllocBlockVector = m_pBlockVectors[memTypeIndex];
7503 VmaDefragmentator*
const pDefragmentator = pAllocBlockVector->EnsureDefragmentator(
this, currentFrameIndex);
7505 VkBool32*
const pChanged = (pAllocationsChanged != VMA_NULL) ?
7506 &pAllocationsChanged[allocIndex] : VMA_NULL;
7507 pDefragmentator->AddAllocation(hAlloc, pChanged);
7511 VkResult result = VK_SUCCESS;
7515 VkDeviceSize maxBytesToMove = SIZE_MAX;
7516 uint32_t maxAllocationsToMove = UINT32_MAX;
7517 if(pDefragmentationInfo != VMA_NULL)
7524 for(uint32_t memTypeIndex = 0;
7525 (memTypeIndex < GetMemoryTypeCount()) && (result == VK_SUCCESS);
7529 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7531 result = m_pBlockVectors[memTypeIndex]->Defragment(
7532 pDefragmentationStats,
7534 maxAllocationsToMove);
7539 for(
size_t poolIndex = 0; (poolIndex < poolCount) && (result == VK_SUCCESS); ++poolIndex)
7541 result = m_Pools[poolIndex]->GetBlockVector().Defragment(
7542 pDefragmentationStats,
7544 maxAllocationsToMove);
7550 for(
size_t poolIndex = poolCount; poolIndex--; )
7552 m_Pools[poolIndex]->GetBlockVector().DestroyDefragmentator();
7556 for(uint32_t memTypeIndex = GetMemoryTypeCount(); memTypeIndex--; )
7558 if((m_MemProps.memoryTypes[memTypeIndex].propertyFlags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
7560 m_pBlockVectors[memTypeIndex]->DestroyDefragmentator();
7567 void VmaAllocator_T::GetAllocationInfo(VmaAllocation hAllocation,
VmaAllocationInfo* pAllocationInfo)
7569 if(hAllocation->CanBecomeLost())
7575 uint32_t localCurrFrameIndex = m_CurrentFrameIndex.load();
7576 uint32_t localLastUseFrameIndex = hAllocation->GetLastUseFrameIndex();
7579 if(localLastUseFrameIndex == VMA_FRAME_INDEX_LOST)
7583 pAllocationInfo->
offset = 0;
7584 pAllocationInfo->
size = hAllocation->GetSize();
7586 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7589 else if(localLastUseFrameIndex == localCurrFrameIndex)
7591 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7592 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7593 pAllocationInfo->
offset = hAllocation->GetOffset();
7594 pAllocationInfo->
size = hAllocation->GetSize();
7596 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7601 if(hAllocation->CompareExchangeLastUseFrameIndex(localLastUseFrameIndex, localCurrFrameIndex))
7603 localLastUseFrameIndex = localCurrFrameIndex;
7610 pAllocationInfo->
memoryType = hAllocation->GetMemoryTypeIndex();
7611 pAllocationInfo->
deviceMemory = hAllocation->GetMemory();
7612 pAllocationInfo->
offset = hAllocation->GetOffset();
7613 pAllocationInfo->
size = hAllocation->GetSize();
7614 pAllocationInfo->
pMappedData = hAllocation->GetMappedData();
7615 pAllocationInfo->
pUserData = hAllocation->GetUserData();
7619 VkResult VmaAllocator_T::CreatePool(
const VmaPoolCreateInfo* pCreateInfo, VmaPool* pPool)
7621 VMA_DEBUG_LOG(
" CreatePool: MemoryTypeIndex=%u", pCreateInfo->
memoryTypeIndex);
7634 *pPool = vma_new(
this, VmaPool_T)(
this, newCreateInfo);
7636 VkResult res = (*pPool)->m_BlockVector.CreateMinBlocks();
7637 if(res != VK_SUCCESS)
7639 vma_delete(
this, *pPool);
7646 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7647 VmaVectorInsertSorted<VmaPointerLess>(m_Pools, *pPool);
7653 void VmaAllocator_T::DestroyPool(VmaPool pool)
7657 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7658 bool success = VmaVectorRemoveSorted<VmaPointerLess>(m_Pools, pool);
7659 VMA_ASSERT(success &&
"Pool not found in Allocator.");
7662 vma_delete(
this, pool);
7665 void VmaAllocator_T::GetPoolStats(VmaPool pool,
VmaPoolStats* pPoolStats)
7667 pool->m_BlockVector.GetPoolStats(pPoolStats);
7670 void VmaAllocator_T::SetCurrentFrameIndex(uint32_t frameIndex)
7672 m_CurrentFrameIndex.store(frameIndex);
7675 void VmaAllocator_T::MakePoolAllocationsLost(
7677 size_t* pLostAllocationCount)
7679 hPool->m_BlockVector.MakePoolAllocationsLost(
7680 m_CurrentFrameIndex.load(),
7681 pLostAllocationCount);
7684 void VmaAllocator_T::CreateLostAllocation(VmaAllocation* pAllocation)
7686 *pAllocation = vma_new(
this, VmaAllocation_T)(VMA_FRAME_INDEX_LOST,
false);
7687 (*pAllocation)->InitLost();
7690 VkResult VmaAllocator_T::AllocateVulkanMemory(
const VkMemoryAllocateInfo* pAllocateInfo, VkDeviceMemory* pMemory)
7692 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(pAllocateInfo->memoryTypeIndex);
7695 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7697 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7698 if(m_HeapSizeLimit[heapIndex] >= pAllocateInfo->allocationSize)
7700 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7701 if(res == VK_SUCCESS)
7703 m_HeapSizeLimit[heapIndex] -= pAllocateInfo->allocationSize;
7708 res = VK_ERROR_OUT_OF_DEVICE_MEMORY;
7713 res = (*m_VulkanFunctions.vkAllocateMemory)(m_hDevice, pAllocateInfo, GetAllocationCallbacks(), pMemory);
7716 if(res == VK_SUCCESS && m_DeviceMemoryCallbacks.
pfnAllocate != VMA_NULL)
7718 (*m_DeviceMemoryCallbacks.
pfnAllocate)(
this, pAllocateInfo->memoryTypeIndex, *pMemory, pAllocateInfo->allocationSize);
7724 void VmaAllocator_T::FreeVulkanMemory(uint32_t memoryType, VkDeviceSize size, VkDeviceMemory hMemory)
7726 if(m_DeviceMemoryCallbacks.
pfnFree != VMA_NULL)
7728 (*m_DeviceMemoryCallbacks.
pfnFree)(
this, memoryType, hMemory, size);
7731 (*m_VulkanFunctions.vkFreeMemory)(m_hDevice, hMemory, GetAllocationCallbacks());
7733 const uint32_t heapIndex = MemoryTypeIndexToHeapIndex(memoryType);
7734 if(m_HeapSizeLimit[heapIndex] != VK_WHOLE_SIZE)
7736 VmaMutexLock lock(m_HeapSizeLimitMutex, m_UseMutex);
7737 m_HeapSizeLimit[heapIndex] += size;
7741 VkResult VmaAllocator_T::Map(VmaAllocation hAllocation,
void** ppData)
7743 if(hAllocation->CanBecomeLost())
7745 return VK_ERROR_MEMORY_MAP_FAILED;
7748 switch(hAllocation->GetType())
7750 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7752 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7753 char *pBytes = VMA_NULL;
7754 VkResult res = pBlock->Map(
this, 1, (
void**)&pBytes);
7755 if(res == VK_SUCCESS)
7757 *ppData = pBytes + (ptrdiff_t)hAllocation->GetOffset();
7758 hAllocation->BlockAllocMap();
7762 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7763 return hAllocation->DedicatedAllocMap(
this, ppData);
7766 return VK_ERROR_MEMORY_MAP_FAILED;
7770 void VmaAllocator_T::Unmap(VmaAllocation hAllocation)
7772 switch(hAllocation->GetType())
7774 case VmaAllocation_T::ALLOCATION_TYPE_BLOCK:
7776 VmaDeviceMemoryBlock*
const pBlock = hAllocation->GetBlock();
7777 hAllocation->BlockAllocUnmap();
7778 pBlock->Unmap(
this, 1);
7781 case VmaAllocation_T::ALLOCATION_TYPE_DEDICATED:
7782 hAllocation->DedicatedAllocUnmap(
this);
7789 void VmaAllocator_T::FreeDedicatedMemory(VmaAllocation allocation)
7791 VMA_ASSERT(allocation && allocation->GetType() == VmaAllocation_T::ALLOCATION_TYPE_DEDICATED);
7793 const uint32_t memTypeIndex = allocation->GetMemoryTypeIndex();
7795 VmaMutexLock lock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7796 AllocationVectorType*
const pDedicatedAllocations = m_pDedicatedAllocations[memTypeIndex];
7797 VMA_ASSERT(pDedicatedAllocations);
7798 bool success = VmaVectorRemoveSorted<VmaPointerLess>(*pDedicatedAllocations, allocation);
7799 VMA_ASSERT(success);
7802 VkDeviceMemory hMemory = allocation->GetMemory();
7804 if(allocation->GetMappedData() != VMA_NULL)
7806 (*m_VulkanFunctions.vkUnmapMemory)(m_hDevice, hMemory);
7809 FreeVulkanMemory(memTypeIndex, allocation->GetSize(), hMemory);
7811 VMA_DEBUG_LOG(
" Freed DedicatedMemory MemoryTypeIndex=%u", memTypeIndex);
7814 #if VMA_STATS_STRING_ENABLED 7816 void VmaAllocator_T::PrintDetailedMap(VmaJsonWriter& json)
7818 bool dedicatedAllocationsStarted =
false;
7819 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7821 VmaMutexLock dedicatedAllocationsLock(m_DedicatedAllocationsMutex[memTypeIndex], m_UseMutex);
7822 AllocationVectorType*
const pDedicatedAllocVector = m_pDedicatedAllocations[memTypeIndex];
7823 VMA_ASSERT(pDedicatedAllocVector);
7824 if(pDedicatedAllocVector->empty() ==
false)
7826 if(dedicatedAllocationsStarted ==
false)
7828 dedicatedAllocationsStarted =
true;
7829 json.WriteString(
"DedicatedAllocations");
7833 json.BeginString(
"Type ");
7834 json.ContinueString(memTypeIndex);
7839 for(
size_t i = 0; i < pDedicatedAllocVector->size(); ++i)
7841 const VmaAllocation hAlloc = (*pDedicatedAllocVector)[i];
7842 json.BeginObject(
true);
7844 json.WriteString(
"Type");
7845 json.WriteString(VMA_SUBALLOCATION_TYPE_NAMES[hAlloc->GetSuballocationType()]);
7847 json.WriteString(
"Size");
7848 json.WriteNumber(hAlloc->GetSize());
7850 const void* pUserData = hAlloc->GetUserData();
7851 if(pUserData != VMA_NULL)
7853 json.WriteString(
"UserData");
7854 if(hAlloc->IsUserDataString())
7856 json.WriteString((
const char*)pUserData);
7861 json.ContinueString_Pointer(pUserData);
7872 if(dedicatedAllocationsStarted)
7878 bool allocationsStarted =
false;
7879 for(uint32_t memTypeIndex = 0; memTypeIndex < GetMemoryTypeCount(); ++memTypeIndex)
7881 if(m_pBlockVectors[memTypeIndex]->IsEmpty() ==
false)
7883 if(allocationsStarted ==
false)
7885 allocationsStarted =
true;
7886 json.WriteString(
"DefaultPools");
7890 json.BeginString(
"Type ");
7891 json.ContinueString(memTypeIndex);
7894 m_pBlockVectors[memTypeIndex]->PrintDetailedMap(json);
7897 if(allocationsStarted)
7904 VmaMutexLock lock(m_PoolsMutex, m_UseMutex);
7905 const size_t poolCount = m_Pools.size();
7908 json.WriteString(
"Pools");
7910 for(
size_t poolIndex = 0; poolIndex < poolCount; ++poolIndex)
7912 m_Pools[poolIndex]->m_BlockVector.PrintDetailedMap(json);
7919 #endif // #if VMA_STATS_STRING_ENABLED 7921 static VkResult AllocateMemoryForImage(
7922 VmaAllocator allocator,
7925 VmaSuballocationType suballocType,
7926 VmaAllocation* pAllocation)
7928 VMA_ASSERT(allocator && (image != VK_NULL_HANDLE) && pAllocationCreateInfo && pAllocation);
7930 VkMemoryRequirements vkMemReq = {};
7931 bool requiresDedicatedAllocation =
false;
7932 bool prefersDedicatedAllocation =
false;
7933 allocator->GetImageMemoryRequirements(image, vkMemReq,
7934 requiresDedicatedAllocation, prefersDedicatedAllocation);
7936 return allocator->AllocateMemory(
7938 requiresDedicatedAllocation,
7939 prefersDedicatedAllocation,
7942 *pAllocationCreateInfo,
7952 VmaAllocator* pAllocator)
7954 VMA_ASSERT(pCreateInfo && pAllocator);
7955 VMA_DEBUG_LOG(
"vmaCreateAllocator");
7961 VmaAllocator allocator)
7963 if(allocator != VK_NULL_HANDLE)
7965 VMA_DEBUG_LOG(
"vmaDestroyAllocator");
7966 VkAllocationCallbacks allocationCallbacks = allocator->m_AllocationCallbacks;
7967 vma_delete(&allocationCallbacks, allocator);
7972 VmaAllocator allocator,
7973 const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
7975 VMA_ASSERT(allocator && ppPhysicalDeviceProperties);
7976 *ppPhysicalDeviceProperties = &allocator->m_PhysicalDeviceProperties;
7980 VmaAllocator allocator,
7981 const VkPhysicalDeviceMemoryProperties** ppPhysicalDeviceMemoryProperties)
7983 VMA_ASSERT(allocator && ppPhysicalDeviceMemoryProperties);
7984 *ppPhysicalDeviceMemoryProperties = &allocator->m_MemProps;
7988 VmaAllocator allocator,
7989 uint32_t memoryTypeIndex,
7990 VkMemoryPropertyFlags* pFlags)
7992 VMA_ASSERT(allocator && pFlags);
7993 VMA_ASSERT(memoryTypeIndex < allocator->GetMemoryTypeCount());
7994 *pFlags = allocator->m_MemProps.memoryTypes[memoryTypeIndex].propertyFlags;
7998 VmaAllocator allocator,
7999 uint32_t frameIndex)
8001 VMA_ASSERT(allocator);
8002 VMA_ASSERT(frameIndex != VMA_FRAME_INDEX_LOST);
8004 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8006 allocator->SetCurrentFrameIndex(frameIndex);
8010 VmaAllocator allocator,
8013 VMA_ASSERT(allocator && pStats);
8014 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8015 allocator->CalculateStats(pStats);
8018 #if VMA_STATS_STRING_ENABLED 8021 VmaAllocator allocator,
8022 char** ppStatsString,
8023 VkBool32 detailedMap)
8025 VMA_ASSERT(allocator && ppStatsString);
8026 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8028 VmaStringBuilder sb(allocator);
8030 VmaJsonWriter json(allocator->GetAllocationCallbacks(), sb);
8034 allocator->CalculateStats(&stats);
8036 json.WriteString(
"Total");
8037 VmaPrintStatInfo(json, stats.
total);
8039 for(uint32_t heapIndex = 0; heapIndex < allocator->GetMemoryHeapCount(); ++heapIndex)
8041 json.BeginString(
"Heap ");
8042 json.ContinueString(heapIndex);
8046 json.WriteString(
"Size");
8047 json.WriteNumber(allocator->m_MemProps.memoryHeaps[heapIndex].size);
8049 json.WriteString(
"Flags");
8050 json.BeginArray(
true);
8051 if((allocator->m_MemProps.memoryHeaps[heapIndex].flags & VK_MEMORY_HEAP_DEVICE_LOCAL_BIT) != 0)
8053 json.WriteString(
"DEVICE_LOCAL");
8059 json.WriteString(
"Stats");
8060 VmaPrintStatInfo(json, stats.
memoryHeap[heapIndex]);
8063 for(uint32_t typeIndex = 0; typeIndex < allocator->GetMemoryTypeCount(); ++typeIndex)
8065 if(allocator->MemoryTypeIndexToHeapIndex(typeIndex) == heapIndex)
8067 json.BeginString(
"Type ");
8068 json.ContinueString(typeIndex);
8073 json.WriteString(
"Flags");
8074 json.BeginArray(
true);
8075 VkMemoryPropertyFlags flags = allocator->m_MemProps.memoryTypes[typeIndex].propertyFlags;
8076 if((flags & VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT) != 0)
8078 json.WriteString(
"DEVICE_LOCAL");
8080 if((flags & VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT) != 0)
8082 json.WriteString(
"HOST_VISIBLE");
8084 if((flags & VK_MEMORY_PROPERTY_HOST_COHERENT_BIT) != 0)
8086 json.WriteString(
"HOST_COHERENT");
8088 if((flags & VK_MEMORY_PROPERTY_HOST_CACHED_BIT) != 0)
8090 json.WriteString(
"HOST_CACHED");
8092 if((flags & VK_MEMORY_PROPERTY_LAZILY_ALLOCATED_BIT) != 0)
8094 json.WriteString(
"LAZILY_ALLOCATED");
8100 json.WriteString(
"Stats");
8101 VmaPrintStatInfo(json, stats.
memoryType[typeIndex]);
8110 if(detailedMap == VK_TRUE)
8112 allocator->PrintDetailedMap(json);
8118 const size_t len = sb.GetLength();
8119 char*
const pChars = vma_new_array(allocator,
char, len + 1);
8122 memcpy(pChars, sb.GetData(), len);
8125 *ppStatsString = pChars;
8129 VmaAllocator allocator,
8132 if(pStatsString != VMA_NULL)
8134 VMA_ASSERT(allocator);
8135 size_t len = strlen(pStatsString);
8136 vma_delete_array(allocator, pStatsString, len + 1);
8140 #endif // #if VMA_STATS_STRING_ENABLED 8146 VmaAllocator allocator,
8147 uint32_t memoryTypeBits,
8149 uint32_t* pMemoryTypeIndex)
8151 VMA_ASSERT(allocator != VK_NULL_HANDLE);
8152 VMA_ASSERT(pAllocationCreateInfo != VMA_NULL);
8153 VMA_ASSERT(pMemoryTypeIndex != VMA_NULL);
8160 uint32_t requiredFlags = pAllocationCreateInfo->
requiredFlags;
8164 switch(pAllocationCreateInfo->
usage)
8169 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8172 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT | VK_MEMORY_PROPERTY_HOST_COHERENT_BIT;
8175 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8176 preferredFlags |= VK_MEMORY_PROPERTY_DEVICE_LOCAL_BIT;
8179 requiredFlags |= VK_MEMORY_PROPERTY_HOST_VISIBLE_BIT;
8180 preferredFlags |= VK_MEMORY_PROPERTY_HOST_COHERENT_BIT | VK_MEMORY_PROPERTY_HOST_CACHED_BIT;
8186 *pMemoryTypeIndex = UINT32_MAX;
8187 uint32_t minCost = UINT32_MAX;
8188 for(uint32_t memTypeIndex = 0, memTypeBit = 1;
8189 memTypeIndex < allocator->GetMemoryTypeCount();
8190 ++memTypeIndex, memTypeBit <<= 1)
8193 if((memTypeBit & memoryTypeBits) != 0)
8195 const VkMemoryPropertyFlags currFlags =
8196 allocator->m_MemProps.memoryTypes[memTypeIndex].propertyFlags;
8198 if((requiredFlags & ~currFlags) == 0)
8201 uint32_t currCost = VmaCountBitsSet(preferredFlags & ~currFlags);
8203 if(currCost < minCost)
8205 *pMemoryTypeIndex = memTypeIndex;
8215 return (*pMemoryTypeIndex != UINT32_MAX) ? VK_SUCCESS : VK_ERROR_FEATURE_NOT_PRESENT;
8219 VmaAllocator allocator,
8223 VMA_ASSERT(allocator && pCreateInfo && pPool);
8225 VMA_DEBUG_LOG(
"vmaCreatePool");
8227 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8229 return allocator->CreatePool(pCreateInfo, pPool);
8233 VmaAllocator allocator,
8236 VMA_ASSERT(allocator);
8238 if(pool == VK_NULL_HANDLE)
8243 VMA_DEBUG_LOG(
"vmaDestroyPool");
8245 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8247 allocator->DestroyPool(pool);
8251 VmaAllocator allocator,
8255 VMA_ASSERT(allocator && pool && pPoolStats);
8257 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8259 allocator->GetPoolStats(pool, pPoolStats);
8263 VmaAllocator allocator,
8265 size_t* pLostAllocationCount)
8267 VMA_ASSERT(allocator && pool);
8269 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8271 allocator->MakePoolAllocationsLost(pool, pLostAllocationCount);
8275 VmaAllocator allocator,
8276 const VkMemoryRequirements* pVkMemoryRequirements,
8278 VmaAllocation* pAllocation,
8281 VMA_ASSERT(allocator && pVkMemoryRequirements && pCreateInfo && pAllocation);
8283 VMA_DEBUG_LOG(
"vmaAllocateMemory");
8285 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8287 VkResult result = allocator->AllocateMemory(
8288 *pVkMemoryRequirements,
8294 VMA_SUBALLOCATION_TYPE_UNKNOWN,
8297 if(pAllocationInfo && result == VK_SUCCESS)
8299 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8306 VmaAllocator allocator,
8309 VmaAllocation* pAllocation,
8312 VMA_ASSERT(allocator && buffer != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8314 VMA_DEBUG_LOG(
"vmaAllocateMemoryForBuffer");
8316 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8318 VkMemoryRequirements vkMemReq = {};
8319 bool requiresDedicatedAllocation =
false;
8320 bool prefersDedicatedAllocation =
false;
8321 allocator->GetBufferMemoryRequirements(buffer, vkMemReq,
8322 requiresDedicatedAllocation,
8323 prefersDedicatedAllocation);
8325 VkResult result = allocator->AllocateMemory(
8327 requiresDedicatedAllocation,
8328 prefersDedicatedAllocation,
8332 VMA_SUBALLOCATION_TYPE_BUFFER,
8335 if(pAllocationInfo && result == VK_SUCCESS)
8337 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8344 VmaAllocator allocator,
8347 VmaAllocation* pAllocation,
8350 VMA_ASSERT(allocator && image != VK_NULL_HANDLE && pCreateInfo && pAllocation);
8352 VMA_DEBUG_LOG(
"vmaAllocateMemoryForImage");
8354 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8356 VkResult result = AllocateMemoryForImage(
8360 VMA_SUBALLOCATION_TYPE_IMAGE_UNKNOWN,
8363 if(pAllocationInfo && result == VK_SUCCESS)
8365 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8372 VmaAllocator allocator,
8373 VmaAllocation allocation)
8375 VMA_ASSERT(allocator && allocation);
8377 VMA_DEBUG_LOG(
"vmaFreeMemory");
8379 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8381 allocator->FreeMemory(allocation);
8385 VmaAllocator allocator,
8386 VmaAllocation allocation,
8389 VMA_ASSERT(allocator && allocation && pAllocationInfo);
8391 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8393 allocator->GetAllocationInfo(allocation, pAllocationInfo);
8397 VmaAllocator allocator,
8398 VmaAllocation allocation,
8401 VMA_ASSERT(allocator && allocation);
8403 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8405 allocation->SetUserData(allocator, pUserData);
8409 VmaAllocator allocator,
8410 VmaAllocation* pAllocation)
8412 VMA_ASSERT(allocator && pAllocation);
8414 VMA_DEBUG_GLOBAL_MUTEX_LOCK;
8416 allocator->CreateLostAllocation(pAllocation);
8420 VmaAllocator allocator,
8421 VmaAllocation allocation,
8424 VMA_ASSERT(allocator && allocation && ppData);
8426 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8428 return allocator->Map(allocation, ppData);
8432 VmaAllocator allocator,
8433 VmaAllocation allocation)
8435 VMA_ASSERT(allocator && allocation);
8437 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8439 allocator->Unmap(allocation);
8443 VmaAllocator allocator,
8444 VmaAllocation* pAllocations,
8445 size_t allocationCount,
8446 VkBool32* pAllocationsChanged,
8450 VMA_ASSERT(allocator && pAllocations);
8452 VMA_DEBUG_LOG(
"vmaDefragment");
8454 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8456 return allocator->Defragment(pAllocations, allocationCount, pAllocationsChanged, pDefragmentationInfo, pDefragmentationStats);
8460 VmaAllocator allocator,
8461 const VkBufferCreateInfo* pBufferCreateInfo,
8464 VmaAllocation* pAllocation,
8467 VMA_ASSERT(allocator && pBufferCreateInfo && pAllocationCreateInfo && pBuffer && pAllocation);
8469 VMA_DEBUG_LOG(
"vmaCreateBuffer");
8471 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8473 *pBuffer = VK_NULL_HANDLE;
8474 *pAllocation = VK_NULL_HANDLE;
8477 VkResult res = (*allocator->GetVulkanFunctions().vkCreateBuffer)(
8478 allocator->m_hDevice,
8480 allocator->GetAllocationCallbacks(),
8485 VkMemoryRequirements vkMemReq = {};
8486 bool requiresDedicatedAllocation =
false;
8487 bool prefersDedicatedAllocation =
false;
8488 allocator->GetBufferMemoryRequirements(*pBuffer, vkMemReq,
8489 requiresDedicatedAllocation, prefersDedicatedAllocation);
8493 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_TEXEL_BUFFER_BIT) != 0)
8495 VMA_ASSERT(vkMemReq.alignment %
8496 allocator->m_PhysicalDeviceProperties.limits.minTexelBufferOffsetAlignment == 0);
8498 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_UNIFORM_BUFFER_BIT) != 0)
8500 VMA_ASSERT(vkMemReq.alignment %
8501 allocator->m_PhysicalDeviceProperties.limits.minUniformBufferOffsetAlignment == 0);
8503 if((pBufferCreateInfo->usage & VK_BUFFER_USAGE_STORAGE_BUFFER_BIT) != 0)
8505 VMA_ASSERT(vkMemReq.alignment %
8506 allocator->m_PhysicalDeviceProperties.limits.minStorageBufferOffsetAlignment == 0);
8510 res = allocator->AllocateMemory(
8512 requiresDedicatedAllocation,
8513 prefersDedicatedAllocation,
8516 *pAllocationCreateInfo,
8517 VMA_SUBALLOCATION_TYPE_BUFFER,
8522 res = (*allocator->GetVulkanFunctions().vkBindBufferMemory)(
8523 allocator->m_hDevice,
8525 (*pAllocation)->GetMemory(),
8526 (*pAllocation)->GetOffset());
8530 if(pAllocationInfo != VMA_NULL)
8532 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8536 allocator->FreeMemory(*pAllocation);
8537 *pAllocation = VK_NULL_HANDLE;
8538 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8539 *pBuffer = VK_NULL_HANDLE;
8542 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, *pBuffer, allocator->GetAllocationCallbacks());
8543 *pBuffer = VK_NULL_HANDLE;
8550 VmaAllocator allocator,
8552 VmaAllocation allocation)
8554 if(buffer != VK_NULL_HANDLE)
8556 VMA_ASSERT(allocator);
8558 VMA_DEBUG_LOG(
"vmaDestroyBuffer");
8560 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8562 (*allocator->GetVulkanFunctions().vkDestroyBuffer)(allocator->m_hDevice, buffer, allocator->GetAllocationCallbacks());
8564 allocator->FreeMemory(allocation);
8569 VmaAllocator allocator,
8570 const VkImageCreateInfo* pImageCreateInfo,
8573 VmaAllocation* pAllocation,
8576 VMA_ASSERT(allocator && pImageCreateInfo && pAllocationCreateInfo && pImage && pAllocation);
8578 VMA_DEBUG_LOG(
"vmaCreateImage");
8580 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8582 *pImage = VK_NULL_HANDLE;
8583 *pAllocation = VK_NULL_HANDLE;
8586 VkResult res = (*allocator->GetVulkanFunctions().vkCreateImage)(
8587 allocator->m_hDevice,
8589 allocator->GetAllocationCallbacks(),
8593 VmaSuballocationType suballocType = pImageCreateInfo->tiling == VK_IMAGE_TILING_OPTIMAL ?
8594 VMA_SUBALLOCATION_TYPE_IMAGE_OPTIMAL :
8595 VMA_SUBALLOCATION_TYPE_IMAGE_LINEAR;
8598 res = AllocateMemoryForImage(allocator, *pImage, pAllocationCreateInfo, suballocType, pAllocation);
8602 res = (*allocator->GetVulkanFunctions().vkBindImageMemory)(
8603 allocator->m_hDevice,
8605 (*pAllocation)->GetMemory(),
8606 (*pAllocation)->GetOffset());
8610 if(pAllocationInfo != VMA_NULL)
8612 allocator->GetAllocationInfo(*pAllocation, pAllocationInfo);
8616 allocator->FreeMemory(*pAllocation);
8617 *pAllocation = VK_NULL_HANDLE;
8618 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8619 *pImage = VK_NULL_HANDLE;
8622 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, *pImage, allocator->GetAllocationCallbacks());
8623 *pImage = VK_NULL_HANDLE;
8630 VmaAllocator allocator,
8632 VmaAllocation allocation)
8634 if(image != VK_NULL_HANDLE)
8636 VMA_ASSERT(allocator);
8638 VMA_DEBUG_LOG(
"vmaDestroyImage");
8640 VMA_DEBUG_GLOBAL_MUTEX_LOCK
8642 (*allocator->GetVulkanFunctions().vkDestroyImage)(allocator->m_hDevice, image, allocator->GetAllocationCallbacks());
8644 allocator->FreeMemory(allocation);
8648 #endif // #ifdef VMA_IMPLEMENTATION PFN_vkGetPhysicalDeviceProperties vkGetPhysicalDeviceProperties
Definition: vk_mem_alloc.h:793
Set this flag if the allocation should have its own memory block.
Definition: vk_mem_alloc.h:1047
void vmaUnmapMemory(VmaAllocator allocator, VmaAllocation allocation)
Unmaps memory represented by given allocation, mapped previously using vmaMapMemory().
VkPhysicalDevice physicalDevice
Vulkan physical device.
Definition: vk_mem_alloc.h:818
VkResult vmaDefragment(VmaAllocator allocator, VmaAllocation *pAllocations, size_t allocationCount, VkBool32 *pAllocationsChanged, const VmaDefragmentationInfo *pDefragmentationInfo, VmaDefragmentationStats *pDefragmentationStats)
Compacts memory by moving allocations.
PFN_vkCreateBuffer vkCreateBuffer
Definition: vk_mem_alloc.h:803
void vmaFreeStatsString(VmaAllocator allocator, char *pStatsString)
struct VmaStats VmaStats
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:1004
PFN_vkMapMemory vkMapMemory
Definition: vk_mem_alloc.h:797
VkDeviceMemory deviceMemory
Handle to Vulkan memory object.
Definition: vk_mem_alloc.h:1315
VmaAllocatorCreateFlags flags
Flags for created allocator. Use VmaAllocatorCreateFlagBits enum.
Definition: vk_mem_alloc.h:815
uint32_t maxAllocationsToMove
Maximum number of allocations that can be moved to different place.
Definition: vk_mem_alloc.h:1481
Use this flag if you always allocate only buffers and linear images or only optimal images out of thi...
Definition: vk_mem_alloc.h:1185
void vmaMakePoolAllocationsLost(VmaAllocator allocator, VmaPool pool, size_t *pLostAllocationCount)
Marks all allocations in given pool as lost if they are not used in current frame or VmaPoolCreateInf...
VkDeviceSize size
Total amount of VkDeviceMemory allocated from Vulkan for this pool, in bytes.
Definition: vk_mem_alloc.h:1239
Definition: vk_mem_alloc.h:1084
VkFlags VmaAllocatorCreateFlags
Definition: vk_mem_alloc.h:786
VkMemoryPropertyFlags preferredFlags
Flags that preferably should be set in a memory type chosen for an allocation.
Definition: vk_mem_alloc.h:1122
Definition: vk_mem_alloc.h:1031
const VkAllocationCallbacks * pAllocationCallbacks
Custom CPU memory allocation callbacks.
Definition: vk_mem_alloc.h:827
void vmaCalculateStats(VmaAllocator allocator, VmaStats *pStats)
Retrieves statistics from current state of the Allocator.
const VmaVulkanFunctions * pVulkanFunctions
Pointers to Vulkan functions. Can be null if you leave define VMA_STATIC_VULKAN_FUNCTIONS 1...
Definition: vk_mem_alloc.h:880
Description of a Allocator to be created.
Definition: vk_mem_alloc.h:812
void vmaDestroyAllocator(VmaAllocator allocator)
Destroys allocator object.
VmaAllocationCreateFlagBits
Flags to be passed as VmaAllocationCreateInfo::flags.
Definition: vk_mem_alloc.h:1035
void vmaGetAllocationInfo(VmaAllocator allocator, VmaAllocation allocation, VmaAllocationInfo *pAllocationInfo)
Returns current information about specified allocation.
VkDeviceSize allocationSizeMax
Definition: vk_mem_alloc.h:945
PFN_vkBindImageMemory vkBindImageMemory
Definition: vk_mem_alloc.h:800
VkDeviceSize unusedBytes
Total number of bytes occupied by unused ranges.
Definition: vk_mem_alloc.h:944
PFN_vkGetImageMemoryRequirements2KHR vkGetImageMemoryRequirements2KHR
Definition: vk_mem_alloc.h:808
Statistics returned by function vmaDefragment().
Definition: vk_mem_alloc.h:1485
void vmaFreeMemory(VmaAllocator allocator, VmaAllocation allocation)
Frees memory previously allocated using vmaAllocateMemory(), vmaAllocateMemoryForBuffer(), or vmaAllocateMemoryForImage().
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:844
VmaStatInfo total
Definition: vk_mem_alloc.h:954
uint32_t deviceMemoryBlocksFreed
Number of empty VkDeviceMemory objects that have been released to the system.
Definition: vk_mem_alloc.h:1493
VmaAllocationCreateFlags flags
Use VmaAllocationCreateFlagBits enum.
Definition: vk_mem_alloc.h:1106
VkDeviceSize maxBytesToMove
Maximum total numbers of bytes that can be copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1476
PFN_vkGetBufferMemoryRequirements vkGetBufferMemoryRequirements
Definition: vk_mem_alloc.h:801
void(VKAPI_PTR * PFN_vmaAllocateDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called after successful vkAllocateMemory.
Definition: vk_mem_alloc.h:728
VkDevice device
Vulkan device.
Definition: vk_mem_alloc.h:821
Describes parameter of created VmaPool.
Definition: vk_mem_alloc.h:1193
Definition: vk_mem_alloc.h:1187
VkDeviceSize size
Size of this allocation, in bytes.
Definition: vk_mem_alloc.h:1325
void vmaGetMemoryTypeProperties(VmaAllocator allocator, uint32_t memoryTypeIndex, VkMemoryPropertyFlags *pFlags)
Given Memory Type Index, returns Property Flags of this memory type.
PFN_vkUnmapMemory vkUnmapMemory
Definition: vk_mem_alloc.h:798
void * pUserData
Custom general-purpose pointer that will be stored in VmaAllocation, can be read as VmaAllocationInfo...
Definition: vk_mem_alloc.h:1143
size_t minBlockCount
Minimum number of blocks to be always allocated in this pool, even if they stay empty.
Definition: vk_mem_alloc.h:1209
size_t allocationCount
Number of VmaAllocation objects created from this pool that were not destroyed or lost...
Definition: vk_mem_alloc.h:1245
struct VmaVulkanFunctions VmaVulkanFunctions
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:784
uint32_t memoryTypeIndex
Vulkan memory type index to allocate this pool from.
Definition: vk_mem_alloc.h:1196
VkResult vmaFindMemoryTypeIndex(VmaAllocator allocator, uint32_t memoryTypeBits, const VmaAllocationCreateInfo *pAllocationCreateInfo, uint32_t *pMemoryTypeIndex)
VmaMemoryUsage
Definition: vk_mem_alloc.h:982
struct VmaAllocationInfo VmaAllocationInfo
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Optional configuration parameters to be passed to function vmaDefragment().
Definition: vk_mem_alloc.h:1471
struct VmaPoolCreateInfo VmaPoolCreateInfo
Describes parameter of created VmaPool.
void vmaDestroyPool(VmaAllocator allocator, VmaPool pool)
Destroys VmaPool object and frees Vulkan device memory.
VkDeviceSize bytesFreed
Total number of bytes that have been released to the system by freeing empty VkDeviceMemory objects...
Definition: vk_mem_alloc.h:1489
Definition: vk_mem_alloc.h:1021
uint32_t memoryTypeBits
Bitmask containing one bit set for every memory type acceptable for this allocation.
Definition: vk_mem_alloc.h:1130
PFN_vkBindBufferMemory vkBindBufferMemory
Definition: vk_mem_alloc.h:799
void vmaGetPoolStats(VmaAllocator allocator, VmaPool pool, VmaPoolStats *pPoolStats)
Retrieves statistics of existing VmaPool object.
struct VmaDefragmentationInfo VmaDefragmentationInfo
Optional configuration parameters to be passed to function vmaDefragment().
General statistics from current state of Allocator.
Definition: vk_mem_alloc.h:950
void(VKAPI_PTR * PFN_vmaFreeDeviceMemoryFunction)(VmaAllocator allocator, uint32_t memoryType, VkDeviceMemory memory, VkDeviceSize size)
Callback function called before vkFreeMemory.
Definition: vk_mem_alloc.h:734
void vmaSetAllocationUserData(VmaAllocator allocator, VmaAllocation allocation, void *pUserData)
Sets pUserData in given allocation to new value.
VkResult vmaCreatePool(VmaAllocator allocator, const VmaPoolCreateInfo *pCreateInfo, VmaPool *pPool)
Allocates Vulkan device memory and creates VmaPool object.
VmaAllocatorCreateFlagBits
Flags for created VmaAllocator.
Definition: vk_mem_alloc.h:755
struct VmaStatInfo VmaStatInfo
Calculated statistics of memory usage in entire allocator.
Allocator and all objects created from it will not be synchronized internally, so you must guarantee ...
Definition: vk_mem_alloc.h:760
uint32_t allocationsMoved
Number of allocations that have been moved to different places.
Definition: vk_mem_alloc.h:1491
void vmaCreateLostAllocation(VmaAllocator allocator, VmaAllocation *pAllocation)
Creates new allocation that is in lost state from the beginning.
VkMemoryPropertyFlags requiredFlags
Flags that must be set in a Memory Type chosen for an allocation.
Definition: vk_mem_alloc.h:1117
VkDeviceSize unusedRangeSizeMax
Size of the largest continuous free memory region.
Definition: vk_mem_alloc.h:1255
void vmaBuildStatsString(VmaAllocator allocator, char **ppStatsString, VkBool32 detailedMap)
Builds and returns statistics as string in JSON format.
PFN_vkGetPhysicalDeviceMemoryProperties vkGetPhysicalDeviceMemoryProperties
Definition: vk_mem_alloc.h:794
Calculated statistics of memory usage in entire allocator.
Definition: vk_mem_alloc.h:933
VkDeviceSize blockSize
Size of a single VkDeviceMemory block to be allocated as part of this pool, in bytes.
Definition: vk_mem_alloc.h:1204
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Definition: vk_mem_alloc.h:747
VkResult vmaCreateBuffer(VmaAllocator allocator, const VkBufferCreateInfo *pBufferCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkBuffer *pBuffer, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Definition: vk_mem_alloc.h:1091
VkDeviceSize unusedRangeSizeMin
Definition: vk_mem_alloc.h:946
PFN_vmaFreeDeviceMemoryFunction pfnFree
Optional, can be null.
Definition: vk_mem_alloc.h:751
VmaPoolCreateFlags flags
Use combination of VmaPoolCreateFlagBits.
Definition: vk_mem_alloc.h:1199
Definition: vk_mem_alloc.h:1030
struct VmaPoolStats VmaPoolStats
Describes parameter of existing VmaPool.
VkResult vmaCreateImage(VmaAllocator allocator, const VkImageCreateInfo *pImageCreateInfo, const VmaAllocationCreateInfo *pAllocationCreateInfo, VkImage *pImage, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaCreateBuffer().
VmaMemoryUsage usage
Intended usage of memory.
Definition: vk_mem_alloc.h:1112
Definition: vk_mem_alloc.h:1103
uint32_t blockCount
Number of VkDeviceMemory Vulkan memory blocks allocated.
Definition: vk_mem_alloc.h:936
PFN_vkFreeMemory vkFreeMemory
Definition: vk_mem_alloc.h:796
size_t maxBlockCount
Maximum number of blocks that can be allocated in this pool.
Definition: vk_mem_alloc.h:1217
const VmaDeviceMemoryCallbacks * pDeviceMemoryCallbacks
Informative callbacks for vkAllocateMemory, vkFreeMemory.
Definition: vk_mem_alloc.h:830
size_t unusedRangeCount
Number of continuous memory ranges in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1248
VkFlags VmaAllocationCreateFlags
Definition: vk_mem_alloc.h:1101
VmaPool pool
Pool that this allocation should be created in.
Definition: vk_mem_alloc.h:1136
void vmaGetMemoryProperties(VmaAllocator allocator, const VkPhysicalDeviceMemoryProperties **ppPhysicalDeviceMemoryProperties)
const VkDeviceSize * pHeapSizeLimit
Either NULL or a pointer to an array of limits on maximum number of bytes that can be allocated out o...
Definition: vk_mem_alloc.h:868
VmaStatInfo memoryType[VK_MAX_MEMORY_TYPES]
Definition: vk_mem_alloc.h:952
Set this flag to use a memory that will be persistently mapped and retrieve pointer to it...
Definition: vk_mem_alloc.h:1071
VkDeviceSize allocationSizeMin
Definition: vk_mem_alloc.h:945
PFN_vkCreateImage vkCreateImage
Definition: vk_mem_alloc.h:805
PFN_vmaAllocateDeviceMemoryFunction pfnAllocate
Optional, can be null.
Definition: vk_mem_alloc.h:749
PFN_vkDestroyBuffer vkDestroyBuffer
Definition: vk_mem_alloc.h:804
VkResult vmaMapMemory(VmaAllocator allocator, VmaAllocation allocation, void **ppData)
Maps memory represented by given allocation and returns pointer to it.
uint32_t frameInUseCount
Maximum number of additional frames that are in use at the same time as current frame.
Definition: vk_mem_alloc.h:1231
VkResult vmaAllocateMemoryForImage(VmaAllocator allocator, VkImage image, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
Function similar to vmaAllocateMemoryForBuffer().
struct VmaAllocatorCreateInfo VmaAllocatorCreateInfo
Description of a Allocator to be created.
void * pUserData
Custom general-purpose pointer that was passed as VmaAllocationCreateInfo::pUserData or set using vma...
Definition: vk_mem_alloc.h:1339
VkDeviceSize preferredLargeHeapBlockSize
Preferred size of a single VkDeviceMemory block to be allocated from large heaps > 1 GiB...
Definition: vk_mem_alloc.h:824
VkDeviceSize allocationSizeAvg
Definition: vk_mem_alloc.h:945
VkDeviceSize usedBytes
Total number of bytes occupied by all allocations.
Definition: vk_mem_alloc.h:942
struct VmaDeviceMemoryCallbacks VmaDeviceMemoryCallbacks
Set of callbacks that the library will call for vkAllocateMemory and vkFreeMemory.
Describes parameter of existing VmaPool.
Definition: vk_mem_alloc.h:1236
VkDeviceSize offset
Offset into deviceMemory object to the beginning of this allocation, in bytes. (deviceMemory, offset) pair is unique to this allocation.
Definition: vk_mem_alloc.h:1320
Definition: vk_mem_alloc.h:1099
VkDeviceSize bytesMoved
Total number of bytes that have been copied while moving allocations to different places...
Definition: vk_mem_alloc.h:1487
Pointers to some Vulkan functions - a subset used by the library.
Definition: vk_mem_alloc.h:792
VkResult vmaCreateAllocator(const VmaAllocatorCreateInfo *pCreateInfo, VmaAllocator *pAllocator)
Creates Allocator object.
PFN_vkGetBufferMemoryRequirements2KHR vkGetBufferMemoryRequirements2KHR
Definition: vk_mem_alloc.h:807
uint32_t unusedRangeCount
Number of free ranges of memory between allocations.
Definition: vk_mem_alloc.h:940
Definition: vk_mem_alloc.h:987
VkFlags VmaPoolCreateFlags
Definition: vk_mem_alloc.h:1189
void vmaGetPhysicalDeviceProperties(VmaAllocator allocator, const VkPhysicalDeviceProperties **ppPhysicalDeviceProperties)
uint32_t allocationCount
Number of VmaAllocation allocation objects allocated.
Definition: vk_mem_alloc.h:938
PFN_vkGetImageMemoryRequirements vkGetImageMemoryRequirements
Definition: vk_mem_alloc.h:802
PFN_vkDestroyImage vkDestroyImage
Definition: vk_mem_alloc.h:806
Set this flag to only try to allocate from existing VkDeviceMemory blocks and never create new such b...
Definition: vk_mem_alloc.h:1058
Definition: vk_mem_alloc.h:1014
void * pMappedData
Pointer to the beginning of this allocation as mapped data.
Definition: vk_mem_alloc.h:1334
void vmaDestroyImage(VmaAllocator allocator, VkImage image, VmaAllocation allocation)
Destroys Vulkan image and frees allocated memory.
Enables usage of VK_KHR_dedicated_allocation extension.
Definition: vk_mem_alloc.h:782
struct VmaDefragmentationStats VmaDefragmentationStats
Statistics returned by function vmaDefragment().
PFN_vkAllocateMemory vkAllocateMemory
Definition: vk_mem_alloc.h:795
Parameters of VmaAllocation objects, that can be retrieved using function vmaGetAllocationInfo().
Definition: vk_mem_alloc.h:1301
VkResult vmaAllocateMemory(VmaAllocator allocator, const VkMemoryRequirements *pVkMemoryRequirements, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
General purpose memory allocation.
void vmaSetCurrentFrameIndex(VmaAllocator allocator, uint32_t frameIndex)
Sets index of the current frame.
struct VmaAllocationCreateInfo VmaAllocationCreateInfo
VkResult vmaAllocateMemoryForBuffer(VmaAllocator allocator, VkBuffer buffer, const VmaAllocationCreateInfo *pCreateInfo, VmaAllocation *pAllocation, VmaAllocationInfo *pAllocationInfo)
VmaPoolCreateFlagBits
Flags to be passed as VmaPoolCreateInfo::flags.
Definition: vk_mem_alloc.h:1167
VkDeviceSize unusedRangeSizeAvg
Definition: vk_mem_alloc.h:946
Definition: vk_mem_alloc.h:1097
VmaStatInfo memoryHeap[VK_MAX_MEMORY_HEAPS]
Definition: vk_mem_alloc.h:953
void vmaDestroyBuffer(VmaAllocator allocator, VkBuffer buffer, VmaAllocation allocation)
Destroys Vulkan buffer and frees allocated memory.
VkDeviceSize unusedSize
Total number of bytes in the pool not used by any VmaAllocation.
Definition: vk_mem_alloc.h:1242
VkDeviceSize unusedRangeSizeMax
Definition: vk_mem_alloc.h:946
uint32_t memoryType
Memory type index that this allocation was allocated from.
Definition: vk_mem_alloc.h:1306